From 1ab535ea8c105a4fbe288353a69c3914bea94027 Mon Sep 17 00:00:00 2001 From: Fr4nz D13trich Date: Sat, 22 Nov 2025 13:52:14 +0100 Subject: [PATCH] Repo created --- .gitignore | 26 + .idea/inspectionProfiles/Project_Default.xml | 41 + _docs/.htaccess | 3 + _docs/baseline_add_a_photo_white_48.png | Bin 0 -> 1725 bytes _docs/baseline_delete_white_48.png | Bin 0 -> 670 bytes _docs/baseline_filter_vintage_white_48.png | Bin 0 -> 2640 bytes _docs/credits.html | 97 + _docs/devices.html | 221 + _docs/exposure_locked.png | Bin 0 -> 2022 bytes _docs/exposure_unlocked.png | Bin 0 -> 1794 bytes _docs/focus_mode_auto.png | Bin 0 -> 1612 bytes _docs/focus_mode_continuous_picture.png | Bin 0 -> 1964 bytes _docs/focus_mode_edof.png | Bin 0 -> 2378 bytes _docs/focus_mode_fixed.png | Bin 0 -> 933 bytes _docs/focus_mode_infinity.png | Bin 0 -> 2458 bytes _docs/focus_mode_locked.png | Bin 0 -> 1324 bytes _docs/focus_mode_manual.png | Bin 0 -> 1614 bytes ...ogle_material_design_icons_LICENSE-2.0.txt | 202 + _docs/help.html | 1546 ++ _docs/history.html | 1946 ++ _docs/ic_exposure_white_48dp.png | Bin 0 -> 969 bytes _docs/ic_gps_fixed_white_48dp.png | Bin 0 -> 3823 bytes _docs/ic_launcher.png | Bin 0 -> 12307 bytes _docs/ic_mic_white_48dp.png | Bin 0 -> 678 bytes _docs/ic_pause_circle_outline_white_48dp.png | Bin 0 -> 2716 bytes _docs/index.html | 326 + _docs/info.html | 83 + _docs/popup.png | Bin 0 -> 706 bytes _docs/privacy_oc.html | 117 + _docs/settings.png | Bin 0 -> 1327 bytes _docs/share.png | Bin 0 -> 936 bytes _docs/stylesheet.css | 5 + _docs/switch_camera.png | Bin 0 -> 2547 bytes _docs/take_photo.png | Bin 0 -> 11238 bytes _docs/take_video.png | Bin 0 -> 8593 bytes androidx_LICENSE-2.0.txt | 202 + app/build.gradle | 67 + .../opencamera/AvgInstrumentedTests.java | 17 + .../opencamera/HDRInstrumentedTests.java | 17 + .../opencamera/HDRNInstrumentedTests.java | 17 + .../opencamera/InstrumentedTest.java | 7311 +++++++ .../opencamera/MainInstrumentedTests.java | 13 + .../opencamera/PanoramaInstrumentedTests.java | 17 + .../opencamera/PhotoInstrumentedTests.java | 13 + .../net/sourceforge/opencamera/TestUtils.java | 1681 ++ .../opencamera/VideoInstrumentedTests.java | 13 + .../sourceforge/opencamera/test/AvgTests.java | 71 + .../opencamera/test/HDRNTests.java | 47 + .../sourceforge/opencamera/test/HDRTests.java | 85 + .../opencamera/test/MainActivityTest.java | 17623 ++++++++++++++++ .../opencamera/test/MainTests.java | 109 + .../opencamera/test/MultiCameraTests.java | 18 + .../opencamera/test/Nexus7Tests.java | 36 + .../opencamera/test/OldDeviceTests.java | 48 + .../opencamera/test/PanoramaTests.java | 61 + .../opencamera/test/PhotoCamera2Tests.java | 42 + .../opencamera/test/PhotoTests.java | 109 + .../opencamera/test/TempTests.java | 15 + .../opencamera/test/VideoTests.java | 95 + .../res/drawable-hdpi/ic_launcher.png | Bin 0 -> 3619 bytes .../res/drawable-ldpi/ic_launcher.png | Bin 0 -> 1352 bytes .../res/drawable-mdpi/ic_launcher.png | Bin 0 -> 1859 bytes .../res/drawable-xhdpi/ic_launcher.png | Bin 0 -> 3960 bytes app/src/androidTest/res/values/strings.xml | 6 + app/src/main/AndroidManifest.xml | 159 + app/src/main/assets/androidx_LICENSE-2.0.txt | 202 + ...ogle_material_design_icons_LICENSE-2.0.txt | 202 + app/src/main/assets/gpl-3.0.txt | 674 + .../sourceforge/opencamera/AudioListener.java | 185 + .../sourceforge/opencamera/GyroSensor.java | 665 + .../sourceforge/opencamera/HDRProcessor.java | 2362 +++ .../opencamera/HDRProcessorException.java | 19 + .../sourceforge/opencamera/ImageSaver.java | 4607 ++++ .../opencamera/JavaImageFunctions.java | 4934 +++++ .../opencamera/JavaImageProcessing.java | 269 + .../sourceforge/opencamera/KeyguardUtils.java | 34 + .../opencamera/LocationSupplier.java | 357 + .../opencamera/MagneticSensor.java | 216 + .../sourceforge/opencamera/MainActivity.java | 6963 ++++++ .../opencamera/MyApplicationInterface.java | 3937 ++++ .../MyAudioTriggerListenerCallback.java | 106 + .../net/sourceforge/opencamera/MyDebug.java | 21 + .../opencamera/MyPreferenceFragment.java | 1099 + .../sourceforge/opencamera/MyTileService.java | 63 + .../opencamera/MyTileServiceFrontCamera.java | 63 + .../opencamera/MyTileServiceVideo.java | 63 + .../opencamera/MyWidgetProviderTakePhoto.java | 59 + .../opencamera/OpenCameraApplication.java | 30 + .../opencamera/PanoramaProcessor.java | 3028 +++ .../PanoramaProcessorException.java | 20 + .../opencamera/PermissionHandler.java | 354 + .../opencamera/PreferenceKeys.java | 386 + .../PreferenceSubCameraControlsMore.java | 244 + .../opencamera/PreferenceSubGUI.java | 127 + .../opencamera/PreferenceSubLicences.java | 122 + .../opencamera/PreferenceSubLocation.java | 19 + .../opencamera/PreferenceSubPhoto.java | 299 + .../opencamera/PreferenceSubPreview.java | 115 + .../opencamera/PreferenceSubProcessing.java | 96 + .../opencamera/PreferenceSubRemoteCtrl.java | 18 + .../opencamera/PreferenceSubScreen.java | 80 + .../PreferenceSubSettingsManager.java | 246 + .../opencamera/PreferenceSubVideo.java | 245 + .../opencamera/SaveLocationHistory.java | 157 + .../opencamera/SettingsManager.java | 278 + .../opencamera/SoundPoolManager.java | 81 + .../sourceforge/opencamera/StorageUtils.java | 1689 ++ .../net/sourceforge/opencamera/TakePhoto.java | 42 + .../sourceforge/opencamera/TextFormatter.java | 123 + .../sourceforge/opencamera/ToastBoxer.java | 12 + .../cameracontroller/CameraController.java | 815 + .../cameracontroller/CameraController1.java | 2010 ++ .../cameracontroller/CameraController2.java | 9594 +++++++++ .../CameraControllerException.java | 11 + .../CameraControllerManager.java | 28 + .../CameraControllerManager1.java | 58 + .../CameraControllerManager2.java | 246 + .../opencamera/cameracontroller/RawImage.java | 56 + .../preview/ApplicationInterface.java | 277 + .../preview/BasicApplicationInterface.java | 692 + .../opencamera/preview/CanvasView.java | 72 + .../opencamera/preview/Preview.java | 9300 ++++++++ .../opencamera/preview/VideoProfile.java | 114 + .../preview/VideoQualityHandler.java | 226 + .../preview/camerasurface/CameraSurface.java | 19 + .../preview/camerasurface/MySurfaceView.java | 117 + .../preview/camerasurface/MyTextureView.java | 90 + .../remotecontrol/BluetoothLeService.java | 532 + .../remotecontrol/BluetoothRemoteControl.java | 309 + .../remotecontrol/DeviceScanner.java | 542 + .../remotecontrol/KrakenGattAttributes.java | 26 + .../opencamera/ui/ArraySeekBarPreference.java | 242 + .../opencamera/ui/DrawPreview.java | 3217 +++ .../opencamera/ui/FolderChooserDialog.java | 445 + .../net/sourceforge/opencamera/ui/MainUI.java | 3368 +++ .../opencamera/ui/ManualSeekbars.java | 256 + .../opencamera/ui/MyEditTextPreference.java | 157 + .../sourceforge/opencamera/ui/PopupView.java | 1884 ++ app/src/main/res/anim/fade_in.xml | 5 + app/src/main/res/anim/slide_in_bottom.xml | 7 + app/src/main/res/anim/slide_out_bottom.xml | 7 + .../main/res/animator/button_animation.xml | 31 + .../baseline_add_a_photo_white_48.png | Bin 0 -> 583 bytes .../baseline_bedtime_white_48.png | Bin 0 -> 597 bytes .../baseline_bluetooth_white_48.png | Bin 0 -> 422 bytes .../drawable-hdpi/baseline_check_white_48.png | Bin 0 -> 238 bytes .../drawable-hdpi/baseline_close_white_48.png | Bin 0 -> 312 bytes .../baseline_delete_white_48.png | Bin 0 -> 192 bytes ...eline_face_retouching_natural_white_48.png | Bin 0 -> 1098 bytes .../baseline_filter_vintage_white_48.png | Bin 0 -> 867 bytes .../baseline_folder_open_white_48.png | Bin 0 -> 248 bytes .../baseline_highlight_white_48.png | Bin 0 -> 256 bytes .../baseline_panorama_horizontal_white_48.png | Bin 0 -> 533 bytes .../baseline_photo_library_white_48.png | Bin 0 -> 419 bytes .../baseline_portrait_white_48.png | Bin 0 -> 361 bytes .../baseline_remove_red_eye_white_48.png | Bin 0 -> 744 bytes .../baseline_rotate_left_white_48.png | Bin 0 -> 708 bytes .../baseline_rotate_right_white_48.png | Bin 0 -> 730 bytes .../baseline_shutter_speed_white_48.png | Bin 0 -> 1324 bytes .../baseline_text_fields_red_48.png | Bin 0 -> 570 bytes .../baseline_text_fields_white_48.png | Bin 0 -> 135 bytes app/src/main/res/drawable-hdpi/flash_auto.png | Bin 0 -> 613 bytes app/src/main/res/drawable-hdpi/flash_off.png | Bin 0 -> 436 bytes app/src/main/res/drawable-hdpi/flash_on.png | Bin 0 -> 297 bytes .../ic_burst_mode_white_48dp.png | Bin 0 -> 325 bytes .../drawable-hdpi/ic_colorize_white_48dp.png | Bin 0 -> 438 bytes .../drawable-hdpi/ic_exposure_red_48dp.png | Bin 0 -> 945 bytes .../drawable-hdpi/ic_exposure_white_48dp.png | Bin 0 -> 357 bytes .../res/drawable-hdpi/ic_face_red_48dp.png | Bin 0 -> 1672 bytes .../res/drawable-hdpi/ic_face_white_48dp.png | Bin 0 -> 1022 bytes .../ic_fast_forward_white_48dp.png | Bin 0 -> 367 bytes .../drawable-hdpi/ic_gps_fixed_red_48dp.png | Bin 0 -> 1730 bytes .../drawable-hdpi/ic_gps_fixed_white_48dp.png | Bin 0 -> 1012 bytes .../drawable-hdpi/ic_gps_off_white_48dp.png | Bin 0 -> 887 bytes .../drawable-hdpi/ic_hdr_on_white_48dp.png | Bin 0 -> 314 bytes .../ic_help_outline_white_48dp.png | Bin 0 -> 1174 bytes .../ic_info_outline_white_48dp.png | Bin 0 -> 953 bytes .../drawable-hdpi/ic_launcher_take_photo.png | Bin 0 -> 2811 bytes .../drawable-hdpi/ic_mic_off_white_48dp.png | Bin 0 -> 713 bytes .../res/drawable-hdpi/ic_mic_red_48dp.png | Bin 0 -> 1112 bytes .../res/drawable-hdpi/ic_mic_white_48dp.png | Bin 0 -> 606 bytes .../ic_more_horiz_white_48dp.png | Bin 0 -> 213 bytes .../ic_pause_circle_outline_white_48dp.png | Bin 0 -> 1822 bytes .../ic_photo_camera_white_48dp.png | Bin 0 -> 666 bytes .../ic_photo_size_select_large_white_48dp.png | Bin 0 -> 420 bytes .../ic_play_circle_outline_white_48dp.png | Bin 0 -> 1938 bytes .../ic_power_settings_new_white_48dp.png | Bin 0 -> 787 bytes .../res/drawable-hdpi/ic_save_white_48dp.png | Bin 0 -> 391 bytes .../ic_slow_motion_video_white_48dp.png | Bin 0 -> 942 bytes .../drawable-hdpi/ic_text_format_red_48dp.png | Bin 0 -> 1030 bytes .../ic_text_format_white_48dp.png | Bin 0 -> 505 bytes .../drawable-hdpi/ic_timelapse_white_48dp.png | Bin 0 -> 1150 bytes .../res/drawable-hdpi/ic_timer_white_48dp.png | Bin 0 -> 901 bytes .../drawable-hdpi/ic_touch_app_white_48dp.png | Bin 0 -> 603 bytes .../drawable-hdpi/ic_videocam_white_48dp.png | Bin 0 -> 234 bytes app/src/main/res/drawable-hdpi/settings.png | Bin 0 -> 726 bytes app/src/main/res/drawable-hdpi/share.png | Bin 0 -> 513 bytes .../res/drawable-mdpi/auto_stabilise_icon.png | Bin 0 -> 291 bytes .../drawable-mdpi/auto_stabilise_icon_red.png | Bin 0 -> 375 bytes .../baseline_add_a_photo_white_48.png | Bin 0 -> 422 bytes .../baseline_bedtime_white_48.png | Bin 0 -> 428 bytes .../baseline_bluetooth_white_48.png | Bin 0 -> 304 bytes .../drawable-mdpi/baseline_check_white_48.png | Bin 0 -> 221 bytes .../drawable-mdpi/baseline_close_white_48.png | Bin 0 -> 251 bytes .../baseline_delete_white_48.png | Bin 0 -> 151 bytes ...eline_face_retouching_natural_white_48.png | Bin 0 -> 723 bytes .../baseline_filter_vintage_white_48.png | Bin 0 -> 589 bytes .../baseline_folder_open_white_48.png | Bin 0 -> 198 bytes .../baseline_highlight_white_48.png | Bin 0 -> 203 bytes .../baseline_panorama_horizontal_white_48.png | Bin 0 -> 391 bytes .../baseline_photo_library_white_48.png | Bin 0 -> 302 bytes .../baseline_portrait_white_48.png | Bin 0 -> 266 bytes .../baseline_remove_red_eye_white_48.png | Bin 0 -> 527 bytes .../baseline_rotate_left_white_48.png | Bin 0 -> 481 bytes .../baseline_rotate_right_white_48.png | Bin 0 -> 479 bytes .../baseline_shutter_speed_white_48.png | Bin 0 -> 835 bytes .../baseline_text_fields_red_48.png | Bin 0 -> 423 bytes .../baseline_text_fields_white_48.png | Bin 0 -> 105 bytes app/src/main/res/drawable-mdpi/dro_icon.png | Bin 0 -> 597 bytes app/src/main/res/drawable-mdpi/expo_icon.png | Bin 0 -> 582 bytes .../res/drawable-mdpi/exposure_locked.png | Bin 0 -> 2053 bytes .../res/drawable-mdpi/exposure_unlocked.png | Bin 0 -> 1534 bytes app/src/main/res/drawable-mdpi/flash_auto.png | Bin 0 -> 449 bytes app/src/main/res/drawable-mdpi/flash_off.png | Bin 0 -> 330 bytes app/src/main/res/drawable-mdpi/flash_on.png | Bin 0 -> 243 bytes .../res/drawable-mdpi/focus_mode_auto.png | Bin 0 -> 1084 bytes .../focus_mode_continuous_picture.png | Bin 0 -> 936 bytes .../focus_mode_continuous_video.png | Bin 0 -> 936 bytes .../res/drawable-mdpi/focus_mode_edof.png | Bin 0 -> 1858 bytes .../res/drawable-mdpi/focus_mode_fixed.png | Bin 0 -> 686 bytes .../res/drawable-mdpi/focus_mode_infinity.png | Bin 0 -> 1092 bytes .../res/drawable-mdpi/focus_mode_locked.png | Bin 0 -> 1146 bytes .../res/drawable-mdpi/focus_mode_manual.png | Bin 0 -> 1193 bytes .../ic_burst_mode_white_48dp.png | Bin 0 -> 239 bytes .../drawable-mdpi/ic_colorize_white_48dp.png | Bin 0 -> 324 bytes .../drawable-mdpi/ic_exposure_red_48dp.png | Bin 0 -> 616 bytes .../drawable-mdpi/ic_exposure_white_48dp.png | Bin 0 -> 260 bytes .../res/drawable-mdpi/ic_face_red_48dp.png | Bin 0 -> 1092 bytes .../res/drawable-mdpi/ic_face_white_48dp.png | Bin 0 -> 694 bytes .../ic_fast_forward_white_48dp.png | Bin 0 -> 272 bytes .../drawable-mdpi/ic_gps_fixed_red_48dp.png | Bin 0 -> 1122 bytes .../drawable-mdpi/ic_gps_fixed_white_48dp.png | Bin 0 -> 687 bytes .../drawable-mdpi/ic_gps_off_white_48dp.png | Bin 0 -> 602 bytes .../drawable-mdpi/ic_hdr_on_white_48dp.png | Bin 0 -> 191 bytes .../ic_help_outline_white_48dp.png | Bin 0 -> 801 bytes .../ic_info_outline_white_48dp.png | Bin 0 -> 655 bytes .../drawable-mdpi/ic_launcher_take_photo.png | Bin 0 -> 1681 bytes .../drawable-mdpi/ic_mic_off_white_48dp.png | Bin 0 -> 484 bytes .../res/drawable-mdpi/ic_mic_red_48dp.png | Bin 0 -> 801 bytes .../res/drawable-mdpi/ic_mic_white_48dp.png | Bin 0 -> 436 bytes .../ic_more_horiz_white_48dp.png | Bin 0 -> 161 bytes .../ic_pause_circle_outline_white_48dp.png | Bin 0 -> 1248 bytes .../ic_photo_camera_white_48dp.png | Bin 0 -> 446 bytes .../ic_photo_size_select_large_white_48dp.png | Bin 0 -> 304 bytes .../ic_play_circle_outline_white_48dp.png | Bin 0 -> 1343 bytes .../ic_power_settings_new_white_48dp.png | Bin 0 -> 556 bytes .../res/drawable-mdpi/ic_save_white_48dp.png | Bin 0 -> 273 bytes .../ic_slow_motion_video_white_48dp.png | Bin 0 -> 659 bytes .../ic_stat_notify_take_photo.png | Bin 0 -> 927 bytes .../drawable-mdpi/ic_text_format_red_48dp.png | Bin 0 -> 516 bytes .../ic_text_format_white_48dp.png | Bin 0 -> 353 bytes .../drawable-mdpi/ic_timelapse_white_48dp.png | Bin 0 -> 790 bytes .../res/drawable-mdpi/ic_timer_white_48dp.png | Bin 0 -> 628 bytes .../drawable-mdpi/ic_touch_app_white_48dp.png | Bin 0 -> 423 bytes .../drawable-mdpi/ic_videocam_white_48dp.png | Bin 0 -> 178 bytes app/src/main/res/drawable-mdpi/iso_icon.png | Bin 0 -> 539 bytes app/src/main/res/drawable-mdpi/nr_icon.png | Bin 0 -> 472 bytes app/src/main/res/drawable-mdpi/popup.png | Bin 0 -> 305 bytes .../res/drawable-mdpi/popup_flash_auto.png | Bin 0 -> 1517 bytes .../res/drawable-mdpi/popup_flash_off.png | Bin 0 -> 1220 bytes .../main/res/drawable-mdpi/popup_flash_on.png | Bin 0 -> 1056 bytes .../res/drawable-mdpi/popup_flash_red_eye.png | Bin 0 -> 1427 bytes .../res/drawable-mdpi/popup_flash_torch.png | Bin 0 -> 1060 bytes app/src/main/res/drawable-mdpi/raw_icon.png | Bin 0 -> 693 bytes .../main/res/drawable-mdpi/raw_off_icon.png | Bin 0 -> 707 bytes .../main/res/drawable-mdpi/raw_only_icon.png | Bin 0 -> 1024 bytes app/src/main/res/drawable-mdpi/settings.png | Bin 0 -> 538 bytes app/src/main/res/drawable-mdpi/share.png | Bin 0 -> 383 bytes .../main/res/drawable-mdpi/switch_camera.png | Bin 0 -> 2737 bytes app/src/main/res/drawable-mdpi/take_photo.png | Bin 0 -> 7475 bytes .../res/drawable-mdpi/take_photo_pref.png | Bin 0 -> 817 bytes app/src/main/res/drawable-mdpi/take_video.png | Bin 0 -> 5316 bytes .../res/drawable-mdpi/take_video_pref.png | Bin 0 -> 380 bytes .../drawable-mdpi/white_balance_locked.png | Bin 0 -> 2617 bytes .../drawable-mdpi/white_balance_unlocked.png | Bin 0 -> 1760 bytes .../baseline_add_a_photo_white_48.png | Bin 0 -> 763 bytes .../baseline_bedtime_white_48.png | Bin 0 -> 778 bytes .../baseline_bluetooth_white_48.png | Bin 0 -> 504 bytes .../baseline_check_white_48.png | Bin 0 -> 341 bytes .../baseline_close_white_48.png | Bin 0 -> 389 bytes .../baseline_delete_white_48.png | Bin 0 -> 239 bytes ...eline_face_retouching_natural_white_48.png | Bin 0 -> 1473 bytes .../baseline_filter_vintage_white_48.png | Bin 0 -> 1190 bytes .../baseline_folder_open_white_48.png | Bin 0 -> 322 bytes .../baseline_highlight_white_48.png | Bin 0 -> 333 bytes .../baseline_panorama_horizontal_white_48.png | Bin 0 -> 689 bytes .../baseline_photo_library_white_48.png | Bin 0 -> 543 bytes .../baseline_portrait_white_48.png | Bin 0 -> 461 bytes .../baseline_remove_red_eye_white_48.png | Bin 0 -> 1033 bytes .../baseline_rotate_left_white_48.png | Bin 0 -> 934 bytes .../baseline_rotate_right_white_48.png | Bin 0 -> 941 bytes .../baseline_shutter_speed_white_48.png | Bin 0 -> 1781 bytes .../baseline_text_fields_red_48.png | Bin 0 -> 739 bytes .../baseline_text_fields_white_48.png | Bin 0 -> 111 bytes .../main/res/drawable-xhdpi/flash_auto.png | Bin 0 -> 793 bytes app/src/main/res/drawable-xhdpi/flash_off.png | Bin 0 -> 511 bytes app/src/main/res/drawable-xhdpi/flash_on.png | Bin 0 -> 347 bytes .../ic_burst_mode_white_48dp.png | Bin 0 -> 415 bytes .../drawable-xhdpi/ic_colorize_white_48dp.png | Bin 0 -> 528 bytes .../drawable-xhdpi/ic_exposure_red_48dp.png | Bin 0 -> 1159 bytes .../drawable-xhdpi/ic_exposure_white_48dp.png | Bin 0 -> 447 bytes .../res/drawable-xhdpi/ic_face_red_48dp.png | Bin 0 -> 2259 bytes .../res/drawable-xhdpi/ic_face_white_48dp.png | Bin 0 -> 1344 bytes .../ic_fast_forward_white_48dp.png | Bin 0 -> 447 bytes .../drawable-xhdpi/ic_gps_fixed_red_48dp.png | Bin 0 -> 2316 bytes .../ic_gps_fixed_white_48dp.png | Bin 0 -> 1379 bytes .../drawable-xhdpi/ic_gps_off_white_48dp.png | Bin 0 -> 1189 bytes .../drawable-xhdpi/ic_hdr_on_white_48dp.png | Bin 0 -> 320 bytes .../ic_help_outline_white_48dp.png | Bin 0 -> 1578 bytes .../ic_info_outline_white_48dp.png | Bin 0 -> 1279 bytes .../drawable-xhdpi/ic_launcher_take_photo.png | Bin 0 -> 4008 bytes .../drawable-xhdpi/ic_mic_off_white_48dp.png | Bin 0 -> 902 bytes .../res/drawable-xhdpi/ic_mic_red_48dp.png | Bin 0 -> 1557 bytes .../res/drawable-xhdpi/ic_mic_white_48dp.png | Bin 0 -> 819 bytes .../ic_more_horiz_white_48dp.png | Bin 0 -> 267 bytes .../ic_pause_circle_outline_white_48dp.png | Bin 0 -> 2561 bytes .../ic_photo_camera_white_48dp.png | Bin 0 -> 894 bytes .../ic_photo_size_select_large_white_48dp.png | Bin 0 -> 542 bytes .../ic_play_circle_outline_white_48dp.png | Bin 0 -> 2700 bytes .../ic_power_settings_new_white_48dp.png | Bin 0 -> 1074 bytes .../res/drawable-xhdpi/ic_save_white_48dp.png | Bin 0 -> 504 bytes .../ic_slow_motion_video_white_48dp.png | Bin 0 -> 1256 bytes .../ic_text_format_red_48dp.png | Bin 0 -> 1000 bytes .../ic_text_format_white_48dp.png | Bin 0 -> 657 bytes .../ic_timelapse_white_48dp.png | Bin 0 -> 1548 bytes .../drawable-xhdpi/ic_timer_white_48dp.png | Bin 0 -> 1241 bytes .../ic_touch_app_white_48dp.png | Bin 0 -> 768 bytes .../drawable-xhdpi/ic_videocam_white_48dp.png | Bin 0 -> 290 bytes app/src/main/res/drawable-xhdpi/settings.png | Bin 0 -> 1018 bytes app/src/main/res/drawable-xhdpi/share.png | Bin 0 -> 666 bytes .../main/res/drawable-xhdpi/take_photo.png | Bin 0 -> 11238 bytes .../res/drawable-xhdpi/take_photo_pref.png | Bin 0 -> 1540 bytes .../main/res/drawable-xhdpi/take_video.png | Bin 0 -> 8593 bytes .../res/drawable-xhdpi/take_video_pref.png | Bin 0 -> 579 bytes .../drawable-xxhdpi/auto_stabilise_icon.png | Bin 0 -> 483 bytes .../auto_stabilise_icon_red.png | Bin 0 -> 463 bytes .../baseline_add_a_photo_white_48.png | Bin 0 -> 1123 bytes .../baseline_bedtime_white_48.png | Bin 0 -> 1130 bytes .../baseline_bluetooth_white_48.png | Bin 0 -> 620 bytes .../baseline_check_white_48.png | Bin 0 -> 383 bytes .../baseline_close_white_48.png | Bin 0 -> 544 bytes .../baseline_delete_white_48.png | Bin 0 -> 343 bytes ...eline_face_retouching_natural_white_48.png | Bin 0 -> 2383 bytes .../baseline_filter_vintage_white_48.png | Bin 0 -> 1723 bytes .../baseline_folder_open_white_48.png | Bin 0 -> 470 bytes .../baseline_highlight_white_48.png | Bin 0 -> 462 bytes .../baseline_panorama_horizontal_white_48.png | Bin 0 -> 1020 bytes .../baseline_photo_library_white_48.png | Bin 0 -> 764 bytes .../baseline_portrait_white_48.png | Bin 0 -> 715 bytes .../baseline_remove_red_eye_white_48.png | Bin 0 -> 1540 bytes .../baseline_rotate_left_white_48.png | Bin 0 -> 1372 bytes .../baseline_rotate_right_white_48.png | Bin 0 -> 1353 bytes .../baseline_shutter_speed_white_48.png | Bin 0 -> 2741 bytes .../baseline_text_fields_red_48.png | Bin 0 -> 1088 bytes .../baseline_text_fields_white_48.png | Bin 0 -> 124 bytes app/src/main/res/drawable-xxhdpi/dro_icon.png | Bin 0 -> 1667 bytes .../main/res/drawable-xxhdpi/expo_icon.png | Bin 0 -> 1644 bytes .../main/res/drawable-xxhdpi/flash_auto.png | Bin 0 -> 1077 bytes .../main/res/drawable-xxhdpi/flash_off.png | Bin 0 -> 759 bytes app/src/main/res/drawable-xxhdpi/flash_on.png | Bin 0 -> 487 bytes .../ic_burst_mode_white_48dp.png | Bin 0 -> 613 bytes .../ic_colorize_white_48dp.png | Bin 0 -> 725 bytes .../drawable-xxhdpi/ic_exposure_red_48dp.png | Bin 0 -> 1624 bytes .../ic_exposure_white_48dp.png | Bin 0 -> 616 bytes .../res/drawable-xxhdpi/ic_face_red_48dp.png | Bin 0 -> 3451 bytes .../drawable-xxhdpi/ic_face_white_48dp.png | Bin 0 -> 2083 bytes .../ic_fast_forward_white_48dp.png | Bin 0 -> 652 bytes .../drawable-xxhdpi/ic_gps_fixed_red_48dp.png | Bin 0 -> 3622 bytes .../ic_gps_fixed_white_48dp.png | Bin 0 -> 2095 bytes .../drawable-xxhdpi/ic_gps_off_white_48dp.png | Bin 0 -> 1800 bytes .../drawable-xxhdpi/ic_hdr_on_white_48dp.png | Bin 0 -> 456 bytes .../ic_help_outline_white_48dp.png | Bin 0 -> 2463 bytes .../ic_info_outline_white_48dp.png | Bin 0 -> 1985 bytes .../ic_launcher_take_photo.png | Bin 0 -> 6772 bytes .../drawable-xxhdpi/ic_mic_off_white_48dp.png | Bin 0 -> 1326 bytes .../res/drawable-xxhdpi/ic_mic_red_48dp.png | Bin 0 -> 1856 bytes .../res/drawable-xxhdpi/ic_mic_white_48dp.png | Bin 0 -> 1220 bytes .../ic_more_horiz_white_48dp.png | Bin 0 -> 389 bytes .../ic_pause_circle_outline_white_48dp.png | Bin 0 -> 3904 bytes .../ic_photo_camera_white_48dp.png | Bin 0 -> 1309 bytes .../ic_photo_size_select_large_white_48dp.png | Bin 0 -> 815 bytes .../ic_play_circle_outline_white_48dp.png | Bin 0 -> 4092 bytes .../ic_power_settings_new_white_48dp.png | Bin 0 -> 1699 bytes .../drawable-xxhdpi/ic_save_white_48dp.png | Bin 0 -> 751 bytes .../ic_slow_motion_video_white_48dp.png | Bin 0 -> 1885 bytes .../ic_stat_notify_take_photo.png | Bin 0 -> 3574 bytes .../ic_text_format_red_48dp.png | Bin 0 -> 1455 bytes .../ic_text_format_white_48dp.png | Bin 0 -> 961 bytes .../ic_timelapse_white_48dp.png | Bin 0 -> 2368 bytes .../drawable-xxhdpi/ic_timer_white_48dp.png | Bin 0 -> 1941 bytes .../ic_touch_app_white_48dp.png | Bin 0 -> 1151 bytes .../ic_videocam_white_48dp.png | Bin 0 -> 437 bytes app/src/main/res/drawable-xxhdpi/iso_icon.png | Bin 0 -> 1445 bytes app/src/main/res/drawable-xxhdpi/nr_icon.png | Bin 0 -> 1177 bytes app/src/main/res/drawable-xxhdpi/raw_icon.png | Bin 0 -> 1925 bytes .../main/res/drawable-xxhdpi/raw_off_icon.png | Bin 0 -> 1777 bytes .../res/drawable-xxhdpi/raw_only_icon.png | Bin 0 -> 3052 bytes app/src/main/res/drawable-xxhdpi/settings.png | Bin 0 -> 1510 bytes app/src/main/res/drawable-xxhdpi/share.png | Bin 0 -> 969 bytes .../baseline_add_a_photo_white_48.png | Bin 0 -> 1498 bytes .../baseline_bedtime_white_48.png | Bin 0 -> 1539 bytes .../baseline_bluetooth_white_48.png | Bin 0 -> 935 bytes .../baseline_check_white_48.png | Bin 0 -> 540 bytes .../baseline_close_white_48.png | Bin 0 -> 747 bytes .../baseline_delete_white_48.png | Bin 0 -> 451 bytes ...eline_face_retouching_natural_white_48.png | Bin 0 -> 3242 bytes .../baseline_filter_vintage_white_48.png | Bin 0 -> 2353 bytes .../baseline_folder_open_white_48.png | Bin 0 -> 657 bytes .../baseline_highlight_white_48.png | Bin 0 -> 598 bytes .../baseline_panorama_horizontal_white_48.png | Bin 0 -> 1460 bytes .../baseline_photo_library_white_48.png | Bin 0 -> 1077 bytes .../baseline_portrait_white_48.png | Bin 0 -> 1038 bytes .../baseline_remove_red_eye_white_48.png | Bin 0 -> 2085 bytes .../baseline_rotate_left_white_48.png | Bin 0 -> 1845 bytes .../baseline_rotate_right_white_48.png | Bin 0 -> 1831 bytes .../baseline_shutter_speed_white_48.png | Bin 0 -> 3650 bytes .../baseline_text_fields_red_48.png | Bin 0 -> 1486 bytes .../baseline_text_fields_white_48.png | Bin 0 -> 129 bytes .../main/res/drawable-xxxhdpi/flash_auto.png | Bin 0 -> 1437 bytes .../main/res/drawable-xxxhdpi/flash_off.png | Bin 0 -> 957 bytes .../main/res/drawable-xxxhdpi/flash_on.png | Bin 0 -> 629 bytes .../ic_burst_mode_white_48dp.png | Bin 0 -> 871 bytes .../ic_colorize_white_48dp.png | Bin 0 -> 1000 bytes .../drawable-xxxhdpi/ic_exposure_red_48dp.png | Bin 0 -> 2165 bytes .../ic_exposure_white_48dp.png | Bin 0 -> 888 bytes .../res/drawable-xxxhdpi/ic_face_red_48dp.png | Bin 0 -> 4693 bytes .../drawable-xxxhdpi/ic_face_white_48dp.png | Bin 0 -> 2752 bytes .../ic_fast_forward_white_48dp.png | Bin 0 -> 788 bytes .../ic_gps_fixed_red_48dp.png | Bin 0 -> 4851 bytes .../ic_gps_fixed_white_48dp.png | Bin 0 -> 2748 bytes .../ic_gps_off_white_48dp.png | Bin 0 -> 2501 bytes .../drawable-xxxhdpi/ic_hdr_on_white_48dp.png | Bin 0 -> 588 bytes .../ic_help_outline_white_48dp.png | Bin 0 -> 3278 bytes .../ic_info_outline_white_48dp.png | Bin 0 -> 2633 bytes .../ic_mic_off_white_48dp.png | Bin 0 -> 1714 bytes .../ic_more_horiz_white_48dp.png | Bin 0 -> 533 bytes .../ic_pause_circle_outline_white_48dp.png | Bin 0 -> 5319 bytes .../ic_photo_camera_white_48dp.png | Bin 0 -> 1837 bytes .../ic_photo_size_select_large_white_48dp.png | Bin 0 -> 1095 bytes .../ic_play_circle_outline_white_48dp.png | Bin 0 -> 5581 bytes .../ic_power_settings_new_white_48dp.png | Bin 0 -> 2167 bytes .../drawable-xxxhdpi/ic_save_white_48dp.png | Bin 0 -> 1031 bytes .../ic_slow_motion_video_white_48dp.png | Bin 0 -> 2541 bytes .../ic_text_format_red_48dp.png | Bin 0 -> 1934 bytes .../ic_text_format_white_48dp.png | Bin 0 -> 1297 bytes .../ic_timelapse_white_48dp.png | Bin 0 -> 3149 bytes .../drawable-xxxhdpi/ic_timer_white_48dp.png | Bin 0 -> 2473 bytes .../ic_touch_app_white_48dp.png | Bin 0 -> 1516 bytes .../ic_videocam_white_48dp.png | Bin 0 -> 591 bytes .../main/res/drawable-xxxhdpi/settings.png | Bin 0 -> 1962 bytes .../main/res/drawable/circle_background.xml | 8 + app/src/main/res/drawable/key_visualizer.xml | 5 + .../main/res/drawable/key_visualizer_red.xml | 5 + .../main/res/drawable/shortcut_gallery.xml | 9 + .../drawable/shortcut_ic_face_white_48dp.xml | 9 + .../shortcut_ic_photo_camera_white_48dp.xml | 9 + .../shortcut_ic_videocam_white_48dp.xml | 9 + .../main/res/drawable/shortcut_settings.xml | 9 + .../main/res/drawable/take_photo_selector.xml | 6 + .../main/res/drawable/take_photo_shutter.xml | 9 + .../drawable/take_photo_shutter_pressed.xml | 5 + .../res/drawable/take_video_recording.xml | 17 + .../main/res/drawable/take_video_selector.xml | 6 + .../main/res/drawable/take_video_shutter.xml | 11 + .../drawable/take_video_shutter_pressed.xml | 11 + .../res/layout/activity_device_select.xml | 42 + app/src/main/res/layout/activity_main.xml | 627 + .../main/res/layout/alertdialog_edittext.xml | 23 + .../main/res/layout/alertdialog_textview.xml | 12 + .../res/layout/arrayseekbarpreference.xml | 19 + app/src/main/res/layout/listitem_device.xml | 14 + .../main/res/layout/myedittextpreference.xml | 29 + .../res/layout/popupview_arrayoptions.xml | 30 + app/src/main/res/layout/popupview_button.xml | 11 + .../main/res/layout/popupview_radiobutton.xml | 11 + app/src/main/res/layout/popupview_switch.xml | 14 + .../main/res/layout/popupview_textview.xml | 20 + app/src/main/res/layout/stamp_image_text.xml | 19 + app/src/main/res/layout/toast_textview.xml | 13 + app/src/main/res/layout/widget_layout.xml | 22 + .../res/layout/widget_layout_take_photo.xml | 22 + app/src/main/res/menu/main.xml | 10 + .../res/mipmap-anydpi-v26/ic_launcher.xml | 6 + .../mipmap-anydpi-v26/ic_launcher_round.xml | 6 + app/src/main/res/mipmap-hdpi/ic_launcher.png | Bin 0 -> 5610 bytes .../mipmap-hdpi/ic_launcher_background.png | Bin 0 -> 1686 bytes .../mipmap-hdpi/ic_launcher_foreground.png | Bin 0 -> 5288 bytes app/src/main/res/mipmap-mdpi/ic_launcher.png | Bin 0 -> 3368 bytes .../mipmap-mdpi/ic_launcher_background.png | Bin 0 -> 1121 bytes .../mipmap-mdpi/ic_launcher_foreground.png | Bin 0 -> 3073 bytes app/src/main/res/mipmap-xhdpi/ic_launcher.png | Bin 0 -> 8114 bytes .../mipmap-xhdpi/ic_launcher_background.png | Bin 0 -> 2168 bytes .../mipmap-xhdpi/ic_launcher_foreground.png | Bin 0 -> 7554 bytes .../main/res/mipmap-xxhdpi/ic_launcher.png | Bin 0 -> 13065 bytes .../mipmap-xxhdpi/ic_launcher_background.png | Bin 0 -> 3637 bytes .../mipmap-xxhdpi/ic_launcher_foreground.png | Bin 0 -> 12862 bytes .../main/res/mipmap-xxxhdpi/ic_launcher.png | Bin 0 -> 19126 bytes .../mipmap-xxxhdpi/ic_launcher_background.png | Bin 0 -> 5181 bytes .../mipmap-xxxhdpi/ic_launcher_foreground.png | Bin 0 -> 19078 bytes app/src/main/res/raw/mybeep.ogg | Bin 0 -> 4535 bytes app/src/main/res/raw/mybeep_hi.ogg | Bin 0 -> 4597 bytes app/src/main/res/values-az/arrays.xml | 187 + app/src/main/res/values-az/strings.xml | 322 + app/src/main/res/values-be/arrays.xml | 245 + app/src/main/res/values-be/strings.xml | 366 + app/src/main/res/values-ca/arrays.xml | 310 + app/src/main/res/values-ca/strings.xml | 1027 + app/src/main/res/values-cs/arrays.xml | 245 + app/src/main/res/values-cs/strings.xml | 370 + app/src/main/res/values-de/arrays.xml | 302 + app/src/main/res/values-de/strings.xml | 963 + app/src/main/res/values-el/strings.xml | 444 + app/src/main/res/values-es/arrays.xml | 289 + app/src/main/res/values-es/strings.xml | 601 + app/src/main/res/values-fr/arrays.xml | 245 + app/src/main/res/values-fr/strings.xml | 453 + app/src/main/res/values-hu/arrays.xml | 301 + app/src/main/res/values-hu/strings.xml | 508 + app/src/main/res/values-it/arrays.xml | 289 + app/src/main/res/values-it/strings.xml | 967 + app/src/main/res/values-ja/arrays.xml | 290 + app/src/main/res/values-ja/strings.xml | 853 + app/src/main/res/values-ko/arrays.xml | 187 + app/src/main/res/values-ko/strings.xml | 333 + app/src/main/res/values-nb/arrays.xml | 289 + app/src/main/res/values-nb/strings.xml | 557 + app/src/main/res/values-pl/arrays.xml | 290 + app/src/main/res/values-pl/strings.xml | 1024 + app/src/main/res/values-pt-rBR/arrays.xml | 245 + app/src/main/res/values-pt-rBR/strings.xml | 333 + app/src/main/res/values-pt-rPT/arrays.xml | 253 + app/src/main/res/values-pt-rPT/strings.xml | 427 + app/src/main/res/values-ru/arrays.xml | 289 + app/src/main/res/values-ru/strings.xml | 966 + app/src/main/res/values-sl/arrays.xml | 310 + app/src/main/res/values-sl/strings.xml | 969 + app/src/main/res/values-sw600dp/dimens.xml | 8 + .../main/res/values-sw720dp-land/dimens.xml | 9 + app/src/main/res/values-tr/arrays.xml | 245 + app/src/main/res/values-tr/strings.xml | 369 + app/src/main/res/values-uk/arrays.xml | 289 + app/src/main/res/values-uk/strings.xml | 512 + app/src/main/res/values-vi/arrays.xml | 289 + app/src/main/res/values-vi/strings.xml | 787 + app/src/main/res/values-zh-rCN/arrays.xml | 269 + app/src/main/res/values-zh-rCN/strings.xml | 969 + app/src/main/res/values-zh-rTW/arrays.xml | 289 + app/src/main/res/values-zh-rTW/strings.xml | 966 + app/src/main/res/values/arrays.xml | 1045 + app/src/main/res/values/colors.xml | 9 + app/src/main/res/values/dimens.xml | 9 + app/src/main/res/values/strings.xml | 1099 + app/src/main/res/values/styles.xml | 34 + app/src/main/res/xml-v25/shortcuts.xml | 62 + app/src/main/res/xml/preferences.xml | 222 + .../preferences_sub_camera_controls_more.xml | 181 + app/src/main/res/xml/preferences_sub_gui.xml | 161 + .../main/res/xml/preferences_sub_licences.xml | 29 + .../main/res/xml/preferences_sub_location.xml | 35 + .../main/res/xml/preferences_sub_photo.xml | 336 + .../main/res/xml/preferences_sub_preview.xml | 237 + .../res/xml/preferences_sub_processing.xml | 29 + .../res/xml/preferences_sub_remote_ctrl.xml | 50 + .../xml/preferences_sub_settings_manager.xml | 26 + .../main/res/xml/preferences_sub_video.xml | 171 + .../main/res/xml/widget_info_take_photo.xml | 8 + .../sourceforge/opencamera/test/UnitTest.java | 1264 ++ build.gradle | 17 + .../metadata/android/en-US/images/icon.png | Bin 0 -> 19126 bytes google_material_design_icons_LICENSE-2.0.txt | 202 + gpl-3.0.txt | 674 + gradle.properties | 5 + gradle/wrapper/gradle-wrapper.jar | Bin 0 -> 53636 bytes gradle/wrapper/gradle-wrapper.properties | 6 + gradlew | 160 + gradlew.bat | 90 + makesrcarchive.bat | 46 + opencamera_source.txt | 60 + settings.gradle | 4 + 589 files changed, 130568 insertions(+) create mode 100644 .gitignore create mode 100644 .idea/inspectionProfiles/Project_Default.xml create mode 100644 _docs/.htaccess create mode 100644 _docs/baseline_add_a_photo_white_48.png create mode 100644 _docs/baseline_delete_white_48.png create mode 100644 _docs/baseline_filter_vintage_white_48.png create mode 100644 _docs/credits.html create mode 100644 _docs/devices.html create mode 100644 _docs/exposure_locked.png create mode 100644 _docs/exposure_unlocked.png create mode 100644 _docs/focus_mode_auto.png create mode 100644 _docs/focus_mode_continuous_picture.png create mode 100644 _docs/focus_mode_edof.png create mode 100644 _docs/focus_mode_fixed.png create mode 100644 _docs/focus_mode_infinity.png create mode 100644 _docs/focus_mode_locked.png create mode 100644 _docs/focus_mode_manual.png create mode 100644 _docs/google_material_design_icons_LICENSE-2.0.txt create mode 100644 _docs/help.html create mode 100644 _docs/history.html create mode 100644 _docs/ic_exposure_white_48dp.png create mode 100644 _docs/ic_gps_fixed_white_48dp.png create mode 100644 _docs/ic_launcher.png create mode 100644 _docs/ic_mic_white_48dp.png create mode 100644 _docs/ic_pause_circle_outline_white_48dp.png create mode 100644 _docs/index.html create mode 100644 _docs/info.html create mode 100644 _docs/popup.png create mode 100644 _docs/privacy_oc.html create mode 100644 _docs/settings.png create mode 100644 _docs/share.png create mode 100644 _docs/stylesheet.css create mode 100644 _docs/switch_camera.png create mode 100644 _docs/take_photo.png create mode 100644 _docs/take_video.png create mode 100644 androidx_LICENSE-2.0.txt create mode 100644 app/build.gradle create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/AvgInstrumentedTests.java create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/HDRInstrumentedTests.java create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/HDRNInstrumentedTests.java create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/InstrumentedTest.java create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/MainInstrumentedTests.java create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/PanoramaInstrumentedTests.java create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/PhotoInstrumentedTests.java create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/TestUtils.java create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/VideoInstrumentedTests.java create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/test/AvgTests.java create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/test/HDRNTests.java create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/test/HDRTests.java create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/test/MainActivityTest.java create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/test/MainTests.java create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/test/MultiCameraTests.java create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/test/Nexus7Tests.java create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/test/OldDeviceTests.java create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/test/PanoramaTests.java create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/test/PhotoCamera2Tests.java create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/test/PhotoTests.java create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/test/TempTests.java create mode 100644 app/src/androidTest/java/net/sourceforge/opencamera/test/VideoTests.java create mode 100644 app/src/androidTest/res/drawable-hdpi/ic_launcher.png create mode 100644 app/src/androidTest/res/drawable-ldpi/ic_launcher.png create mode 100644 app/src/androidTest/res/drawable-mdpi/ic_launcher.png create mode 100644 app/src/androidTest/res/drawable-xhdpi/ic_launcher.png create mode 100644 app/src/androidTest/res/values/strings.xml create mode 100644 app/src/main/AndroidManifest.xml create mode 100644 app/src/main/assets/androidx_LICENSE-2.0.txt create mode 100644 app/src/main/assets/google_material_design_icons_LICENSE-2.0.txt create mode 100644 app/src/main/assets/gpl-3.0.txt create mode 100644 app/src/main/java/net/sourceforge/opencamera/AudioListener.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/GyroSensor.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/HDRProcessor.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/HDRProcessorException.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/ImageSaver.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/JavaImageFunctions.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/JavaImageProcessing.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/KeyguardUtils.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/LocationSupplier.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/MagneticSensor.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/MainActivity.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/MyApplicationInterface.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/MyAudioTriggerListenerCallback.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/MyDebug.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/MyPreferenceFragment.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/MyTileService.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/MyTileServiceFrontCamera.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/MyTileServiceVideo.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/MyWidgetProviderTakePhoto.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/OpenCameraApplication.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/PanoramaProcessor.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/PanoramaProcessorException.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/PermissionHandler.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/PreferenceKeys.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/PreferenceSubCameraControlsMore.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/PreferenceSubGUI.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/PreferenceSubLicences.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/PreferenceSubLocation.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/PreferenceSubPhoto.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/PreferenceSubPreview.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/PreferenceSubProcessing.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/PreferenceSubRemoteCtrl.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/PreferenceSubScreen.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/PreferenceSubSettingsManager.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/PreferenceSubVideo.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/SaveLocationHistory.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/SettingsManager.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/SoundPoolManager.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/StorageUtils.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/TakePhoto.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/TextFormatter.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/ToastBoxer.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/cameracontroller/CameraController.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/cameracontroller/CameraController1.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/cameracontroller/CameraController2.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/cameracontroller/CameraControllerException.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/cameracontroller/CameraControllerManager.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/cameracontroller/CameraControllerManager1.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/cameracontroller/CameraControllerManager2.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/cameracontroller/RawImage.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/preview/ApplicationInterface.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/preview/BasicApplicationInterface.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/preview/CanvasView.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/preview/Preview.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/preview/VideoProfile.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/preview/VideoQualityHandler.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/preview/camerasurface/CameraSurface.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/preview/camerasurface/MySurfaceView.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/preview/camerasurface/MyTextureView.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/remotecontrol/BluetoothLeService.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/remotecontrol/BluetoothRemoteControl.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/remotecontrol/DeviceScanner.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/remotecontrol/KrakenGattAttributes.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/ui/ArraySeekBarPreference.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/ui/DrawPreview.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/ui/FolderChooserDialog.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/ui/MainUI.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/ui/ManualSeekbars.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/ui/MyEditTextPreference.java create mode 100644 app/src/main/java/net/sourceforge/opencamera/ui/PopupView.java create mode 100644 app/src/main/res/anim/fade_in.xml create mode 100644 app/src/main/res/anim/slide_in_bottom.xml create mode 100644 app/src/main/res/anim/slide_out_bottom.xml create mode 100644 app/src/main/res/animator/button_animation.xml create mode 100644 app/src/main/res/drawable-hdpi/baseline_add_a_photo_white_48.png create mode 100644 app/src/main/res/drawable-hdpi/baseline_bedtime_white_48.png create mode 100644 app/src/main/res/drawable-hdpi/baseline_bluetooth_white_48.png create mode 100644 app/src/main/res/drawable-hdpi/baseline_check_white_48.png create mode 100644 app/src/main/res/drawable-hdpi/baseline_close_white_48.png create mode 100644 app/src/main/res/drawable-hdpi/baseline_delete_white_48.png create mode 100644 app/src/main/res/drawable-hdpi/baseline_face_retouching_natural_white_48.png create mode 100644 app/src/main/res/drawable-hdpi/baseline_filter_vintage_white_48.png create mode 100644 app/src/main/res/drawable-hdpi/baseline_folder_open_white_48.png create mode 100644 app/src/main/res/drawable-hdpi/baseline_highlight_white_48.png create mode 100644 app/src/main/res/drawable-hdpi/baseline_panorama_horizontal_white_48.png create mode 100644 app/src/main/res/drawable-hdpi/baseline_photo_library_white_48.png create mode 100644 app/src/main/res/drawable-hdpi/baseline_portrait_white_48.png create mode 100644 app/src/main/res/drawable-hdpi/baseline_remove_red_eye_white_48.png create mode 100644 app/src/main/res/drawable-hdpi/baseline_rotate_left_white_48.png create mode 100644 app/src/main/res/drawable-hdpi/baseline_rotate_right_white_48.png create mode 100644 app/src/main/res/drawable-hdpi/baseline_shutter_speed_white_48.png create mode 100644 app/src/main/res/drawable-hdpi/baseline_text_fields_red_48.png create mode 100644 app/src/main/res/drawable-hdpi/baseline_text_fields_white_48.png create mode 100644 app/src/main/res/drawable-hdpi/flash_auto.png create mode 100644 app/src/main/res/drawable-hdpi/flash_off.png create mode 100644 app/src/main/res/drawable-hdpi/flash_on.png create mode 100644 app/src/main/res/drawable-hdpi/ic_burst_mode_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_colorize_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_exposure_red_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_exposure_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_face_red_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_face_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_fast_forward_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_gps_fixed_red_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_gps_fixed_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_gps_off_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_hdr_on_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_help_outline_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_info_outline_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_launcher_take_photo.png create mode 100644 app/src/main/res/drawable-hdpi/ic_mic_off_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_mic_red_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_mic_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_more_horiz_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_pause_circle_outline_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_photo_camera_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_photo_size_select_large_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_play_circle_outline_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_power_settings_new_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_save_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_slow_motion_video_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_text_format_red_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_text_format_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_timelapse_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_timer_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_touch_app_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/ic_videocam_white_48dp.png create mode 100644 app/src/main/res/drawable-hdpi/settings.png create mode 100644 app/src/main/res/drawable-hdpi/share.png create mode 100644 app/src/main/res/drawable-mdpi/auto_stabilise_icon.png create mode 100644 app/src/main/res/drawable-mdpi/auto_stabilise_icon_red.png create mode 100644 app/src/main/res/drawable-mdpi/baseline_add_a_photo_white_48.png create mode 100644 app/src/main/res/drawable-mdpi/baseline_bedtime_white_48.png create mode 100644 app/src/main/res/drawable-mdpi/baseline_bluetooth_white_48.png create mode 100644 app/src/main/res/drawable-mdpi/baseline_check_white_48.png create mode 100644 app/src/main/res/drawable-mdpi/baseline_close_white_48.png create mode 100644 app/src/main/res/drawable-mdpi/baseline_delete_white_48.png create mode 100644 app/src/main/res/drawable-mdpi/baseline_face_retouching_natural_white_48.png create mode 100644 app/src/main/res/drawable-mdpi/baseline_filter_vintage_white_48.png create mode 100644 app/src/main/res/drawable-mdpi/baseline_folder_open_white_48.png create mode 100644 app/src/main/res/drawable-mdpi/baseline_highlight_white_48.png create mode 100644 app/src/main/res/drawable-mdpi/baseline_panorama_horizontal_white_48.png create mode 100644 app/src/main/res/drawable-mdpi/baseline_photo_library_white_48.png create mode 100644 app/src/main/res/drawable-mdpi/baseline_portrait_white_48.png create mode 100644 app/src/main/res/drawable-mdpi/baseline_remove_red_eye_white_48.png create mode 100644 app/src/main/res/drawable-mdpi/baseline_rotate_left_white_48.png create mode 100644 app/src/main/res/drawable-mdpi/baseline_rotate_right_white_48.png create mode 100644 app/src/main/res/drawable-mdpi/baseline_shutter_speed_white_48.png create mode 100644 app/src/main/res/drawable-mdpi/baseline_text_fields_red_48.png create mode 100644 app/src/main/res/drawable-mdpi/baseline_text_fields_white_48.png create mode 100644 app/src/main/res/drawable-mdpi/dro_icon.png create mode 100644 app/src/main/res/drawable-mdpi/expo_icon.png create mode 100644 app/src/main/res/drawable-mdpi/exposure_locked.png create mode 100644 app/src/main/res/drawable-mdpi/exposure_unlocked.png create mode 100644 app/src/main/res/drawable-mdpi/flash_auto.png create mode 100644 app/src/main/res/drawable-mdpi/flash_off.png create mode 100644 app/src/main/res/drawable-mdpi/flash_on.png create mode 100644 app/src/main/res/drawable-mdpi/focus_mode_auto.png create mode 100644 app/src/main/res/drawable-mdpi/focus_mode_continuous_picture.png create mode 100644 app/src/main/res/drawable-mdpi/focus_mode_continuous_video.png create mode 100644 app/src/main/res/drawable-mdpi/focus_mode_edof.png create mode 100644 app/src/main/res/drawable-mdpi/focus_mode_fixed.png create mode 100644 app/src/main/res/drawable-mdpi/focus_mode_infinity.png create mode 100644 app/src/main/res/drawable-mdpi/focus_mode_locked.png create mode 100644 app/src/main/res/drawable-mdpi/focus_mode_manual.png create mode 100644 app/src/main/res/drawable-mdpi/ic_burst_mode_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_colorize_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_exposure_red_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_exposure_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_face_red_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_face_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_fast_forward_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_gps_fixed_red_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_gps_fixed_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_gps_off_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_hdr_on_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_help_outline_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_info_outline_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_launcher_take_photo.png create mode 100644 app/src/main/res/drawable-mdpi/ic_mic_off_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_mic_red_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_mic_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_more_horiz_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_pause_circle_outline_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_photo_camera_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_photo_size_select_large_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_play_circle_outline_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_power_settings_new_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_save_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_slow_motion_video_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_stat_notify_take_photo.png create mode 100644 app/src/main/res/drawable-mdpi/ic_text_format_red_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_text_format_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_timelapse_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_timer_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_touch_app_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/ic_videocam_white_48dp.png create mode 100644 app/src/main/res/drawable-mdpi/iso_icon.png create mode 100644 app/src/main/res/drawable-mdpi/nr_icon.png create mode 100644 app/src/main/res/drawable-mdpi/popup.png create mode 100644 app/src/main/res/drawable-mdpi/popup_flash_auto.png create mode 100644 app/src/main/res/drawable-mdpi/popup_flash_off.png create mode 100644 app/src/main/res/drawable-mdpi/popup_flash_on.png create mode 100644 app/src/main/res/drawable-mdpi/popup_flash_red_eye.png create mode 100644 app/src/main/res/drawable-mdpi/popup_flash_torch.png create mode 100644 app/src/main/res/drawable-mdpi/raw_icon.png create mode 100644 app/src/main/res/drawable-mdpi/raw_off_icon.png create mode 100644 app/src/main/res/drawable-mdpi/raw_only_icon.png create mode 100644 app/src/main/res/drawable-mdpi/settings.png create mode 100644 app/src/main/res/drawable-mdpi/share.png create mode 100644 app/src/main/res/drawable-mdpi/switch_camera.png create mode 100644 app/src/main/res/drawable-mdpi/take_photo.png create mode 100644 app/src/main/res/drawable-mdpi/take_photo_pref.png create mode 100644 app/src/main/res/drawable-mdpi/take_video.png create mode 100644 app/src/main/res/drawable-mdpi/take_video_pref.png create mode 100644 app/src/main/res/drawable-mdpi/white_balance_locked.png create mode 100644 app/src/main/res/drawable-mdpi/white_balance_unlocked.png create mode 100644 app/src/main/res/drawable-xhdpi/baseline_add_a_photo_white_48.png create mode 100644 app/src/main/res/drawable-xhdpi/baseline_bedtime_white_48.png create mode 100644 app/src/main/res/drawable-xhdpi/baseline_bluetooth_white_48.png create mode 100644 app/src/main/res/drawable-xhdpi/baseline_check_white_48.png create mode 100644 app/src/main/res/drawable-xhdpi/baseline_close_white_48.png create mode 100644 app/src/main/res/drawable-xhdpi/baseline_delete_white_48.png create mode 100644 app/src/main/res/drawable-xhdpi/baseline_face_retouching_natural_white_48.png create mode 100644 app/src/main/res/drawable-xhdpi/baseline_filter_vintage_white_48.png create mode 100644 app/src/main/res/drawable-xhdpi/baseline_folder_open_white_48.png create mode 100644 app/src/main/res/drawable-xhdpi/baseline_highlight_white_48.png create mode 100644 app/src/main/res/drawable-xhdpi/baseline_panorama_horizontal_white_48.png create mode 100644 app/src/main/res/drawable-xhdpi/baseline_photo_library_white_48.png create mode 100644 app/src/main/res/drawable-xhdpi/baseline_portrait_white_48.png create mode 100644 app/src/main/res/drawable-xhdpi/baseline_remove_red_eye_white_48.png create mode 100644 app/src/main/res/drawable-xhdpi/baseline_rotate_left_white_48.png create mode 100644 app/src/main/res/drawable-xhdpi/baseline_rotate_right_white_48.png create mode 100644 app/src/main/res/drawable-xhdpi/baseline_shutter_speed_white_48.png create mode 100644 app/src/main/res/drawable-xhdpi/baseline_text_fields_red_48.png create mode 100644 app/src/main/res/drawable-xhdpi/baseline_text_fields_white_48.png create mode 100644 app/src/main/res/drawable-xhdpi/flash_auto.png create mode 100644 app/src/main/res/drawable-xhdpi/flash_off.png create mode 100644 app/src/main/res/drawable-xhdpi/flash_on.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_burst_mode_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_colorize_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_exposure_red_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_exposure_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_face_red_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_face_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_fast_forward_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_gps_fixed_red_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_gps_fixed_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_gps_off_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_hdr_on_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_help_outline_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_info_outline_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_launcher_take_photo.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_mic_off_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_mic_red_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_mic_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_more_horiz_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_pause_circle_outline_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_photo_camera_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_photo_size_select_large_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_play_circle_outline_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_power_settings_new_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_save_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_slow_motion_video_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_text_format_red_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_text_format_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_timelapse_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_timer_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_touch_app_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/ic_videocam_white_48dp.png create mode 100644 app/src/main/res/drawable-xhdpi/settings.png create mode 100644 app/src/main/res/drawable-xhdpi/share.png create mode 100644 app/src/main/res/drawable-xhdpi/take_photo.png create mode 100644 app/src/main/res/drawable-xhdpi/take_photo_pref.png create mode 100644 app/src/main/res/drawable-xhdpi/take_video.png create mode 100644 app/src/main/res/drawable-xhdpi/take_video_pref.png create mode 100644 app/src/main/res/drawable-xxhdpi/auto_stabilise_icon.png create mode 100644 app/src/main/res/drawable-xxhdpi/auto_stabilise_icon_red.png create mode 100644 app/src/main/res/drawable-xxhdpi/baseline_add_a_photo_white_48.png create mode 100644 app/src/main/res/drawable-xxhdpi/baseline_bedtime_white_48.png create mode 100644 app/src/main/res/drawable-xxhdpi/baseline_bluetooth_white_48.png create mode 100644 app/src/main/res/drawable-xxhdpi/baseline_check_white_48.png create mode 100644 app/src/main/res/drawable-xxhdpi/baseline_close_white_48.png create mode 100644 app/src/main/res/drawable-xxhdpi/baseline_delete_white_48.png create mode 100644 app/src/main/res/drawable-xxhdpi/baseline_face_retouching_natural_white_48.png create mode 100644 app/src/main/res/drawable-xxhdpi/baseline_filter_vintage_white_48.png create mode 100644 app/src/main/res/drawable-xxhdpi/baseline_folder_open_white_48.png create mode 100644 app/src/main/res/drawable-xxhdpi/baseline_highlight_white_48.png create mode 100644 app/src/main/res/drawable-xxhdpi/baseline_panorama_horizontal_white_48.png create mode 100644 app/src/main/res/drawable-xxhdpi/baseline_photo_library_white_48.png create mode 100644 app/src/main/res/drawable-xxhdpi/baseline_portrait_white_48.png create mode 100644 app/src/main/res/drawable-xxhdpi/baseline_remove_red_eye_white_48.png create mode 100644 app/src/main/res/drawable-xxhdpi/baseline_rotate_left_white_48.png create mode 100644 app/src/main/res/drawable-xxhdpi/baseline_rotate_right_white_48.png create mode 100644 app/src/main/res/drawable-xxhdpi/baseline_shutter_speed_white_48.png create mode 100644 app/src/main/res/drawable-xxhdpi/baseline_text_fields_red_48.png create mode 100644 app/src/main/res/drawable-xxhdpi/baseline_text_fields_white_48.png create mode 100644 app/src/main/res/drawable-xxhdpi/dro_icon.png create mode 100644 app/src/main/res/drawable-xxhdpi/expo_icon.png create mode 100644 app/src/main/res/drawable-xxhdpi/flash_auto.png create mode 100644 app/src/main/res/drawable-xxhdpi/flash_off.png create mode 100644 app/src/main/res/drawable-xxhdpi/flash_on.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_burst_mode_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_colorize_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_exposure_red_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_exposure_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_face_red_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_face_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_fast_forward_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_gps_fixed_red_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_gps_fixed_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_gps_off_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_hdr_on_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_help_outline_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_info_outline_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_launcher_take_photo.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_mic_off_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_mic_red_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_mic_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_more_horiz_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_pause_circle_outline_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_photo_camera_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_photo_size_select_large_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_play_circle_outline_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_power_settings_new_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_save_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_slow_motion_video_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_stat_notify_take_photo.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_text_format_red_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_text_format_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_timelapse_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_timer_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_touch_app_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/ic_videocam_white_48dp.png create mode 100644 app/src/main/res/drawable-xxhdpi/iso_icon.png create mode 100644 app/src/main/res/drawable-xxhdpi/nr_icon.png create mode 100644 app/src/main/res/drawable-xxhdpi/raw_icon.png create mode 100644 app/src/main/res/drawable-xxhdpi/raw_off_icon.png create mode 100644 app/src/main/res/drawable-xxhdpi/raw_only_icon.png create mode 100644 app/src/main/res/drawable-xxhdpi/settings.png create mode 100644 app/src/main/res/drawable-xxhdpi/share.png create mode 100644 app/src/main/res/drawable-xxxhdpi/baseline_add_a_photo_white_48.png create mode 100644 app/src/main/res/drawable-xxxhdpi/baseline_bedtime_white_48.png create mode 100644 app/src/main/res/drawable-xxxhdpi/baseline_bluetooth_white_48.png create mode 100644 app/src/main/res/drawable-xxxhdpi/baseline_check_white_48.png create mode 100644 app/src/main/res/drawable-xxxhdpi/baseline_close_white_48.png create mode 100644 app/src/main/res/drawable-xxxhdpi/baseline_delete_white_48.png create mode 100644 app/src/main/res/drawable-xxxhdpi/baseline_face_retouching_natural_white_48.png create mode 100644 app/src/main/res/drawable-xxxhdpi/baseline_filter_vintage_white_48.png create mode 100644 app/src/main/res/drawable-xxxhdpi/baseline_folder_open_white_48.png create mode 100644 app/src/main/res/drawable-xxxhdpi/baseline_highlight_white_48.png create mode 100644 app/src/main/res/drawable-xxxhdpi/baseline_panorama_horizontal_white_48.png create mode 100644 app/src/main/res/drawable-xxxhdpi/baseline_photo_library_white_48.png create mode 100644 app/src/main/res/drawable-xxxhdpi/baseline_portrait_white_48.png create mode 100644 app/src/main/res/drawable-xxxhdpi/baseline_remove_red_eye_white_48.png create mode 100644 app/src/main/res/drawable-xxxhdpi/baseline_rotate_left_white_48.png create mode 100644 app/src/main/res/drawable-xxxhdpi/baseline_rotate_right_white_48.png create mode 100644 app/src/main/res/drawable-xxxhdpi/baseline_shutter_speed_white_48.png create mode 100644 app/src/main/res/drawable-xxxhdpi/baseline_text_fields_red_48.png create mode 100644 app/src/main/res/drawable-xxxhdpi/baseline_text_fields_white_48.png create mode 100644 app/src/main/res/drawable-xxxhdpi/flash_auto.png create mode 100644 app/src/main/res/drawable-xxxhdpi/flash_off.png create mode 100644 app/src/main/res/drawable-xxxhdpi/flash_on.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_burst_mode_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_colorize_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_exposure_red_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_exposure_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_face_red_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_face_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_fast_forward_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_gps_fixed_red_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_gps_fixed_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_gps_off_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_hdr_on_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_help_outline_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_info_outline_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_mic_off_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_more_horiz_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_pause_circle_outline_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_photo_camera_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_photo_size_select_large_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_play_circle_outline_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_power_settings_new_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_save_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_slow_motion_video_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_text_format_red_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_text_format_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_timelapse_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_timer_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_touch_app_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/ic_videocam_white_48dp.png create mode 100644 app/src/main/res/drawable-xxxhdpi/settings.png create mode 100644 app/src/main/res/drawable/circle_background.xml create mode 100644 app/src/main/res/drawable/key_visualizer.xml create mode 100644 app/src/main/res/drawable/key_visualizer_red.xml create mode 100644 app/src/main/res/drawable/shortcut_gallery.xml create mode 100644 app/src/main/res/drawable/shortcut_ic_face_white_48dp.xml create mode 100644 app/src/main/res/drawable/shortcut_ic_photo_camera_white_48dp.xml create mode 100644 app/src/main/res/drawable/shortcut_ic_videocam_white_48dp.xml create mode 100644 app/src/main/res/drawable/shortcut_settings.xml create mode 100644 app/src/main/res/drawable/take_photo_selector.xml create mode 100644 app/src/main/res/drawable/take_photo_shutter.xml create mode 100644 app/src/main/res/drawable/take_photo_shutter_pressed.xml create mode 100644 app/src/main/res/drawable/take_video_recording.xml create mode 100644 app/src/main/res/drawable/take_video_selector.xml create mode 100644 app/src/main/res/drawable/take_video_shutter.xml create mode 100644 app/src/main/res/drawable/take_video_shutter_pressed.xml create mode 100644 app/src/main/res/layout/activity_device_select.xml create mode 100644 app/src/main/res/layout/activity_main.xml create mode 100644 app/src/main/res/layout/alertdialog_edittext.xml create mode 100644 app/src/main/res/layout/alertdialog_textview.xml create mode 100644 app/src/main/res/layout/arrayseekbarpreference.xml create mode 100644 app/src/main/res/layout/listitem_device.xml create mode 100644 app/src/main/res/layout/myedittextpreference.xml create mode 100644 app/src/main/res/layout/popupview_arrayoptions.xml create mode 100644 app/src/main/res/layout/popupview_button.xml create mode 100644 app/src/main/res/layout/popupview_radiobutton.xml create mode 100644 app/src/main/res/layout/popupview_switch.xml create mode 100644 app/src/main/res/layout/popupview_textview.xml create mode 100644 app/src/main/res/layout/stamp_image_text.xml create mode 100644 app/src/main/res/layout/toast_textview.xml create mode 100644 app/src/main/res/layout/widget_layout.xml create mode 100644 app/src/main/res/layout/widget_layout_take_photo.xml create mode 100644 app/src/main/res/menu/main.xml create mode 100644 app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml create mode 100644 app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml create mode 100644 app/src/main/res/mipmap-hdpi/ic_launcher.png create mode 100644 app/src/main/res/mipmap-hdpi/ic_launcher_background.png create mode 100644 app/src/main/res/mipmap-hdpi/ic_launcher_foreground.png create mode 100644 app/src/main/res/mipmap-mdpi/ic_launcher.png create mode 100644 app/src/main/res/mipmap-mdpi/ic_launcher_background.png create mode 100644 app/src/main/res/mipmap-mdpi/ic_launcher_foreground.png create mode 100644 app/src/main/res/mipmap-xhdpi/ic_launcher.png create mode 100644 app/src/main/res/mipmap-xhdpi/ic_launcher_background.png create mode 100644 app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png create mode 100644 app/src/main/res/mipmap-xxhdpi/ic_launcher.png create mode 100644 app/src/main/res/mipmap-xxhdpi/ic_launcher_background.png create mode 100644 app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png create mode 100644 app/src/main/res/mipmap-xxxhdpi/ic_launcher.png create mode 100644 app/src/main/res/mipmap-xxxhdpi/ic_launcher_background.png create mode 100644 app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png create mode 100644 app/src/main/res/raw/mybeep.ogg create mode 100644 app/src/main/res/raw/mybeep_hi.ogg create mode 100644 app/src/main/res/values-az/arrays.xml create mode 100644 app/src/main/res/values-az/strings.xml create mode 100644 app/src/main/res/values-be/arrays.xml create mode 100644 app/src/main/res/values-be/strings.xml create mode 100644 app/src/main/res/values-ca/arrays.xml create mode 100644 app/src/main/res/values-ca/strings.xml create mode 100644 app/src/main/res/values-cs/arrays.xml create mode 100644 app/src/main/res/values-cs/strings.xml create mode 100644 app/src/main/res/values-de/arrays.xml create mode 100644 app/src/main/res/values-de/strings.xml create mode 100644 app/src/main/res/values-el/strings.xml create mode 100644 app/src/main/res/values-es/arrays.xml create mode 100644 app/src/main/res/values-es/strings.xml create mode 100644 app/src/main/res/values-fr/arrays.xml create mode 100644 app/src/main/res/values-fr/strings.xml create mode 100644 app/src/main/res/values-hu/arrays.xml create mode 100644 app/src/main/res/values-hu/strings.xml create mode 100644 app/src/main/res/values-it/arrays.xml create mode 100644 app/src/main/res/values-it/strings.xml create mode 100644 app/src/main/res/values-ja/arrays.xml create mode 100644 app/src/main/res/values-ja/strings.xml create mode 100644 app/src/main/res/values-ko/arrays.xml create mode 100644 app/src/main/res/values-ko/strings.xml create mode 100644 app/src/main/res/values-nb/arrays.xml create mode 100644 app/src/main/res/values-nb/strings.xml create mode 100644 app/src/main/res/values-pl/arrays.xml create mode 100644 app/src/main/res/values-pl/strings.xml create mode 100644 app/src/main/res/values-pt-rBR/arrays.xml create mode 100644 app/src/main/res/values-pt-rBR/strings.xml create mode 100644 app/src/main/res/values-pt-rPT/arrays.xml create mode 100644 app/src/main/res/values-pt-rPT/strings.xml create mode 100644 app/src/main/res/values-ru/arrays.xml create mode 100644 app/src/main/res/values-ru/strings.xml create mode 100644 app/src/main/res/values-sl/arrays.xml create mode 100644 app/src/main/res/values-sl/strings.xml create mode 100644 app/src/main/res/values-sw600dp/dimens.xml create mode 100644 app/src/main/res/values-sw720dp-land/dimens.xml create mode 100644 app/src/main/res/values-tr/arrays.xml create mode 100644 app/src/main/res/values-tr/strings.xml create mode 100644 app/src/main/res/values-uk/arrays.xml create mode 100644 app/src/main/res/values-uk/strings.xml create mode 100644 app/src/main/res/values-vi/arrays.xml create mode 100644 app/src/main/res/values-vi/strings.xml create mode 100644 app/src/main/res/values-zh-rCN/arrays.xml create mode 100644 app/src/main/res/values-zh-rCN/strings.xml create mode 100644 app/src/main/res/values-zh-rTW/arrays.xml create mode 100644 app/src/main/res/values-zh-rTW/strings.xml create mode 100644 app/src/main/res/values/arrays.xml create mode 100644 app/src/main/res/values/colors.xml create mode 100644 app/src/main/res/values/dimens.xml create mode 100644 app/src/main/res/values/strings.xml create mode 100644 app/src/main/res/values/styles.xml create mode 100644 app/src/main/res/xml-v25/shortcuts.xml create mode 100644 app/src/main/res/xml/preferences.xml create mode 100644 app/src/main/res/xml/preferences_sub_camera_controls_more.xml create mode 100644 app/src/main/res/xml/preferences_sub_gui.xml create mode 100644 app/src/main/res/xml/preferences_sub_licences.xml create mode 100644 app/src/main/res/xml/preferences_sub_location.xml create mode 100644 app/src/main/res/xml/preferences_sub_photo.xml create mode 100644 app/src/main/res/xml/preferences_sub_preview.xml create mode 100644 app/src/main/res/xml/preferences_sub_processing.xml create mode 100644 app/src/main/res/xml/preferences_sub_remote_ctrl.xml create mode 100644 app/src/main/res/xml/preferences_sub_settings_manager.xml create mode 100644 app/src/main/res/xml/preferences_sub_video.xml create mode 100644 app/src/main/res/xml/widget_info_take_photo.xml create mode 100644 app/src/test/java/net/sourceforge/opencamera/test/UnitTest.java create mode 100644 build.gradle create mode 100644 fastlane/metadata/android/en-US/images/icon.png create mode 100644 google_material_design_icons_LICENSE-2.0.txt create mode 100644 gpl-3.0.txt create mode 100644 gradle.properties create mode 100644 gradle/wrapper/gradle-wrapper.jar create mode 100644 gradle/wrapper/gradle-wrapper.properties create mode 100755 gradlew create mode 100644 gradlew.bat create mode 100644 makesrcarchive.bat create mode 100644 opencamera_source.txt create mode 100644 settings.gradle diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..46ed441 --- /dev/null +++ b/.gitignore @@ -0,0 +1,26 @@ +_archive/ +_devdocs/ +_docs/*.jpg +_docs/ads.txt +_docs/app-ads.txt +_docs/gplay.html +_other/ +_saved/ + +.gradle/ + +.idea/* +!.idea/inspectionProfiles + +build/ +app/release/ +gfx/ +testdata/ + +*.db +*.iml +*.apk +*.ap_ + +local.properties +uninstall.bat diff --git a/.idea/inspectionProfiles/Project_Default.xml b/.idea/inspectionProfiles/Project_Default.xml new file mode 100644 index 0000000..136b777 --- /dev/null +++ b/.idea/inspectionProfiles/Project_Default.xml @@ -0,0 +1,41 @@ + + + + \ No newline at end of file diff --git a/_docs/.htaccess b/_docs/.htaccess new file mode 100644 index 0000000..1f8de97 --- /dev/null +++ b/_docs/.htaccess @@ -0,0 +1,3 @@ +RewriteEngine On +RewriteCond %{HTTP:X-Forwarded-Proto} !https +RewriteRule ^ https://%{HTTP_HOST}%{REQUEST_URI} [R=301,L] diff --git a/_docs/baseline_add_a_photo_white_48.png b/_docs/baseline_add_a_photo_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..17a587b41c4d79237ddb7572b51191359e35af4f GIT binary patch literal 1725 zcmV;u215CXP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGi!vFvd!vV){sAK>D22x2xK~#8N?VM>( z>P!^J1s6o3f(Sk?xJ40>7~%WB@Do5Z#x(*iR7FwLfTD7J{-MVkEK^$BnS$K;y}0&N ziuB*J%$XUkYkPaUPoj^TeoAzZ0vR9$GQd<0T3T8f8X6+wi?_G8TrNjO?_$b>pw53S zv`W^K$PhC?3S@v3$N(vj0a73Xq(BBpfeerW86X8RKni4l>+0%?4)yW-{lmkD+pL8gI>+9>Iqa!l<`qtLg(8Q6!2|TC0y}e4M zLN;%HeTc!e6N$vg$OzfI>4hAmYSfwpxX>d>+UxahZf*_^4wB7}-l<~HD49%h*Qlq; zJT%(c+TyE`qh(%16>Ftht!6TrkB<+s?O|$8nt5ZI5^e@(1OjFr9;nyr&=F7Z_V)Ju z{hiK!r#A)qDG>Dd`1s-Bf$S!FDhF8x2_zbghQnd9n~(w#&q19mQJUe^AR@>Vy>NO=yG)cK4B;nBBOx}F)f-nkOFBL7#N_V22vm`{r&xP)IbVk zfE368QvzWyj-svsC+XKc4YxgtHrogcKuV=jtyaSrS96!0&4bi6zr(cMqGXmk>^7%ZZ!QJZ3GqeGhTwGl2?Cik%=#(%_g&5yH3+kMl zoZR2vlWhY8-OS9)^z<~xh_jM|IL_lhor8mer>7^fd4R+ejYfHC)X5wKb@uo7`Kkku z;E#`w1*IhC1X3f*aG4l|98nXAL>3z00H&dLKqE7g>lSb>#E_NG&d$ha0oi0~Y6|KE zgTYogAnx7W9V#8FnBHtW-XWDr*&ir%Cd5!~6h$GUh0)Pb+}p~^irxSRC+FtokZABS zpU+2UzXRcRcXzo5nnMD$Dd!Ut6Y+T5mneStv0oFsfKNA`d^(3*@Un19q7 zVg*8mW_Dg~H@Cm5S%grj*Xwj-8?geF%Vp-&HAjJ%Z}n?DNDr6GMMq-bxVKrXKz_fU zxgn0@6K1{@Gw!S*Rv;u%W>3H|M=;-tX9&|zVK6@{uQfFk0rs}AUnCH-8$k2HrKK#D zO3Xns$BnMS1!4|)Fp%Ri0jSqf@ET_U;R1!jVdjbm1hVNwQ0M06hKwfAe*}X;GHfGU zAicpZoXE~hJ1#spIAG4A1OfpreT<55fx!IyJTntno@s|Vdh`FtK3v-1zc?X~*Xu=B zNyc{+MakuIKk_409ldo&`?Dw-oDc}WiI|lIF>-!>Za&tKcMw^6>LA(ibsyl6KzOQc z7LE{W>2$iiE8$n_PKP$@j(pdkRh(NF!yO(T9zH%klFb5MhKvqJQ1=9(r9vkGYPA}? zhMAK8VqyNDNhjClp^>2PmyV5%t*@_hai31E&Ep0(Ha7HD{wy;(I?79(RvZ-wAQ`n; z`Lh5bD;|#vQU^FK5WvmEVzEdhVzXnyF;H-9R{0D3p8z6x0cYLZV%!lV#pK&`bT&sqtvZX4@AKu0S>FvN|y@MJxGBJ zkOFDZER&cLh~p|v_k$WrM-5B~^pAY=Nky|?$CN;&QVDgJjGhL!d3Se5MgvmFfI+IS4y; T69FUK00000NkvXXu0mjfdCU!K literal 0 HcmV?d00001 diff --git a/_docs/baseline_delete_white_48.png b/_docs/baseline_delete_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..318f8e78d1211e7931a498b6d49eb7dda4049166 GIT binary patch literal 670 zcmeAS@N?(olHy`uVBq!ia0vp^2_VeD1|%QND7OGojKx9jP7LeL$-D$|SkfJR9T^xl z_H+M9WCij$3p^r=85sBugD~Uq{1qt-3{1wJE{-7;ac}SJ&3oh^;*gjwXfGQYA=l70 z>-+QyMlLOt78mZmmTR(X6LrjIUZ1i>vv3B5^5~Pzad7i`)1Cxu>aFH>2%-Cn;v%dgPBMg!wkj*wgyRt zYcp?OuzPVdcg4TnXaTSzH^npmstG(-c=4}Rb%UL~^r??;o@;McfU1XUqz&UHvyEJT z_OBb(xo76tI>cVzf9d$W%;axx7|%Wb?7e60p8TX|Gi-G?pOkx0wfEneRj+JytCiFG zZeBK;9j15a_~VUlF4sGrkuqMM8n%|D;-Bi-@4x@96;Lz0YsU zsPx#1ZP1_K>z@;j|==^1poj532;bRa{vGi!vFvd!vV){sAK>D3FS#dK~#8N?VSr# z8aounK@b&iX$7i+QfuE+`~Lnj{Q~Xu;k4R1P^pSmQQp3I-1FyV?`0Gu*(6K0z&7^bH@3?&cuIiz`FVt# ziQdY$!9M(kW4wfC1R(G3WHOm6P4aI4O_guMT7ndUi~Jg^mwMvd$d87V*_k+4*R9~;<;$K$gsX%xnWpuYv< z1PBBIb(P`A1~%!w(qKtrFiwD2ELKq&er#a#)~#D*mNW+A1elzhoMM~G(P`T$$^@X$ z(XcElfBW|BJ^aHq!U_AC8+vG(W}C{P{0IwxGN3GTb924rQWnY_zY!yELRS$_ncaAS z7<)Bz;7!%<-@jSP=exYTtW&zvY&N58*ER5pab;zNrPPwkIx4ECZxdgzt%+k`*6uiSp2KzBch~?$w?)zSrRLa)YR%3T}_rGh= zO}Rlg{jk%D?+41Yy}jM0vT=oXu7fC(C*+mNDj&cWLe0a655ovM)6>)4nGMvGwRIKA z_9_0(%*>Q=O^WeJGq}5C_Qu z(%;7U`MJe@>I?Eo-Q}>~?+=bb4CHrxeZ72odKzQ9ZiG7oq2O-c2T+)7HhWI}YTvJ4 zzk!wa!ZI# zrz12!RY(AOifrjF2=n?2#847kI)1K;6PK6Ja0l;0DzW&M}K4+2vAwcA@T{U!V zq2U#j4UV?HzFv{_d>bVK*wD?$KZ@}eH$a(kx!ffj4Z^YA(kK-G;D-A7^XH0_kYpOR z$741H!tBw}(Iopx8Oj9!7?Fgh&;#2JB|ZBYfxW%G1`DkLgju#*DOv=uAt5ksX3b?XnfBV+8m71<{@v?6QmIsT$_$mIMP<^mz(ctJiA19L>eVYt^*o|cfc#_n z3Y8^5WtyFvn^V5zRha;k+3x3KD6>*aOHBo3x_|%v^!)rh_f$?Eln8(o>Cc`$vqUF5 z;himJsj;N%;M&LoFDM&@BR?Ds6M5NgDWnCkEUShU=}x>J=OmL!>8DAj;M$`3#_@2p z*RNkkL!pp#4^K(}tUPr3DFK^NE+LhBh`b;y4lj-{O(=o?c*! zY`QiqP1kFw%0EQJN20LSfb9}OSODC3=q9AJs|j0VGk~zbc4-6Gpk9oU?@-vl7S%@TBaw$=IZL|R4f+LHi8E)#Wi&0Kb=ks*Qd}4@P3t{=PD?~qeqXz_$(#c zr3~1Fm+Gi|n}vmikmzD?ApywyrF%WYfPKuu##j>8rm@|QfeqM#O*it6k4&*77otr@ z%j(6epdc7adi(b6jLdxj=fZ}Y<5DEJ(5yag0jL04!%sg@X^7c)^j}_HZer>=`)MFJ zhCCi$S?r$cR>j+(jzor6xYq|zp|q{v&Wz%LFs)ex2 zRVtOZ9ErOZWFxZ-6q!ReveW4__xJZ3{T4Ln>}_F!73TP``u`6c4T37Wa(sN;owUVv^@W}{T9}2vl1j;DvrQ~_Wyx#cJug1V z(_5Z?qSzG-Bm<#LI8OxaR=!C~fZqsM3T-Sy9oIN+m#tOXA=n}jlr1Y#iLoI_a!uNeD3DH$sOJuscB2t%+zwphwMj1_<$$f5@a2hyzo zpVz^vewH*2V+8;^J3E&$tpMlZ^ExbPBt{DW&)wYItT_<@=WJ|jR897NV6*_BQmI7o z!bU^{9E)?R)vA`Ir7AIA0Du=Z+uPe*pFzMeIJWmCkI8sK06=|te?Fh@R`4M38;-%T zY?twPMgTy)qVBi(`-#9jzl~$Ogr@`m + + +Open Camera + + + + + + + + + + + + + + + + + + + + + + + + +Open Camera icon +
+

Open Camera Credits

+
+ +

< Main Page.

+ +

Open Camera is written by Mark Harman. Additional credits:

+
    +
  • App icon by Adam Lapinski.
  • +
  • Improvements/help for video frame rates (including high speed) by George Joseph.
  • +
  • Support for the video picture profiles implementation and custom Log profiles JTVideo, JTLog and JTLog2 design by JT Haapala.
  • +
  • Smart Housing Bluetooth LE support by Edouard Lafargue
  • +
  • Improved selfie stick button support by Lau Keat Hwa.
  • +
  • Option to storing yaw/pitch/roll in Exif user comment by Joshua.
  • +
  • Option for filenames to be based on UTC (Zulu) time by David Pletcher ( lpm_sourceforge AT cathedralcanyon DOT net , https://www.cathedralcanyon.net ).
  • +
  • Support for manual ISO for old camera API on Asus Zenphone 2 Z00A and Z008 by Flávio Keglevich ( fkeglevich AT gmail DOT com ).
  • +
  • Changing icons for pause/resume video by Johan Ejdemark ( johanejdemark AT hotmail DOT com).
  • +
  • Various improvements (including for lock screen behaviour) by Daniel Zhang.
  • +
  • Option to use milliseconds in filenames by Rob Emery ( opencam AT mintsoft DOT net).
  • +
  • Azerbaijani translation by Eldost ( l-dost AT mail DOT ru ).
  • +
  • Brazilian tranlation by Kaio Duarte.
  • +
  • Catalan translation by Cambrells.
  • +
  • Chinese Simplified translation by Michael Lu ( yeskky AT gmail DOT com ), tumuyan ( tumuyan AT gmail DOT com ) and Tommy He.
  • +
  • Chinese Traditional translation by You-Cheng Hsieh ( yochenhsieh AT gmail DOT com ) and Hsiu-Ming Chang.
  • +
  • Belarusian translation by Zmicer Turok.
  • +
  • Czech translation by Jaroslav Svoboda ( multi DOT flexi AT gmail DOT com , http://jaroslavsvoboda.eu ).
  • +
  • French translation by Olivier Seiler ( oseiler AT nebuka DOT net ) and Eric Lassauge ( lassauge AT users DOT sf DOT net ).
  • +
  • German translation by Ronny Steiner, Sebastian Ahlborn, Carsten Schlote, Wilhelm Stein, Jochen Wiesel.
  • +
  • Greek translation by Wasilis Mandratzis-Walz.
  • +
  • Hungarian translation by Báthory Péter.
  • +
  • Italian tranlation by Valerio Bozzolan, Stefano Gualmo ( s DOT gualmo AT gmail DOT com ), Renato Giliberti.
  • +
  • Japanese translation by Mitsuse and Yanagimoto Yoshiaki.
  • +
  • Korean translation by Halcyonest.
  • +
  • Norwegian Bokmål translation by Imre Kristoffer Eilertsen ( imreeil42 AT gmail DOT com ).
  • +
  • Polish translation by Jacek Buczyński and Grzegorz Koryga.
  • + +
  • Russian translation by maksnogin ( maksnogin AT gmail DOT com ), Grigorii Chirkov, Dmitry Vahnin aka JSBmanD, Aleksey Khlybov, Ilya Pogrebenko.
  • +
  • Slovenian translation by Peter Klofutar.
  • +
  • Spanish translation by Mario Sanoguera ( sanogueralorenzo AT gmail DOT com , https://play.google.com/store/apps/developer?id=Mario+Sanoguera ; Sebastian05067, https://forum.xda-developers.com/member.php?u=6302705 ) and Gonzalo Prieto Vega.
  • +
  • Turkish translation by Serdar Erkoc ( serdarerkoc2004 AT yahoo DOT com ).
  • +
  • Ukranian translation by Olexandr ( https://sourceforge.net/u/olexn13/ ).
  • +
  • Vietnamese translation by Khánh Trần ( crhanh AT gmail DOT com ).
  • +
  • Earlier versions (pre-Material Design) have an icon/logo by Cosmin Saveanu ( http://aboutfoto.wordpress.com/ ).
  • +
+ +

Also see licence for third party files.

+ +
+

Open Camera Privacy Policy.

+

This website uses icons from third party sources, see licences.

+

Open Camera on Sourceforge.

+
+ + + diff --git a/_docs/devices.html b/_docs/devices.html new file mode 100644 index 0000000..075ba97 --- /dev/null +++ b/_docs/devices.html @@ -0,0 +1,221 @@ + + + +Open Camera Device Compatibility + + + + + + + + + + + + + + + + + + + + + + + + +Open Camera icon +
+

Open Camera Device Compatibility

+
+ +

< Main Page.

+ +

This page provides some guidance on possible issues of Open Camera on various Android™ devices. Please note the following:

+
    +
  • Sometimes behaviour can be affected by things like operating system version or can differ between variants of the same + model. Behaviour may also change over time depending on different versions of Open Camera, and different + versions of device operating system. So something listed here isn't a guarantee of that behaviour on a + particular model.
  • +
  • This information is provided "as is" with no warranties - if you need to be certain of how Open Camera works on a + particular device, ultimately you'll have to test it yourself.
  • +
+ +

General notes

+ +

If you're just interested in taking photos with non-advanced features (without using "Camera2 API"), then most things + should work on most devices, as far as I can tell. I do occasionally get bug reports of things which seem to be device + specific, but not enough to draw conclusions about things not working on particular devices. The most commonly reported + issues seems to be:

+
    +
  • Video is one of the most difficult things working across different Android devices - on some devices recording comes out + corrupted. In some cases this is only on some resolutions, or it may be + all.
  • +
  • If you're wanting to save to external SD cards, you'll need to follow the advice at + the FAQ . Note that in some cases, it seems that SD cards + can't be selected even when using the Storage Access Framework option - this is a device issue, and + something out of my control. If you're wanting to have lots + of storage for taking photos or videos, it's probably better to make sure you get a device with plenty of + internal storage (internal storage is faster anyway, so means faster taking of photos, and more reliable high + resolution video recording).
  • +
+ +

If you're interested in enabling Camera2 support for advanced features (manual focus, manual exposure, expo bracketing, HDR), + be aware that some devices have poor support for Camera2 (even if they support the API, the + implementations seem to have problems). Thankfully things seem to be improving on that front with newer + devices. Please read the details below on what I know about different devices.

+ +

Also note that just because a manufacturer advertises a particular camera feature, it doesn't mean that Open Camera can use + it. Unfortunately some manufacturers limit some features to the "stock" camera application, and don't make it available + through to third party cameras. This tends to be more advanced features - 4K video, high photo resolutions, high frame + rate video, RAW.

+ +

Device specific notes

+ +

Google Nexuses/Pixels

+ +

In general, Google Nexuses and Pixels have worked well for Open Camera.

+ +

Camera2 API on the Nexus 6 works well (there are some minor issues, e.g., manual exposure doesn't work well when recording + video). It's hard to be sure about other Nexuses though.

+ +

Similarly Camera2 API works well on the Pixel 6 Pro. Open Camera also supports Google's HDR+ mode on the Pixels with Pixel Visual Core + (including the Pixel 6 Pro). As of Open Camera 1.50, Night Sight on the Pixel 6 Pro is available via the photo mode X-Night. + As of Open Camera 1.50, all of the Pixel 6 Pro's cameras are available to use by zooming in or out. + As of Open Camera 1.54, you can also select individual cameras directly (tested on the Pixel 6 Pro).

+ +

Color effects don't work on the Nexus 7.

+ +

Huawei

+ +

I've had reports of expo bracketing and HDR in Camera2 mode not working properly on some Huawei devices + (Huawei P8 lite 2017, P9). See here for details.

+ +

I've also had reports of RAW/DNG images being saved with red/blue swapped. See +here for details.

+ +

Nokia

+ +

I've tested Open Camera with the Nokia 8. Everything seems to work as far as I can tell, including Camera2 API with full +manual controls, RAW and 120fps video.

+ +

OnePlus

+ +

I've tested Open Camera with the OnePlus Pad. Everything seems to work as far as I can tell, including Camera2 API with full + manual controls, and RAW.

+ +

The OnePlus 3T had problems related to Camera2 API and manual exposure:

+
    +
  • Manual exposure sometimes fails (the preview corrupts or the device may freeze for a few moments), this also + includes HDR.
  • +
  • Low light scenes show the wrong ISO and shutter speed, both on-screen and in the saved Exif info (although the + photos themselves still come out okay); also a knock on effect of this bug is that HDR and expo bracketing + don't work in low light. Manual ISO also doesn't work above 799 (the images still come out with ISO at 799).
  • +
+

See this thread for more + details.

+ +

The OnePlus 5 seems to have the same problems with Camera2 API as the OnePlus 3T (see above). Issues with RAW images have been + reported for third party camera applications - see + here, + here and + here. As of August 2017, + this + seems to have been fixed. + But as of December 2017, there seem to be + additional + problems with RAW on Android 8. +

+ +

The OnePlus 3T and 5 are rather old devices - as I say, the more recent OnePlus Pad does not seem to have these problems.

+ +

Samsung

+ +

I have tested Open Camera on a Samsung Galaxy S24+ (Exynos SM-S926B) and Galaxy S10e (Exynos SM-G970F). Mostly things work, including with Camera2 + API. Known issues are:

+
    +
  • Night mode is available via X-Night.
  • +
  • Slow motion and high speed frame rate video doesn't work on the Galaxy S10e (see below for more details), but fine on the Galaxy S24+.
  • +
  • The "Image quality" setting has no effect for JPEGs on the Galaxy S10e (unless post-processing options such as auto-level or + photo stamp are applied). This has also been reported for other Samsung devices; I also have the same + issue with other third party camera applications on my S10e. Howevever the image quality setting does work on the Galaxy S24+. See + this thread + for details.
  • +
  • The photo shutter sound always plays at maximum volume on Camera2 API, this seems to be + a Samsung issue - a workaround is to turn off the shutter sound + via Settings/More camera controls/"Shutter sound".
  • +
+ +

All the Galaxy S10e and Galaxy S24+ rear cameras are available (including telephoto for the S24+), you can switch by zooming in or out. + As of Open Camera 1.54, you can also select individual cameras directly. + Also the two modes for the front camera + ("cropped" and "wide") are available to Open Camera.

+ +

At least some Samsung Galaxy devices support the camera extension modes (X-Night, X-Bokeh, X-Bty) (including the Galaxy S10e and Galaxy S24+; +in general this is more likely to be available for the flagship S devices running Android 12+).

+ +

The Samsung Galaxy S24+ at least supports Ultra HDR JPEG image format.

+ +

More generally I have occasionally tested on various Samsung devices using their remote test labs - although useful, this is limited + compared to owning a real device (especially when the test labs are dark!)

+ +

I've had reports of the audio being out of sync with video on the Galaxy S7 and S8 when in Camera2 API mode.

+ +

Older Samsung devices (e.g., Galaxy S5) didn't have 4K video recording available for third party camera applications. In some + cases it could be enabled with the "Force 4K" option, but this only works on some devices (in some cases whether it works + depends on which variant of a device). As of the Galaxy S10e at least, 4K video is available.

+ +

Some Samsung devices do not have any "scene modes" (in some cases this can depend on even which variant of a device is + used).

+ + + +

At least some Samsung devices don't seem to have support for high speed frame rates for video for third party camera applications. + Filmic have documented issues for the + S9 and S9+, and + Note 9 (these articles + are for Filmic Pro, but the issues faced likely affect all third party camera applications, including Open Camera).

+ +

On a related note, the Galaxy Note 4 and 5 were used with Open Camera to film + the + world's first 4K feature film shot on a phone.

+ +

Sony Xperia

+ +

To enable the 23MP photo resolution, you need to set Settings/"Camera API" to "Camera2 API". Someone has reported to me this works on the + Sony XA1 (G3123) (Android 8), I'm not sure about older devices.

+ + +

Sony devices don't seem to support + RAW/DNG at the time of writing.

+ +

I've had a report that manual white balance temperature doesn't work (Sony Xperia X Compact).

+ +
+

Open Camera Privacy Policy.

+

This website uses icons from third party sources, see licences.

+

Open Camera on Sourceforge.

+
+ + + diff --git a/_docs/exposure_locked.png b/_docs/exposure_locked.png new file mode 100644 index 0000000000000000000000000000000000000000..3b2aa4bdf39e1cac46524e0cd42bfe7dd602ff9c GIT binary patch literal 2022 zcmZvdc|6mPAICr2T-#ikD+z`8I&zhCY2=)JM}Fi8bCxl*lv|dl%wi%EMh8pIxxSOW zGvvsW%tUC0E=1(`&JjQT`TIV8f4ttW$Mf&k>+yQ0Ibm!iU`j9m03__~tekh<_qW7E zcJI`KXtP}dMLOGB0@ZJoXLdp8lEt4E06@=#Z}|!DGW4>YM|7GVN4xheSNWJC^J zW<1!n$stSIiO&K1;}9r0ntdTRZDOm6AjgSGn-9brpLyL&p5Gf!-RU#&=|c2>|6cjZ z=-H%Zt2oLKq}}1=Br)k&Y}MaODz>_ioP#E{XQ_{MU%n4)<4qA5n&wP*Ca?EP+3m{Q zrKo5J>J5<3o$ljw8ZECfoYa%8pQ%ar0sFn$=HZs zzZ=V?gMkhKy@`olgsVBrUH)=Qi<1e231)bmecpK}F;b`KOuwk=nT*MCuLH=%{^H7= z`7tU#UEzg|+u6f{5k;*Ga-c)}+6rapITuzUR-QTl%W93%cT-X(A57v0gI=zmQuT2E z2JL$j+o!}-QS=M-x6~nLT-5dUHmZvgZ3J^$NH*UZA9zly^XvdyZ{gJ3ymb1^jJu!> zGDeo7DEzeInTdFObe_Fib1T3T8G|cVrsgkwzzJ+vhV5Jz4@IS)S*~LvpOd-dAMOgW z+XA}sbwTatE;oub<4UQX^68B{vADyXVxC7QvcN1&tAz=vy+-CRw-eV$%Mz0E8m&nc zFLG*m>ZBX9GI~inDo|!z(6d1spVSJs^v<)lEd%cdff~qC%38$Yuj>mZqy6JV&bBoj z($ejm?mpJZ^BKa(3QYEzuWTnCS^STc%-xQh851E*=%$w;(*tPfi%-K23Pme@0?nbJe8&ou6DshEa66Y7!BW1?Y z@LO<&r{no1g;}A3xZWZ5oAFmEwv3o_{}C#LyiK(#=prqFj)Jz*(?5nZVok3PU2gB` z!DjC3Qxlg8B@rX<@LGoFFJO0)D;{2~4M{*+1LL1Xe3%;aBL0HhE%kG3HG@y(w=Tuz zr%(Jn#&vzLzf06bXt6b(=CSz~rjT#XHKouzb%6`zTs7$Ic`bus< zS`D3il7(>iLlK`3c&SNu8QJw2+I*ChFTFNBd`CAVNlTCPyk_fVn$-=V3pVxLhEq{G zEz#`E8oIVV8sm01pg=xM0f{bk$xQotQe(tequ5cy!ket9D(gtiQ8n;AM=jBbVAlSb z-UL^>|LN-`iL6s9Llv?ul$ShXXox>ou&P~UQS(K&VM4eWsP7U*g}$p;hoOlgeBl>+ zrwbm~cg2U5z*Nrp!P(>I$mvz<;9%?V9Ia`psS=fBo*7&%Cc zAQ+88yWEf-=Pt@LrUZeK=jJ>FTNp@?%?Eq#kz8Rh+F%>z`7c7@aa&){70L;sMj+iT-gI%Isz-w(xPptI`Xg8er&`W9m z8_F|3N|!_xYYFUL*1CX;U{Lt&mV?Lq?WV8( z15q{rSX%?$_4S1Y#luBC&`n?LQv#$~8o5ALJv|jyMhh5NkNsm~buOYt4WEhbn*LTj zdjj8KJ~AxPsLGg4MCvm-T*K=cn42M*`&f3C!8``-@ zq?cTm{S1enNnv+%G-E=9wRL;Zk8^9SiT+ru>|o5bWU;!r@?5!~Llt^$hvrSh#D9FV x-wYXe*u)8Agp8tWmPa_EhX1cDI?Df?5pkaWinkuTxBHs`duxnUwWV+Je*sf2y%zug literal 0 HcmV?d00001 diff --git a/_docs/exposure_unlocked.png b/_docs/exposure_unlocked.png new file mode 100644 index 0000000000000000000000000000000000000000..e784ce18ad4878999b7109ad37b4ee7169cc6a54 GIT binary patch literal 1794 zcmZuyc{JOJ7XBrch6Jgmw#K71GK{@ZRa=PEh>4|^&`J@t*AiQcwj{4=irU6h(P$~H zQUpOp(`qcu=zEDGh`vxE460t2JT)fTzBuQ-citcK$6daA?m73~@7!~5p`Xw9YN~pw z005}rJ#qeW8?h%4Ww~CezGyEupmhK5u|UTda$OD-W8J*n0HCW3x=U7)V-2v^4 zAKDYpK@%@304U$W`)%32-TFee1J9Kr z5sJN(sqWF5+k}Nc_4}fq*VjmR!I-s$)M-0I>#l!uX<3=h<2vd76SP z8P)*#HOp&^3j_l3AFdG4(Bk_`E_Z{wUnP$47Kxm!$Sj6`OV6``fhAiJq7m@0;p+%> zs1rM`I-uWHS22AomLypp14E`}W^R4>`0*%Fu@PXgSPRHwMX48gozg)Q5cn7r`?b`o zJHv(3OYtThT5w}tIrp=a58>&Td)=>Pv)QIqr3Om$^wiXeZ<^v}!f#mUt;}V(N$8k2 z>X3YZ$z<}e4Pv%5Q`{8UHJjLbW&vldDpr0}W#9=KH%20nP3`oQxQZ)jBqM=$3Bmc& zsA0e)q`wr~(dGG50P(63XUoUeH%QVmT?`P%_R$cW(h2LaF~inMpXRT|3fYrh*B5Cs z?Z5@PhKC9#6OMwmTGFGGVeJ0?9fuPqvRNCK>su2vJ;K7n&;L@O`+M>f4Rw|*+=-za zh{(x#+}qVah!{d>JOp(wEW;zg=Fw@+6h4m z3TD*4%?x9_95)x1JAPYmzT^NXvjH#aq_~KW0L1=~#mVL45jyL)FEwp(Ls0Nduj2h0Ws|c=(;oQcK%2!aczt=XKH0_EpDdG$D@tzJA#Du&5CHn zK!s=2vW13Zbnc0FB`^EZC8Kb%gGub#YlNc(MypsY#$TQ#(y>t3M0D=384n-nFm7;37-F(F$&@p00uXuSw!$(frY3ELRPW}(3Xw{Hz8?ixvpO`^dE zqJpP&9dS!wT;nN$>D>qM15nq2;mgr>`SZ!EQfYc;E4LsW_~a>JrV>cNBv_IY{Ua-e z8Skw2!ILHGJvJ)JbaB;fd(yRj4))12v<{OBx^Ywg^^=kGO)PP`Do>WSv`hXMd$4t7 zgFvFzOP=qzrbJwLc?%4qbqu#l=i#H(op+o3TV%t-!$GyX2EQoGbsTKtOa&So%2?Kq z;P|d8kPd0YOe_Q0`*b+s_{6$O*ViC=1(?ERSzb4PK7H+)lCfr?iLDtt!q8S0F#6D@ zUbqoo;-S#s9X?}1P`ds+GQsY)eqaW@;T{=LKsXiDJsOEpMuR5TGtS4x{z~+7k22)0 ztVI716eQC4Tzd6|4!p$U_Ornqz;1{*OJF(;;;1CcZ8JJUA(V{v&KY6?kj~04xkacv zwNBkWLDP9BX!(mMm)w92A&uWzhdL|ChKHR!yScQ8s@*ME_(6uZPPPmF?xw>^+6k}9 zj`!xk(w&%>0UtyD!Q`gMmc6Rc{=k?g$lgdvd_#Uq{tUDMg*YsKRjOx)6eKRfy?_z= zmI3zd=H`7?UmqgTenxt#Kua4;R&?cM=jCCXcikb>bIu0QepR`Mtb8~J&(n<#;U<3e zfPm;+`l+fQ-b@PEiYlr`KdNqDPz(cDHnH^KBQYDtY1&j?!k2OPUrmaZWWINvXl8-_ z`20oNIpeC;%ctGnya&hC{+44WS((G{&jP*K%7K-!?3Grz)+Y++&U|c>N5NGc)ndpPQjbwBYZLjMP zXEsJ9e?L%u+FP#-9O;O`1b-mn{x@ZVqBZu+mB;=qCbJX*^O}1IC|C8*Nc(>}B|8^~ z%*@O-`Fwtb)f_k6H1BtfUeQY0E5YBn&QRCyLaY#~U&ogj_d?8P@Cwo>_NwJ*5|5Y> zfiA9Q#)9U!i6?&y;?=w`d}|wH=FoXl?iT_+k$v|R4xQ%GYo4Oj#m~-}TUmWwyYnnm+6R literal 0 HcmV?d00001 diff --git a/_docs/focus_mode_auto.png b/_docs/focus_mode_auto.png new file mode 100644 index 0000000000000000000000000000000000000000..7e48c33a32bccdcd0e3eefd092a610803a017922 GIT binary patch literal 1612 zcmc&#`#;kQ82*mTt*MYooYYxjPMvbuX+tz*?yR{Ljf#om)=0@^Zpo*ZQi;iqBq_E` zCSh}HXmh_;7^g6rxmGsXSwEfs;G7?x_xU{U``i0`-sjD5b+&~lX(|B#0I@?`W28*{ z2?emUww&}AB%8-D7&$*<=zR;{K>Pmo$9FQ(Uv;s920X$bALJv5u4ru+S zZDtyfWXYqmScz5R#&C-R;eKltw-wS*_%bXEe*F0Cyg*=rk7w4^nNDaPe&9`GuLEdm?i0xwCh5GSa=G^u zLta;`MCYJ~mLO+q)6>%t`WoaRrRaQtY!D-obHtfZQ=?2Gkq*b`Hzd37v0Rw)Orgo% z-QoECc6p0$X_Bkn>NKwgm8Pk!9beU9xHeMXy5jXKaYD$t>>wE)9*$IFW-!lDbn+KD z*=2Ksu(urP^ofLSR8lj=<26Q`WTT-d-3`^%aC7nj-BRh1NI(kKvwIzPxeC>XvXpJO2mbWW1XibU#o9(dPUzEvL!zq99>`D--LRcdq;lJzlP`3kchbpletlkVyN7 zFGD|4OEBeBD%|Y#r3WC`ef!C|x#uq!f|XYw>|oXgTD<0Q-Qx56CXaoXC8fenU^3$S zfcNG+F~BhIgxZzXmKLi}dEvo)xr@O;;v5u)WDO13c#1pK+mM=(k#VP^qhnuR!aKTA zzN7BnW!PXxGWyeclc$*hvA&}vjE@7;i&t?opFTl_1>k!{MMd}c2`~(6wyA2gItwqi zDBgs`#Fk-^$HuaxH%BGi9P3F(gwdm#1%irS%p};JDih*|T%&&eYNk3-IKHzlToG}) z%EyH!)){N;&pUJ@J>96%>&uwUV}*zs7E4mQcQ=mcPEj~OAPx#1ZP1_K>z@;j|==^1poj532;bRa{vGi!vFvd!vV){sAK>D2SG_hK~#8N?VRgR z8ch_&!Bs365%7Yvg=nRT#>A#E{{MbAP17b#n^dgTG6M)-4xPYea{nV|qaW3&L4W!1Q8EXE2jHa1pST3U*8 z*I0}c0LEWmU!SEJz$4~joB(a(kB^V@h}jq=K+ud2j1eG+#s@|S5LDv>?F9(3@qyL? z1l{;RTLIpJ@qv~CycOdE?F4vB#s^yc9?-AxZ*Fcv7Z(?y+uPfVR;zV?cXxMvb#-+~ zV<~FS@PE@9Z3K{f^4r3IMx$|fbaXUAW9i2M&Co&s)o!OP2+fXiu~>{Wo6RKulkfO0 zfY&}BVw|3yUheJfHONiP^004cUx-Nqq2IrMKLOEmxm<&XeZzMF@DzUg_N{^^(@R*0 z*viUEXl7=n%)_3e#SHLMgt@u7P(GheFDxv0`^T9;q@holnwokvY`?UxwY$4prnYAp zlU4$NendDvJ`Vl<{hNFLV_DWkGMOwV5{V35YqDQKAjGd4|=3)}KFr4$sfem5ZQd79<%HDEH6&{rh)fYip}|aBxuT<(d#` zd3iZAIXQXAL(0%L4V4beD89D0x8vWxe?Kah%gt`rg$S#ws~P%KnjvXj2dK!7=vwpS zUaUf)Q0+y8PoF+14aiyx092)@rBW$L@#C=DbrHl>rl{Hr091)UX=mo=&!3JWU>3~J z&yTAr)|3dKV#}Z^5gLs~Pa>dL3x~tvOBdn-P6+;xqZ0P%P{$4%qe+uM6^ng!8lRIE4? z5g-zYRH$GHbGL)Dv$N~_`}=9`Y7dqz@Uta%JKz;ok-9)c04h{&UvobIPg1d1Jm;=4 z?C>7F0B>k1J8VzJmfH#^|n2)6&OD_BK+`D4#hobI_FL$Ic-PYyygKGhh-M}Z>L3y?EHFZ@ueRte5rPn9i9T?XFBoc{JC;v}3VNCFSNwr#? zbBY5bg6n#~wE&a>MY!1ke5gs;&r2v9N;t&dGcibEp*8L@Qs1_+u8MhGErdySNOK@{wTnj*jO3JBPOeT}Z z+%*m_s(c9-G~-k)VwW;Ejl=1?OI7)<1z>E`F)Gw1xoI3u-<>j0ksSdd0yG+pBW`v8 zABrvF+%*RJ&PPuE-|3$FK|}z450`t-fe*$0eC`@UHk(a2`G2}++eOHJ5D|dBP}(UD z@K%m0ceMxEPb!tNU1+0wZBYUw@`|fe40wDAnwuTqNm^J~5c^wxQ>)bq+;okYQHsUl zn4KE}v$L~PB!u`5W13mwo3oJJ*lmBrSgBMJd%;^wfM&Cqa@xv)S+KacD1Lwj&JLw) z#j`!ACnqO0%7DaHfJNN`UjKKR1(a7?*gef%5oiW@wheDXC?|1pwrR(p0FB2Hiz4$sMUci2#5C^0-rR z24F0>Bk90wk_gHK0DSNT_g)9aLSSBAUT#ubWPJsgB7#x@$oOd-e9?;t>2%uq`t|F4 zKA*2iEnAo%f>HqhP94F)7u`evFxZC=AL1Ju8_Ca~KVM3$qUtV!XQ!p$;0r!`fk(On z(GW){4Xk6MY_(cVQ#SH!jTk3h;n1aS;9AJ%ky7%I7I=DG4H>_Tvlp<=-j5J%+JZc# zpKIwTf-mO<;p_#x{y&HaZqZEy8n@IYvdU z=UKT*)|eyq$>g;g`kotScc3oUwJO$Ceg%+~>B#bJn`lY_ zDKXD8?GajR0tI(e^5{)oLhH>-C3%Z%dV)7d*mr!P?yaI6fN=!g%Ly4fi@a6;zlfq; zqqjg|@BOmU(!L3LLbUj!UT(Z>Ie}0z(BBVZ_7@fuXa@NC;i~I)jf|CnBUVjqZOH-X zv9CNHFByaRSx))9GW*y4EtRuq=c#7_v#~AgtSnGKmFGiGx^?IBa!hPgRFtBa);=vQ zQY*g;OuafaI~zho!46ao&80jZ&EfaSVX;_{gM$O*l9&h_lIw{2aI8nTw5%+zwzl># zCEWL^XLUvlyf%D|7;SZ%^3cN^;GYRGeW=<T2u-Zfy27nW@+o)cg;7%fhY(8qLfBnW>vwQ9@N_qJZ-cCsosB15 zBSVfrjF0aj7CC8tAHe-PPqceRBH7Ne=+a5A1#P`SjhOUk1A7&dcD7nLd8?aS30*{c z^z#h33yLwi@8YbkwRe@IP+XRrY3C<7R?)Cu%+Qhjp$<8WGi5DT!s+$(Maa}aQz~&l z(P^icYP4_=?;e&AAMd7rriMKl$kT(;f*!9w^&9h=21eyl*A_^Z~;0y3mF%X{g4?IANm4rIMo%mqAl_osE27k~Jv# z*T07T`if-}Q2JIDYleRhcRMEuD=+RJBtv?N(?linc;AebeI!EA9bf3(ng1YC=yCWT^nhE|x39is2P~Ij^?5scH z(H>|Cd=QuFRtvGvq)>t(;IQgQ-8iYg_oCL^2TK|Nw~aV4#1HcIjtm5J63&;{kM^Zp zmb%wOgMB6jPyOLF8iVCk0pk069vm~lAyl-hDdt)wwGMOs97WFMHs!SFO z{MGv0a;*X-S1#oc5c2E63H8`DcML?WeQpMU#$^D`==y>Pi>oxne8|X%dX10aJ#`ey!-`m%}|Cb5}!K)2Wq9X&qfk8nXtDbj!SR?NCMTEzT%8`Lx zNocdqBRi9E*X#H3@IYn<6qm|)X~s@g@%d$DT*CNb*Zy-exkvJF#o0a*b{Fn`N=ZXk z^DUrqys^3Ya#F(sr}d#B4<~dZ3Z{e=CKF1G0{UfSWUNx_Vs6~9(hwIpyh7*~I^VWp zeNzY6Gl>?mk{ z;;m|TRbNrdF&y05Gv)Oes!v#ACW}g?YCiE}4BeU0gr+4D7f<3rp(`fEj2zD1WvQs- z1eIO_=|%9U42E~~)M(kxR}i;xNhEcfZ!WbI`eo8UU=oG0_StF$gf&X~_U5Y4INdA4 zYA*MHE$yX3u|%AVi;dAW8ADrcqy*6@Oek(9xSnZ}`@U;KWVn%ntWrVGaY_4G@tm2XR5yzcH{X7W^w=I_e5@-RSRkH$Ob7TU2Ppl&2y+lfiR=!ezQvq49r%ZE{-7;ac}SX`bz~$I3!B=EcNw?R-Uo0 za`O+0-k@228;>F$ji5nZP{dD)tSW-TlHx+X@i{{EdiHR98SPtUS` zUnK6ZH_m_iwrzIDpB|ZYHf`m_j3ZY!Pn|k-^4z&|*B4rDT(k4DUU{^`55L~)%e2qT zJ^R4&c{an2JGqRjSFg^zu{u77@%r`anX9ixRaI2%;GeGkaNWex_19OI?Y`^Z?{2&L zYS!}Gw{OQk`gAhCxP4w)>u$GaZRc0V#m4TPVf~&<_`tt^{|bLFzxm(-vFZMs z?y*ahzq@%ea`XN7*B=|115Mx2*VpG4wf0)~WV=Ylp4AsK=9tYs8!rEU|ARQ@5Ur^* zZJrsJPmgBRYIu0La{cw{ua^IH4)461_dPe}nOEYrhS{@c*X_P5H*dP+yd{!5TXxsa z-N8A3a!gE&j=h|rRyMapah~d#zdtHm_KB!9?A^Qf@rPK8uah0w+1YzPC|#KBXxZ?m zwXZ&O21A?w(T}e}&St&aqY=dL@m7iGpV#Mq8wM<{Vf(>7pYa*b*X`x$M|By_F+AV_ z5k=p_3bsb=jfswq_M7y?chAZz@4s6=fBt;w@87?_e_Yn7tEU&YZvFatj^foHmpzoP ziEG_y|L)zpe--~0a1@C*EIW-b3Tz&|hznjz=hp{hwrniu1LZpgPgg&ebxsLQ0E;1& AasU7T literal 0 HcmV?d00001 diff --git a/_docs/focus_mode_infinity.png b/_docs/focus_mode_infinity.png new file mode 100644 index 0000000000000000000000000000000000000000..649110367e5b3c8d27ad805aa513314d3c657033 GIT binary patch literal 2458 zcmb_e_g4}O7e>N~+p@y>YQE-PSuSwlDm5ijY$%lFC=t}G%)LrYEk|YQYZPW?3R>>W z365}^N}@S&q83^xj*s_zf5P|u@Z5Wzd+smy+~+yxNp^9z6A^+40RR9I2YWbz+fn~Q zfR9^eUfea}Hl7fKofV+GS9Xa@K)#kvmH>SoC{J=1u{M{&ZWCZ>(iBX8D*o{cW=_oVbR+LvqUdDIbOTNyb0BteDm(F ziq?${zh1t5_l*DDyk3KCYUr4gVcy({ANf8*`_FZ}M|On23p{G#b2`Rz>N=?Zv4n+z zEuiOrCTt~(=;(uz+THUpJNcmamP5D|H90va*<;5#dwP5S$w*J{O(B?^!O#~?p^fD& zG@2(Yt1DklLBX3-(?}T`Lk8Tuc{3D?)m6&ep>%YtM2m@*-*QpUsdgzII9nun#G(5n zSX=;fwgxj%e+&XisYJ0gzTo(khBab=3VS|0%_gIV`haBMdmbjy-6508iEM|OOk9*;EG(R3!*}0_B{uunquox}RBDw2*%$k5 zk$#&mS%v1&N$|`dl)jd>mX^)u#Vu!&b6#FvIl60rcj8MVhrUixr7B2y``y1^xBPru zQP{Yz(7dqyV?#iQr@vrtA8^pQWytNPhtf|P5*W)e;NW}TQ`ss4=lWHbZSu&X* zZyuGu=pHFhm}7q;iT2b6A*E$4esMNBTkXRU)|B5_RaI3R`g7dX#5C}ffswwxer!ax ztQ2ea*ZjHVjg5Z8r0!N((qJ?cN7VBd`)uni*=<5*S#I|W8+vP{M z3ts$KTU#3#8F5FRxH@pcLO)j%yI6aJ>Zmtchyp^bc#64~Uoo&46Ngx<6Z8(&e>C|9 z*U=x_2Y_ii;xMTYS5c88@c8p7K9@<8_{g+tu;bbO#I&@u)yc_8!<9%Ufabd}1z3d9 zO7?Ms)>SW?Hs1kLNhME+Q0%0U_GQ1^WU45Vp^RK0QA2)h?@B-fm|lAymk}|{B;TI; zGy&McM8jPgWvj2s7==c+;9lq-5-!RjdN=Wn`$qu#6xt3H;5yf9B}>ZCp8fH~;$109 z4iL&!8l_mo%iCM5W{90o%BiZVs?Q2}FZDI9?v53Gq)rk5)wj9zHzp5786O|#_p6n7 zIxHa`ESXAz76Vxg++Tg{s7;Eu^$`KF?0oI-SMlM~Y`tz2o8aa`+8>P{fMfo7_DMt6 zfKYF0Y8tpDZdyrOJtZ@>4oqFR=-n88CvMv&ydh3L`o@hSu~bTo<81UU+cltq0JI&_ zwft4A$BxDOo_@+r*0aAN<(u}vF4jU?ul~^C zdsTN?`#90~XtGZqK0f&6EYca25B3`6jWO{5RV=-_hZT_Xv^>5jJf>Urxl(GAEhcY9 z5-F6vEu5}NHT6J`o;T{KV6Kv?Hx4aAOP#0r8x6FyABcuWxa-+k6OJ!HKM+%%(ahVN|p)61p>We)Z@e;!2Lk09a!(3M~T=>8kK41yWV zqc3Rkv+)yJ0aIcSsuh#cu>edhT?jifd}>+~(5POMK@RkA{mGuVf-0G+CLbE)adc1p z0M|fn!<7cME@0H=Nk4JM>juITnvNli^nGd?*K3Prm8hKI{fc0?QDdaMdK zfBf50eG@vb?c|5yhs^o;B4*wL(Z3=PGb0{ue|{cBH7@Xx?}S^fsl2o6Rog{gHoLTADIq@qSU8* z5HA_7lReqt5oyf_enEz`x?YKTDEefMgXMbVu_Q)SP{Kq9lCkJ5Xn|UKIoWr*_3_Uq zU7SDb&rOZ3I^z(Ixs;&Mn@W!4RPyUtHQrWzO&uN81qS1Lx!9kQT8Bl(BvN`pkE)Ha^kNv+fCE?7R6#eU63@|v5O(UXW!%3QE#nftc2=;>ep?jGn?a^3#^{wv2e zDaI1H#0Xse{-Se71?&@p-H>XMqmd)D&e8K9wZgk7%8H&>Y*%qxZhJ}(=ov%+te!U~ zzaJ-&N=r*i5G8XZTWPj$a*K*omRNYNmK|(iH>Q+uLus4qPM!jOgGK3xk%wXmgI;G~ zXe6FAQJygl%V7$TUKQV|yy(>&7d%EJ5*PON>m-+-+CIt=RbaN7?CtM0BxEqdK)53@ z{o#XqY*&=@ur_QbxmzOQ$cNq&rn% zryQ2h^vW`bsMyHNQjrtv7s6oCe!hCSBQB)c?nx(|)*Y1Ag@Yc5)x6orq4ccx>i>XI rE^Q-)kox%lQ}Xz4bUEw&;{fmm5p&e9cXyPVrvMH%&hT<8RNQ|6nDn7Y literal 0 HcmV?d00001 diff --git a/_docs/focus_mode_locked.png b/_docs/focus_mode_locked.png new file mode 100644 index 0000000000000000000000000000000000000000..bcf7034e63c12e3d882a091574fd206e3a226117 GIT binary patch literal 1324 zcmeAS@N?(olHy`uVBq!ia0vp^2_VeD1|%QND7OGojKx9jP7LeL$-D$|SkfJR9T^xl z_H+M9WCij$3p^r=85sBufiR<}hF1en%|TBW$B>G+w{vd$KXMRhcg#DwDRj>wyBDiJ z9{m6R?EV8El1%nhbZ&2G30R@9Wy9yoi(e|e*>vd3nK$$F70Z@g{$_E%#A@#{e?LDr z&Bk8UU)lzWOfv)-&vY=PxiAc1n0-(8?#%Pd|Nh=~58i%ZCLP8D_Kh z`Yr$bcx{7ggz|yUd(NFY)wNj5cK6n;Q={VImRY{vvuDqetgTWhEo;M8KmT3VnI_r$ zktrlQfAQ9Et*LJ+mijXr&6_vRX=`|i?|tna(Fv!Y-rBu;x4qidZ;?N5Xv@jT-D*B~ zz(deYu7$g`slKKr#=^qlN7++mo$k_v*a-Dh>DIz(hA2C&Go^QJQfq}?KmWZqZvE1z z()iT=D@K*a3&XB{RJ70S=WJRKzcwuXuz^XIzk%O@fB*hPuagAS;O^+8`jgO6WUA}zzVf#?K7iGKe z-Mt&@7+Thm`Z(e44K~%@0~(C0SFb)=vD%e&RaZ}5e!lUl+nPb&e6GCysxz(E`@pV^ zyk1g`hD;ZXwr}6Qc}k5luPXa0CF3c7vNz~lb3AFjfp_vZreu~C(_Z#%ubXo>=+kWz zsor-jdk-vCv3BD)kz&-CHFN)wriF}eL`B<;BO*GQ`A}QLJpUZO?)I18e_Ov^o_uZTwoDiE6ff~77oMoUQd#Xixp=YPzI`&@SIl}P zcqJ7T=K9rlI`dj9&zdt^qswX5pOi~xmIg{4{~>Yj#$uq%G{f^xkx8uHxuflk`*$DT z&5;(lUrMu7y7w4Ec(A6nppftatpoS(-1)NXu7P1AzvkDfyT?^|PF?d-iWP|Zd-38# z>-V~DcNNTzbVLXmr)H}PGOoH>{c8U2Z>dk0IIiiKIVtL*p2J^byIr!k9nP$Kopqmk z3wHo}%<(6A=~5j#WAwx|R`R}O`mlt_H>zD~mEp_#tx>wZC+(XyMI2*WBhaurBvInrpIe%zj^&%r>Ir3k zST6|47+j1yY-gNJOP9O3V2$o}$F%|tx0_cvrOVx34CcOU`u6Rc*N;3F)#Xh_8s46d zH@Y=sU3_`?@Zq4PL75M8t4{r6shrUcl)8Cr@!yj>GV}MHXP!GF@RjVBpWW}u3MTr0 zpZztguPNZ-1gZZwFFo&1UfNL43AP&UabQ;6!riebP&v1?w#M1KL)0)hNV}8c!0zqo>FJFx z{WmCrB586}yv0ZHt+K~6b8~W@`2LJL&t4Ju`{z$d`Igv)Yg`Q89bWriy0gUbPzh2Y jf$Sh6g(Ezd)-iQ_RKD^%Wu6kSgk$h@^>bP0l+XkK<_ckC literal 0 HcmV?d00001 diff --git a/_docs/focus_mode_manual.png b/_docs/focus_mode_manual.png new file mode 100644 index 0000000000000000000000000000000000000000..c4341672b3ad641f5c9d11526af2583016a44a64 GIT binary patch literal 1614 zcmc&#{WsGK82<`mq!e{CdE3-*L|iVe7@8t1%(-ld2vZcNh1zAN6gygymrNAqC1RDg z-3(ju(!_ARE=HH6HE+2?X5+5=N8I~4=kq-0^ZDU9&pFRIpNH&?I<2MxQ2_uz&D{;@ zE9;~mR0hfFTgQ<1vL=W3J?#R}xrYTZp@4Jtat44`nW~ZyMVSW2x?%7Du!s4Ba^2sD ze*pl+S$Cwf-;JQfq6uFJM)R&(n#GI-RV#}`Tolof){;1+)_(OdBRp!&*(g39^B@c4y>O+;BdH$dI+Maj;`*M z`z>#y>k{#rkSd(Ic;!m1Db#e_!@+Prxlnm!QZ~c4>R!_Kl{W+${5YB|5?z;8r}&8G zErA3sP~}%m$liF+e=U^ns0$vI7{}5<`9(z|&f;kPISV_x&EA5JSZ7kh;zVTX(QHl% zUZfwkl3|jWw$Zc`a=8(E^l5+c)Q1ly?rJ`hGW12%EApDcx&4(xf?5iNGRftlI4H0} z%0c$$*@`9RKJeA^smILBx|l<(puhRdghlD`2OmBKf)Hl=4VxH@ryM7^>K&IeO^C7^ z=S>b~yf60^+-$^3OQ7-N3Rn`22IFs!CfPtZXf)d9z9F?qh{xj{o~yQ2)EJ5Bl)D<% zBxE1mdLq)$!`UW=AB71B_`X0PlDnMBXMFxj1s*z9wp3wZSlC3fKeT(@bi(TEa&wM^ z7wba$?YXtkYXmX~;f?CJSwW;TXvsHLIRvF#{4TMG5up?lwq?*6-0^t3-0h;+rf{EA z#X{xOB`28ue)0ll>?bV=d^DOyMdn4f=v}advFGOI8m6rtcpf#cnMk8+wg_0fXx(%8 zXQ^*P;LGYDcT&`RcM*m>O=2X4Ep>*TKxdD8567e#>WOZ!k%XW_!((WaCSIW;? zPJ=!C$BI_%Yv8AG_Ngu<)i&a_z%2c%{F2hrzW)ATCI{C%bY)?y-B?aC5S(a|Tkz_t z5*4zCNeiwT6T-~5k-bh*Zjpu)OXFh^s7$@EAwr3|gO;s3uc>mUdsGK3)+z%Qv*U7X z;UuC#M+_vNN);d(NFvW%%D~n>Iq>HAB`XMS?%JkjPtH*2s{jq3v&cVSRd##AwoE+w zTN52w$ppf$FC(-RQ7E}Os4XKx+iUINs(r$GUS6KKO=Pfkl3TFRE*2Bkhbqs`YHMq2 z2(A+>vpJr)dLFN@uMb`)EqFgi94fET(LGeC=~OCwo&Xt0-yuc?m@Ectxt`3-{nGMi zX4!ta?e$Ikpu3zXt9sfY#s5W6Yi9Q>h9xqWABJ^0u(yD{oX$?ykpi8vl`ULc+*<#V zD5i6|)qe+?pKZ*>$c1^IZ5blzL8Ndv{B7duytcq{yBF9?_8Ur)*}j|8U$QJrxGKeN zpm9cpd4WL~IKK4=+x8f#1|)7XAE!hp_i2tEaa1@(VkS-pfe^$ItAR+Xwt(#2Y{Bkg zx701iz`eNZe+|EgwiqzWJgot_Ujm-1j)S*1@+7i^x1h zLpNf*^zs{Qv^wN5?Ox*sZGk~SsGfMEdBl<9O%zb_N&v z{O|M@B7}_89TTptY|L(~uiu}m2Yof@_W} + + +Open Camera Help + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +Open Camera icon +
+

Open Camera Help

+
+ +

< Main Page.

+ +

Contents:

+ + +
+ + + + + + + +
+ +

Quickstart

+ +

Simply point, and press the blue circular camera icon to take a photo. +If your device supports focus areas, you can touch the part of the screen you want to focus. Touching an area will also +(if your device supports it) control the exposure level (e.g., so clicking on a bright area will adjust the exposure +so that it becomes less bright). Double tapping will reset the focus and exposure area.

+ +

To zoom, use the slider next to the take photo button, or do a multi-touch "pinch" +gesture, or a double-tap-drag gesture. You can also control via the volume keys on your phone or tablet - by default, pressing them will take a +photo, but you can change this to zoom in/out from the Settings.

+ +

You can press the gallery icon to view your photos +(by default saved in the OpenCamera folder) - it will show the most recent image or video. The gallery button's +icon will also show a thumbnail for the most recent image/video.

+ +

The screen display is kept on when Open Camera is running as the foreground app (if +you want to switch off the display, do so on your device manually).

+ +

On-Screen User Interface

+ +

Shutter button - Click the blue circular icon to take +a photo. In some cases, you can also hold (long press) for a continuous burst:

+
    +
  • Photo mode must be Standard or Fast Burst
  • +
  • Settings/"Camera API" must be set to "Camera2 API".
  • +
  • Continuous burst only supported on some devices and resolutions.
  • +
+ +

Switch to video iconSwitch to video mode - Clicking the + smaller video icon next to the shutter button will switch to video mode. The shutter button will now show a smaller + blue dot in a white circle to indicate video mode. Pressing the shutter button will now start and stop video recording. + You can return to photo mode by clicking the + smaller photo iconSwitch to video icon.

+
    +
  • When recording video, the main shutter button will turn red to indicate this. A smaller blue shutter button will show, + pressing this will take a photo while recording video. Note that different + photo modes are not supported when taking photos while recording video, nor is RAW supported. The photo resolution will not + in general match the resolution set in preferences, rather it will be set automatically (usually to match the aspect ratio + of the video resolution). +
  • +
+ +

Switch Camera iconSwitch camera - Switches between front and back camera (if your +device has both front and back cameras). If your device has more than one front and/or back camera, then this will switch +between the first front and back camera.

+ +

Switch multi-camera iconSwitch multi-camera icon - + This icon only shows on devices with more than one front and/or back cameras, and allows you to switch between those + cameras. For example, a device might have two back cameras, one standard and one ultra-wide, this icon will switch between + the standard and ultra-wide camera. + Some devices also support cameras that are made up of multiple lenses. The device will automatically switch cameras as required + when zooming in or out, but this menu allows you to choose a specific physical lens (e.g. telephoto) to use (requires Camera2 API). + If Settings/On screen GUI/"Multiple cameras icon" is disabled, then this icon will not show; instead the "Switch camera" + icon can by used to cycle through all the cameras. + Note that some other devices do not allow third party applications to access their multiple cameras at all, in which case Open Camera isn't + able to use them.

+ +

Lock Exposure iconExposure lock - Click to lock or unlock the exposure. +When locked, the icon will change to Exposure Locked icon. Locking the exposure means +the exposure doesn't change based on the brightness of the current scene (though you can still adjust the exposure +compensation). Note that this isn't guaranteed to work on all devices (doesn't seem to work on Galaxy S3, Nexus 6).

+ +

Exposure Compensation iconExposure compensation, ISO and Manual White Balance - Clicking this will bring +a panel with various controls:

    +
  • + ISO (top row) - (Not supported on all devices.) A higher ISO setting means the camera is more sensitive to light, though may + also result in more noise. This mimics the film speed on traditional film cameras. Select "AUTO" to switch back to + automatic ISO mode. + If Camera2 API is used, then selecting a non-auto ISO will bring up sliders allowing direct control over the ISO and exposure + time (in place of the exposure compensation slider). You can also select "M" to switch straight to manual mode, keeping to the + current ISO value. +
  • +
  • + Exposure compensation (slider) - A higher value increases the exposure, so that pictures come out brighter in low light; a + lower value makes pictures darker. One unit of EV changes the brightness of the captured image by a factor of two. +1 EV + doubles the image brightness, while -1 EV halves the image brightness. Set to 0 for the default exposure. + See Exposure compensation. + (Only available if the camera supports control of the exposure.) +
  • +
  • + ISO and shutter speed sliders - If Camera2 API is used, and a non-auto ISO mode is selected, instead of the exposure + compensation sliders, two sliders will appear allowing you to control the ISO more finely, and (if supported) the exposure + (shutter speed) time. +
  • +
  • + Manual white balance - If Camera2 API is used, and manual white balance is enabled (from the popup menu), then this + will also show a slider allowing you to control the white balance temperature. +
  • +
+

To get rid of this panel, either click the Exposure button again, or click elsewhere on the screen.

+ +

Popup iconPopup menu - Opens the popup menu for quick access to changing +various options:

    +
  • Flash - Typical options are off, auto, on and torch mode. For front cameras without a real flash, flash and torch options +will be available which instead work by making the screen light up (note, front screen flash "auto" is only available with Camera2 API).
  • +
  • Focus - Controls the focusing method used. Available options depend on your device. These may include:
      +
    • Continuous focus iconContinuous (default) - + The camera focuses continuously. This is generally the best mode - the preview will usually always be in focus, and taking photos + is quick when the preview is already in focus. Note that you can still manually focus by touching the screen. +
    • +
    • Auto focus iconAuto - + The camera focuses when you take a photo, or when you touch to focus. +
    • +
    • Infinity focus iconInfinity - + The camera focus will be fixed for objects far away. +
    • +
    • Macro focus iconMacro - + This behaves similarly to auto focus, but is optimised for close-up objects. +
    • +
    • Locked focus iconLocked - + The camera does not focus, unless you manually touch to focus. +
    • +
    • Manual focus iconManual - + A slider appears allowing you to manually control the focus distance (only available if Camera2 + API is used). Also see the options "Focus assist" and "Focus peaking" under Settings/Camera preview/ + which may be useful when using manual focus. +
    • +
    • Fixed focus iconFixed - + The focus remains fixed. +
    • +
    • Fixed focus iconEDOF - + Extended depth of field (EDOF). Focusing is done digitally and continuously. +
    • +
  • +
  • Photo Mode - You can also choose different photo modes:
      +
    • STD - Standard operation: takes a single photo.
    • +
    • NR - Enables Noise Reduction mode. (Only available on some devices, + and if Camera2 API is used.)
    • +
    • DRO - Enables Dynamic Range Optimisation mode.
    • +
    • HDR - Enables High Dynamic Range mode.
    • +
    • Pano - Enables Panorama mode.
    • +
    • []]] - Enables Fast Burst mode. Takes multiple images in quick succession. (Only available on some + devices, and if Camera2 API is used.) In this mode, you can change the number of photos to take from the + popup menu.
    • +
    • Expo {} - Enables Auto Exposure Bracketing (AEB) mode. Takes multiple images at different exposure + levels. See Settings/"Photo settings" for options to control the number of images and stops. These images can + be processed in other applications to create HDR images. For Android, you can try my own + Vibrance HDR.
    • +
    • Focus {} - Enables Focus Bracketing + mode. (Only available on some devices, and if Camera2 API is + used.) This mode takes a series of photos each with a different focus distance. Two sliders appear, allowing you + to change the "source" and "target" focus distance. In this mode, you can change the number of photos to take from + the popup menu, and the option "Add infinite distance" if enabled will mean an extra photo is taken, at infinite focus distance. + Also on the popup menu, enabling "Auto source" will mean that the "source" focus distance will be set automatically via + continuous focus (or touching to focus will also select the source focus distance in this mode). Manually adjusting the + source focus distance slider will exit "Auto source" mode. + Focus bracketing is typically used with + Focus stacking software to merge the images into a single + photo. Note that whilst taking a set of focus bracketed photos, you can cancel the set by pressing the "take photo" + button again. Also see the options "Focus assist" and "Focus peaking" under Settings/Camera preview/ + which may be useful when adjusting the focus distances.
    • +
    • X- modes - These extension modes enable device specific algorithms or effects that manufacturers have exposed to + third party applications (via Android's camera extensions API) (requires Android 12; only available on some devices, + and if Camera2 API is used). +
        +
      • X-Auto: Allows the device to choose which algorithm to use based on the current scene. Note this differs to + "STD" mode in that it allows the use of the other camera extensions e.g. Night mode for low light scenes.
      • +
      • X-Night: Improves image quality under low light conditions.
      • +
      • X-Bokeh: Blurs the background of photos. This is typically intended when taking portraits of people.
      • +
      • X-Bty: Face retouch or "beauty", applies cosmetic effects to people's faces.
      • +
      + Note many features may be unavailable when using an extension mode, including flash and manual controls. +
    • +
  • +
  • Auto-level - Enable the auto-level feature for photos (see +below). (Only available if the device has enough memory.)
  • +
  • Aperture - Allows changing the current camera aperture. Large numbers mean smaller aperture, which means less + light is allowed into the camera. (Only available on some devices, and if Camera2 API is used.)
  • +
  • Camera resolution - Change the photo resolution (also available under Settings/Photo +Settings/"Camera resolution".
  • +
  • Video resolution - Change the photo resolution (also available under Settings/Video +Settings/"Video resolution".
  • +
  • Speed - This allows you to record video at either a faster rate (time lapse) or slower + rate (slow motion). Note that slow motion is only available on some devices, and requires + Camera2 API to be enabled. Also note that sound is not recorded in time lapse or slow motion + modes.
  • +
  • Timer - Set a timer for taking photos or recording video. Also available under +Settings/Timer.
  • +
  • Repeat - Take a repeated set of photos when the take photo button is pressed. The time +interval between each repeated photo can be set under Settings/"Repeat mode interval".
  • +
  • Grid - Whether to display one of a choice of grids on the camera preview. Also available +under Settings/Camera preview/"Show a grid".
  • +
  • White balance - Choose a method to control how the white balance is set. +(Only available if the camera supports different white balance +settings.) If Camera2 API is enabled, then you can also set "manual". In this mode, manual +control over the white balance temperature is available from the exposure compensation icon +Exposure Compensation icon.
  • +
  • Scene mode - Choose a scene mode to apply. (Only available if the camera supports scene +modes.)
  • +
  • Color effect - Choose a color effect to apply. (Only available if the camera supports +color effects.)
  • +
+ +

Settings iconSettings - Click to open the Settings. (If your phone/tablet +has a hardware menu button, pressing that should also open the settings.)

+ +

Gallery - Click to launch the Gallery app, to view the most recent +photo/video (by default saved in the OpenCamera folder). If you get the message "No Gallery app available", then you should install +a Gallery app. +You can also "long press" on the Gallery icon - this will let you switch between the recent save locations, or take you straight to a +dialog to choose a save location if additional locations have yet been defined. See +Save location under Settings/More camera controls for more details.

+ +

Pause video iconPause video - When recording +video, this icon allows you to pause and then resume video recording. (Requires Android 7.0 or higher.)

+ +

In continuous focus mode, a white circle shows to indicate that the camera is focusing. When touching to focus, or in + autofocus mode, a white square is shown (depending on the focus mode), which turns green to indicate that the auto-focus was + successful, or red if it was not.

+ +

The on-screen display also shows the remaining battery left (green/red status bar in the corner), and optionally +the zoom level (if zoomed in), the remaining free storage space on the device, and the current angle orientation +of the camera. If "Store location data" is enabled (off by default), then a small GPS icon +Location/GPS icon will appear at the top left +when the location is available (you can still take photos when the GPS icon doesn't show, +it's just that location data won't be stored in the photo). A dot shows to the top-right of the GPS icon to +indicate the accuracy (green for accurate, yellow for less accurate, grey if waiting for location but using a recently +cached location). If the location isn't available, a dash will be shown through the gps icon.

+ +

Auto-level feature

+ +

All mobile phone cameras will rotate the photo depending on the orientation of the camera, but only to the nearest 90 degrees - so the +photos look right whether you hold the device in "portrait" or "landscape" mode. But Open Camera has the option to rotate the +photos so they are perfectly level, so your shots come out looking perfectly level every time!

+ +

+Screenshots demonstrating auto-level +

+ +

The above shows a rather exaggerated example - in practice, you can probably take better photos, but this feature ensures they +come out perfectly level, without you having to edit them afterwards in a photo editor. Of course you won't always want this - +perhaps you're going for artistic 45-degree shots - so this is an option. By default it is disabled. To enable, open the +Popup menu, and enable "Auto-level". Note that this feature is memory intensive - it will not be available +on devices with low memory. Even where it is available, the performance of taking photos will be +slower.

+ +

Also note that the feature reduces the available space in the image - because rotating an image + makes it no longer fit into a rectangular image, so we have to crop it. So it's still advisable + to try to hold the camera reasonably level when using this feature. + When auto-level is enabled, an on-screen rectangle will display the frame of the resultant + rotated photo (note that this won't necessarily be 100% accurate depending on device/resolution, + in the same way that normally the preview frame may not perfectly match the resultant photo - + this will especially be true if Settings/Camera preview/"Preview size" is set to + "Maximise preview size".

+ +

Note that auto-stabilising will not occur if the device is pointing up or down.

+ +

Noise Reduction

+ +

In Noise Reduction photo mode, Open Camera takes a burst of photos which are then automatically merged, to improve photo quality. +This is particularly useful in low light scenes to reduce noise. + +

+ +

Some things to note about Open Camera's NR feature:

+
    +
  • In dark scenes, NR will also apply pixel binning, merging 4 pixels into 1 to reduce noise. Therefore in such cases , the +resultant photo resolution will be halved in width and height.
  • +
  • In bright scenes where there is a high dynamic range, NR mode will perform better than Standard at capturing the dynamic range. +This is similar to what Open Camera's HDR mode can achieve. Also see NR vs HDR.
  • +
  • Taking photos with NR is significantly slower than regular photos.
  • +
  • Although Open Camera applies auto-alignment to the images, it's still best to try to hold your device steady while the photo is +being taken. Aside from reducing the risk of ghosting effects, the more steady the camera is, the more effective the noise reduction +algorithm is in reducing noise and enhancing detail.
  • +
  • If you have a Google Pixel with Pixel Visual Core, you should get Google's HDR+ photos when using Open Camera's Standard photo mode, so there +is generally little benefit to using Open Camera's NR mode on these devices. See "Does Open Camera support HDR+" in the +FAQ for more details.
  • +
  • If the photo mode "X-Night" is available, this may get better results for low light scenes.
  • +
+ +

In Noise Reduction photo mode, an additional "NR mode" option will appear on the popup mode. This defaults to Normal, but +you can change to "Low Light" mode, which further improves results in dark scenes. If Open Camera detects poor light in this +mode, it will take a burst of images for a duration of around 5 seconds. For best results, use a tripod, or try to hold the +camera as steady as possible.

+ +

DRO

+ +

Dynamic Range Optimisation (DRO) is a technique that optimises the dynamic range available in the image. In particular, dark +regions will have their brightness boosted to bring out the detail. This mode is useful for capturing scenes with a wide range +of brightness (e.g., on a bright sunny day) as well as being useful to automatically optimise photos in low light scenes. Also +see DRO vs HDR.

+ +

HDR

+ +

High Dynamic Range Imaging (HDR) is a technique where the camera takes multiple shots at different exposures, and combines them +into a single image. A typical problem in photography is that a scene may contain a brightness range that is wider than what can be +captured in a single shot. Varying the exposure (whether by touching on the screen, or exposure compensation or manual exposure) might +make darker regions brighter, but leave other areas over-exposed. Or reducing the exposure to prevent over-exposure may result in +the rest of the scene being too dark. HDR uses an algorithm to combine the best parts of each image, and adjusts the colors so that +the full range of brightness values are captured in the scene:

+ +

+Images demonstrating HDR +

+ +

The left set of three images show the individual exposures, the right the final HDR image.

+ +

Some things to note about Open Camera's HDR feature:

+
    +
  • Taking photos with HDR is significantly slower than regular photos (although note that HDR will give much faster performance if Camera2 API is enabled, see Settings - not all devices support this).
  • +
  • HDR isn't so good for scenes with movement, due to combining an image from multiple shots. Open Camera does apply "deghosting" +to reduce the problem, but this isn't perfect.
  • +
  • Although Open Camera applies auto-alignment to the images, it's still best to try to hold your device steady while the photo is +being taken.
  • +
  • HDR can sometimes result in less accurate color reproduction (due to trying to estimate the colors from multiple exposures).
  • +
  • Some camera apps use "HDR" to mean "apply a whacky-looking filter". Whilst HDR filters can be used to apply a rather +unreal or vivid image, for now Open Camera's HDR is more geared towards capturing the range of exposures. Also note that many camera +apps that advertise "HDR" don't even make use of multiple images - this is more a case of filtering, than HDR.
  • +
  • If you have a Google Pixel with Pixel Visual Core, you should get Google's HDR+ photos when using Open Camera's Standard photo mode, so there +is generally little benefit to using Open Camera's HDR mode on these devices. See "Does Open Camera support HDR+" in the +FAQ for more details.
  • +
+ +

DRO vs HDR

+ +

Both DRO and HDR share in common that they are ways to handle wide ranges of brightness. They each have their pros and cons:

+
    +
  • DRO requires only a single image from the sensor, so shots are fast to take, and fine for scenes with movement, unlike HDR.
  • +
  • Photos will still take longer to process compared with Standard photo mode, but it will still be faster than HDR.
  • +
  • HDR will in general be better at scenes with a high range of brightness values. It can take advantage of the information from +three separate images, rather than reprocessing a single image.
  • +
+ +

NR vs HDR

+ +

In scenes with high dynamic range, HDR should do better at capturing the high dynamic range (although NR should still be better +than Standard photo mode). Though NR still has the advantage that it is less prone to ghosting and misalignment. NR is also better +suited to working in a wide range of scenes.

+ +

In summary: NR is better if you just want a "works best in most cases" option. HDR may be a better choice specifically in +scenes with high dynamic range, that also don't have movement in the scene.

+ +

Panorama

+ +

Panorama photo mode allows creating wide photos, by stitching together multiple separate photos.

+ +

To take a panorama image, hold your device in portrait orientation, and click to take a photo to start the panorama. Two blue circular dots +will then appear. Rotate your device about the device's axis, either to the left or right, to move the centred white circle over one of the +blue dots, which will allow the camera to capture another image. After each new image is captured, keep rotating your device to cover each new +blue dot that appears in turn. If the device is tilted to one side too much, an icon will appear to indicate you need to rotate the device +clockwise or anti-clockwise.

+ +

Click the tick icon to save the panorama, or the cross icon to cancel. Or the panorama will automatically save after 10 images have been +captured.

+ +

For best results, try to keep your device steady, apart from rotating about the device's axis. Note that panorama photos can take time to process and save.

+ +

Note that the following options are not supported with panorama:

+
    +
  • Timer.
  • +
  • Auto-repeat.
  • +
  • Immersive mode.
  • +
  • Pause after taking photo.
  • +
+ +

Settings

+ + + + + + + + + +

Camera Controls:

+ +

Face detection - If this is enabled, then the camera will automatically try to detect faces, and use +them for the focus, metering (exposure) and white balance. Faces will be drawn as yellow squares when detected.

+
    +
  • If this option is enabled, then you will not be able to touch to manually select the focus area, and + the white balance option will have no effect (since the face recognition will be used to determine these).
  • +
  • If Camera2 API is enabled, then the scene mode option will also have no effect (again since the face recognition + option overrides this).
  • +
  • If you are using an accessibility service such as Google Talkback, information about the number and + location of detected faces will be reported when face detection is enabled.
  • +
+ +

Timer - Set a timer for taking photos or recording video. Press the take photo/video button again to +cancel the timer.

+ + + +

Repeat mode interval - Specify the delay (if any) between photos in repeat mode. Note that if a delay +is selected, note that this does not include the time taken to auto-focus and take each photo. Similarly, "No +delay" still means there will be some time between each photo. If you want to take a burst of photos, use the +Fast Burst photo mode, or long press the "take photo" button in Standard or Fast Burst photo modes. +(Burst mode only supported on some devices; requires Camera2 API to be enabled).

+ +

More camera controls... - Select to access the following controls:

+ +

Touch to capture - This option allows you to take a photo, or start and stop video recording, + just by either touching or double-tapping on the preview screen. Note that "Single touch" means + you will no longer be able to touch to select a focus or metering area; while "Double tap" means + you will not be able to reset focus/metering areas by double tapping. +

+ +

Pause after taking photo - If ticked, after taking a photo the display will pause, with options to share +Share icon or delete +Trash icon the +photo. To keep the photo and continue, touch the screen, press back, or take another photo. Note that this isn't supported +when holding the shutter button to take a continuous burst of photos.

+ +

Shutter sound - Whether to play a sound after taking a photo. (Not supported on all devices.) + For Camera2 API, this also controls whether to play a sound for start/stop video +recording.

+ +

Timer beep - Whether to beep when the timer is counting down, or for the repeat mode delay (see below).

+ +

Voice timer countdown - Whether to give a voice countdown when the timer is counting down, or for the repeat mode +delay (see below).

+ +

Volume keys - You can set what happens when your device's volume keys are pressed:

    +
  • Take photo or start/stop video (depending on photo/video mode). On Android 7+, volume down will instead pause/resume video when recording video.
  • +
  • Trigger an autofocus - or if in manual mode, change the focus distance in/out. In this mode, holding down both volume keys +will take a photo (or start/stop video). This makes your volume keys behave more like a physical camera button - hold down one +key to focus, then both to take a photo.
  • +
  • Zoom in/out.
  • +
  • Change the exposure compensation (or if in manual ISO mode and using Camera2 API, change the ISO).
  • +
  • Switch auto-level on/off.
  • +
  • Change the device's volume as normal.
  • +
  • Do nothing.
  • +
+ +

Audio control options - If enabled, this allows taking a photo (or starting video recording, depending on the mode) +by making a noise. An on-screen microphone button Microphone icon will appear, to +start/stop listening. The "loud noise" option will listen for any noise (so you can remotely take a photo by saying "cheese", +whistling, or whatever you prefer). Note that leaving the listening turned on may use additional battery. +Note that this can't be used to stop video recording - if you want to have some remote control on video recording, +see the "Max duration of video" option.

+ +

Audio control sensitivity - This controls how sensitive Open Camera is to noises, if "Audio control" is set to "Loud noise". +If you find it's taking photos too often unintentionally, or isn't responding to your sounds, try adjusting this option.

+ +

Bluetooth LE remote control - Open Camera supports connecting to some specific "smart housing" cases via the + options in these settings. See "Remote device type" for supported types. At the time of writing, only one make/model + is supported. Once connected via Bluetooth, it should be possible to control Open Camera from the device. + The on-screen display of Open Camera will also display information from the housing (temperature and depth).

+ +

Lock photo/video orientation - Normally the orientation of the photo/video will be rotated by some multiple of +90 degree such that the orientation looks right - e.g. if your device is held in portrait, the resultant image/video will +be in portrait. This option allows fixing the camera to either be in portrait or landscape. Note that if +auto-level is also enabled, it will have the effect of aligning photos to the nearest +90 degrees.

+ +

Save location - Select the folder to store the resultant photos or videos in.

+
    +
  • On Android 9 or earlier: This opens a file dialog. Click on a folder (or "Parent Folder") to navigate through + the filesystem. Select "New Folder" to create a new folder in the currently displayed folder. Select "Use Folder" + to choose the currently displayed folder. Note that on Android, there are some folders that cannot be written + to - Open Camera will display a message if you try to use one of these folders.
  • +
  • On Android 10 or later: This opens a dialog to type the name of the folder. This + will be a subfolder of DCIM on your internal storage. You can specify subfolders with the "/" + character. For example, specifying Camera/holiday will save inside DCIM/Camera/holiday/ + on your internal storage.
  • +
  • If "Storage Access Framework" is enabled: Then on any Android version, this option + will show up the Android standard file chooser - navigate to the desired folder, and click "SELECT" or + "ALLOW ACCESS" (wording varies depending on Android version).
  • +
+

Once you have specified a new save location, you can long press on the Gallery icon to quickly switch between recent save + locations. If you want to save to an SD card, see "How can I save to my external SD card?" under the FAQ.

+ +

Storage Access Framework - If selected, Open Camera will instead use the Android +Storage Access Framework. This +has some advantages, such as using the standard Android file picker, and being the only way to save to SD cards on Android 5+. +In some cases it may allow you to save to cloud or local storage providers provided by other apps or services. +Furthermore on Android 10+, it is the only way to save outside of the DCIM/ folder.

+ + + +

Save photo prefix - This option allows you to customise save filenames for photos.

+ +

Save video prefix - This option allows you to customise save filenames for videos.

+ +

Time format for filename - By default, Open Camera uses the local timezone for the save filenames, but you +can also select UTC (Coordinated Universal Time / Zulu Time). +For the latter option, a "Z" will be appended to the filename (e.g., "IMG_20160524_155116Z.jpg"). The Z (Zulu) suffix +is a standard convention for identifying UTC timestamps.

+ +

Use milliseconds in filename - If enabled, the datestamp used for filenames will include + milliseconds (e.g. "IMG_20250312_210531.350.jpg")

+ +

Allow vibration feedback - Whether to allow haptic vibration feedback, for example when + adjusting some controls. Note this also requires touch interactions to be enabled in your + device's settings, this is usually under the "Sound and vibration" settings or similar.

+ +

Show camera when locked - If you have a lock screen on your device (e.g., PIN to unlock), if this option + is enabled Open Camera will show above the lock screen - i.e., if locked, you won't have to enter the PIN to + use Open Camera. The device still needs to be unlocked in order to go to the Settings or Gallery. If you would + prefer Open Camera to always be unavailable when your device is locked, this option should be disabled.

+ +

Perform auto-focus on startup - Whether Open Camera should auto-focus when starting the camera. Some devices +have a bug where the flash turns on when this happens, so a workaround is to disable this option.

+ +

Allow long press actions - Some icons support a "long press" action (touching and holding on the icon). For example, +long press on the gallery icon to change the save location, or in some cases long press on the shutter icon will enable a +burst. If you prefer, you can disable these long press actions.

+ +

Calibrate level angle - The options Auto-level, "Show angle" and "Show angle line" +rely on your device's ability to detect which orientation it's being held (the accelerometer). On some device's this might not +be calibated correctly. If so, you can use this option to calibrate the acceleromer (or reset the calibration back to the +default behaviour).

+ +

Camera preview... - Select to access the following controls:

+ +

Preview size - By default, Open Camera matches the aspect ratio of the preview (the image that is + displayed on the phone/tablet's display) with that of the photo resolution ("Match photo size (WYSIWYG)" mode). The + advantage is that what you see in the preview will match what will be in the resultant photo + ("What-You-See-Is-What-You-Get") - though this may mean you will have "black bars" on the display in order to do + this. If instead you select "Maximise preview size", then the camera preview will be as large as possible, trying to + fill the available space. However if the resolution of the photo is a different aspect ratio to that of your device, + this will result in the preview being cropped. In video mode, the preview is always in WYSIWYG mode.

+
    +
  • Note that Android 4.4.3 introduced a bug + which means that the aspect ratio will be wrong if you select "Maximise preview size" (i.e., the preview will look + squished). To avoid this problem, stick with "Match photo size".
  • +
  • Also note that even in "Match photo size" mode, on some devices and resolutions, it may not be possible to match + the preview to the resultant photo/video exactly; in such cases, Open Camera will try to match as closely as + possible.
  • +
+ +

Ghost image - You can overlay a previous photo. This is useful for aligning photos (e.g., for stop + motion animation). This can also be used to create a custom "grid", by selecting an image with transparency.

+
    +
  • Last photo taken - When you take a photo, a ghost of that photo will be displayed. The ghost is + reset either by leaving and returning to Open Camera, switching between photo/video modes, or turning + this option off.
  • +
  • Selected image - Select a specific image on your device to be displayed as a ghost + Note that for this option, the ghost image will not be displayed if Open Camera is running + when your device is locked. If the image doesn't show at all, it may be that Open Camera was unable to + load the selected image.
  • +
+ +

Ghost image opacity - If a "Ghost image" is selected, this option controls how transparent/opaque the ghost +image should be drawn with.

+ +

Focus assist - If enabled, this will show a zoomed in view on the camera preview when in manual focus mode, and you + are changing the manual focus distance. Similarly in focus bracketing mode, the preview will zoom in when changing the focus + distances to bracket between. Note not supported when recording video.

+ +

Show zoom - Whether to display the current zoom level of the camera (when zoomed in).

+ +

Show angle - Whether to display the orientation angle of the device's camera.

+ +

Show angle line - Whether to display a horizontal "level" line that visually indicates the orientation of the device.

+ +

Show pitch lines - Whether to display horizontal pitch lines that visually indicate the pitch of the device.

+ +

Angle highlight color - This controls the color to be used for the angle display and "level" line when the camera is nearly + level; and also for the 0 degree pitch line when the camera is held at nearly zero pitch.

+ +

Show compass direction - Whether to display the compass direction of the device's camera.

+ +

Show compass direction lines - Whether to display vertical lines that visually indicate the compass direction of the device.

+ +

Show battery - Whether to display the current battery level.

+ +

Show time - Whether to display the current time.

+ +

Show camera ID - For devices with multiple front/back cameras. Whether to display the currently used camera ID. + This will be a value starting from 0, that distinguishes between the different cameras on your device.

+ +

Show free memory - Whether to display the remaining storage space of the device.

+ +

Show ISO - If selected, the current ISO, exposure time and frame rate (FPS) will be displayed (only available if Camera2 + API is used). The text will be shown in red when the auto-exposure routine is scanning.

+ +

Show a histogram - Allows displaying an on-screen histogram (only available if Camera2 API is used). Note that the + histogram reflects the currently display on-screen preview, and will not necessarily be accurate for the final resultant photograph, + especially if modes such as NR, DRO, HDR are used. Also note that displaying a histogram may use more battery. + The follow options are available:

+
    +
  • RGB Colors - Displays a histogram for each color channel.
  • +
  • Luminance - The brightness is computed as a weighted average of the RGB components: 0.299*R + 0.587G + 0.114*B.
  • +
  • Value - The brightness is computed as the maximum of the RGB components.
  • +
  • Intensity - The brightness is computed as an unweighted average of the RGB components.
  • +
  • Lightness - The brightness is computed as an average of the minimum and maximum of the RGB components.
  • +
+ +

Show zebra stripes - Allows displaying on-screen zebra stripes (also known as + zebra patterning) to show whether the + image is over-exposed (only available if Camera2 API is used). Note when using photo modes NR and HDR, + a region that is shown as over-exposed on-screen may not necessarily be over-exposed in the resultant image, + since these modes are able to better handle a high dynamic range. Also note that enabling zebra stripes + may use more battery.

+ +

Zebra stripes foreground/background colour - If "Show zebra stripes" is enabled, these options allow you to +choose the colours of the stripes.

+ +

Focus peaking - Allows displaying on-screen highlights to indicate edges (contours) that are in-focus + (only available if Camera2 API is used). This is particularly useful in conjunction with manual focus mode, to help show + which regions of the image are in focus. Note that enabling focus peaking may use more battery.

+ +

Focus peaking color - This controls the color to be used for the highlights when "Focus peaking" is enabled.

+ +

Show audio level meter - If selected, when recording video an on-screen meter will display the current audio level.

+ +

Show a grid - Whether to display one of a choice of grids on the camera preview. Grids are useful in photography to help + compose your image. Options are:

+
    +
  • 3x3 - helps with applying the rule of thirds.
  • +
  • Phi 3x3 - 3x3 grid with ratios 1:0.618:1.
  • +
  • 4x2
  • +
  • Crosshair
  • +
  • Golden - displays a Golden spiral (or technically, + a Fibonacci spiral). You can use this to improve your photography.
  • +
  • Golden Triangles
  • +
  • Diagonals
  • +
+ +

Show a crop guide - A crop guide is a rectangle displayed on-screen, which has the specified aspect ratio + (if different to the photo/video aspect ratio). This is useful if you plan to crop the resultant photos or videos + to a particular aspect ratio. For photos, the crop-guide requires "Preview size" to be set to WYSIWYG mode.

+ +

Show thumbnail animation - Whether to display the moving thumbnail animation when taking a photo.

+ +

Show border when taking photo - Whether to display a border effect when taking a photo.

+ +

Rotate preview - this option rotates the on-screen preview by 180 degrees (i.e., upside down). Most users + won't ever need this, but this option can be useful if you are using Open Camera with equipment such as zoom lenses + which invert the image. Note that this doesn't rotate the resultant photos/videos - you'll still have to rotate those + yourself afterwards - but this will correct the preview so that you can still see what you are shooting properly.

+ +

On screen GUI... - Select to access the following controls:

+ +

UI placement - Allows you to choose between various layouts for the on-screen user interface icons.

+ +

Immersive mode - Allows you to choose between various modes which affect the behaviour of the user-interface, in order to +make it more immersive:

+
    +
  • Off (default) - Don't use immersive mode, and on-screen virtual navigation buttons are always visible.
  • +
  • Hide on-screen virtual navigation buttons - After a short delay, any on-screen virtual navigation buttons will disappear.
  • +
  • Hide GUI - After a short delay, any on-screen virtual navigation buttons will disappear, along with most of the GUI buttons. +To exit this immersive mode, either touch on-screen, or swipe in from the top/bottom sides when the device is held in landscape (or +left/right sides if held in portrait).
  • +
  • Hide everything - Same as "Hide GUI", but when in immersive mode everything will disappear except the camera preview. This +mode probably isn't useful for most people (since you need to exit immersive mode even to take a photo), but is available if you wish +to only have the preview showing.
  • +
+ +

Show face detection icon - Whether to display an on-screen icon for enabling or disabling face detection. See + Auto-level feature above for more details.

+ +

Show flash icon - Whether to display an on-screen icon for cycling through flash modes. If this is enabled, then +flash modes won't show on the popup menu. Also note that it is not possible to enable the torch with this method.

+ +

Show focus peaking icon - Whether to display an on-screen icon for enabling or disabling focus peaking. See + Settings/Camera preview/"Focus peaking" above for more details.

+ +

Show auto-level icon - Whether to display an on-screen icon for enabling or disabling auto-level. See + Settings/"Face detection" above for more details.

+ +

Show stamp photo icon - Whether to display an on-screen icon for enabling or disabling photo stamp. See + Settings/Photo settings/"Stamp photos" for more details.

+ +

Show custom text stamp photo icon - Whether to display an on-screen icon for setting a custom text to stamp onto + resultant photos. See + Settings/Photo settings/"Custom text" for more details.

+ +

Show store location data icon - Whether to display an on-screen icon for enabling or disabling storing location + data (geotagging). See Settings/Location settings/"Store location data (geotagging)" + for more details.

+ +

Show RAW icon - Whether to display an on-screen icon for cycling through RAW modes (Standard, Standard+RAW, RAW only). + See Settings/Photo settings/"RAW" for more details.

+ +

Show auto white balance lock icon - Whether to display an on-screen icon for locking or unlocking auto white + balance.

+ +

Show auto exposure lock icon - Whether to display an on-screen icon for locking or unlocking auto exposure.

+ +

Show zoom slider controls - Whether to display to a slider to control zoom.

+ +

Show "Take Photo" icon - Whether to display the shutter button +for taking a photo (or recording video). Uncheck if you'd +rather take photos by other methods (e.g., if your device has a hardware shutter button, or using the volume keys).

+ +

Show on-screen messages - Open Camera will sometimes display temporary on-screen text with information (in some + cases using "toasts"). This option can be used to disable them.

+ +

Show What's New dialog - When updating to a major new version, Open Camera displays a dialog explaining the new +features and other options. You can disable this dialog from being displayed if you prefer.

+ +

Multiple cameras icon - This option only shows on devices that allow explicitly switching between more than +one front and/or back cameras. When +enabled (the default), such devices will have two on-screen icons: one to switch between front/back cameras, and another +to switch between the multiple front or back cameras (and if supported, selecting a physical lens). If this option is disabled, only a single icon will show, which +will cycle between all cameras (selecting a specific lens is not possible with this method).

+ +

Keep display on - Whether to force keeping the screen display on, while the main Open Camera UI is active.

+ +

Force maximum brightness - Whether to force the screen display to maximum brightness.

+ +

Photo and Video Settings:

+ +

Photo settings - Select to access the following controls:

+ +

Camera resolution - Select the resolution of photo images taken by the camera.

+
    +
  • If auto-level is enabled, images will in general come out as a + slightly lower resolution (due to the rotation and cropping that's required). +
  • +
  • Note that in some photo modes, the actual photo mode may be different to that selected here. + For example, advanced photo modes such as HDR, NR, Fast Burst, Expo Bracketing may have a + maximum supported resolution - if the resolution selected here is too high, the largest + allowed resolution will instead be used. To see the resolution currently used (or to change + it), see the "Camera resolution" setting on the popup menu. +
  • +
  • The resolution setting is ignored altogether in Panorama mode. +
  • +
+ +

Optimise focus for... - How to optimise the behaviour when using continuous focus mode. + Latency means take the photo as soon as possible - on modern devices, the result will typically + already be in focus. If this isn't the case, select "Quality" to ensure that the scene is focused. + (Requires Android 12+ and Camera2 API; on other devices this will default to "Quality".)

+ +

Save preview shots - When enabled, a short video will be saved alongside each photo, + containing shots from the moment before the photo was taken. Not supported for expo bracketing, + focus bracketing or panorama modes. Typically the video will contain 12 shots, over approximately + just over a second. Note that this is not intended to be a 30fps (or better) video, rather the intent + is to save a burst of photos. You can use various gallery apps to export individual frames (usually by + selecting "Edit" when viewing the video). + (Requires Android 8+ and Camera2 API.) +

+ +

Image quality - The image quality of saved JPEG (including Ultra HDR) or WebP images. Higher means better quality, but the image files will take up +more storage space. Note that 100% does not necessarily mean there is no lossy compression, rather that there is minimum +compression. Also note this option has no effect if "Image format" is set to PNG.

+ +

Image format - The image file format for saving photos:

+
    +
  • JPEG - This is the fastest option. Also note that only JPEG and Ultra HDR JPEG support saving various photo + metadata (Exif), including camera details, location, and artist/copyright tags.
  • +
  • Ultra HDR JPEG - This is a format that saves additional high dynamic range data, for viewing on + HDR displays, but is backwards compatible with JPEG (so resultant images can still be viewed by older + JPEG viewers, and on standard non-HDR displays). (Requires Android 14+ and Camera2 API, and even then, + only available on devices that support Ultra HDR for third party applications.) This is not supported for + NR, HDR, Pano or "X-" extension photo modes - in these modes, images will be saved as regular JPEG.
  • +
  • WebP - This is an alternative (lossy) file format to JPEG. It offers smaller file sizes, + but saving takes longer, and some applications may not support WebP images. Note that WebP + images are generated by converting from a JPEG with 100% quality (since the Android camera API does not + have native WebP output).
  • +
  • PNG - This is a lossless format, but note that file sizes will be large, and saving will + take significantly longer. Note that this doesn't mean there will be absolute no loss in quality: + cameras on Android offer images in JPEG or RAW (DNG) file formats. When using PNG, Open Camera + receives the JPEG with the quality set to 100%, and converts to PNG - this means that there is minimal + lossy compression. The only way to have truly lossless is via RAW (see option below).
  • +
+ +

RAW - Only available if Camera2 API is used. If set to "Standard and DNG (RAW)", then photos +will also be saved in RAW (DNG) format. If set to "DNG (RAW) only" (requires Android 7), then photos +will only be saved in DNG format. DNG stands for +"digital negative", and contains the full +uncompressed and unprocessed information from your camera. Please note the following points:

+
    +
  • Some gallery applications don't recognise DNG files - if this is the case, you won't even see them listed. + Either install a gallery application that does, or use specialised RAW viewer or editing tools. + Or transfer to a PC to use DNG editing applications there. Note that newer versions of Android + (7 onwards) seem to have better support for DNG images.
  • +
  • If your gallery application doesn't show DNG files, a file explorer application may also be useful, + to delete DNGs if you don't want them (DNGs take up a lot of space - and won't be deleted when you delete + the corresponding JPEG from a Gallery application).
  • +
  • Note that various processing options such as DRO photo mode, "Stamp photos" and "Auto-level" will only apply + to the JPEG images, and not the DNG images. This is not a bug - the DNGs represent the RAW output + of the camera device. This also means that these options have no relevance for photos in "DNG (RAW) only" mode, therefore such + options will not show on the popup menu.
  • +
  • Options to store extra Exif tags ("Store compass direction", "Artist", "Copyright") are only supported for + JPEG formats.
  • +
  • Some devices only support saving RAW images in the Standard or DRO photo modes. Higher end devices will also support saving RAW + images also in Expo Bracketing and Focus Bracketing modes, as well as HDR where "Save all images for HDR mode" is enabled. + The X- modes do not support saving RAW images. + The on-screen "RAW" icon will show if RAW is enabled for the current photo mode.
  • +
+ +

Allow RAW for expo bracketing - If this option is disabled, the RAW images won't ever be saved in expo bracketing or HDR + photo modes.

+ +

Allow RAW for focus bracketing - If this option is disabled, the RAW images won't ever be saved in focus bracketing photo + mode.

+ +

Noise Reduction original images - When using Noise Reduction mode, if + this option is enabled, then the input images from the burst of images taken will be saved, as well as the + final NR photo. You can choose to save either a single image from the burst, or all images. This may be useful + if you want to use external image stacking applications to merge the images. Note that these images will + typically be noisier than a typical photo from Standard mode, because the device's noise reduction algorithms + are disabled (this seems counter-intuitive, but in order to improve quality by merging multiple images, noise + reduction algorithms need to be applied after the merging, not before). +

+ +

Save all images for HDR mode - When using HDR mode, if this option is enabled, +then the three base exposure images will be saved as well as the final HDR photo. This is useful if you want +to use external HDR applications (such as my own +Vibrance HDR for Android, +or various HDR applications for PC) to +create the final HDR image (although if you don't want Open Camera's HDR mode at all, you can instead use the +Exposure Bracketing Photo Mode). Note this will make saving slower, especially if options like "Stamp photos" or +Auto-level are also used.

+ +

HDR tonemapping - When using HDR mode, the high dynamic range image needs to be converted +back to a regular image, using a process called tonemapping. This option allows you to choose some different +tonemapping algorithms.

+ +

HDR contrast enhancement - When using HDR mode, in some (bright) scenes a local contrast +enhancement algorithm is applied to improve the look of the image. It also gives such images a look that is stereotypically +associated with "HDR". If you prefer not to apply this at all, you can change this option from "Smart" to "Off". Or you can +choose "Always" to have it applied in all cases.

+ +

Exposure Bracketing - Specifies the total number of images to save in Exposure Bracketing Photo Mode +(Camera2 API only).

+ +

Exposure Bracketing Stops - Specifies the number of stops to subtract/add from the "base" +exposure level to the darkest/brightest image, in Exposure Bracketing Photo Mode. An increase of 1 +stop means a doubling of the amount of light. So a setting of "2", with 3 images, will produce +images with exposure set to [-2, 0, +2]. For 5 images, this will produce [-2, -1, 0, +1, +2].

+ +

Panorama auto-crop - Normally panoramas are cropped to give a rectangular result. +Disabling this option will show the full extent of the panorama, but will leave wavy black borders.

+ +

Panorama original images - Whether to save the original shots in Panorama mode. This can be useful +if you want to use a third party application to stitch the images together to create a panorama. Note that these original +images are always saved in PNG format. This option can also be useful for debugging purposes, to send me example images when +reporting problems with panorama - for this, it's best to select to include the debug XML file. Note the XML files will be +saved inside Android/data/net.sourceforge.opencamera/files/, and you'll typically need +a third party File Explorer application to see and delete XML files.

+ +

Front camera mirror - Normally for front cameras, the preview will behave like a mirror, but resultant +photos will still be as the camera (or other people) view the scene. This option can be used to mirror the +resultant photo, so the resultant photo matches the mirrored image you see on the screen.

+ +

Remove device EXIF data - Whether to remove device EXIF metadata from JPEG photos. Note that + this will not remove exif tags applied by other Open Camera settings that apply EXIF metadata (e.g. + location/geotagging, artist, copyright etc). Those other options are independent and will override this + setting. Also note that RAW/DNG and videos are not affected.

+ +

Artist - If text is entered in this setting, then the text will be stored in the image's Exif metadata as the + "Artist" tag. Only supported for JPEG formats. Not supported for RAW photos (DNG format).

+ +

Copyright - If text is entered in this setting, then the text will be stored in the image's Exif metadata as the + "Copyright" tag. Only supported for JPEG formats. Not supported for RAW photos (DNG format).

+ +

Stamp photos - Option to add a date and timestamp to the resultant photos. If "Store +location data" is enabled (see "Location settings" below), then the current location latitude +and longitude coordinates, and altitude, will also be stamped on the resultant photos (if the location is known). +Similarly for "Store compass direction". Note that if this option is enabled, then it will take +longer to save the photo. Also see "Video subtitles".

+ +

Datestamp format, Timestamp format - If "Stamp photos" is enabled, these options allow +extra control over the date and time formatting. Also used for Video settings/"Video subtitles".

+ +

GPS stamp format - If "Stamp photos" is enabled, this allows extra control over the GPS +formatting. Also used for Video settings/"Video subtitles".

+ + + +

Distance unit - If "Stamp photos" is enabled, this controls whether to use metres (m) or +feet (ft) when recording the GPS altitude. Also used for Video settings/"Video subtitles".

+ +

Custom text - Here you can enter some text to be stamped onto resultant photos (e.g., +this could be used for a copyright image). Note that if this option is enabled, then it will take +longer to save the photo. Also note that this option is only supported for photos, not video.

+ +

Font size - Sets the font size used for text for options "Stamp photos" or +"Custom text".

+ +

Font color - Sets the font color used for text for options "Stamp photos" or +"Custom text".

+ +

Text style - Whether to render the text on the image with a shadow background effect, for +options "Stamp photos" or "Custom text".

+ +

Use alternative flash method - (Camera2 API only.) Unfortunately many devices have poor support for the +Camera2 API. A common issue is poor flash behaviour (either flash doesn't fire, or photos are over or under exposed). +If so, enabling this option may help - this uses an alternative algorithm for flash (using the torch to simulate flash +as a workaround). Note that this is enabled by default for Samsung devices.

+ +

Enable dummy capture HDR/expo fix - (Camera2 API only.) Enable this option if your device has problems taking photos +in HDR or Exposure Bracketing photo modes, specifically if some expo images come out with the same exposures. This option +takes an additional "dummy" image which may resolve such problems. Note that "Enable fast HDR/expo burst" (below) must +be enabled for this option to have an effect.

+ +

Enable fast HDR/expo burst - (Camera2 API only.) Disable this option if your device has problems taking photos +in HDR or Exposure Bracketing photo modes (disabling this option will result in a longer delay between the photos being +taken, but may give more stable behaviour if your device is having problems with this).

+ +

Allow photos whilst recording video - (Camera2 API only.) Some devices support taking photos whilst recording video, +but there's the problem that enabling such functionality make cause problems with regular video recording. If you have problems +recording video with Camera2 API enabled, try disabling this option.

+ + + +

Video settings - Select to access the following controls:

+ +

Video resolution - Select the resolution of videos taken by the camera.

+ +

Enable digital video stabilization - Video stabilization reduces the shaking due to the motion of the camera in +both the preview and in recorded videos. This enables a digital method in the camera driver, and may be unnecessary if +your device supports optical image stabilization (OIS).

+ +

Video format - Allows choice of various video file formats and codecs. Please test before using, as some may +not work properly on all devices! Also note:

    +
  • WebM does not support recording audio (at the time of writing, it seems encoding in Vorbis audio format is + not supported on + Android).
  • +
  • WebM does not support storing location data ("Store location data" option).
  • +
  • If using 3GPP, then restarting video when hitting maximum filesize will not be seamless (even on Android 8).
  • +
+ +

Video picture profiles - Enables different color profiles for recording video. Only available if Camera2 + API is used, and only supported on some devices. Additional notes:

    +
  • The various "log" profiles enable a "flat" color profile, and are intended to create videos + for further editing in post production.
  • +
  • Note that it may be necessary to increase the video bitrate when shooting with a "log" profile + or with high custom gamma.
  • +
  • When "Gamma" is selected, you can specify the gamma value used with the next option "Video gamma value".
  • +
  • This feature is somewhat experimental, please test if it fits your need before shooting your masterpiece!
  • +
+ +

Max duration of video - This option can be used to set a maximum duration of the video. If set, video recording +will stop after the specified time (unless already stopped earlier).

+ +

Restart video after max duration - If a max duration has been set (see above), this option can be used to make +the video automatically stop and restart the specified number of times. So this can be used to take a video for a long +period, broken up into multiple video files. If a max duration has not been set, then this option has no effect.

+ +

Maximum file size of video - This allows to set a maximum file size for videos. Note that many Android devices +set a maximum file size (typically around 2GB or 4GB), and there is no way Open Camera can work around such a limitation (and +using exFAT doesn't get round it). This option allows you to set a smaller maximum file size - note that it can only +be used to reduce the device's maximum file size (so if a device has a 2GB limit, you can't increase it by setting this option to +a larger value). Note that the value is approximate - typically +the resultant videos may be slightly smaller. Note that if you using this option together +with "Max duration of video", then - if "Restart on maximum file size" is enabled - hitting the maximum file size will cause a +restart that doesn't reset the max duration timer, nor does it count as one of the number of restarts. E.g., if you requested a +maximum duration of 30m, with 1 restart, but the video hits the maximum file size after 20m, rather than getting two times 30m +videos, you'd get four videos, of lengths 20m, 10m, 20m, 10m (i.e., the 30m videos are split at the maximum file sizes). If +"Restart on maximum file size" is disabled, then hitting the maximum file size will always cause the video to end without +restarting, even if you've set "Restart video after max duration".

+ +

Restart on maximum file size - Whether to automatically restart if the maximum file size is met. As noted above, almost all +Android devices have a maximum file size for videos, even if you don't explicitly set one. So it's advisable to keep this option to true, +so that Open Camera will restart as soon as possible if you're recording video, and hit this limit. Note that on devices that +are not running Android 8 or later, there will still be a loss of a few seconds while the video stops and restarts. On Android 8 +or later, the resume should be seamless (unless using 3GPP video file format).

+ +

Record audio - Whether to record audio when recording a video.

+ +

Audio source - Select the audio source for recording video. The effect of this depends on your device - +if it supports an external microphone, you may be able to use this by selecting "External mic". The other options may +provide different settings affecting the resultant audio (e.g., automatic gain control), though this behaviour is +device specific. These options are just controls for the Android MediaRecorder API, and so the exact behaviour is +up to the device.

+ +

Audio channels - If recording audio with video, this option allows you to specify mono or stereo recording. +Note that most devices do not support stereo recording. Even for devices that do support this, you may need to modify +the "Audio source" option to another value for this to work.

+ +

Lock screen when recording video - If enabled, the GUI will be locked when recording video (i.e., the GUI +won't respond to touch presses). You can use this to prevent accidental presses that might change settings or stop +recording. To unlock the GUI, swipe the screen (in any direction). Note that this won't prevent the video being +stopped if you press your device's Home, Recent Apps or Power button (it is not possible for apps to override the +behaviour of these buttons).

+ +

Video subtitles - This option is analogous to the "Stamp photos" option, but rather than embedding text into +the video itself, it stores the text in a separate subtitles +(".SRT") file. Most decent video players should support +SRT files, and use them to display the information as subtitles. The subtitles will record the date and time. If "Store location data" is enabled (see +"Location settings" below), then the current location latitude and longitude coordinates will also be recorded (if the location is +known). Similarly for "Store compass direction". Note that you can control the formatting style for date, time and location using + the options under the "Photo settings" menu (Datestamp format, Timestamp format, GPS stamp format, Distance unit).

+
    +
  • Note that on Android 10, using this option means the ".SRT" files will show in most gallery apps as separate unplayable video + files. A workaround is to enable Settings/More camera controls/"Storage Access Framework".
  • +
  • On Android 11+, this option is only available if Settings/More camera controls/"Storage Access Framework" is + enabled. This is due to changes in Android 11 which affect how applications are able to save files.
  • +
+ +

Video bitrate (approx) - If set to a value other than "default", the default video bitrate is overridden. Higher values mean better + quality video, but the files take up more disk space. Note that some values may be unsupported by your device, and may + cause the recording to fail - in some cases, this can cause problems with the camera that require a reboot to fix. So + please test before using. Also note that the bitrate setting is approximate - the resultant video file will typically be slightly different + to that requested.

+ +

Video frame rate (approx) - If set to a value other than "default", the camera will try to match this frame + rate.

+
    +
  • This is very approximate, as frame rate depends on many factors such as your device and lighting + conditions, and this is only a "recommendation" to the camera driver, so there is no guarantee that the + resultant video's frame rate will match with the requested value.
  • +
  • Some frame rate values may be unsupported by your device, and cause the recording to fail, so please test before + using.
  • +
  • Even if video recording is successful, if achieving a specific FPS is desired, please check the resultant video's + frame rate rather than assuming that it was achieved.
  • +
  • For best results (especially for 120fps or higher), please set Settings/"Camera API" to "Camera2 API".
  • +
  • This setting is ignored in slow motion mode, where instead Open Camera will choose an appropriate high speed frame + rate.
  • +
+ +

Force 4K UHD video (may not work on all devices) - Enable recording in 4K UHD (3840x2160) on the back camera (if ticked, +this overrides the setting in "Video resolution"). This is provided for some phones that don't properly expose their 4K video resolution to 3rd party camera apps +(and so 4K resolution doesn't show in the Video resolution option above). +It turns out that some such devices can be made to record in 4K resolution if it's requested, but on other devices this won't work. +If you enable this on a device that doesn't support it, you may either get an error +message when you try to record, or it may succeed but create a video where the resolution isn't 4K, or may even result in a crash! So +please test this out first. I've tested this successfully on a Samsung Galaxy S5 and Note 3, but even there it only works on some +variants of those devices. If this doesn't work, it isn't a bug in Open Camera, it's because your device doesn't support 4K recording +for third party camera apps. (If this option doesn't show up at all, it's either because your device already lists 4K in the Video +resolutions options above, or because Open Camera thinks this is a device that probably doesn't support 4K video.)

+ +

Critical battery check - If a device runs out of power while recording video, in theory the video should safely stop in time. +However in some cases this doesn't happen in time (if the video file is large, post-processing may still be occurring when the device +switches off), causing the entire video file to be corrupted! To reduce this risk, Open Camera will stop video recording when the +battery level is low (3%), but before the device is about to shut off. If for some reason you don't want this behaviour, you can +switch this option off.

+ +

Flash while recording video - If enabled, the camera flash will flash every second while recording video. This +isn't something most people will need, but it can be useful if the phone is being operated remotely, as a signal that the +video is still recording.

+ +

Location settings - Select to access the following controls:

+ +

Store location data (Geotagging) - If selected, then photos will be tagged with the current +location. Location data will also be stored in videos (though only for devices that record in MPEG4 or +3GPP formats).

+ +

Store compass direction - If selected, then photos will be tagged with the compass direction. +Only supported for JPEG formats. Not supported for RAW photos (DNG format) or videos.

+ +

Store yaw, pitch and roll - If selected, then photos will be tagged with the device's yaw, pitch and roll. + Note that Exif data does not have direct support for this, instead it will be written as a string in the Exif data's + User Comment for the image. Only supported for JPEG formats. Not supported for RAW photos (DNG format) or videos.

+ +

Require location data - If "Store location data" is enabled, then also enabling this option means that +photos and videos can only be taken if location data is present (this can be useful if you need pictures/videos to +have location data in them).

+ +

Processing settings - Select to access the following controls:

+ +

Anti-banding - Some kinds of lighting, such as some fluorescent lights, flicker at the rate of the power supply +frequency (60Hz or 50Hz, depending on country). While this is typically not noticeable to a person, it can be visible to +a camera. If a camera sets its exposure time to the wrong value, the flicker may become visible in the viewfinder as flicker +or in a final captured image, as a set of variable-brightness bands across the image. Therefore, the auto-exposure routines +of the camera include antibanding routines that ensure that the chosen exposure value will not cause such banding. The choice +of exposure time depends on the rate of flicker, which the camera can detect automatically, or the expected rate can be +specified by using this option.

+ +

Edge mode algorithm - Only available if Camera2 API is used. Allows control over the algorithm used by the camera + driver for applying edge enhancement. Edge enhancement improves sharpness and details in the captured image, though on + some devices you may prefer to turn it off if it introduces undesirable effects. Note that this setting is ignored in + Noise Reduction (NR) photo mode.

+ +

Noise reduction mode algorithm - Only available if Camera2 API is used. Allows control over the algorithm used by the camera + driver for applying noise reduction. On some devices you may prefer to turn it off if it introduces undesirable effects. + Note that this setting is not related to Open Camera's Noise Reduction (NR) photo mode (and in fact this setting is ignored + in NR photo mode).

+ +

Misc:

+ +

Online help - Load this web page.

+ +

Camera API - If set to "Camera2 API", this enables support for the Camera2 API that was introduced +in Android 5. Changing this setting will cause Open Camera to restart. Camera2 API enables more advanced features +(including manual ISO/exposure, manual focus, HDR, exposure bracketing). +Note that not all Android 5+ devices have full support for the Camera2 API (Open Camera will only show this +option if at least one camera reports either "LIMITED" or "FULL" support for the API; "LEGACY" only devices are not supported). +Also note that even if devices support Camera2 API, some +devices have poor support. +These are not necessarily bugs in Open Camera, but problems with manufacturer support for Camera2 API. If you have +problems with flash behaviour, try the "Use alternative flash method" setting under "Photo Settings". +Please see here for more details on device compatibility.

+ +

About - Provides various debug information about the app and your device's camera. You can also copy +this information to the clipboard.

+ +

Privacy policy - Open Camera's privacy policy.

+ +

Open Source licences - Licences for files used in Open Camera.

+ +

Settings manager - Select to access the following controls:

+ +

Save settings - Open Camera supports saving all of its settings to a file, so that you can + restore them later. This could potentially be used for saving different profiles or presets. + The files are saved inside Android/data/net.sourceforge.opencamera/files/backups/ . + Please note that all backup files are removed in Open Camera is uninstalled (or you clear Open + Camera's data in your device's Settings/Apps), unless you manually copy them elsewhere.

+ +

Restore settings - Restores all settings from a previously saved settings file (see + "Save settings"). A file dialog will appear allowing you to choose the settings file. Be warned + that selecting a file will mean all of Open Camera's settings will be overwritten with the + saved version! Also beware of using this to transfer settings between different devices - there + is the risk that settings on some devices may be incompatible with other devices. Also if the + saved settings file specified a save location, this may not be valid on the new device (or if + using "Storage Access Framework", you may have to reselect the folder in Open Camera, to grant + write permission for the folder). Note that on Android 10+, the file dialog will only let you + select a file inside Android/data/net.sourceforge.opencamera/files/.

+ +

Reset settings - Resets all Open Camera settings to their default. Selecting this option will cause +Open Camera to restart. Note that this will not delete any saved settings (see above options).

+ +

Widgets and tiles

+ +

Open Camera comes with a "Take Photo" widget. You can place this on your homescreen. When clicked +on, it lauches Open Camera and takes a photo immediately.

+ +

On Android 7, Open Camera supports Quick Settings Tiles, to launch Open Camera in photo mode +("Camera"), video mode ("Record video") or front camera mode ("selfie").

+ +

On Android 7.1, Open Camera supports application shortcuts. Press and hold the Open Camera icon +to show additional shortcut options.

+ +

Remote control

+ +

Some remote control buttons and selfie sticks work by transmitting a volume key command, which +by default will take a photo, but you can change this from the Settings.

+ +

Open Camera also supports some remote control via a Bluetooth or USB keyboard:

+
    +
  • Function key or numeric keypad "*": toggle popup menu
  • +
  • "/": toggle exposure menu
  • +
  • Space or numeric keypad "5": Activate shutter (take photo/video), unless the exposure or popup menu is open, + and you are in highlight selection mode (see below).
  • +
  • Up/down arrows, or numeric keypad "8" and "2": if the exposure or popup menu is open, this enters highlight selection + mode:
      +
    • For the popup menu, move the highlighted row up or down to select a row, then press Space/"5", then you can change + the selected icon in that row with the up/down arrows, then press Space/"5" again to select the button.
    • +
    • For the exposure menu, move the highlighted row up or down to select a row, then press Space/"5", then you + can change the selected value or slider with the up/down arrows.
    • +
    + +
  • +
  • "+" or "-": Zoom in or out.
  • +
+ +

Frequently Asked Questions

+ +

How can I save to my external SD card? - This depends on your Android version:

+
    +
  • Android 5.0 onwards - enable Settings/More camera controls/"Storage Access Framework", and this should allow you +to save to external SD cards. If when choosing a folder, you only see "Recent", you may need to click on the +three dots at the top right to open the menu, to enable showing the drives (e.g., "Show internal storage").
  • +
  • Android 6.0 onwards - From Android 6, some devices support "Adoptable Storage" allowing you to +select to use +an SD card as internal storage. Note that not all devices support this, even if running Android 6 or later. If your device +doesn't support this, or you want to instead use an SD card as "portable storage", you'll have to use +the Storage Access Framework method as with Android 5.
  • +
+ +

Can you implement disabling shutter sound for my phone? - + If Open Camera shows the option Settings/"Camera API", then changing to "Camera2 API" means you'll be able to disable + shutter sounds under "Settings/More camera controls...". + When not using Camera2 API, if the option "Shutter sound" under "More camera controls..." isn't shown, + then it's not available. There + are possible workarounds for some of these devices (which is why some third party camera applications + may be able to silence the shutter), though the issue is these don't work on all devices, and tend + to use methods that Google now discourage. The fault is with the device for not supporting + standard method for cameras to disable the shutter sound on Android. In particular, if under Settings/About + you see that "Can disable shutter sound?" says No, it means the device's camera API is telling 3rd + party camera apps that shutter sound can't be disabled (so either it can't do it, or the API is lying + - either way, this should be reported to your manufacturer).

+ +

Photos or videos fail to save! - Firstly, if you're trying to save to an +external SD card, see "How can I save to my external SD card?" above. Otherwise:

+
    +
  • If Settings/More camera controls/"Storage Access Framework" is enabled, + in some cases the permission may be lost, try rechoosing the save location (from + Settings/More camera controls/"Save location").
  • +
  • If not using Storage Access Framework, but you have changed the save location, + it may be you've chosen somewhere where applications don't have permission to save + files.
  • +
  • Or sometimes simply restarting the devices fixes such problems.
  • +
+ +

I switched to a new phone, and now something doesn't work! - Google's auto-backup will +typically transfer settings to a new phone, but this may mean a camera-specific setting is no +longer relevant. In particular, if you set a non-default save location, it may be that the path is +not valid on the new device, or if using Settings/More camera controls/"Storage Access Framework", +you may need to rechoose the save location (from Settings/More camera controls/"Save location") to +grant permission to the new device. You can use Settings/Settings manager/"Reset settings" to reset +Open Camera to its original state, to rule out any issues from an Android backup from another +device.

+ +

My pictures are being rotated/cropped! - This likely means the auto-level +option is on. (If they're being rotated even when the phone is held level, it may mean the accelerometer sensor on your +device isn't calibrated.) It's off by default, but you may have accidentally switched it on. To turn off, go to the "popup" menu +and untick Auto-level.

+ +

Why doesn't Open Camera support dual / multiple cameras? - Open Camera supports cameras + that are made available to third party applications, although you may need to set Settings/"Camera API" + to "Camera2 API". When using Camera2 API, many devices expose multiple cameras via the zoom - zooming out to less + than 1x switches to the ultra-wide camera, zooming in automatically switches to the telephoto when + required. On other devices, the cameras can be manually switched by using the + Switch multi-camera iconswitch multi-camera icon. + Note that some devices don't allow third party applications to use their extra cameras, either + via zoom or by explicitly switching to the camera. In such cases Open Camera cannot access them.

+ +

But another third party camera app can access the extra cameras on my device, why can't Open Camera? - + On some devices, it may be possible to access the camera by ignoring what the device claims, and trying to + access the camera IDs anyway. This is a hack - on other devices, this will lead to buggy behaviour where + cameras are exposed that hang or otherwise don't work. The problem here is that the device does not + support exposing the cameras to third party camera applications via the Android camera API.

+ +

But can't you use the hacky method to access the extra cameras anyway? - Put it this way: + you paid hundreds of pounds for a device from a large company with lots of resources, but you + want the free application to do the extra work to workaround the device's limitation, even when it's a hacky + method? Sometimes I do implement workarounds for device limitations - but it is risky to do so here.

+ +

Why doesn't Open Camera support the maximum video resolution on my device? - If you +are using Camera2 API, make sure that you're not in slow motion mode (see "Speed" under +the popup menu), and you don't have a non-default frame rate set (under Settings/Video settings). If +a high speed frame rate is in use, then this usually limits the maximum video resolution. If this +isn't the case, then it may be that the device isn't exposing the highest video resolution to third +party applications (e.g., this is common for 4K/UHD video resolutions on older devices, especially +if Camera2 API isn't enabled).

+ +

Why doesn't the FPS/bitrate setting for video work? - These settings only give "recommendations" to the +camera, and there is no guarantee that they will be met. For best chance of success, try setting Settings/"Camera API" to "Camera2 API".

+ +

But my camera can do 60/120FPS, so why can't Open Camera? - High frame rates often are achieved only by the +"stock" camera app (or "mods" of it) because these are written for a specific device and don't have to go through +the standard Android camera API. Some devices do now support high speed frame rates when Camera2 API is enabled.

+ +

Why doesn't Open Camera show 23MP resolution on my Sony Xperia, only 8MP? - This was a problem on older devices + and/or with the old Camera API because of Sony not making this available to third party camera applications. On newer Sony + devices, this should become available if you set Settings/"Camera API" to "Camera2 API".

+ +

Why does the resolution of my photos not match the specified camera resolution? - This happens if +auto-level is enabled. The image is rotated to be level, which means the +resolution (and aspect-ratio) will change.

+ +

Why can't I change the ISO? - Even if your device supports ISO, this may not be made available through the +standard Android API for 3rd party camera apps to use.

+ +

Why doesn't touch to focus work? - Touching the screen should allow you to choose a particular region to focus +on. If this doesn't work:

+
    +
  • Check the focus mode (under the popup menu), for best results you usually want + Continuous focus iconContinuous or + Continuous focus iconAuto focus modes.
  • +
  • Check that Settings/"Face detection" is disabled (focus regions aren't possible in face detection mode).
  • +
  • Some devices or cameras (especially front cameras) don't support touch to focus.
  • +
+ +

I get "FAILED TO OPEN CAMERA" - In some cases this is fixed by restarting +Open Camera, or otherwise make sure nothing else is using the camera (including the +torch being enabled). In some cases a phone gets into a state where the camera can't +be opened, which is fixed by rebooting. If it persists even after restarting your +device, try other camera applications in case the camera is faulty.

+ +

Why has Open Camera stopped working properly? - If something stops working in Open Camera +first try a reboot of your device. If that doesn't resolve the problem, try resetting +the settings to the defaults (under Settings/Settings manager/"Reset settings"), or try reinstalling +the app (or go to your device's App Settings and select "Clear data" for Open Camera) to +reset it to its initial conditions. Obviously ideally this shouldn't happen, but can be a way of +working around any unresolved bugs that appear. If something stops working in an upgrade to a new +version of Open Camera, and the problem isn't resolved by a reinstall/Clear data, please let me +know, but in the meantime you can install the older versions from + +https://sourceforge.net/​projects/opencamera/files/ .

+ +

Why doesn't the preview display match the resultant photo/video? One of them is cropped. - Firstly, make +sure that Settings/Camera preview/Preview size is set to "Match photo size (WYSIWYG)". However if that doesn't fix +the problem, this is a limitation on some devices and photo/video resolutions (it happens if the device doesn't offer +a "preview" with the same aspect ratio as the chosen photo/video resolution). A workaround may be to try a different +resolution for photos and/or videos.

+ +

Why isn't Open Camera available in my language? - I can only speak English I'm afraid. Please contact me +if you're willing to do a translation (this doesn't require any knowledge of Android programming, it's just a case +of translating a set of strings in a text file).

+ +

Why is the non-English translation of my language incomplete? - Scene modes and color effects aren't +currently translated, as these are just strings returned by the camera. Also note that even if I get someone to +translate Open Camera, when I later add new features/options, this may require additional strings which aren't +translated. I don't have a team of paid translators, so it's not always possible to keep translations up to date :)

+ +

The non-English translation is wrong! - I can only speak English, and am dependent on other people to +offer translations. If you think a particular translation is inaccurate, please let me know.

+ +

Why is the screen forced to maximum brightness? - If you don't like this feature, you can switch it off +by going to Settings/On screen GUI/Force maximum brightness.

+ +

Does Open Camera support selfie sticks / bluetooth remotes? - Open Camera has support for some selfie sticks +though different sticks work in different ways, so it's hard to guarantee this. Sticks and bluetooth remote controls +which work by triggering a volume key press should work (and the behaviour can be configured by +Settings/More camera controls/"Volume keys").

+ +

Why is auto-level slow? - This feature requires doing a decompress of the JPEG data, followed by a +rotation of a multi-megapixel image, then recompressing, which typically results in a short pause on most devices. +And as devices get faster CPUs, they typically come with cameras with even bigger megapixels! This is why I've made +it optional (and you can set the volume control to quickly switch it on and off if you like).

+ +

Why is auto-level for photos only? - Doing auto-level for video is a massively harder +problem. This wouldn't be possible in real-time - rotating images causes a noticeable pause as it is, imagine +having to do that for every frame. Also the rotation angle wouldn't be constant, so it's a much harder problem +figuring out what the correct result should actually be.

+ +

Can I launch a different gallery app when I press the gallery icon? Why doesn't Open Camera +have its own gallery app? - If you have more than one Gallery app on your device, you should be +given the choice which to use when you press the gallery icon. If one app is already set up as the +default and you want to change it, then go to the App Settings for that app, and under +"Launch by default" (or something like that) it will list if it is set as the defaults for any +actions, with an option to clear them. There are plenty of gallery apps for Android, and it seems +better for users to have this choice, rather than Open Camera having its own custom gallery.

+ +

Clicking on the thumbnail icon only shows the photo briefly? - This can happen if you've changed the save location +for photos/videos to one that is not typical (e.g., not inside DCIM/ ). Some gallery applications will not show a photo in +such cases.

+ +

Why does Open Camera have ads? - Open Camera does not have ads in the application (there may be ads on the online +webpage you're reading now, but not in the app). There are however some clones on Google Play with ads inserted. +Please ensure that you've downloaded from one of the places listed above +on this page.

+ +

Does Open Camera support features like manual controls for exposure/ISO/focus, and RAW? - +Set Settings/"Camera API" to "Camera2 API" for such features (if the option isn't there, it's not supported on your device). +This is turned off by default as some devices have poor behaviour.

+ +

Why can't I select the Camera2 API? - Some devices only have +"legacy" Camera2 support, which means it isn't any better than the original Camera API. Open Camera doesn't support +enabling Camera2 on such devices.

+ +

Does Open Camera support HDR+, or use the Pixel's Visual Core? If you have a Google Pixel with Pixel Visual Core, then Open Camera +should be using HDR+ when the following settings are set: Photo Mode Standard, flash off (or auto if the flash doesn't fire), +white balance auto, color effect none, no exposure compensation, no manual ISO/exposure, no RAW. HDR+ should be +supported in both old and Camera2 API. See +this thread for more discussion. +Note that some Google Pixels (e.g., 3a) do not have a Pixel Visual Core chip, and may not support HDR+ in third party +camera applications (see this thread).

+ +

Why isn't Panorama supported on my device? - To support panorama in Open Camera, this requires +a gyroscope and compass, and at least 256MB of "large heap" memory (note, this isn't the same as the device's RAM). +Bear in mind that even if your device supports panorama, with Open Camera I have to support thousands of Android devices, +and I don't have the luxury of targetting functionality towards one particular device.

+ +

Why doesn't Open Camera's HDR images look like other HDR camera apps? - There are a great many different ways +of applying a HDR algorithm, some may do better or worse in different circumstances; some may look more or less pleasing +depending on what you are after. Also note that some camera apps use "HDR" to mean "apply a whacky-looking filter". +Whilst HDR filters can be used to apply a rather unreal or vivid image, for now Open Camera's HDR is more geared towards +capturing the range of exposures.

+ +

Why doesn't Open Camera's HDR photos look as good as other HDR photos I see? - Firstly, see the previous question. +Beyond that, many HDR photos you see on the Internet may have been manually processed in HDR software that allows the +user to tweak settings for optimal results for a given image. Such photos may have had additional processing done. Open +Camera's HDR algorithm tries to get the best results for most purposes automatically, but isn't going to beat manual +editing.

+ +

I ran out of space while recording video, and the video is corrupted, how can I get it back? - Firstly, this sadly +isn't an Open Camera bug - it's an issue that the video recording API doesn't stop or report an error when storage +runs out. As a workaround, Open Camera does try to stop videos when storage space is low (although note this workaround +may not be feasible in some cases if saving to non-default locations). In order to recover a file, you can try +"MP4Fix Video Repair Tool" by Smamolot (com.smamolot.mp4fix) (not affiliated with me or Open Camera!).

+ +

My device ran out of power while recording video, and the video is corrupt! How can I get it back? - This can happen if +the device is very slow at processing the video file after stopping, and this doesn't finishing when the device powers off. Open +Camera will stop the video in advance of the device shutting down to help reduce this risk (see "Critical battery check" option), +but if this still happens, the best hope is to try MP4Fix, see the previous question. Note that if this happens, it isn't an +Open Camera bug - it's a problem that will happen on any device where the device shuts off before the video can be processed.

+ +

Does Open Camera support external USB cameras? - Unfortunately Open Camera does not support USB cameras. These don't +seem to use the standard Android camera APIs, and it would be a lot of work to add support for these.

+ +

I don't like the UI! - The UI for Open Camera has improved significantly over the versions (both in terms +of having a consistent look, and the operation), so this criticism seems to have gone down, but there's always room +for improvement! However, in order for me to improve, please be specific: comments like this could mean all sorts of +things, such as the style of the icons, the arrangement of the icons, wanting more things on the main screen, wanting +less things on the main screen, preferring swipes to icons, wanting it easier to change certain options, or even that +some devices may have a bug that I'm not aware of. Also bear in mind that some preferences may be a matter of opinion +and it's not possible to get an app that satisfies everyone (e.g., some camera apps hide everything behind popup menus +that you swipe to enable; others have as much on screen as possible - I try to achieve a balance in Open Camera).

+ +

Why is the UI cluttered? - Under Settings/"On screen GUI", there are options to disable various controls and +so on from the main view.

+ +

Can I use the Open Camera source code in my app? - The Open Camera source is available under the GPL (see +Licence), and can be used for free, including commercially, if you follow the terms of that +licence (this means making the source of your app available under a GPL-compatible licence).

+ +

Contacting me (bugs etc)

+ +

If you experience a crash, and Google offers to "Report", please do so (if you've installed via F-Droid, +please see here).

+ +

If you have a question or what seems to be a bug, please first read FAQ. Also if something +no longer seems to work properly, try a reboot of your device, or if that fails to fix, try resetting Open Camera +settings to the defaults (under Settings/Settings manager/"Reset settings").

+ +

If there's still a problem, please check other third party camera applications to see if they have the same + problem or not. (It's not enough to try your device's "stock" camera - in some cases, devices may have bugs for + third party camera applications that don't affect the stock camera.)

+ +

If you find a bug, please +report it here (please check for existing tickets first). +It is helpful to supply the "About" information - please go to Settings/About, then click "Copy to clipboard", then you +can paste the information into your web browser, email or whatever.

+ +

For more general questions or things like feature suggestions, please use the +forums. +For some enquiries you may prefer to use email. +Please contact me at mark.harman.apps@gmail.com. + +Please note that I get a lot of emails +for Open Camera these days - I try to reply as many as I can, but this is not always feasible. I do however +read every email and forum post.

+ +

Note that whilst I welcome reviews/ratings, they are not a good way for reporting bugs (I may +miss it, there's only limited number of characters for me to reply).

+ +
+

Open Camera Privacy Policy.

+

This website uses icons from third party sources, see licences.

+

Open Camera on Sourceforge.

+
+ + + diff --git a/_docs/history.html b/_docs/history.html new file mode 100644 index 0000000..ea17dc8 --- /dev/null +++ b/_docs/history.html @@ -0,0 +1,1946 @@ + + + +Open Camera History + + + + + + + + + + + + + + + + + + + + + + + + +Open Camera icon +
+

Open Camera History

+
+ +

< Main Page.

+ +
+Version 1.55 (2025/08/18)
+
+FIXED   Crash on some devices when starting preview using camera vendor extensions, now fail
+        gracefully instead.
+FIXED   Crash related to zoom on some older devices when starting with Camera2 API.
+FIXED   Fixed possible crash related to focusing with Camera2 API.
+FIXED   Store geotagging properly on devices that always had longitude set to 0.0 (e.g.
+        Fairphone 5).
+FIXED   Dialog for poor magnetic sensor dialog wasn't showing any text.
+FIXED   Ensure the info text for capture progress in x- extension modes remains visible until 100%.
+FIXED   Zoom wasn't available in video mode after restarting, if photo mode was set to Panorama.
+ADDED   Announce current camera info for accessibility (e.g. Talkback) on startup and when switching
+        camera.
+ADDED   More crop guides: 65:24 and 3:1.
+ADDED   Catalan translation (thanks to Cambrells).
+UPDATED Shutter button now changes to a red square when recording video.
+UPDATED Smooth zoom transition when using zoom seekbar (for Camera2 API).
+UPDATED Removed -/+ controls for zoom and exposure compensation (these "zoom controls" are now
+        deprecated in Android).
+UPDATED Also hide settings icon when taking a photo.
+UPDATED Show current save location in settings.
+UPDATED Don't block UI thread when first starting camera preview (for Camera2 API with Android 14+).
+
+Version 1.54.1 (2025/04/07)
+
+FIXED   Crash in 1.54 due to trying to change exposure compensation beyond min or max values.
+
+Version 1.54 (2025/04/03)
+
+FIXED   Device specific crashes when starting with Camera2 API (related to camera vendor
+        extensions).
+FIXED   Manual white balance for Pixel devices.
+FIXED   Returning from manual to auto white balance mode had incorrect colours until preview or
+        camera was restarted.
+FIXED   REC709 and sRGB video picture profiles gave errors on some devices (e.g. Pixel).
+FIXED   Device specific fixes for slow motion and high speed video.
+FIXED   Some devices didn't support highest photo resolutions for focus bracketing mode.
+FIXED   Don't enter immersive mode when in background.
+FIXED   Some devices (e.g. Galaxy S24+) didn't layout UI correctly when switching directly between
+        landscape and reversed landscape orientation.
+FIXED   Placement on of-screen text (e.g. zoom indicator) when in reversed landscape orientation and
+        focus seekbar was visible.
+FIXED   Sometimes didn't layout UI correctly for landscape vs reversed landscape when in
+        split-screen mode.
+FIXED   Shifted positions of icons (when "along top") to avoid camera privacy indicator on some devices
+        e.g. Samsung Galaxy.
+FIXED   Icons weren't smoothly rotating the first time the device was rotated (for Camera2 API).
+FIXED   Duplicate entry of 0.1s for manual exposure time.
+ADDED   New Settings/Photo settings/"Save preview shots". Enables saving a short video alongside
+        photos, containing shots from the moment before the photo was taken (requires Android 8+ and
+        Camera2 API).
+ADDED   Support for choosing a specific physical lens (e.g., specifically choosing telephoto lens)
+        on supported devices (requires Android 9+ and Camera2 API).
+ADDED   Support for Ultra HDR on supported devices, under Settings/Photo settings/"Image format"
+        (requires Android 14+ and Camera2 API).
+ADDED   New "Auto source" mode for focus bracketing: in this mode, the source focus distance will be
+        automatically set via continuous focus or touch to focus.
+ADDED   Haptic/vibration feedback for adjusting some seekbars. Can be disabled under
+        Settings/More camera controls/"Allow vibration feedback". Note this also requires touch
+        interactions to be enabled in your device's settings.
+ADDED   New option Settings/More camera controls/"Use milliseconds in filename" (thanks to
+        Rob Emery).
+UPDATED Now requires Android 5+, Android 4.x no longer supported (sorry to anyone still on those
+        devices - but latest AndroidX libraries now require Android 5+).
+UPDATED Improved performance for Camera2 API (on Android 12+) for taking photos with continuous
+        focus mode, by optimising for latency. If this results in out of focus shots on your device,
+        change the new option Settings/Photo settings/"Optimise focus for..." to Quality.
+UPDATED Improved performance for resuming application with Camera2 API on devices that support
+        camera vendor extensions.
+UPDATED Double tapping will now cancel focus and metering area.
+UPDATED Single handed touch zoom by double tap and drag up/down.
+UPDATED Improvements for edge-to-edge mode support on Android 15.
+UPDATED Allow some more dialogs to run while still showing camera preview.
+UPDATED Support longer exposure time (0.5s) on Samsung Galaxy S devices.
+UPDATED OnePlus devices on Android 14+ now default to Camera2 API for new installs.
+UPDATED No longer force max brightness by default for new installs.
+UPDATED Removed "dim on-screen virtual navigation buttons" option for "Immersive mode" (this is now
+        deprecated on Android, and few devices support it anymore).
+UPDATED Changed shutter sound on Samsung devices with Camera2 API (workaround for Samsung specific
+        bug where Camera2 API sounds are always 100% volume).
+UPDATED Made upright detection in Panorama photo mode less strict.
+UPDATED Improved exposure compensation seekbar so it's easier to return to an exposure compensation
+        of 0.
+UPDATED No longer using renderscript for image processing algorithms.
+
+Version 1.53.1 (2024/06/04)
+
+FIXED   Bug in 1.53 where video subtitles option was incorrectly disabled when SAF enabled, also
+        fixed a related crash when recreating fragment.
+
+Version 1.53 (2024/05/28)
+
+FIXED   Device specific crashes when starting with Camera2 API (related to camera vendor
+        extensions).
+FIXED   Crash when saving images if unable to create thumbnail.
+FIXED   ANRs and poor performance if using Storage Access Framework when save folder had lots of
+        files (due to trying to measure free memory).
+FIXED   HDR images coming out green on Samsung Qualcomm devices.
+FIXED   If force destroyed when in settings, the camera would be incorrectly opened when application
+        was recreated (camera should only be reopened when leaving settings).
+FIXED   Long pressing on the shutter button in video mode meant nothing happened when releasing
+        touch.
+FIXED   Clicking on gallery icon when using Storage Access Framework would open contacts on some
+        devices.
+FIXED   Thumbnail wasn't ignoring deleted files when using Storage Access Framework (thanks to
+        Daniel Zhang).
+ADDED   Camera vendor extensions show percentage progress on supported Android 14 devices.
+ADDED   Long press on switch camera icons now bring up a menu to jump to any camera (for devices
+        that expose multiple cameras).
+ADDED   New option for on-screen icon to enable or disable focus peaking.
+ADDED   Support for themed/monochrome application icon (Android 13).
+UPDATED Improved user interface icons for shutter, switch photo/video and switch camera.
+UPDATED Smoother zoom for Camera2 API.
+UPDATED Improvements for loading thumbnails for gallery icon (including fixing orientation for
+        X-NIGHT portrait images on Pixel 6 Pro).
+UPDATED Improvements to lock screen behaviour when running above lock screen (thanks to
+        Daniel Zhang).
+UPDATED Improvements for popup menu and exposure UI when using large font sizes.
+UPDATED Made user's font size preference apply to on-screen text.
+UPDATED Changes in preparation for back button behaviour for future Android versions.
+UPDATED Better compatibility when debug option Settings/Photo settings/"Enable fast HDR/expo burst"
+        is disabled: also change the preview exposure. Turning this option off fixes HDR/expo for
+        Samsung Galaxy devices. New installs on Samsung Galaxy devices now disable fast HDR/expo
+        burst by default.
+UPDATED Updated Chinese Simplified translation (thanks to tumuyan).
+
+Version 1.52 (2023/08/13)
+
+FIXED   Crash related to multi-camera devices.
+FIXED   Possible crash when failing to save with Storage Access Framework.
+FIXED   Jittery zoom when using multitouch pinch but pinching slowly.
+FIXED   Don't show zebra stripes, focus peaking or histogram, when displaying resultant photo for
+        "Pause after taking photo" option.
+FIXED   Problem where clicking on gallery icon would sometimes go to a "base" image instead of HDR
+        image, when saving HDR photos with base images (for Android 10+).
+FIXED   Collapse notification panel when launching from a quick settings tile.
+FIXED   Some info toasts weren't showing (e.g., when cancelling SAF dialog, or denying location
+        permission).
+FIXED   Problem where if setting Video Picture Profiles to non-default value caused camera to fail
+        to open, the Video Picture Profiles setting would no longer show to be able to set back to
+        default.
+FIXED   Allow trying to switch between photo and video mode if camera fails to open (in some case
+        the failure may be specific to the mode).
+FIXED   Aspect ratio and other fixes in split-screen and multi-window modes.
+FIXED   Problem on some tablets where zoom seekbar showed under navigation bar in landscape
+        orientation in some circumstances.
+ADDED   Support for zoom with camera vendor extensions (for supported Android 13+ devices).
+ADDED   Support for displaying on-screen ISO and exposure time with camera vendor extensions (for
+        supported Android 13+ devices).
+UPDATED Made pinch zoom more sensitive (better support for modern devices with higher zoom levels).
+UPDATED "Touch to capture" option now supports starting and stopping video.
+UPDATED Applied a timeout of 2 second for focusing with original camera API.
+UPDATED Improved performance for NR photo mode.
+UPDATED Drop support for notifications for background saving, due to Android 13 permissions faff.
+UPDATED No longer allow a screenshot of the camera preview to show in "recent apps" view (for
+        Android 13+).
+UPDATED No longer cancel panorama when moving device orientation too far in wrong direction.
+UPDATED Made more text scale according to user's font size preference.
+
+Version 1.51.1 (2023/01/02)
+
+FIXED   Fix crashes for Camera2 API.
+
+Version 1.51 (2022/12/21)
+
+FIXED   Gallery thumbnail had incorrect orientation on some Android 10+ devices.
+FIXED   Focus bracketing images came out underexposed on some devices since
+        version 1.50 (e.g. Pixel 6 Pro).
+FIXED   Problems with NR, fast burst and long manual exposures on some devices (e.g., Pixel 6 Pro).
+FIXED   Face detection on-screen icon shouldn't show in camera vendor extension modes (as not
+        supported).
+FIXED   For Camera2 API, red eye flash was incorrectly being shown even on devices that didn't
+        support it.
+FIXED   Not saving location exif information for Camera2 API on some devices (e.g., Pixel 6 Pro).
+FIXED   Crashed recording video on some devices and resolutions (e.g. Pixel 6 Pro at 1920x1440) if
+        those resolutions didn't support the same frame rate as other resolutions.
+FIXED   Don't display error message if using volume keys to turn auto-level on or off in RAW or
+        Panorama mode (unless device doesn't support auto-level at all).
+ADDED   New option Settings/Photo settings/"Remove device EXIF data" to remove device metadata from
+        JPEG photos.
+ADDED   Shading for auto-level and crop guides, to darken the preview outside of the region of
+        interest.
+ADDED   Display message to hold device steady when using X-Night photo mode.
+ADDED   New option Settings/Photo settings/"HDR tonemapping" to choose tonemapping algorithm used
+        for HDR photo mode.
+UPDATED Applied a timeout of 1 second for focusing with Camera2 API.
+UPDATED Made it easier to zoom to 1x zoom using seekbar on devices that can zoom out to ultra-wide
+        camera.
+UPDATED Make zoom seekbar snap to powers of two (for Camera2 API).
+UPDATED No longer switch to manual mode in DRO and NR photo modes, as on some devices this meant
+        losing the benefit of manufacturer algorithms.
+UPDATED Volume key down now supports pause/resume video on Android 7+ when recording video, if
+        option for volume keys is set to "Take photo (or start/stop video recording)".
+UPDATED Apply a dimmed effect when reopening camera or switching modes (for Camera2 API).
+UPDATED Improved look of on-screen level line.
+UPDATED On-screen pitch and compass lines now show smaller intervals as camera zooms in.
+UPDATED Improvement to HDR algorithm for dark scenes.
+UPDATED Camera2 extension night mode now adds "_Night" to filename.
+UPDATED Default to Camera2 API for some devices (will only take affect for new installs or if
+        resetting settings).
+UPDATED Default to flash off instead of flash auto.
+UPDATED DRO, HDR, NR modes no longer activate showing full on-screen info toast when opening camera.
+UPDATED Use system toasts without custom views when appropriate.
+UPDATED Display current value for photo stamp font size and colour in preference summary.
+
+Version 1.50.1 (2022/06/08)
+
+FIXED   Crash on OPPO devices for old camera API introduced in 1.50.
+
+Version 1.50 (2022/06/04)
+
+FIXED   HDR photos came out black on some Samsung Galaxy devices with Android 12.
+FIXED   Problems with flash on Camera2 API (Samsung Galaxy, OnePlus, Pixel 6 Pro). Galaxy and
+        OnePlus devices therefore no longer default to using the "alternative flash method".
+FIXED   Problems with expo, HDR and long manual exposures on some devices (e.g., Pixel 6 Pro).
+FIXED   Granting only approximate location permission on Android 12 would turn geotagging option
+        back off.
+FIXED   On-screen text looked strange on Android 12.
+FIXED   Gallery icon overlapped with navigation bar if using widescreen resolution with UI in left
+        or right handed mode.
+ADDED   Support for Android 12's camera extensions API. When using Camera2 API, on selected devices
+        advanced photo modes are now available (e.g., Night on Pixel 6 Pro; Night, Bokeh and Beauty
+        on some Galaxy devices).
+ADDED   Improved support for devices with multiple camera devices, where extra cameras are exposed
+        via zooming in and out (e.g., Pixel 5/6).
+ADDED   New debug option Settings/Photo settings/"Enable dummy capture HDR/expo fix". Please enable
+        this if you are having problems with HDR or expo bracketing mode on Samsung Galaxy devices
+        with Android 11+ (specifically if some expo images come out with the same exposures).
+UPDATED Removed "use addresses" and "say cheese" options. Sorry about that, but
+        this is due to new data privacy requirements on Google Play: although
+        these used standard Android APIs, information was not available for these APIs to satisfy
+        data privacy requirements.
+UPDATED Now targetting Android 12. For remote control device options, new bluetooth permissions are
+        used instead of requiring location permission.
+UPDATED Move gallery icon slightly to avoid overlapping with Android 12 camera privacy icon.
+UPDATED Made pinch zoom smoother, to allow finer control.
+
+Version 1.49.2 (2022/01/13)
+
+FIXED   Dialog for "Save settings" shouldn't allow multiple lines.
+FIXED   Crash for NR photo mode on some devices since version 1.49.
+UPDATED Switched to using AppCompat AppCompatActivity.
+UPDATED Photo stamp custom text now uses AppCompat libraries to support latest emoji.
+UPDATED Made appearance of info "toasts" more consistent.
+
+Version 1.49.1 (2021/09/20)
+
+FIXED   Crop guides weren't drawn correctly in portrait orientation in 1.49.
+FIXED   Diagonals grid wasn't drawn correctly in portrait orientation in 1.49.
+
+Version 1.49 (2021/09/07)
+
+FIXED   Crash when failing to save photos/videos with mediastore (Android 10+ if not using Storage
+        Access Framework).
+FIXED   Crash related to original camera API.
+FIXED   Crash when using photo stamp with auto-level when angle is zero.
+FIXED   Couldn't exit immersive mode on Android 11.
+FIXED   Behaviour where widescreen preview aspect ratios show under on-screen navigation bar wasn't
+        working properly on Android 11.
+FIXED   Manual white balance had inverted effect.
+FIXED   Video subtitles file didn't work properly when video file restarted due to max filesize.
+FIXED   Taking a photo in RAW only, then clicking on the gallery thumbnail would sometimes
+        incorrectly open an earlier non-RAW photo or video.
+FIXED   Corrected pitch and compass line lengths for portrait vs landscape orientations.
+FIXED   Single and double tap options to take photo weren't working correctly in panorama mode.
+FIXED   When using manual ISO seekbar, sometimes incorrect ISO button would be highlighted.
+UPDATED Now supports portrait and landscape system orientations, rather than being locked to
+        landscape system orientation.
+UPDATED Double tap to take photo option no longer performs a touch to focus, this now only happens
+        from a single tap.
+UPDATED Improved performance when opening camera and clicking on gallery icon (mainly relevant for
+        using Storage Access Framework with Android 10+ when save folder has large number of files).
+UPDATED Set max preview exposure time to be 1/5s instead of 1/12s, for when using manual exposure.
+UPDATED Support longer exposure time (1/5s) on some Samsung Galaxy S devices.
+UPDATED Improvements to brightness levels for Noise Reduction, DRO and HDR photo modes (images
+        coming out too dark in some cases).
+UPDATED Improvement to Noise Reduction photo mode quality (improved ability to distinguish noise
+        from ghosting effects).
+UPDATED Improvement to Noise Reduction photo mode to avoid overexposing in lower light scenes.
+UPDATED Improved choosing when to use 8 images for Noise Reduction photo mode.
+UPDATED Optimisations for DRO and NR photo modes on Samsung devices.
+UPDATED Accessibility improvement, set hints for EditTexts.
+UPDATED Now targetting Android 11. Due to changes in Android 11 this means "video subtitles" option
+        is now only available when saving with
+        Settings/More camera controls/"Storage Access Framework" enabled.
+UPDATED Updated some user interface icons.
+
+Version 1.48.3 (2020/11/20)
+
+FIXED   Possible crash for panorama if failing to crop due to poor transformations; now fails
+        gracefully.
+FIXED   Crash on EXTERNAL devices with Camera2 API that didn't support querying the view angles.
+FIXED   Photos would sometimes fail to save on some devices with Storage Access Framework, when some
+        options were enabled (options like DRO, HDR, auto-level, photostamp that require
+        post-processing; custom Exif tags like artist or copyright; or when using geotagging with
+        Camera2 API).
+FIXED   Fix for HDR scenes with both very bright and very dark regions, result would be over
+        exposed.
+FIXED   Fixed possible misalignment for HDR scenes with very bright or very dark images.
+FIXED   Corrupt videos could be left over if video failed to start.
+FIXED   Possible problem taking photos on some devices with LIMITED Camera2 API support.
+FIXED   Possible problem with default edge mode and noise reduction mode behaviours on some devices
+        with LIMITED Camera2 API support.
+FIXED   UI would become sluggish if camera or storage permission denied with "Don't ask again".
+UPDATED Now supporting "scoped storage" on Android 10+. This means storage permission is no longer
+        required on Android 10+. However this means the following changes:
+          * Saving outside of DCIM/ is no longer possible unless using the Storage Access Framework
+            option. If you had set up a custom save folder outside of DCIM/ and are on Android 10+,
+            it will be reset to the default DCIM/OpenCamera/ folder. If you want to continue saving
+            outside of DCIM/, you can enable
+            Settings/More camera controls/"Use Storage Access Framework" and choose a new folder.
+          * If using Video subtitles option, then the .SRT files will show up in gallery
+            applications, unless Settings/More camera controls/"Use Storage Access Framework" is
+            enabled.
+        Note that these changes are required due to changes being made in Android that applications
+        are required to support.
+UPDATED Use seekbar for more settings (audio control sensitivity, image quality, photo stamp font
+        size).
+UPDATED Debug XML files for panorama now saved in Android/data/net.sourceforge.opencamera/files/.
+UPDATED Camera now closed when in settings or preview otherwise in background.
+
+Version 1.48.2 (2020/07/12)
+
+FIXED   Manual focus and focus bracketing seekbars weren't being hidden when in immersive mode.
+FIXED   Video subtitles would stop before end of video on some devices when using Storage Access
+        Framework.
+UPDATED Switched to AndroidX support library.
+UPDATED Artist, Copyright exif tags option now supported for devices running Android 6 or earlier.
+UPDATED Selecting remote device type for Bluetooth remote control now calls Open Camera's
+        DeviceScanner directly; DeviceScanner activity no longer exported.
+
+Version 1.48.1 (2020/05/02)
+
+FIXED   Crash on devices with Camera2 API where camera reports no picture, video or preview
+        resolutions, instead fail to open camera gracefully instead.
+FIXED   Fix switch camera buttons behaviour if a camera with ID greater than 0 failed to open.
+FIXED   Some devices lost custom video profiles in 1.48.
+ADDED   If camera fails to open, display ID of current camera that we tried to open.
+
+Version 1.48 (2020/04/22)
+
+FIXED   Taking front camera photos with frontscreen torch was slow.
+FIXED   When using "Pause after taking photo", touching to unpause no longer
+        triggers auto focus, or taking another photo for "Touch to capture".
+FIXED   Take photo widget issue.
+FIXED   Camera specific hardware keys such as volume keys shouldn't take effect in settings etc.
+FIXED   Don't set optical image stabilization if video digital stabilization is enabled in video
+        mode.
+FIXED   Seamless video restart on maximum filesize (for Android 8+) wasn't broadcasting video files
+        except the last one, meaning they were taking longer to show up in mediastore gallery.
+FIXED   Recording video on Android 8+ could leave zero-size files if size approached the maximum
+        filesize, but a restart did not occur.
+FIXED   Problem of on-screen level angle overlapping with shutter icon when using a widescreen
+        preview aspect ratio.
+FIXED   Incorrect layout for on-screen text when using "icons along top" with wide-screen aspect
+        ratio and device held in upside-down landscape orientation.
+FIXED   Focus seekbars overlapped with histogram in widescreen aspect ratio when using "Icons along
+        top" UI placement.
+FIXED   Was incorrectly offering manual white balance even if camera didn't support this
+        (inconsistency that the manual white balance option showed, even though the manual white
+        balance temperature seekbar wasn't shown).
+FIXED   Optional on-screen icons (such as flash, RAW) weren't updating correctly if switching to a
+        camera that didn't support that feature.
+FIXED   Don't show on-screen flash icon in video mode (since this icon doesn't support torch, and
+        flash auto/on not supported in video mode).
+FIXED   Preview texture buffer size (for Camera2 API) could be set incorrectly after changing aspect
+        ratios.
+FIXED   Auto-level photos could never be full resolution (for when angle was 0).
+FIXED   Update on-screen time format more often when changing device settings.
+FIXED   USB/bluetooth keyboard control bug when navigating popup menu, if icons were displayed with a
+        horizontal scrollbar.
+ADDED   New icon for switching between multiple cameras. If your device has multiple front and/or
+        back cameras, then the existing icon to switch cameras will switch between the first front
+        and back camera; the new icon will instead cycle between the multiple front or back cameras.
+        If you prefer the old behaviour, then disable
+        Settings/On screen GUI/"Multiple cameras icon".
+ADDED   Current camera ID now displayed on-screen (next to date/time) for devices with multiple
+        front/back cameras. This can be disabled under Settings/Camera preview/"Show camera ID".
+ADDED   Aperture control, for devices that support this. (Camera2 API only.)
+ADDED   Flash on and torch now supported for manual ISO/exposure.
+ADDED   Option to specify REC709 or sRGB profile for video recording.
+ADDED   New custom gamma profile option for video recording.
+ADDED   New video profiles JTVideo, JTLog and JTLog2 (thanks to JT Haapala).
+ADDED   New option for alpha value to use for ghost image option.
+ADDED   More zebra stripe values 93-99%.
+ADDED   Options to control zebra stripe colours.
+ADDED   Option for storing device's current yaw/pitch/roll in Exif user comment for photos (thanks
+        to Joshua).
+ADDED   New option Settings/More camera controls/"Allow long press actions" to disable long press
+        actions.
+UPDATED Auto-level feature now shows on-screen rectangle to show the frame of the resultant photo.
+UPDATED Improvements for log profiles for video recording. Please note that this means the behaviour
+        of these profiles has changed!
+UPDATED On devices with on-screen navigation buttons, camera preview can now display under these
+        buttons if required for wide aspect ratio (requires Android 5+).
+UPDATED New immersive mode option to hide navigation buttons only when in immersive mode; existing
+        option for hiding navigation buttons now renamed to say "dim".
+UPDATED Show toast with camera id on startup if camera isn't set to the default camera for front or
+        back facing. Toast for cameras also displays whether ultra-wide, when using Camera2 API.
+UPDATED HDR and NR photo modes now limited to maximum resolution of 22 megapixels (to avoid risk of
+        running out of memory on devices with large camera resolutions).
+UPDATED Improved performance when displaying ghost image larger than device's resolution.
+UPDATED Popup menu now displays extra information for resolutions (MP for photos, descriptive name
+        like FullHD, VGA etc for video).
+UPDATED Don't set video digital stabilization when in photo mode.
+UPDATED Some preferences are now showed disabled if only relevant for another nearby option that
+        isn't currently enabled.
+UPDATED Moved video bitrate and frame rate options to debugging section.
+UPDATED Improved UI support for "external" cameras (if detected/supported with Camera2 API).
+UPDATED Improved placement of on-screen text (zoom, video recording time etc) to avoid focus
+        seekbars in landscape mode.
+UPDATED Improved look of on-screen text for manual/exposure sliders.
+UPDATED Exposure icon now highlights red when exposure UI is open.
+UPDATED Exposure UI now auto-opens when switching to manual white balance (as exposure UI contains
+        the manual white balance temperature seekbar).
+UPDATED More repeat mode options (100x, 200x, 500x).
+UPDATED Optimisation for reading most recent photo/video for thumbnail.
+
+Version 1.47.3 (2019/10/20)
+
+FIXED   Grids were being drawing too faintly.
+UPDATED Camera2 API is now a "list" selection rather than a boolean switch.
+UPDATED Updated some icons for newer more consistent material design look.
+UPDATED Changed notification icon to be white.
+UPDATED Minor accessibility improvements.
+UPDATED Clarify in settings that PNG image format is not truly lossless.
+UPDATED Moved licences from about to separate preference category.
+
+Version 1.47.2 (2019/09/04)
+
+FIXED   Some devices (e.g., Samsung) played shutter sound etc when Camera2 API enabled even when
+        phone was on silent/vibrate.
+FIXED   When using "Icons along top" UI placement on devices with notches/cutouts, on-screen text
+        could be covered up by icons in widescreen aspect ratios.
+FIXED   Don't display ghost image whilst frontscreen flash is enabled.
+FIXED   Info toast string for video mode could be too long in some cases for high speed frame rates.
+ADDED   Support for EXTRA_DURATION_LIMIT, EXTRA_SIZE_LIMIT, EXTRA_VIDEO_QUALITY for when called from
+        video (ACTION_VIDEO_CAPTURE) intent.
+UPDATED Minor performance improvement for panorama.
+
+Version 1.47.1 (2019/08/17)
+
+FIXED   Crash in 1.47 when saving photo with SAF and geotagging, when using Camera2 API.
+FIXED   Crash in 1.47 for panorama photo mode if a new photo is taken when a previous panorama is
+        still being processed.
+FIXED   Crash for panorama if no matches found when aligning images.
+
+Version 1.47 (2019/08/14)
+
+FIXED   Disallow switching between auto and manual mode while recording video (this previously
+        crashed).
+FIXED   Crashes related to histogram, zebra stripes, focus peaking - don't crash if we have
+        problems.
+FIXED   Don't close Open Camera if bluetooth service (for remote control) fails to initialise.
+FIXED   Crash related to focus assist if camera closed.
+FIXED   Crashes related to multiple instances of Open Camera and Renderscript.releaseAllContexts().
+FIXED   Some devices (e.g., LG G6) defaulted to front camera, hopefully fixed to default to back
+        camera on all devices that have a back camera.
+ADDED   New panorama photo mode (requires Android 5+, note not all devices are supported).
+ADDED   New option Settings/Photo settings/Text style/"Text with shaded background" for photo stamp
+        option, to draw with a rectangular shaded background (i.e., bringing back the old behaviour
+        that was replaced by the newer outline shadow text).
+UPDATED Notifications now displayed on Android 8+ when Open Camera is in background when images are
+        still being processed/saved.
+UPDATED Improved exposure panel UI, and made on-screen text (zoom, video timer etc) more compact.
+UPDATED Camera ID is now displayed when switching cameras, on devices with more than two cameras.
+UPDATED Set navigation bar colour to black (needed for some devices, e.g., Samsung).
+UPDATED Improved performance on Android 7+ when saving photos when using Storage Access Framework
+        (no longer need to save to a temporary file to handle Exif tags).
+UPDATED When saving "base images" for Noise Reduction mode, keep JPEG quality at 100% even if using
+        post processing options.
+UPDATED Updated Renderscript to 64-bit code.
+
+Version 1.46 (2019/05/18)
+
+FIXED   Problems with toasts being displayed for seekbars on Android 9.
+FIXED   Face detection stopped working when recording video (on old Camera API).
+FIXED   Wasn't displaying crop guides in some cases (when the crop guide aspect ratio matched the
+        photo/video resolution's aspect ratio, but this didn't match the preview's aspect ratio).
+FIXED   Improvement to on-screen placement of pitch and compass lines.
+FIXED   Toast for enabling/disabling face detection (via on-screen icon) was hidden by subsequent
+        info toast from restarting the camera.
+FIXED   Fixed some crashes for old camera API.
+FIXED   Problem with Camera2 API where with manual exposure time, preview would display a maximum
+        exposure time of 1/12s rather than what the manual exposure time had been set to.
+FIXED   When using "red eye" flash, on-screen flash icon (with Camera2 API) was being shown all the
+        time, when this actually behaves similar to flash auto.
+FIXED   Avoid "capturing" text from being covered by focus bracketing sliders in focus bracketing
+        photo mode and landscape orientation.
+FIXED   Don't allow continuous fast burst mode (when holding down shutter button) in standard photo
+        mode with RAW only (previously this appeared to work, but no images were saved).
+FIXED   Crashes with Camera2 API related to multithreading issues.
+FIXED   Problem with camera preview and UI orientation if user switched to reverse landscape
+        orientation whilst in settings or an on-screen dialog was displayed.
+FIXED   When using "Icons along top" UI placement, on-screen text could appear below the icons
+        if icons showed above the camera preview, when popup menu opened.
+FIXED   Layout problem on Nexus 7 with "Icons along top" UI placement when going to settings and
+        back.
+FIXED   Free storage space should now be correct for external SD cards when using Storage Access
+        Framework.
+ADDED   New option Settings/Camera preview/"Show a histogram" to display an on-screen histogram.
+        (Camera2 API only.)
+ADDED   New option Settings/Camera preview/"Show zebra stripes" to display on-screen zebra stripes
+        to indicate over-exposed regions. (Camera2 API only.)
+ADDED   New option Settings/Camera preview/"Focus peaking" to display on-screen highlights to
+        indicate in-focus edges. (Camera2 API only.)
+ADDED   RAW/DNG now enabled on higher end devices for expo bracketing and focus bracketing photo
+        modes, as well as HDR when saving the base expo images. Use the new options Settings/Photo
+        settings/"Allow RAW for expo bracketing" and "Allow RAW for focus bracketing" if you want to
+        enable RAW for standard photo mode, but not for expo/focus bracketing modes.
+ADDED   New option Settings/Photo settings/"Use addresses" to display GPS coordinates in the form
+        of an address (for photo stamp and video subtitles).
+ADDED   Support for remote control via bluetooth/USB keyboards.
+ADDED   New option under Settings/On screen GUI/ to enable on-screen icon for enabling location
+        data.
+ADDED   New option under Settings/On screen GUI/ to enable on-screen icon for cycling through flash
+        modes (instead of on the popup menu).
+ADDED   New option under Settings/On screen GUI/ to enable on-screen icon for cycling through RAW
+        modes.
+ADDED   Support for the Kraken Smart Housing (thanks to Edouard Lafargue). See Settings/More camera
+        controls/"Bluetooth LE remote control...".
+ADDED   Added some more common manual shutter speeds (1/10 to 1/50 now changes in steps of 1/5).
+UPDATED Default UI layout is now "icons along top" (see Settings/On screen GUI/UI placement to
+        change back to old layouts.
+UPDATED Improved DRO algorithm for some scenes (e.g. stop making clouds so unnatural).
+UPDATED Noise Reduction "NR Mode" (normal or low light) is now preserved; the on-screen "NR" icon is
+        now highlighted in yellow in low light mode.
+UPDATED Separated some options under "Settings/On screen GUI" preference screen out into the new
+        screen "Camera preview".
+UPDATED Made exposure UI panel more compact.
+UPDATED On-screen icon for RAW now displayed as "RAW ONLY" if only saving RAW.
+UPDATED Display number of remaining images to save when "processing".
+UPDATED Play notification sound (if shutter sound not disabled) when focus bracketing completes.
+UPDATED Also added option for 150 focus bracketed images.
+UPDATED Code to try to prevent video corruption when running out of space now enabled when using
+        Storage Access Framework.
+
+Version 1.45.2 (2019/01/20)
+
+FIXED   Don't allow focus assist when recording video.
+
+Version 1.45.1 (2019/01/18)
+
+FIXED   Crash when saving settings if SAF enabled, and save location does not correspond to a file
+        based directory.
+FIXED   Some devices in Camera2 mode lost support for modes requiring burst or manual ISO/exposure.
+FIXED   Improve popup view for small screens, to help some languages with longer strings.
+
+Version 1.45 (2019/01/14)
+
+FIXED   Crash if going to background during focus bracketing.
+FIXED   Crash for Camera2 API on devices with no color effects.
+FIXED   Crashes related to saving with storage access framework.
+FIXED   If Open Camera is running above screen lock, don't require screen to be unlocked just to
+        show an info dialog.
+FIXED   Prevent exposure panel from overlapping with on-screen icons on small devices.
+FIXED   Problems with popup view and left handed UI (e.g., wouldn't show when first opened).
+FIXED   Unlimited repeat mode didn't work on some languages (if this affected you, you'll need to
+        re-select the option for unlimited, then it should start work).
+FIXED   If Repeat Mode changed in Settings, the same option on popup menu would sometimes not be
+        updated.
+FIXED   Don't pause on-screen preview when capturing in fast burst mode.
+FIXED   Don't support expo bracketing, focus bracketing or fast burst photo modes if called from a
+        "Take photo" intent.
+FIXED   Taking photos could hang on some devices with Camera2 API when using repeat mode and
+        continuous focus.
+FIXED   ISO manual slider wasn't updating when ISO was changed by using the ISO buttons.
+FIXED   Code meant to increase exposure time for dark scenes in Noise Reduction mode could end up
+        reducing exposure time in some cases.
+FIXED   Wasn't requesting audio permission (for Android 6+) for "Audio control" options.
+ADDED   New "Low light" mode for Noise Reduction photo mode.
+ADDED   Continuous burst mode: holding down shutter button in standard or fast burst photo modes
+        will now enable a continuous burst. (Camera2 API only.)
+ADDED   Focus assist option (Settings/On screen GUI/"Focus assist"), allows auto-zooming when
+        adjusting manual focus distance.
+ADDED   Support for different photo image formats (Settings/Photo settings/"Image format"): WebP,
+        PNG.
+ADDED   Support for different video file formats (Settings/Video settings/"Video format"): H264,
+        HEVC, 3GPP, WebM (WebM is without audio support).
+ADDED   New options to save and restore all settings to a file (see Settings/"Settings manager").
+ADDED   New option Settings/On screen GUI/UI placement/"Icons along top", which enables a new user
+        interface layout where control icons are laid out opposite where the "take photo" icon is
+        (top of the screen in portrait, or left of the screen in landscape).
+ADDED   New options under Settings/On screen GUI/ to enable on-screen icons for enabling face
+        detection, auto-level, photo stamp, custom text stamp, white balance lock; also the
+        exposure lock icon is now optional.
+ADDED   Timelapse capture rates of 120x and 240x.
+ADDED   More auto-repeat options.
+ADDED   Larger options for maximum filesize for video (up to 9GB).
+ADDED   2:1 crop guide.
+ADDED   New "fine" setting for video flat (log) profile.
+ADDED   New lower noise sensitivity for "loud noise" audio trigger.
+ADDED   Vietnamese translation (thanks to Khánh Trần).
+UPDATED Improved HDR algorithm to avoid increasing noise in darker scenes. If you prefer the
+        original behaviour, see Settings/Photo settings/"HDR contrast enhancement" and set this to
+        "Always". Or, you can also set this to "Off" to disable this in all cases.
+UPDATED Improvements to contrast enhancement (as used by HDR, DRO, Noise Reduction): avoid
+        increasing contrast too much in dark regions.
+UPDATED Improvement to Noise Reduction mode colours for some scenes.
+UPDATED Improved quality of deghosting algorithm for Noise Reduction mode (manages to deghost with
+        less loss of quality).
+UPDATED Improved auto-alignment for Noise Reduction mode for dark scenes or when zoomed in.
+UPDATED Allow maximum of 2s instead of 0.5s manual exposure time in Noise Reduction mode.
+UPDATED Made user interface buttons larger.
+UPDATED Focus bracketing mode now supports up to 200 images.
+UPDATED Focus bracketing mode can now be cancelled by pressing the take photo button again.
+UPDATED Focus bracketing mode makes the shutter sound per shot.
+UPDATED Burst images are now labelled starting from "_0" in the filename suffix (to be consistent
+        with expo and focus bracketing).
+UPDATED Make it clearer that Storage Access Framework should be enabled to allow saving to SD cards.
+UPDATED Photo and video resolutions on popup menu now ordered left to right from smallest to
+        largest, rather than largest to smallest.
+UPDATED Ensure photo resolutions are sorted in order (by area).
+UPDATED Video frame rates now specify which are "high speed" (for Camera2 API); also make it clearer
+        if video resolution in settings is for high speed mode.
+UPDATED Improved performance when changing video speed on popup menu.
+UPDATED "Say cheese" voice control now remains on.
+UPDATED Minor improvements for accessibility.
+UPDATED Expo bracketing, focus bracketing, noise reduction and fast burst modes now display number
+        of photos being taken in each burst.
+UPDATED Allow Camera2 mode to be used on more devices (devices that have both some LEGACY cameras,
+        and some cameras with at least LIMITED support).
+UPDATED Content description for exposure lock now updates to say whether clicking will lock or
+        unlock exposure.
+
+Version 1.44.1 (2018/09/23)
+
+FIXED   Crash in 1.44 for ghost image option if selected image could not be loaded due to
+        SecurityException.
+FIXED   Crash with RAW (DNG) in some cases due to threading issue when "Pause after taking photo"
+        enabled.
+
+Version 1.44 (2018/09/18)
+
+FIXED   Crash if activity is destroyed as photo is taken.
+FIXED   Crash if camera reopened 128 times in a single instance of the application.
+FIXED   Fixed some crashes for old camera API.
+FIXED   Fixed crash with Camera2 API when using expo/HDR if fast burst disabled, and camera
+FIXED   In Camera2 API mode, wasn't checking to see if video stabilization option is supported.
+FIXED   Settings/Preview size/"Maximise preview size" option didn't always work correctly on some
+        devices (e.g., Nokia 8).
+FIXED   Fixed a crash related to failing to save with storage access framework.
+FIXED   Switching from slow motion back to regular speed wouldn't reset exposure level on some
+        devices.
+FIXED   Prevent manual focus slider from overlapping with on-screen icons.
+FIXED   Photos taken while recording video were being saved at 100% quality if photo mode was DRO or
+        HDR, even though the photo mode isn't relevant for photos when recording video.
+FIXED   Don't show fast burst options on popup mode when in video mode.
+FIXED   Problem of toasts for seekbars disappearing when continually moving seekbar.
+FIXED   GUI not updating properly when using auto-repeat with timed interval in Camera2 API mode.
+FIXED   Norwegian strings for video bitrate preferences were incorrect.
+ADDED   New photo mode "NR" Noise Reduction, takes a burst of photos and aligns and merges to reduce
+        noise. (Camera2 API only, only supported on high end devices.)
+ADDED   New photo mode "Focus {}" to enable Focus Bracketing. (Camera2 API only.)
+ADDED   New option Settings/"Ghost image", to overlay either last photo taken, or a selected image
+        on your device.
+ADDED   New option Settings/On screen GUI/"Show audio level meter", to display on-screen audio level
+        when recording video.
+ADDED   New option Settings/Video settings/"Video flat (log) profile" to record video with a flat
+        profile. (Camera2 API only.)
+ADDED   New options under Settings/"Processing settings..." for setting Edge Mode and Noise
+        Reduction Mode filtering options. (Camera2 API only.)
+ADDED   New option Settings/Photo settings/"Distance unit" to allow using feet instead of metres for
+        GPS altitude on photo stamp and video subtitles.
+ADDED   On-screen flash icon (to show whether flash will fire or not) now supported for front screen
+        flash.
+ADDED   New Camera2 API debug option under Settings/"Photo settings" to disable taking photos whilst
+        recording video feature (if your device has problems recording videos with Camera2 API
+        enabled, try disabling this).
+ADDED   Camera2 support for high resolution photo resolutions (required for supporting highest
+        resolutions on some devices, e.g., Nokia 6, Samsung Galaxy S6).
+ADDED   Launching from intent now recognises request for front or back camera
+        (android.intent.extras.CAMERA_FACING, android.intent.extras.LENS_FACING_FRONT,
+        android.intent.extras.LENS_FACING_BACK, android.intent.extra.USE_FRONT_CAMERA).
+ADDED   Video bitrate options for 150Mbps and 200Mbps (may not work on all devices).
+ADDED   New video audio source options: UNPROCESSED (requires Android 7) and VOICE_RECOGNITION.
+ADDED   Partial Greek translation (thanks to Wasilis Mandratzis-Walz).
+ADDED   New option to disable ever showing the "What's New" dialog (under Settings/On screen GUI/
+        "Show What's New dialog").
+UPDATED No longer wait for image queue to be saved to go to settings, or background.
+UPDATED On Android 8+ when restarting video due to hitting maximum filesize, transition to new file
+        is now seamless.
+UPDATED Improved behaviour of seekbars for manual ISO, shutter speed, white balance (they now select
+        sensible "round" values).
+UPDATED Show pitch lines etc even when device is near vertically down or up; also highlight the 90
+        degree pitch line when almost vertically down or up.
+UPDATED Don't show auto-level on-screen icon when device is near vertically down or up (since auto
+        level won't occur in that situation).
+UPDATED Always show scrollbar on popup menu (so it's more obvious that it can be scrolled).
+UPDATED Show FPS on-screen when recording video (Camera2 API only).
+UPDATED Made disabled buttons on popup menu easier to see in bright light.
+UPDATED Made on-screen level lines clearer.
+UPDATED New material design gallery icon (used when no last thumbnail found, and also for shortcut
+        icons).
+UPDATED If an option that requires compass direction is set, warn user if compass is unreliable.
+UPDATED Current manual focus distance is now always saved.
+UPDATED Show video bitrate on info toast if not set to default.
+UPDATED More preferences in settings now show the currently selected value.
+UPDATED Battery optimisation: don't use magnetic compass sensor unless required for user
+        preferences.
+UPDATED Camera opening on background thread (for smoother operation) now enabled for Android 6
+        devices (was already supported for Android 7+).
+UPDATED Workaround for bug where popup menu appears too large (off-screen), reopening the popup at
+        least should fix this without having to restart Open Camera.
+
+Version 1.43.3 (2018/04/20)
+
+FIXED   Fixed some crashes.
+ADDED   Added video capture rate, capture rate factor and high speed info to About debug dialog.
+UPDATED Fix toast message if slow motion video fails.
+
+Version 1.43.2 (2018/04/13)
+
+FIXED   Fixed some crashes.
+
+Version 1.43.1 (2018/04/12)
+
+FIXED   Crash when selecting antibanding setting on devices with no support for antibanding; this
+        option is now removed on such devices.
+FIXED   Crash when using Storage Access Framework to select "Downloads" on Android 8+.
+FIXED   Fixed some other crashes.
+
+Version 1.43 (2018/04/10)
+
+FIXED   Fixed crash when sharing images (if not using Storage Access Framework) on some Android 7+
+        devices.
+FIXED   Fixed crash related to devices that don't support video.
+FIXED   Fixed some crashes for when Camera2 API enabled.
+FIXED   Problems when taking HDR or expo photos quickly without pause, the second photo would use
+        incorrect exposure.
+FIXED   Problems with popup view layout on some non-English languages.
+FIXED   On-screen icons (e.g., for HDR) didn't show after restarting.
+FIXED   When using volume keys to switch auto-level on/off, user interface wasn't updating properly.
+FIXED   Face detection mode on Camera2 API wasn't affecting resultant exposure etc.
+FIXED   Bug with video subtitles (.SRT) option, overlapping subtitles could appear briefly when
+        pausing/resuming video.
+FIXED   Bug on some devices (e.g., Nokia 8) where manual sliders (e.g., for manual ISO) could only
+        go up to 1 less than the true maximum allowed value.
+FIXED   Some stored "values" for zh-rCN shouldn't have been translated.
+ADDED   New Fast Burst photo mode (Camera2 API only).
+ADDED   Slow motion video (Camera2 API only).
+ADDED   Time lapse video (requires Android 5).
+ADDED   Torch option for front cameras without flash, which lights up the screen.
+ADDED   Option to save only in RAW/DNG format (requires Android 7).
+ADDED   New options under "Photo settings" to support Artist and/or Copyright Exif tags in images
+        (requires Android 7).
+ADDED   On-screen icons for DRO and Expo Bracketing modes.
+ADDED   On-screen icon for when audio is disabled in video mode.
+UPDATED Now targetting Android 8.1.
+UPDATED Improved appearance of on-screen text and photo stamp text (draw fonts with outline, rather
+        than shaded background).
+UPDATED Improved layout of on-screen information.
+UPDATED Improved look of popup menu.
+UPDATED Improved support for setting frame rates (including high speed support) when using Camera2
+        API. Thanks to George Joseph.
+UPDATED Video frame rate preference is now stored per-camera (as with video resolution).
+UPDATED Burst mode now renamed to Repeat mode (note, some translations may still refer to "burst").
+UPDATED Some preferences in settings now show the currently selected value.
+UPDATED Improved performance when going to settings in portrait mode.
+UPDATED Don't request audio permission (or show toast if not available) if record audio option for
+        video is disabled.
+UPDATED Zoom now resets when pausing the application, switching camera, or switching between photo
+        and video modes (not resetting zoom tends to confuse users; this change also makes behaviour
+        consistent with other camera applications).
+UPDATED For "Pause after taking photo" option, pressing Back when preview is paused now unpauses the
+        preview and keeps the photo, rather than exiting the application.
+UPDATED Don't block UI when image saving queue is full, instead simply don't allow more photos to be
+        taken until queue is no longer full. Size of image saving queue is also increased, depending
+        on memory of device.
+UPDATED Datestamp format (for "Stamp photos" option) yyyy/mm/dd replaced with yyyy-mm-dd (to match
+        ISO 8601 standard).
+UPDATED Camera2 API now sets CONTROL_ENABLE_ZSL for standard and DRO photo modes on Android 8+.
+UPDATED "Use background thread" debug option removed (now defaults to using background thread for
+        saving photos).
+
+Version 1.42.2 (2017/12/30)
+
+FIXED   Fixed some crashes.
+FIXED   Problems with alignment of some dialogs in portrait mode on some devices (possibly an
+        Android 8 issue).
+ADDED   On-screen face icon displayed when face detection is enabled.
+UPDATED Enable 60fps on Android 7+ for GUI rendering.
+
+Version 1.42.1 (2017/12/10)
+
+FIXED   Crash related to front camera fake flash on devices with auto focus and Camera2 API.
+FIXED   Possible fix for boot loop on LineageOS (this is a LineageOS bug exposed by using Android
+        Studio 3; workaround is to upgrade to build tools 27.0.1).
+FIXED   Change in 1.42 for using suffix "_0" etc instead of "_EXP0" etc was only done for HDR expo
+        images; now done for Expo {} photo mode too.
+FIXED   Text style "Plain text" for "Stamp photos" was still showing a shadow effect.
+FIXED   Workaround for Google bug where crash can be reported if application is open when it's being
+        updated ( https://issuetracker.google.com/issues/36972466#comment14 ).
+UPDATED Now targetting Android 8.
+UPDATED New adaptive icon for Android 8 (thanks to Adam Lapinski).
+UPDATED Improve Camera2 quality for Samsung Galaxy S7 and S7 Edge (set EDGE_MODE_OFF and
+        NOISE_REDUCTION_MODE_OFF).
+UPDATED Updated Norwegian Bokmål translation (thanks to Imre Kristoffer Eilertsen).
+UPDATED Updated Polish translation (thanks to Jacek Buczyński).
+
+Version 1.42 (2017/11/19)
+
+FIXED   Fixed some crashes.
+FIXED   Bug since 1.41 where if camera failed to open, it wasn't possible to switch to other
+        cameras.
+ADDED   Application shortcuts (requires Android 7.1).
+ADDED   Face detection now supports accessibility services (e.g., if using Google Talkback,
+        information on the number and position of detected faces will be spoken).
+ADDED   Link to GPL on About dialog.
+UPDATED Expo images (either in "Expo {}" photo mode, or HDR when "Save all images for HDR mode" is
+        selected) are now saved with suffix "_0" etc, instead of "_EXP0" etc. This means Google
+        Photos knows to group the images together.
+UPDATED Performance improvements.
+UPDATED "Show angle" no longer enabled by default.
+
+Version 1.41.1 (2017/10/21)
+
+FIXED   Crash in 1.41 when opening popup menu in video mode on devices without flash.
+FIXED   Crash in 1.41 related to taking photos on burst mode with no delay when camera closing.
+UPDATED Auto-repeat burst mode now closes when going to settings (not safe to be taking photos in
+        background when settings may change).
+
+Version 1.41 (2017/10/15)
+
+FIXED   No longer shows exposure flashes on the screen when taking HDR photos with Camera2 API.
+FIXED   "Pause after taking photo" option was pausing preview after stopping video recording.
+FIXED   Minor bug in HDR auto-alignment.
+ADDED   Take photos while recording video.
+ADDED   What's New dialog now displays to user new features/changes.
+ADDED   Norwegian Bokmål translation (thanks to Imre Kristoffer Eilertsen).
+UPDATED Switch camera icon now next to take photo button, for easier one-handed use.
+UPDATED Switching to video mode now done by selecting the smaller video icon next to the take photo
+        button; similarly switch back to photo mode by selecting the smaller photo icon next to the
+        record video button.
+UPDATED ISO controls now moved entirely to the exposure icon.
+UPDATED Changing ISO values is faster (except for switching between auto and manual mode on Camera2
+        API).
+UPDATED Improved performance by closing camera on background thread when application pauses.
+UPDATED HDR improvements (ghost removal, and brightening of too dark images).
+UPDATED Expo bracketing and HDR photo modes now have maximum exposure time of 0.5s (for Camera2
+        API).
+UPDATED Use material design icons for location/GPS.
+UPDATED Updated Italian translation (thanks to Stefano Gualmo).
+
+Version 1.40 (2017/08/18)
+
+FIXED   Front screen flash wasn't showing if in "everything" immersive mode.
+FIXED   Shouldn't show "RAW" on-screen icon when RAW is enabled, but it's not
+        supported by the current camera.
+FIXED   Fixed title of calibrate level angle dialog (shouldn't be "About").
+FIXED   Various crashes.
+FIXED   Don't show two toasts when changing scene modes (bug in 1.39).
+FIXED   For DRO and HDR modes with Camera2 API, don't set ISO value outside of
+        supported range.
+ADDED   Polish translation (thanks to Jacek Buczyński).
+UPDATED Improvements to HDR algorithm for brightness levels (problems with HDR
+        photos coming out too dark).
+UPDATED Changing color effect or white balance is now faster, and no longer
+        closes the popup (unless switching to manual white balance); for
+        Camera2 mode, changing scene mode is also faster.
+UPDATED Improved performance when leaving Settings, if certain preferences
+        haven't been modified.
+UPDATED Close camera on background thread.
+UPDATED Pause icon now changes when pausing video (thanks to Johan Ejdemark).
+UPDATED Make pause icon easier to see against bright backgrounds.
+UPDATED Larger gap between bottom of zoom slider and edge of screen (reduce
+        risk of hitting device capacitive buttons when zooming out).
+UPDATED Don't show ISO info on debug window that's specific to old camera API,
+        when using Camera2 API.
+UPDATED Performance improvements: reduced memory allocations.
+UPDATED Make sure exposure lock icon is set whilst camera is opening.
+UPDATED White balances, scene modes, color effects are now shown as "user
+        readable" strings (except for old Camera API where there are device
+        specific values). This also means these strings can be translated in
+        future versions.
+UPDATED About debug info now lists information on supported exposure
+        compensation and manual ranges.
+
+Version 1.39 (2017/06/28)
+
+FIXED   For Camera2, don't show flash of previous preview image when resuming
+        application.
+FIXED   Crash when exiting (related to cleaning up renderscript object).
+UPDATED Camera now opens on background thread (for smoother operation) on
+        Android 7+.
+UPDATED Performance improvements for opening popup view.
+
+Version 1.38.2 (2017/06/01)
+
+FIXED   Various crashes.
+UPDATED Performance improvements for opening popup view.
+
+Version 1.38.1 (2017/05/22)
+
+FIXED   Various crashes.
+FIXED   Don't show expo bracketing for original camera API if exposure
+        compensation is not supported.
+UPDATED Performance improvements.
+
+Version 1.38 (2017/05/13)
+
+FIXED   Crash with video subtitles option when GPS is lost.
+FIXED   Fixed various other crashes.
+FIXED   Photo stamp now shows in correct orientation, on devices that store
+        orientation as Exif tag (e.g., Samsung devices).
+FIXED   Still show flash symbol (for flash auto) even if "Show ISO" option is
+        disabled (for Camera2 API).
+FIXED   Minor bug in DRO/HDR image generation.
+ADDED   Manual white balance temperature (Camera2 API only).
+ADDED   HDR and expo bracketing now available for original Camera API (HDR
+        still requires Android 5+) (Camera2 API will still be much faster).
+ADDED   On-screen icons now displayed to indicate options being enabled, for
+        RAW, auto-stabilise, HDR, photo stamp.
+ADDED   Hungarian translation (thanks to Báthory Péter).
+ADDED   Ukranian translation (thanks to Olexandr).
+UPDATED Improved HDR algorithm for areas that are over-exposed even on the
+        darkest image.
+UPDATED More Exif tags preserved when saving in modes that require re-saving
+        the image (e.g., auto-stabilise, photo stamp, DRO, HDR) (requires
+        Android 7). Performance also improved for such modes on Android 7.
+UPDATED Improved ISO and exposure time scaling for the manual sliders.
+UPDATED Don't leave gaps between various on-screen info text (time, free
+        memory, etc) if some of them are disabled.
+UPDATED Number of exposure bracketing stops now refers to the full half-range.
+UPDATED "Auto-stabilise" option now renamed to "Auto-level", to avoid confusion
+        with image stabilisation.
+UPDATED Performance improvements.
+
+Version 1.37 (2017/02/12)
+
+FIXED   Crash on devices not supporting scene mode.
+FIXED   Crash if failed to start audio listener (for "loud noise").
+FIXED   Problems with focusing on OnePlus 3T with Android 7 on Camera2.
+FIXED   Problems with "alternative" flash method for Camera2 API on OnePlus 3T.
+FIXED   Improved support for standard flash on Camera2 API on OnePlus 3T.
+FIXED   Support for long manual exposure times (with Camera2 API) (e.g.,
+        OnePlus 3T).
+FIXED   Remove duplicate entries from ISO buttons (e.g., for OnePlus 3T).
+FIXED   Problems with "fast HDR/expo burst" on Nexus 6 now fixed (you should be
+        able to reenable this under "Photo Settings", for faster HDR and expo
+        bracketing shots.
+FIXED   Shutter sound option wasn't working for Camera2 API unless a photo had
+        previously been taken in the same session.
+ADDED   Display flash symbol for Camera2 API in flash auto mode, to indicate
+        whether flash will fire.
+UPDATED Improved battery life when going idle.
+UPDATED Improved support for flash auto decision on when to take a photo, when
+        using "alternative" flash method and/or HDR, expo bracketing for
+        Camera2 API (it now matches the decision when not using the alternative
+        flash method).
+UPDATED Finer control for manual seekbars.
+UPDATED Display "Capturing..." when manual exposure time is 0.5s or longer.
+UPDATED Support face detection on more devices (e.g., OnePlus 3T) with Camera2
+        API.
+UPDATED Improved popup view so selected button is centred rather than aligned
+        left.
+UPDATED Continuous focus is now the default for new installs on all devices.
+
+Version 1.36.2 (2017/01/01)
+
+FIXED   Save location folder dialog wasn't working from Settings in 1.36.
+
+Version 1.36.1 (2016/12/31)
+
+FIXED   Fixed a couple of crashes.
+
+Version 1.36 (2016/12/28)
+
+FIXED   "Loud noise" audio control wasn't working.
+FIXED   Remember focus mode for video mode.
+FIXED   Focus circle for continuous focus mode would sometimes show green.
+FIXED   Improved behaviour for photo/video mode when launched from a take photo
+        or record video intent (no longer keeps switching back to photo mode
+        when pause/resuming if called from a photo intent; now allows switching
+        to photo mode if called from a video intent).
+FIXED   Make line-spacing for toasts more consistent.
+FIXED   Improved "Max duration of video", was stopping up to a second
+        prematurely.
+ADDED   New photo mode "DRO" (dynamic range optimisation) (requires Android 5).
+ADDED   Quick settings tiles to open camera (requires Android 7): tiles for
+        photo, video and selfie mode.
+ADDED   Pause/resume video recording (requires Android 7).
+ADDED   New option to mirror photos for front camera (Settings/Photo settings/
+        "Front camera mirror").
+ADDED   Option to calibrate the level angle (accelerometer) (see Settings/More
+        camera controls/"Calibrate level angle".
+ADDED   New option to display on-screen pitch lines (Settings/On screen GUI/
+        "Show pitch lines") and compass direction lines (Settings/
+        On screen GUI/"Show compass direction lines").
+ADDED   Camera2 API popup menu now has "ISO M" option to switch straight to
+        manual exposure mode, but defaulting to the current ISO value.
+ADDED   New option "Video subtitles" (Settings/Video settings/). This creates a
+        subtitle file (in .SRT format) displaying date and time; and also GPS
+        location and direction if those options are enabled
+ADDED   New option to disable showing the "take photo" button (under Settings/
+        On screen GUI/"Show take photo icon").
+ADDED   3 seconds entry for "Max duration of video" option.
+UPDATED HDR now has auto-alignment for better quality photos.
+UPDATED Moved white balance/scene mode/color effect options lower down the
+        popup menu.
+UPDATED No longer auto-stabilise or display level angle when device pointing
+        nearly up or down.
+UPDATED Apply low pass filter for compass direction.
+UPDATED Continuous focus mode is now the default for back camera for Nexus and
+        Pixel devices.
+UPDATED About/debug info now shows whether disabling shutter sound is supported.
+UPDATED Updated Russian translation (thanks to Grigorii Chirkov).
+UPDATED Updated Slovenian translation (thanks to Peter Klofutar).
+
+Version 1.35 (2016/10/30)
+
+FIXED   Crashes for Galaxy Ace 4, Galaxy S Duos 3 (to do with Exif data).
+FIXED   Workaround for Android 7 bug where rotated seekbars don't show
+        properly.
+FIXED   Problem that after pausing and resuming in manual ISO mode, the
+        on-screen ISO was shown continually in red.
+FIXED   Improved handling of camera errors - display error rather than just
+        allowing preview to freeze.
+ADDED   New "Enable fast HDR/expo burst" option under "Photo settings" for
+        Camera2 mode. Disable this if your device has problems taking photos in
+        HDR or Exposure Bracketing photo modes.
+UPDATED Improvements to HDR algorithm: improved tonemapping; local contrast
+        enhancement.
+UPDATED Improved performance of on-screen toasts, gives smoother update for
+        seekbars (e.g., manual focus, exposure).
+UPDATED Launching Open Camera always opens in camera mode (rather than gallery,
+        if the user had previously gone to the gallery).
+UPDATED Improved look of seekbars, particularly for exposure.
+UPDATED Folder chooser now contains entry to jump to DCIM folder.
+UPDATED Timeout for camera2, give up waiting after 10s rather than freezing.
+UPDATED Updated Russian translation (thanks to Grigorii Chirkov).
+UPDATED Updated Slovenian translation (thanks to Peter Klofutar).
+UPDATED Now supporting Android 7.
+UPDATED Now compiling with Android Studio instead of Eclipse.
+
+Version 1.34 (2016/10/08)
+
+FIXED   Crash on Camera2.
+FIXED   Crash for Asus ZenFones with Android 5.0.
+FIXED   Face detection and touch to focus not working properly with Camera2 and
+        "Rotate preview" option; this should also hopefully fix problems for
+        devices with unusual camera orientations (Nexus 5X).
+FIXED   Face detection wouldn't switch off with Camera2 API without restart.
+FIXED   For file dialog, still allow going up to parent folders even if a
+        folder contains no readable files.
+FIXED   Pause preview option now shows exactly the saved photo (no lag, and
+        works with options such as auto-stabilise, photostamp, HDR).
+FIXED   Nexus 6 got wrong date/time when using Camera2 API with geotagging
+        enabled.
+ADDED   New photo mode Exposure Bracketing. Only supported for devices with
+        Camera2 API (and Camera2 must be enabled).
+ADDED   New "Use alternative flash method" option under "Photo settings" for
+        Camera2 mode. This offers a possible workaround for devices that have
+        poor flash behaviour with Camera2 API.
+ADDED   Screen flash to simulate flash auto/on modes for front cameras without
+        flash ("auto" requires Camera2).
+ADDED   Material design icons for Settings.
+UPDATED Now supporting Android 6 permissions. You'll have to "allow"
+        permissions for camera and storage for Open Camera to work. Additional
+        permissions requested when required are microphone (for video) and
+        location (for geotagging).
+UPDATED HDR mode now supports flash modes.
+UPDATED Extra protection for video recording and running out of storage space -
+        if not using Storage Access Framework, and saving to internal memory,
+        videos should now stop rather than becoming corrupt when running out of
+        storage. Note this can't be supported for Storage Access Framework or
+        SD cards due to Android limitations.
+UPDATED Stop video when battery level critical, to give extra protection when
+        device runs out of video (see
+        Settings/Video settings/"Critical battery check" to disable this
+        behaviour).
+UPDATED Gallery icon now right-most.
+UPDATED On-screen icons now have transparent icons.
+UPDATED Improved look of popup-menu: headings are larger and in bold.
+UPDATED New material design icon for exposure.
+UPDATED On-screen ISO (for Camera2 API) displays red when auto-exposure routine
+        is scanning.
+UPDATED Manual exposure times longer than 1s now shown directly rather than as
+        a reciprocal.
+UPDATED Thicker lines for grid, auto-focus grid.
+UPDATED Battery icon shows red at 15% instead of 30%; also now flashes at 5% or
+        less.
+UPDATED On-screen icons now rotate with animation.
+UPDATED Reorganised some preference settings.
+UPDATED Set metadata tags for sound files beep.ogg and beep_hi.ogg.
+UPDATED Updated French translation (thanks to Eric Lassauge).
+UPDATED Updated Slovenian translation (thanks to Peter Klofutar).
+UPDATED Show compass direction option now defaults to off.
+
+Version 1.33 (2016/08/29)
+
+FIXED   Hang when starting with Camera2 API on Android N.
+FIXED   Problems with popup menu on some languages and screen sizes.
+FIXED   Problems with Camera2 and flash: no longer does excessive flash; also
+        fixed problem where with flash auto, flash would sometimes misfire
+        resulting in too dark photos.
+FIXED   Problem with Pause Preview option: if a photo with RAW was saved, then
+        another photo without RAW saved and deleted, the previous RAW was also
+        deleted.
+ADDED   New photo mode HDR. Only supported for devices with Camera2 API (and
+        Camera2 must be enabled).
+ADDED   Support for manual ISO for old camera API on Asus Zenphone 2 Z00A and
+        Z008 (thanks to Flávio Keglevich).
+ADDED   Portuguese translation.
+ADDED   Slovenian translation (thanks to Peter Klofutar).
+UPDATED Folder history (via Gallery long click) now supported when using Storage
+        Access Framework.
+UPDATED When switching from auto to manual ISO mode in Camera2 API, exposure
+        time now defaults to the current (auto) exposure time.
+UPDATED Updated Russian translation (thanks to Dmitry Vahnin aka JSBmanD).
+
+Version 1.32.1 (2016/07/24)
+
+FIXED   Crash on some devices when taking photos with RAW enabled.
+
+Version 1.32 (2016/07/19)
+
+FIXED   Don't repeatedly focus when volume key held down (for "focus" volume
+        key option). Hopefully this may help for devices with physical focus
+        buttons.
+FIXED   Border to indicate photo is being taken no longer remains for "Pause
+        after taking photo" option.
+FIXED   Now plays sound for start/stop video recording (if shutter sound
+        enabled) for Camera2 API.
+ADDED   Support for RAW (DNG) files. Only available in Camera2 mode, and if
+        your device supports it. Note that most Gallery apps don't recognise
+        DNG files, instead use specialised applications like Google Snapseed
+        and Adobe Photoshop Lightroom, or transfer to a PC.
+UPDATED If volume keys are set to "focus", then holding down both volume keys
+        will take a photo (this makes your volume keys behave more like a
+        physical camera button - hold down one key to focus, then both to take
+        a photo).
+UPDATED Allow one more photo to be processed in background at a time.
+UPDATED Improved timing of shutter sound for Camera2 API.
+UPDATED Enabling auto-stabilise now shows an info dialog. Select "Don't show
+        again" to prevent the dialog from showing again.
+UPDATED Thumbnail for most recent photo/video now restricted to Open Camera's
+        save folder when using Storage Access Framework (this was aready the
+        case when not using Storage Access Framework).
+UPDATED 5s and 10s options for maximum video duration.
+
+Version 1.31 (2016/06/19)
+
+FIXED   Crash on Camera2 API (checkArgumentNonnegative).
+FIXED   Photo filename's date/time and date/time used for photostamp now match
+        better with time the photo was actually taken, rather than when the
+        image was saved.
+FIXED   Improved Take Photo widget behaviour so it focuses in continuous focus
+        mode.
+ADDED   New option for filenames to be based on UTC (Zulu) time (thanks to
+        David Pletcher).
+UPDATED Photos now processed in background thread, for smoother operation (if
+        this causes problems, you can disable it under Settings/Photo settings/
+        Use background thread).
+UPDATED Touching the screen with continuous focus mode in photo mode now causes
+        an autofocus.
+UPDATED Gallery button shows animation effect to indicate saving images.
+UPDATED Photo/video info toast now indicates if auto-stabilise option is on.
+UPDATED Don't display pointless toast for front/back camera any more.
+UPDATED Updated Russian translation (thanks to Grigorii Chirkov).
+
+Version 1.30.1 (2016/06/01)
+
+FIXED   Crash in 1.29 on some devices on startup.
+
+Version 1.30 (2016/05/29)
+
+FIXED   Crash in some cases when failing to save photo.
+FIXED   Crash in some cases when opening settings.
+FIXED   Focus/metering regions and face detection now works properly with
+        Camera2 when zoomed.
+FIXED   Audio control icon still showed if audio control was disabled, when
+        previously set to "loud noise" option.
+UPDATED Popup menu now uses the more standard three dots icon.
+UPDATED Popup menu also shows indicator for flash being off.
+UPDATED Show zoom seekbar closer to the edge of screen if -/+ controls not
+        enabled; always show manual focus distance seekbar closer to the edge.
+
+Version 1.29 (2016/05/06)
+
+FIXED   Still show Camera2 API option for Android N's LEVEL_3 hardware level.
+FIXED   Problems with flash mode on Camera2 API - now seems to be working
+        better on my Nexus 6 at least...
+FIXED   Sluggish zooming with Camera2 API.
+FIXED   With Camera2 API, if starting up with flash auto or on, and in focus
+        modes other than auto, macro or locked, then flash wouldn't fire.
+FIXED   Problems on some devices where we claimed failed to take photo, even
+        though it had succeed (some Samsung devices call the autofocus callback
+        more than once).
+FIXED   Video recording icon would get stuck on red ("playing") if failed to
+        save video file.
+FIXED   Too long string for 3x3 grid for French translation on popup menu.
+ADDED   Proper support for continuous focus mode for photo mode.
+ADDED   Graphical border effect when taking a photo (if you don't like this,
+        you can disable it under Settings/On screen GUI/"Show border when
+        taking photo").
+ADDED   Animation effect for auto-focus square.
+UPDATED Improved on-screen graphics (line drawings now have proper
+        density-independent thickness rather than being hairline).
+UPDATED On-screen ISO display (for Camera2) simplified and moved to top of
+        screen.
+UPDATED Use Switches instead of Checkboxes in Settings where appropriate.
+UPDATED Content description for "switch photo/video mode" button now updates to
+        indicate whether it will switch to photo or video mode. Content
+        description for "switch camera" button now updates to indicate whether
+        it will switch to the front or back camera.
+UPDATED Improved content descriptions for buttons on popup menu (focus/flash
+        buttons now say which mode they are).
+UPDATED Disabled flash options for manual ISO mode in Camera2 API (doesn't work
+        properly; plus flash is less useful when setting ISO manually anyway).
+UPDATED Switching to ISO auto and back to manual now resets the exposure time
+        to default.
+UPDATED Debug info now includes device language setting.
+UPDATED Default to FullHD rather than higher video resolutions, if possible.
+
+Version 1.28.1 (2016/03/30)
+
+FIXED   Crash when going to "Photo settings" on devices with Czech language
+        (bug introduced in 1.28).
+
+Version 1.28 (2016/03/25)
+
+FIXED   Broadcast File-based Uri when using Storage Access Framework, instead
+        of SAF Uri (this fixes Owncloud crashing for auto-upload photos/videos
+        option; this might also fix problems with photos/videos not showing up
+        on SD card until after a reboot?)
+FIXED   Unable to get location from network providers on some pre-Android 6
+        devices (e.g., Galaxy Nexus, Nexus 7) since v1.27 due to Android OS
+        bug, have worked around this.
+FIXED   If using Storage Access Framework, and save location was on external SD
+        card, the on-screen free memory would show internal storage. (Note that
+        with this fix, it may be that the free memory is not shown for external
+        SD cards, but that's better than showing an incorrect value.)
+FIXED   Toast for hitting max filesize on video recording wasn't shown for very
+        long.
+FIXED   Was incorrectly showing "n repeats to go" toast even if it wasn't going
+        to repeat (e.g., due to user stopping video recording).
+FIXED   Use unicode character for degrees symbol (fixes warning in Android
+        Studio).
+FIXED   Corrected content description of take photo icon for when switched to
+        video mode (for accessibility).
+FIXED   Don't show thumbnails of earlier photos/videos when device is locked.
+FIXED   Fixed various FindBugs errors.
+ADDED   New options to take photo/record video by making a noise, or responding
+        to the voice command "cheese".
+ADDED   Option to control maximum filesize of videos.
+ADDED   New option to disable performing auto-focus on startup (disable this if
+        you have the bug where flash turns on on startup).
+ADDED   Belarusian translation (thanks to Zmicer Turok).
+ADDED   Czech translation (thanks to Jaroslav Svoboda).
+ADDED   Japanese translation (thanks to Mitsuse).
+ADDED   Turkish translation (thanks to Serdar Erkoc).
+UPDATED Video now automatically restarts (rather than stopping) if the device's
+        maximum filesize (typically ~2GB), or the user specified limit, is hit.
+        (Option also added to not restart, if you prefer.)
+UPDATED Don't show info toasts so often - only if an unusual non-default
+        setting is set, or when switching between photo and video modes.
+UPDATED Improved quality of take photo/video icons.
+UPDATED Improved look of focus indicator.
+UPDATED Display countdown timer as hours:minutes:seconds when 60 seconds or
+        more.
+UPDATED No longer show pointless toast for stopping video recording.
+UPDATED Display if face detection is turned on in info toast.
+UPDATED Level line now has a crossbar to indicate the vertical angle better.
+UPDATED Reorganised the Settings ("Lock screen when recording video" moved to
+        "Video settings", "Rotate preview" moved to "On screen GUI").
+UPDATED Minor performance improvement to startup time.
+UPDATED Condense popup view by combining some titles into the buttons.
+UPDATED Updated Chinese Simplified translation (thanks to Michael Lu).
+
+Version 1.27 (2015/10/25)
+
+FIXED   Crash on Android 6 when accessing popup menu.
+FIXED   Crashes on Android 6 if users have denied permissions (currently Open
+        Camera won't request permissions at runtime, and please don't expect
+        well-defined behaviour if you've blocked a permission, this is a quick
+        fix to prevent crashes).
+FIXED   Problems when holding down volume/focus/camera key to focus.
+FIXED   Camera2 API photos sometimes taken before focused; autofocus square
+        sometimes turned green immediately before focus actually succeeded.
+FIXED   Take photo icon would momentarily switch to video icon when going to
+        settings.
+FIXED   Video bitrates now correctly documented as bps rather than bits.
+ADDED   3 second option for timer/repeat mode.
+ADDED   German translation (thanks to Ronny Steiner and Sebastian Ahlborn).
+UPDATED Improved selfie stick button support (thanks to Lau Keat Hwa).
+UPDATED Improved icons for app, and take photo (thanks to Adam Lapinski).
+UPDATED Updated French translation (thanks to Olivier Seiler).
+UPDATED Updated Russian translation (thanks to Vitamin).
+
+Version 1.26 (2015/07/06)
+
+FIXED   Crashes on some devices if camera couldn't be opened.
+FIXED   Crash in Camera2 API to do with focusing.
+FIXED   Crash in Camera2 API when camera not available.
+FIXED   Crash if pressing volume keys to change exposure when camera couldn't
+        be opened.
+FIXED   Taking photo would sometimes hang if flash on, and taking a photo
+        whilst already focusing.
+FIXED   "Show thumbnail animation" preference was ignored since v1.24.
+FIXED   Saving of Exif tags (including GPS compass direction) now handled
+        properly when Open Camera is called via another app to take a photo
+        (note that some 3rd party apps may still override the Exif tags).
+FIXED   Was sometimes displaying "0" for timer countdown and playing a beep
+        when taking a photo on timer.
+FIXED   If a photo was taken before startup autofocus completed, flash was
+        incorrectly turned off.
+FIXED   If torch is on, don't turn it off and on when Open Camera is starting
+        up.
+FIXED   Now switches to photo mode automatically when called from a photo
+        intent (ACTION_IMAGE_CAPTURE, ACTION_IMAGE_CAPTURE_SECURE,
+        INTENT_ACTION_STILL_IMAGE_CAMERA,
+        INTENT_ACTION_STILL_IMAGE_CAMERA_SECURE).
+FIXED   Reenable exposure compensation when using non-default ISO for original
+        Camera API.
+ADDED   Support for Android 5's Storage Access Framework. This should allow
+        saving to SD cards, but has to be enabled under Settings/More camera
+        controls/"Use Storage Access Framework".
+ADDED   New option "Touch to capture" - take photos by touching or
+        double-tapping anywhere on the preview.
+ADDED   New option to allow disabling "toast" messages.
+ADDED   New grids: 3x3 Phi, Crosshair, Golden (Fibbonaci) Spirals, Golden
+        Triangles, Diagonals.
+ADDED   New crop guides: 1:1, 5:4, 7:5.
+ADDED   Options for date, time and GPS formats (including 12/24 hour choice),
+        and font color, for date and time stamps.
+ADDED   Option for highlight color to use for angle display and level line,
+        when camera is nearly level.
+ADDED   Option for mono or stereo audio when recording video (stereo only
+        supported on some devices).
+ADDED   Support for INTENT_ACTION_VIDEO_CAMERA intent (so Open Camera can now
+        be launched in video using OK Google's "record a video") and
+        ACTION_VIDEO_CAPTURE.
+ADDED   Support for ACTION_IMAGE_CAPTURE_SECURE intent.
+ADDED   Optional voice countdown for timer.
+ADDED   Info toast now shows if timer and/or burst mode enabled.
+ADDED   More repeat mode intervals, now up to 2 hours.
+ADDED   New "Text style" option to render "stamp" text without a shadow
+        background effect.
+ADDED   Azerbaijani translation (thanks to Eldost).
+ADDED   Brazilian translation (thanks to Kaio Duarte).
+ADDED   Chinese Traditional translation (thanks to You-Cheng Hsieh).
+ADDED   French translation (thanks to Olivier Seiler).
+ADDED   Korean translation (thanks to Halcyonest).
+UPDATED Choice of grid now available on the popup menu.
+UPDATED Focus mode remembered for photo mode even when switching to video mode
+        and back.
+UPDATED Improved timer beep sound; also plays a higher pitch sound for one
+        second to go.
+UPDATED Record video button now turns red when recording video.
+UPDATED Popup menu now shows symbol for red eye mode.
+UPDATED Make icons transparent again, when using Material Design theme.
+UPDATED Made popup menu slightly more transparent.
+UPDATED Slight tweak to colours of take photo/video icons.
+UPDATED "Timer beep" preference moved to "More camera controls..." page.
+UPDATED Updated Chinese Simplified translation (thanks to Michael Lu).
+UPDATED Updated Russian translation (thanks to Vitamin).
+UPDATED Updated Spanish translation (thanks to Mario Sanoguera).
+
+Version 1.25 (2015/04/23)
+
+FIXED   Crash when clicking on settings if camera couldn't be opened.
+FIXED   Various other crashes.
+
+Version 1.24 (2015/04/18)
+
+FIXED   Problem on Nexus 6 where torch didn't turn off until going to flash
+        off mode (this came back again - possibly due to Android 5.1 update!).
+FIXED   Problem that a toast wouldn't clear if new toasts were repeatedly
+        created.
+FIXED   Toast for changing exposure compensation is no longer on top of the -/+
+        buttons.
+FIXED   Some UI controls had missing contentDescription attribute (needed for
+        accessibility).
+FIXED   Corrected Italian translation for "Save location" setting.
+FIXED   Avoid slowdown when repeatedly pressing switch camera or switch video
+        buttons.
+ADDED   New option to enable Android 5's Camera2 API! Currently experimental.
+        Restricted to only some devices (e.g., Nexus 6).
+ADDED   True manual focus mode, which allows setting the focus distance with a
+        slider (only if Camera2 API is enabled).
+ADDED   Option to display current ISO on screen (only if Camera2 API is
+        enabled).
+ADDED   If using non-auto ISO, a specific ISO value and exposure time can now
+        be selected via sliders by clicking the exposure compensation button
+        (only if Camera2 API is enabled).
+ADDED   Chinese Simplified translation (thanks to Michael Lu).
+UPDATED Focus Manual mode renamed to Focus Locked; also it no longer refocuses
+        when switching to this mode.
+UPDATED Improved look of on-screen level bar - now easier to see against white
+        background; it also becomes a double bar as well as turning green when
+        nearly level (for accessibility).
+UPDATED Angle display on-screen is now underlined as well as turning green when
+        nearly level (for accessibility).
+UPDATED Level angle no longer flips between "-0.0" and "0.0" when near zero
+        angle.
+UPDATED Option to reset settings now restarts app.
+
+Version 1.23 (2015/02/16)
+
+FIXED   Some devices (Nexus 5, Nexus 6) had dark preview/photos in low light,
+        and frame rate not as smooth as it could be in good light.
+FIXED   Problem on some devices (including Nexus 5 and Nexus 6) where torch
+        didn't turn off until going to flash off mode (have actually fixed it
+        this time!).
+FIXED   Crash on some Sony Xperia devices when in video mode.
+FIXED   Fix for devices where there is no write access for DCIM (thanks to
+        https://sourceforge.net/u/olevs ).
+FIXED   Wasn't displaying free memory when first run, if OpenCamera folder
+        didn't already exist.
+FIXED   If GPS lost, old possibly out of date position was still being used.
+FIXED   Gallery now shows/goes to latest image/video in the chosen Open Camera
+        save folder, rather than the latest image/video on the device. Also
+        fixed problem if there existed a file with a datestamp in the future -
+        these are no longer shown, even if located in the save folder.
+FIXED   Some devices displayed "ISO ISO100" etc on popup menu.
+FIXED   Pressing "No" to "Clear Save History" left screen as all black.
+FIXED   Don't created corrupted video file if video recording stopped too
+        quickly after starting (when no video data is received).
+FIXED   Font size used for date/geo stamping photos now scales sensibly with
+        photo resolution.
+FIXED   Thumbnail created after taking a photo didn't always reflect actual
+        image saved, if using auto-stabilise or date/geo stamp options.
+FIXED   Popup button wasn't being hidden in immersive mode, on devices with
+        flash.
+FIXED   Don't show "Force 4K UHD video" option if 4K mode is already supported
+        by the device in the standard video resolutions.
+ADDED   Italian translation (thanks to Valerio Bozzolan).
+ADDED   Options to customise photo and video filenames.
+ADDED   New option "Keep display on", can now disable the behaviour to keep the
+        screen on.
+ADDED   Burst mode control now available on popup menu.
+ADDED   A custom text can now be stamped onto photos.
+ADDED   New option to choose font size for date/geo/text stamping on photos.
+ADDED   Can now change video resolution from popup menu.
+ADDED   Crop guide 1.5 (3:2).
+ADDED   Audio source can now be set to "default" or "Optimised for voice".
+UPDATED GPS listener now prefers GPS provider to Network provider if both are
+        available.
+UPDATED GPS listener now sets min time of 1s, to improve battery usage.
+UPDATED Now uses Material Design theme style on Android 5.
+UPDATED Now uses dark theme for Settings (more consistent with rest of app;
+        better for not ruining night vision; sorry if you preferred the old
+        look, but Android doesn't seem to cope well with mixing themes in the
+        same Activity).
+UPDATED "Photo and video settings" preference screen now split up into "Photo
+        settings", "Video settings" and "Location settings".
+UPDATED Exposure level now displayed in units of "EV".
+UPDATED Toast now displays if focus, ISO, color effect or white balance modes
+        are non-default.
+UPDATED Popup menu now supports horizontal scrolling if there are too many
+        flash, focus or ISO buttons.
+UPDATED Popup menu photo resolution switcher now displays megapixels of each
+        resolution.
+UPDATED Switching resolutions from popup menup is now smoother.
+UPDATED Removed android.hardware.camera.autofocus as being a required feature
+        (this meant Open Camera didn't show up in Google Play on devices
+        without auto-focus camera).
+
+Version 1.22 (2015/01/04)
+
+FIXED   Crash if camera couldn't be opened, and GUI buttons were pressed.
+FIXED   Crash that could occur if camera closes whilst taking a photo.
+FIXED   Crash on Galaxy Nexus in rare circumstances when switching camera.
+FIXED   Pressing take photo button didn't cancel burst mode when not using
+        timer.
+FIXED   If failed to create video file when recording video, UI wasn't put back
+        into proper state.
+FIXED   EXIF tags DateTimeOriginal and DateTimeDigitized weren't being saved in
+        some cases (if using auto-stabilise, geotagging or stamp photo
+        options).
+ADDED   New immersive mode (requires Android 4.4 KitKat or higher), under
+        Settings/On screen GUI/Immersive mode. You can set the option to use
+        KitKat's immersive mode, either hiding the GUI, or even hiding
+        everything except the camera preview. Touch the preview or swipe in
+        from side to exit immersive mode. Also added an option to not even dim
+        the on-screen virtual navigation buttons.
+ADDED   Option to reset all settings to default.
+ADDED   Options to control whether time and/or battery status are displayed
+        on-screen.
+ADDED   Can now set burst mode to unlimited.
+ADDED   New option to only allow taking photo/video if GPS location data is
+        available.
+ADDED   Tagging photos with GPS direction is now a separate option ("Store
+        compass direction" rather than "Store location data"). If you are
+        upgrading and want photos to remain tagged with GPS direction, please
+        enable the new option.
+UPDATED Can now choose new save location from recent save folders picker.
+UPDATED Improved look of recent save folders picker.
+UPDATED Popup menu button now also shows indicator for torch.
+UPDATED If camera only supports one scene mode etc, no point offering the
+        option to the user.
+UPDATED "Take Photo" shortcut is now done as a widget rather than a shortcut
+        (if you previously had this shortcut on your homescreen, you may need
+        to re-add it as a widget).
+
+Version 1.21 (2014/11/15)
+
+FIXED   Broadcasts of Camera.ACTION_NEW_PICTURE, com.android.camera.NEW_PICTURE
+        and Camera.ACTION_NEW_VIDEO intents weren't supplying the Uri correctly
+        - auto-uploading for photos and videos with Owncloud should now work.
+FIXED   Corrected look of popup animation when in left-handed UI mode.
+FIXED   If called via an intent, and returning the bitmap via a parcel, the
+        bitmap wasn't being resized to a small size if auto-stabilise option
+        was enabled.
+ADDED   Option to stamp photos with date and time; and also GPS if location
+        data is enabled.
+ADDED   New application shortcut "Take Photo", which automatically takes a
+        photo after opening Open Camera.
+ADDED   Lock screen widgets, to allow calling Open Camera from the lock screen,
+        and taking a photo from the lock screen (requires Android 4.2).
+UPDATED Popup menu button now shows flash indicator (for flash auto or on).
+UPDATED Cleaned up settings - removed some options that are more easily
+        selected from the popup menu (ISO, white balance, scene mode, color
+        effect, auto-stabilize).
+UPDATED New material design icon.
+
+Version 1.20 (2014/09/21)
+
+FIXED   Crash (NumberFormatException in Parameters.getPreviewFpsRange()) on
+        startup for mb526.
+FIXED   Problems if settings window was opened while timer was active (timer is
+        now cancelled when going to settings).
+ADDED   New popup menu, allows quick access to changing: flash, focus, ISO,
+        white balance, scene mode, color effect, camera resolution, timer.
+UPDATED New icon (by Cosmin Saveanu).
+
+Version 1.19 (2014/09/08)
+
+FIXED   Crashes on startup due to invalid parameters being set.
+FIXED   Location info was lost when switching camera or changing scene mode.
+FIXED   Focus rectangle was shown after switching from video to photo, and
+        wouldn't disappear until a focus occurred.
+FIXED   Accents for Spanish translation.
+ADDED   New option to display crop guides.
+ADDED   Option to rotate preview 180 degrees (useful if using Open Camera with
+        adapters that invert the image).
+UPDATED Material Design icons and colours.
+UPDATED Improved look of toasts to match Android 4.4 look.
+UPDATED Made changing resolution on the popup menu smoother - now possible to
+        quickly go through the resolutions without a pause every time.
+UPDATED Removed some pointless toasts.
+
+Version 1.18 (2014/08/28)
+
+FIXED   Problem on some devices (including Nexus 5) where torch didn't turn off
+        until going to flash off mode.
+FIXED   Problem on some devices (e.g., Galaxy S5) if in video mode, and focus
+        mode is not continuous, and user went to settings and back, then tried
+        to record - video would hang.
+ADDED   Spanish translation (thanks to Mario Sanoguera).
+UPDATED If camera can't be opened, touching the screen now tries to reopen the
+        camera.
+UPDATED Allow installation of app onto external storage.
+
+Version 1.17 (2014/08/24)
+
+FIXED   Crash during auto-stabilise if unable to rotate bitmap (out of memory?)
+        now instead reports being unable to auto-stabilise.
+FIXED   Crash if failed to start camera preview.
+FIXED   Crash when changing flash mode, if camera was lost.
+FIXED   Problem where photos were being taken out of focus on some devices (bug
+        introduced in v1.16).
+FIXED   "Save location" option didn't work if folder didn't exist (this also
+        meant it didn't work when the app is first installed, unless a
+        photo/video was taken first) (bug introduced in v1.16 with the new file
+        chooser dialog).
+ADDED   New options for volume keys: focus, and switch auto-stabilise on/off.
+UPDATED Allow changing flash mode while recording video (so torch can be
+        switched on and off).
+UPDATED Zoom -/+ controls are now disabled by default, to reduce clutter (for
+        those upgrading, you can change this in Settings/On screen GUI).
+UPDATED Use more subtle/natural colors for red/green/blue in UI; improve look
+        of take photo/video icon.
+
+Version 1.16 (2014/08/17)
+
+FIXED   Device freeze when recording video on some Samsung devices (e.g.,
+        Galaxy S2, and some Galaxy S3 variants).
+FIXED   Fail to take photo if in manual focus mode, and picture was taken
+        whilst focusing.
+FIXED   Changing left/right handedness of UI didn't update until app
+        paused/resumed.
+FIXED   Problems with left-handed UI when going to settings and back, icons
+        would shift to incorrect positions.
+FIXED   When geotagging was enabled, this didn't take effect until the app was
+        paused and resumed (or restarted).
+FIXED   If zoomed in, then switch camera or app paused, the camera preview
+        would reset to being unzoomed on some devices (e.g., Nexus 7).
+UPDATED Save folder location is now chosen via a GUI, rather than having to
+        type the path.
+UPDATED If exposure is non-zero, the exposure is displayed on the photo/video
+        toast.
+UPDATED Photo/video toast now displays scene mode if not auto.
+UPDATED Video toast now displays if audio recording is disabled.
+UPDATED Photo/video toast now displays for longer.
+UPDATED Improved behaviour of left-handed UI, to make it more consistent with
+        the behaviour of the right-handed UI.
+UPDATED Minor improvements to alignment of on-screen text.
+UPDATED New icon for switching between photo and video mode.
+UPDATED Virtual buttons now dimmed.
+ADDED   Support for video stabilization.
+ADDED   Option to disable showing the zoom slider control (Settings/
+        On screen GUI.../Show zoom slider control).
+ADDED   Russian translation (thanks to maksnogin).
+
+Version 1.15 (2014/08/02)
+
+FIXED   Crash when exiting settings, if camera wasn't opened (bug introduced in
+        v1.14).
+FIXED   More crashes due to camera drivers that aren't following Android API
+        specs (Parameters.getFocusMode() should always be non-null, but isn't
+        on some devices!)
+FIXED   If video failed due to error (e.g., hitting device max filesize,
+        running out of space, of other errors), Open Camera now stops properly
+        rather than thinking the video is still recording. Last video error is
+        also stored in the about/debug window.
+ADDED   New option to lock orientation to portrait or landscape.
+ADDED   New option Flash while recording video.
+UPDATED Touch to set focus/metering now works while recording video.
+
+Version 1.14 (2014/07/22)
+
+FIXED   Crash when clicking to switch between photo/video modes if camera
+        couldn't be opened.
+FIXED   Problem with face detection not resuming after focusing on mtk6589.
+FIXED   Workaround for aspect ratio bug introduced in Android 4.4.3:
+        http://code.google.com/p/android/issues/detail?id=70830
+        Problem that this caused with aspect ratio with video recording.
+FIXED   Open Camera now available as a choice when camera icon pressed from
+        Gallery app (Open Camera now responds to
+        android.media.action.STILL_IMAGE_CAMERA).
+FIXED   Open Camera now available as a choice when camera icon pressed from
+        Cover Lock Screen (beta) app (Open Camera now responds to
+        android.media.action.STILL_IMAGE_CAMERA_SECURE).
+FIXED   All available video resolutions are now supported.
+ADDED   New "manual" focus mode - focusing happens when you touch the screen,
+        but it doesn't do automatic focusing when taking a photo.
+ADDED   Support for more hardware buttons: camera button to take photo/video;
+        focus button; zoom in/out buttons.
+ADDED   Long press on gallery icon now shows a popup of recent save folders (if
+        more than one is available), allowing you to quickly change between
+        them.
+ADDED   Location data now stored in videos too, if geotagging option is enabled
+        (only for MPEG4 and 3GPP video formats).
+ADDED   Option for volume keys to do nothing (not even changing the device
+        volume).
+ADDED   Option to lock screen when recording video (swipe to unlock).
+ADDED   Option to limit duration of recording video (automatically stops after
+        specified time); also option to restart video a specified number of
+        times.
+UPDATED Significantly improved speed for opening and closing settings (now
+        launched as a Fragment rather than a separate Activity).
+UPDATED Open Camera now remains active rather than being blocked by a "screen
+        lock" (face/PIN unlock still required to go to Gallery or Settings).
+        This behaviour can be switched off by going to Settings/More Camera
+        Controls/Show camera when locked.
+UPDATED Broadcast Camera.ACTION_NEW_PICTURE and com.android.camera.NEW_PICTURE
+        intents for new photos, and Camera.ACTION_NEW_VIDEO for new videos.
+UPDATED WYSIWYG mode is now the default setting for the preview size.
+UPDATED Expanded number of devices which show the "Force 4K UHD video
+        (experimental)" option (remember, 4K isn't officially supported by
+        Android API and this option is experimental - if this option shows, it
+        doesn't necessarily mean it will work on your device).
+UPDATED Video time is now shown with smaller font and off-centre, so as to not
+        obscure the view so much.
+
+Version 1.13 (2014/05/24)
+
+FIXED   Crash when opening settings on devices that didn't support
+        auto-stabilise (bug introduced in v1.10).
+FIXED   Crash introduced in v1.10 to do with cancelling autofocus on some
+        devices.
+ADDED   Options for video bitrate and frame rate. Note that both of these are
+        approximate settings, and whether they can be achieved may depend on
+        your device, and other conditions such as lighting. Also note that
+        setting non-default values for bitrate of frame rate may cause video
+        recording to fail, if the values are not supported.
+UPDATED About window now shows device manufacturer and model, to help with
+        debugging.
+
+Version 1.12 (2014/05/19)
+
+UPDATED Improve support for ISO setting (didn't show up on some devices, e.g.,
+        some Galaxy S5 variants).
+
+Version 1.11 (2014/05/17)
+
+ADDED   New GUI icon to set exposure lock.
+ADDED   New option in settings to set ISO.
+ADDED   Displays photo and video resolution etc, when starting up, switching
+        between photo/video, and switching camera.
+ADDED   About window now displays all camera parameters.
+UPDATED Don't display switch camera icon if device only has 1 camera.
+UPDATED Always reset to continuous focus mode when starting camera in video
+        mode (fixes some problems on some devices).
+
+Version 1.10 (2014/05/07)
+
+FIXED   Preview wouldn't restart after taking photo with continuous focus, on
+        Galaxy Nexus.
+FIXED   Problems with aspect ratio when not using WYSIWYG preview mode and
+        recording video.
+FIXED   Camcorder profile was always being initialised from the back camera,
+        even when recording video with the front camera.
+ADDED   New (experimental!) option to support 4K UHD (3840x2160) video
+        (Settings->Photo and video settings->Force 4K UHD video). Note that 4K
+        video isn't properly supported by Android API, so this option may show
+        even if it isn't supported on your device, and may not work or even
+        crash. I've successfully tested this on a Samsung Galaxy S5 and Note 3.
+ADDED   Option to not display the -/+ zoom control.
+UPDATED All available video resolutions offered by the camera are now supported.
+UPDATED Picture and video resolution preferences display aspect ratio and
+        megapixels of each resolution.
+UPDATED Reorganised preferences into new On screen GUI page.
+UPDATED Don't change camera settings whilst camera is autofocusing (may help
+        problems/crashes on some devices).
+
+Version 1.9 (2014/03/22)
+
+FIXED   Crash on some devices when starting app or switching camera (bug
+        introduced in v1.8).
+
+Version 1.8 (2014/03/18)
+
+FIXED   Crash on startup if Network or GPS location providers not available,
+        and geotagging was enabled.
+FIXED   Crash if specified save folder was an empty string.
+FIXED   Don't ever turn on flash during autofocus when app is launched.
+FIXED   Various other crashes.
+ADDED   Option to not force screen display to maximum brightness.
+ADDED   Option to display horizontal "level" line.
+ADDED   Support for hardware menu button (now opens settings).
+ADDED   Option to display a 4x2 grid (if you previously had a 3x3 "rule of
+        thirds" grid, you'll have to reenable it under the Settings).
+ADDED   Added privacy policy (for location permission/geotagging) to intro
+        window text (needed for Nokia Store).
+ADDED   Uses setRecordingHint, may improve performance of starting video
+        recording.
+ADDED   New About option in Settings, providing debug info.
+UPDATED Don't re-autofocus before taking a photo, if camera recently
+        successfully focused due to user touching the screen.
+UPDATED Display yellow or green dot next to earth icon to indicate location
+        accuracy.
+UPDATED Display earth icon with red dash through it, if geotagging is enabled,
+        but the app doesn't have a location.
+UPDATED Current zoom is now saved when app goes idle, or switching cameras.
+UPDATED Offset zoom slider slightly so as to not interfere with Google Now
+        swipe.
+UPDATED Allow greater range of characters (including unicode) for save location
+        (now allows any character other than those reserved by filesystem).
+
+Version 1.7 (2014/01/29)
+
+FIXED   More fixes for aspect ratio - the preview display should now always
+        have a 1:1 aspect ratio (on some devices this may mean black bars are
+        shown, if there isn't a match between the camera's available preview
+        sizes, and the aspect ratio of the device's display).
+FIXED   Possible crash relating to creating thumbnails.
+FIXED   Autofocus on startup didn't always actually focus.
+FIXED   If camera doesn't support focus areas, but does support metering areas,
+        still set the metering area.
+FIXED   Was sometimes trying to set metering areas when metering areas not
+        supported by device.
+FIXED   If image is deleted after taking the photo, the thumbnail is now
+        properly updated to what is now the most recent photo or video.
+ADDED   New option to set preview aspect ratio to match the picture/video
+        aspect ratio (WYSIWIG).
+UPDATED Save folder can now be an absolute path, allowing possibility to save
+        on external SD cards (though you need to know what the path is, which
+        typically varies depending on device; I am unable to test this, so
+        please let me know if it does or doesn't work).
+UPDATED Zoom -/+ control now matches the zoom slider orientation.
+UPDATED Hide some icons when taking video, as they don't do anything.
+
+Version 1.6 (2014/01/20)
+
+FIXED   Crash when trying to access image or video resolutions in settings, if
+        camera didn't offer these settings.
+FIXED   Exposure compensation wasn't available on devices if min or max
+        exposure compensation level was equal to 0.
+FIXED   Aspect ratio problems with the preview on some devices (if you are
+        still having problems, please let me know what Android device you are
+        using).
+FIXED   Aspect ratio problems with the preview on all devices when switching
+        the camera.
+FIXED   Problem on smaller devices where on-screen text overlapped with take
+        photo button; the text is now aligned to above the button on all
+        devices.
+ADDED   Zoom can now be also controlled via on-screen slider (next to the
+        plus/minus zoom buttons).
+ADDED   Option for volume keys to control the exposure compensation.
+ADDED   Option to display compass direction of camera on-screen (defaults to
+        on, disable it in options if you don't like it).
+ADDED   Option to choose microphone for recording audio (support for external
+        microphones).
+UPDATED Exposure compensation is now set via a new button in the on-screen GUI
+        (the black and white +/- symbol) instead of the settings. Clicking this
+        will bring up a slider and plus/minus buttons to adjust the exposure
+        compensation. To get rid of the slider and buttons, either click the
+        Exposure button again, or click elsewhere on the screen.
+UPDATED Geotagging now stores image compass direction (GPSImgDirection,
+        GPSImgDirectionRef).
+UPDATED Display degree symbol for displayed on-screen angles.
+UPDATED Zoom control is now transparent.
+UPDATED Filter applied to sensor for device angle.
+
+Version 1.5 (2014/01/09)
+
+FIXED   Crash on Android 4.4 when launching for first time, or changing the
+        save folder (issue with creating the save folder, due to Android no
+        longer allowing ANDROID_MEDIA_MOUNTED to be broadcast).
+FIXED   Crash if failed to open camera after switching cameras, then user tried
+        to zoom.
+FIXED   Other potential crashes (NumberFormatException on "es209ra" on startup;
+        RuntimeException on Xperia Go when taking photo; RuntimeException on
+        "maxx_ax5" when taking photo with face detection).
+
+Version 1.4 (2013/12/16)
+
+FIXED   Calculation for focus areas wasn't right for front facing cameras.
+FIXED   Exif data wasn't getting saved if auto-stabilise option was enabled
+        (this also meant that on some cameras/focus modes, the orientation
+        would have been incorrect).
+FIXED   "Toast" pop-up messages looked poor on Android 4.4.
+FIXED   Fixed potential crash if taking picture fails.
+FIXED   Touch to focus with continuous focus shouldn't show red box.
+FIXED   Crash with auto-stabilise for some angles larger than 90 degrees.
+FIXED   Crash when rotating device when viewing "Photo and video settings".
+FIXED   If device was rotated when app was idle, the UI sometimes showed with
+        the incorrect orientation.
+ADDED   Images now tagged with current location (optional, off by default). Note
+        that Open Camera now requires Location permission, for this feature.
+ADDED   Option for face detection.
+ADDED   Touch to select focus area also now sets the metering area (used to
+        determine exposure).
+ADDED   Now displays current time.
+ADDED   Option to display a 3x3 grid ("rule of thirds").
+ADDED   Now displays flashy thumbnail animation when taking a photo (you can
+        disable this in the options under "More camera controls..." if you
+        don't like this sort of thing!)
+UPDATED Gallery button now displays thumbnail of last image/video taken.
+UPDATED Clicking the Gallery button now goes to most recent image/video.
+UPDATED Made it easier to see on-screen text when underlying photo preview is
+        bright, by drawing a background with the text.
+
+Version 1.3 (2013/11/18)
+
+FIXED   Video files may not have shown up properly in other apps, or over USB,
+        until rebooting device.
+FIXED   Make sure filenames for images/videos are unique.
+ADDED   New burst mode option - take a repeated set of photos at once, or with
+        a delay.
+ADDED   Option for video resolution.
+ADDED   Display battery status on screen.
+UPDATED Reorganised settings screen to remove clutter - less commonly used
+        options have been moved to sub-screens; "Record audio?" is moved to
+        "Photo and video settings..."
+UPDATED Only show zoom level (if that option is set) when actually zoomed in.
+
+Version 1.2 (2013/11/09)
+
+FIXED   Crash when launching gallery, if no Gallery app installed on device.
+FIXED   Selecting a focus area shouldn't switch to focus mode auto.
+FIXED   Focus area wasn't getting reset when it should (e.g., changing focus
+        mode or zooming).
+FIXED   Fixed potential crash on auto focus.
+FIXED   Hide GUI options while taking a photo (otherwise problems can be caused
+        by changing options, e.g., changing focus mode).
+FIXED   Message for failing to open camera was too wide for portrait view.
+ADDED   Option for exposure compensation.
+ADDED   Show whether auto focus was successful or not (via red/green rectangle)
+        even if focus area not selected.
+UPDATED Pause after taking photo now defaults to false.
+UPDATED Made "toasts" look nicer.
+UPDATED Added simple instructions to the intro window shown on first time
+        start-up.
+
+Version 1.1 (2013/10/27)
+
+ADDED  Touch to select focus area.
+ADDED  Optional beep on timer countdown.
+ADDED  Option on whether to display the current camera zoom level on screen.
+
+Version 1.0 (2013/10/17)
+
+First release.
+
+ +
+

Open Camera Privacy Policy.

+

This website uses icons from third party sources, see licences.

+

Open Camera on Sourceforge.

+
+ + + diff --git a/_docs/ic_exposure_white_48dp.png b/_docs/ic_exposure_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..92742effaa70e54485075f619943457d7e79ef7c GIT binary patch literal 969 zcmeAS@N?(olHy`uVBq!ia0vp^2_VeD1|%QND7OGojKx9jP7LeL$-D$|SkfJR9T^xl z_H+M9WCij$3p^r=85o30K$!7fntTONP@=>&q9iy!t)x7$D3zfgF*C13FE6!3!9>qM z&$#E=^Z*71W-Ct@$B>G+w{v~_ZaIjs75@+3*uX2p5LVFaVl24Zqh->DCl+_#Q?i z{yfzBK6ip>!}e)mW^-C&{5;tfEQ$O3w0bXhUHhCqPqqir|GrvpJv8_H^ZAO)lP59$ zFn|5^_m+zlt;_!@ABjEjaz+Y6?Z5x_OYGzCdfHpwXXXmnvGLCodDazsR{ohX|12}l z{O)OIYM({vsn|blPriKIf2EYd^OkJQo%5o)Px{C9GlkrkQxm{&c%%uFtghKR@pI zC#JTm^ZcfVYfkyrG;E&*Nk%8R7L+^QEQtDnm{r-UW|QN66g literal 0 HcmV?d00001 diff --git a/_docs/ic_gps_fixed_white_48dp.png b/_docs/ic_gps_fixed_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..b525cf8228adc635d5f55cb4e739664318a1fc8c GIT binary patch literal 3823 zcmcgvcQhQ@)<4FK8Vn&OO7tF`5j9FCdN71UnJAYSy@e=|L876rH|O?WCm_vs=p^9v+=cZ3RDfml-~HbnP#A zTT1M74MucmI!YZ6K&2x3?e{lant#XO3>g1BnZJWb>q%eJ9%2@)_Er|e#i)c3_ePJ7 zj_!Z6Ep)*!r#)+k+_XKtc(Get<>8+CgAcn6&k$;_lsf39M~Pg5I552SL(5&GYq$eZ zX7r#v%!MSb>)%6Pe5e5k z;p0ohh4rW1cZ60;ul#unP*PGpma=t^ju&b%&nq3R^^7lXPhdgfmoICAu zB(VD0{F^2)DpY&4aI*E@tXG>DTW>O$Su@@NtY4mI!p_bj$M$RnE5yfNfCANECqii1 zuZo`Wag)Z$azmPK!a!(Cx6ys#kN!B;Le0+F{BmgBovTwdE59**^z)nbn3r8oPcgEi zR5(_;F_uUb{z-W$B)N{CcVqoY*snGVX#C+?o z@p$Cc&4zKeVh_mSx#ZgijkHmV={$1`%o$W zb7;4NjNKmPfz&bOy>b^;cXry_)TD(;GXw6UP4h?>Xt)0pGeZ36aaq*IVzZsz3-^SR zm@47wdK>d9kJZpML#Y%|0h)n2rJ7(-Uy`GCOmEFpKP=ha%P|5I$kc4LkKHVR=TQFnf@wzjq~>U0oh zaeZa%l=IwtSb<=PeO&^H-a&bBH)GyJb5-{gCIaN&|k>|2!3Ho0xY7|WB>R7}e zn+QFtSbNBd^6h42$s37$Q5a@6hHT?nM5)IJByu97C<|o45Ft{-UU?a~l|@KvFP{F? z{Xp|W)9Vyubd79b2_0TsLz=AbnB!C}tyb<8{pPY0A5X}Iblsesg@$Ng&(L6~uAX6HrxE zvQ7@p!iGJ&2{I-vXxW?flO?AueUu&juoViG!bqjD2DB>bg%>_vCc{!c`Vo!PGq|3D z&=*LwH&cR@{q?}a>bpK{j`)${_SGjXG5e-5?+wjBeFUC~{yBbQ898MAMHjx%^Lf{} z{S}-A91yf@?q3dAj+z*S?M}*5bSVr;s0tP(L~h^DK6Fj41opv{-`>WHyu6ee>e7yk zwM3*!yvzcZ;!sm+KG*^0+|4}KDUG05x$>{h=Af1_lx?%Ebv$|u*7dHR*MgRmMZ1nf zM!#FqNydF@8SzTWoEGpSAfR{@n7$F-UO zElaNGQ9AQSN*GJy%Evrl@4>kqfVSd-&nLU5B2{2FTF?Vmul!K18cpPdto$)$+)6NP zP>2rHo3?Kz9-N+@W*X;IF*2&tk*_dQD!WK+3O`5|z6sFec8sKWZ(_7p*Kt`gGfwFI z*x`+Z<=b?dtznlhrJY~r)_y-03RwB){1=ADBp?PK1RwzR$Sa@F7_@i-F{wC5>L4Nfc_V+K$DHZLXv|seV<3AwZnI2K%Fl0 zgFanEUSR9aP|)5^nob%_3Fs3>9vRHKP2K&n7emech9BDfC{N4(Q+D-9mzJ9D zzfJfilIikf8c$(a9~5rK1etR~az$>pH&$0yZ)cih16~77U~Q!K$83H*N?a9mf5O;K7TI-`z4>Rix~Fw@_t18bXenwk^><1c z4Oc>x@ri;!`#*93O?Qh{D|Sk8H7>1>L+(q!S456h%FYLoA}QZd`o2}^7;~o znvjr1jn8H~g20uSzA5E}QoKPMnJY6X!ufzQbKz21`0$!T)R!=UpT(Hq+%!I=*OySM zC7-iy;j3It7L{9U;_-bh`}a**9j1Nc?W*R}!fmI~n`<~`?JUi{Sw)BJ_0`EnGr z&RU{WO=r{-kJR4@@0Ve#DT5E@%A_G!6JkDYNAEABrlxLZ`aKoq1(nSMbCCqC3C+DQ z$yco#NEeS`dx&knpk~f190J? zJ*P#(#vH7~;BS|dlj3sCSN8GkoE#j?TZ6IG0DWzDd;97M-GHy{+h|k6sc~5pv)Y_$ znTBPtieAKS#nd-sX<~5`URV^zOlxr3Io0%Mrxy#)Pj4S9h=Ob*ii0Toc%Vzy+9E65 z%-zDi$HTyc45+HVNdJxs;x~C6vq0>vxi2yDYKe5lb8UDN8uj-5G-y-P}a zZREq8PHI7221bmcpj}@CwvvoQM4&TfEnjmQg0pVadOZ@SF^N+gu(=U% zIk35|=nesJ4=DB6tUm>dZi0#!Zpj~qzcMR>#blifsesRz&!VG)#`&D#%f3xY+6-hIo67S6qfI)$`WAtB!UyxDORY(ALLixq zZtLpu?+qXtZ`oxq%ZUM_T2;v!b~T3@fz`9pJ=;#(?w=yXA7-ZA{c@;2g_e2GX#-(JkAhPL)AkWTD809G640W?O^vr zvhVMLfgFY)i(>}Wu4SVhs1zf`DCWXE^fP%V)$gQBhm+D|BTxF})ecNG;a_nZ`JHbs zx>`z+@rOqoZftCH#iJO-;=PN=v8mc|QOElKfGFkv9f%qn2_l?%a*u`m8^ZlxfQK8G h|9=RO(|7meF=+ATE}eRZ@FC$*po_Y7qgK;4{9j|LRRMT^to6fN$hIFw?A;O_2TthiGs5?l%`TA;YQYvD`# z{{HjL+?zW$lRdlp?6Z6JOg3CiMIH~E5*rB#2~SZ$Mg#GT`+H-eBR;p~)>;q`WLFLO zmq=CP)VqimRBI_^DI}!YSeyq_G{ifWlY*Wr5)x$R?~NQ_jz*36lH5&J*G<#W%FV;n z#S+QM)Yid`)4|dW%*XkhlQ(KBRSgM=D@ajBO6#N1VK&xB;y1sJTglLnk)=psac&9Y z{j34+1DG+H!S{l^B8B5Vg}LK`Je;`L%=ewgF93BI)B}^PQR~b?do2KL<`;Aj3*4g%ysPfy-agyx;&-4_+6R)e~+kvQ8aMhhOZj%LZcN%e?K?#_&vbzxFIay)+~59DdTw6CF9ubFXOoC5APX? zJ-9IX1ew$na!M8SQ!9Xa4?3v+npnCW2ss{J5PWZaMHSd=p+bGqpKdHB^t69e8))PZ z^j$5tPtlUJ{9PGRq=YT-tNu~^I^}ixb+vcs*L0o;nR$Wt&D%RGhQDd=57~)3nn)s! zGI3--M;lE(;#J$U|7QDD$);#0v2uI;{v?9a(M$F_htZ^UyN+nW&L8KA@ql&dCTn|N zH}m1r-#FsO)c11`h2ZzW_T>p>0ql=OWlQD<2I|s+OcJ~9~KG3X^OnHV6}Q+P7ZkVrjKi zM2P#i9!B*&_UQ3$dCg{FEWJ#r2*{~Voo(-cbA0+SP<~nHvOd1uRf1e*Oym2P-~yC7 zTt)McM2xqpu+aviZNh9p#q`JJxa?Ni3uEs+Yr@0OMVPO~eTZ)*Xehsr9be8UzYp3CMtolp)mVhqSA%K!z%dXTP~Q8`a|~+ z3V>vgU1Rx>=6jd2?K(F#%)B5*IawwJArn`(k6XO@`VFFW*3<3>R@Y(htR-KMkJYPPdYiW_*r-nT zontw6D9x6PA1LZM&u&c-#cm5nHR7ygH#zg1ur52C&BX3sj zQvJiXX(;Ugx8?x;zzLKweq5w(X7}MDp@uMgK9lyBX%wlQkf*;POvn=nN`NVcK;o~w zJXpC|P;3_ceMbY!W9rY$Rq}H4+r&XRE6;bA+1IkE`R!uX6YV@B|DXd_BAoJ13=T}h zm?H^G#Be%;Gykf(4trd{QCX5WU-x!qpXTl;W9*5cThdrqY8a2hv(BN3+qpCYPsE6J zRUu>%f`hDzwoYgMCa7y0$ogRaZLykZ%?ZE5UE`n5vWX?__k!6n`Z<(%1tK%Fonq#{ zr58G8E7xoehEW2NF|@Q{e1-y^RpMW{eXXRM+i7BIMP>w}5ds-U@r9I{9p^KD3?gS= zSpD;|v=rd{KSd6JZDiWt8I59<`-34uGka+|Ug17x@Fy3a`qlO`@vXgQyoh-HXJImi zQ-PDy8B+Th5)>6+U&^`H%x`V{P;=VWQZ)s--PpZoC*cS^9)4;YVKSn`gW|7jAa^6o zi$?(BPeljp+P+xGxb|G+V&WgD)!{C6`zv!}9N8OsG%OLuf}6!hsY2Zqj`Gh&gshU=gy#@qo=w3>2|#srm-a)6uBnJZFmrh=vO?&TVeT_=6YEDk6>{Fcq597SgbDs8 zn}1a!Q2Wai#DxTaAwU)Ab=o0 zoc1r?7+Mh635BnKrGwn4)`5er7^lrf?B4dPCwFC5o-UBYl;uu z;e}YiezrQ&CEg8TDNeDUy6nNxk(%ZL0EFVQ+I9u}CI`>y>rRc@S>#@G=%fIQGZ=!=pX4IEO`G{uMiv#AYp!2EZ?AK;M6#~=J*t77ETd8`$Y#`)2<)y(f7P_0ec*c zK01V`9~T<^7Yl?xh*cD9|Gx>M{_q+G0)NT8k&v%MVqj8g3OipZ~LT!>PN2Cf1H;#69jz=Wof2`62 zz`SS3Hkv-U=<}IXm{BL7SryEc-w*aj@n%41vqHM1sf*c5*KDQEb=40&RzPVj6I<*`oId$J7IVFL)mrb~SV-5bESRJ<7&U{`u zoCqg!u27&~Au5xE!>HKf-q0xDU!vs%8GoPD>7bv9;p{y4=_h^n%bU3-pA_T2C?+Kh z=t}2A-s;Ghn&ub2&AX;MGSsjUV9J0e%ng6B!gc$8O|nDm zazCh!Mjf{HxfY8e+(v}sAGI|#XaaNp46)T=XQj%!(j7;THw6~yr7*0el7*v&Lo~pk z0f_p<84{>B22pvS8j~LkrK;?@{LGyuC-pIzhI4e5-G_Kza^SB7acjO#bDuk)pzGRd>jV#|<};X|C{NS*AW3u8i(Lb%1SOl?v+qHRE8BLH(}0Wo8*bw7E5%)Xc% zDUb$)^Jg~fE;Vd#J>aTD)vZ8QS1f8ZAd#JhZnRJpDpvo=_lDS}TeU9SPj%!?*&BKp zjP-;v5rrjqpuUVcLblkTAeJV9qB=H$nuxzNAjMql;N)s_SlaTS-(tOz;t;u?+U>Ym zFkVeUgN8{6A<}S212>nQDO@40$qol%k2C~uJH`^dYT9laE|g-T?2N9P043PiHib=t zzOaK>Lb~(=znNd zq(cBE>im=mUY3tlzP(shrB`<)lWruEaberA=ZMpTBn0##Djq9vr{!+#KCCsuR}DZH zJl%F3-00!)d*e2q2;IqS4o1=+l@uy&-XZ^RN*lb^e!zXbce~+N5ke`yXY=hJ|Re+UQZ7Z_hUUR{;7HEwAtIX4iuY4$G+trTLRGWn5mx zYia)gDCQ5RV9X_B$V{HMbKaTanpIOX>yY+DswwdA)Y9GKs02xx%AN+kuVX$x5FQ#) zCw*(eN;DVR92-ZhB`y0Z9J4$>>UB|Fyz;-EDYoQK#;aV0E1A|zZeq2@UlPLY{QSl` z!gL2+L+Ye*@dqT|7S@YVZhsDmYS(b7G8zGo`ch^1NY)&}iWv3tQF#;*?JFP|176M2 zviGDN8^z%0X_x@7O5{!%Wb#0ZutR9XW|& zD9VY9eKUo=mn(7|HGd0s&#!jwCP3Gwjs}%3^!qFToG~7)=y)?xGd?KEs0uR_e9mSJ z|DmNBl9NuS)nneOEe$<*Ths{R<$Ofq=1s9Y+ zBTMzBlE%Il;Rabnk^OCW6O!ci8Mi&+age8_>K^GRJ@j*36A}y9b-CZ0WGQ~W%m7S& zNy3Fc+c6m&yN*zev4|L2h>iS|cpVu;)=BKl<;$^V^cXV7HH*dN%Xp_M!CZ16$|C;f zF<`K8*6DbYPd~3z5l%e(5~9Y3njx(`-xJA$BdeMB1>}s_gA;xz=*Ig}b23BUPfOYC zzZ0}y9+`Fy+?H1XvSs1I)XdgWiAyce*YKU5Z;*WKYx3QeIaUM)lY8sJqo0i69an*2 z-hWENG9_P&GKHgEc(9Anyx1~0T}g86Yl_vBg+UwWrSfJpRpKP!S_~-p@}y!=Lz%Jf zK=sH{Yb)FSe|xYV{0nD`#y4NT2Ov$AS>qdwq0`(LF302j24?LDl_3 zguFF1v9q5jc+K|*P(MR)Kz7l>=_OnP$-K4%+^@kDw;v8~={4&*kX{>EPo4Yj6hIGS zM!7{Tz~vJh1KLQH9Ha*;<&`F*>8yYHc8cr*SeLTIF2dQldsgCiNloSkWu}V>Aq;y_ z+Fkefr@mlp$5G~?g@HZ?9^M=wjU1Wlq5)SGV_YlACarCV$&P3Y$N+iLia|r*@A!}6 z=5Exn@AbOr(K3_($^uw?FJxuXQ6u!7y5j~-J^7rgk9!4IXLTHs^f7h;ZQWw$eraQ9 z-@exL=yGFCT}F$>AFmR;^E~~C)0fq(68Dt_(efj z2-CVSI{z)k_H}d{Jx~>#4ohGMNYO|(0{8M6=PMDrTQ8?`ea!0+q96{K6QEH&)N*J+ zm=FLo&G)ig7m-kO=w@a&_IW(T*76%~;ZO z2VTEQpwCy)KYbLwsvmi!j$UAtHY#Z?J9VH1!sL$|7LT)DMI)5mFYRMwTr0v1nnP2h z8p0BLRjUPaBdAmiolehn)<{BUlSLw*eHSd3M}S`xiCQ6AH_kFF0{r~*!F4S?o5F0h zlfCv_vTCkYLWg}WljBo6gXUuWN>*5QGP{`qeY7fP0xSC2r1z1SPyABISr@;l6$~7M zJXF7u(7i@2j~1bq^oT5+?`>9$9Yl|1ZG5~}BXDoQ45`&PxH|`8C8Z zcTRo=*Iaq=U6s^Me1x<$r=V5-?y@EN-esqjB-bKQfki3LmEMZf6f~?%f0^dHSnJHd zGQr0YHyLCLD^Qo@LX8`MO!Tl5Hp1h=i0IW8UvW``p&6`#N{7QEoDcSR8O`sTnFJGY zS~Q7?9*dF4rrT|ReJnw8Lz4anml5`&P0~)ek zPTJQFiwug_D}Zxlw&7Gyqb9~=m?4r-SqKl1WIsf*EJX60jsR?0BuVNCneTqpAiAwF zc1KA5n%$LM0iRx=#Y1Lazzzpnm~J?Lq?Z#PXQSBRz9twF&2kS{%`bUHGU_LmHPFck z{QV+}ZE26^F+TnqQg^+-CsYx-F6)H|cBduvKT1t1lEZ(q+ z4TQsnM+%;>6Z#?XnW!#_l32tCCXu$7Q`p-*J^*#}mv}M&*)ax~#cAaUl)(LvaWmP* z)9BM!zk|=8$Q+Afk5jCvcgl~J0)zQ*Q<(VeW5KZalzr;_0QtVumEN5*>3zmo%}yas ze;0k**_4|U%{6<1ja9_W#YcgA%Jv}q z1o42ve!qD4K|fxS9OVqlLYjuPlyT(jqaOL&SM^+PDrP~-X&kV&xgi$Lm^8_hXYblNW%CVu%&1(p3K1F)lM^c|JCZD-U3T9-lvc2Tk1^jkhNVF{x4 zWIe;a`1L)YqR|IX#>AeG@tkEsbRAMohoBY+4f0(>e}5s0&x)SZGN(%W3sc)^6Y9(2 zXE`-|=fq?iYAg4{ZwJ@&B#&uYAZ9EmeQQ}{s0B>u)<5Yc7Vfh%dlPSDOPo|iITy8d z?oc(9=h~;nwvEhF>0#qx3h#R8-HEn{b`t0aFtuouDL}zo3NJ9BNmRwy5cz`elbc|c zr?r4XVsYD1MC>QE-syvZXo3!Pq+xBpAQO@86VcnoWXy$4aeNmPlbV%);;=-DZjW^X z)j}F_ORzMU&r}v5tAUpJXmETmVH4d42$34!2mT%nsW2TGP%GI4HzXa zK2FfAN2c#vtRO{G5tM8ad+Qw1GO|F&ehr>;*?p|#L`kRuxDYE*M@7w}bD@s!iXWC2 zX@d)qQ+XwT6+}b6K^HMCz8QycjUoXGRu+yjzP6Q`6G?^wj`k9Bc6Nlhq^9Fwd(XG<7Kua9@bsP^-uIgo5_c78RR>!-}<4Ff&pd?;$NPOL^q zprjD=N;ZKPs%fAsVF_Y>gHQ?@92D0FGpam4vw@{-lM(GyeQuV(fQIpWcD-Rr;-?U? z#H89#19%om-o{`B*;b;YX2-jxZtCJ$;SYC<2R3QOL=uc}BOe@N@XGYe9piesUB!>3 zI#0tVlqq6fJ94n7MT9|MZ>NGtuw;JhFhy-(D$%$dcDp2pYi}6apPcNY3}P9iUfq;5 z{Qd=LGVB@7qMes~5p|5AG_zt2$Rfe?xk0haA4&>tkR(4qIB=@CubmhZ-se3Ye(<#p z$0%zd;|afGfNeQJ)TS-YcdBt;wEr?s}*Y$S1{2H<=pOB9u0~O|!M>kE#+y zb*Xs0QNnc8Sr*#2N1y=@pL!dgc$_?sv$5g5F5c6fdD6$68AhjRgFP-e6IZx0&IyhVD{oFxs%VXf60sP88HTtu@4HkpO!8-habZOe}y3)ezBm3b`kA4q(9OrBa>Qq6y+&>rhYa8Q-tiK6Mf2fHk=e=?XnN%E#K(|dMPgcB%~u|+6GmYtl~ z<>L-S)c`3z2P%wZWo|r|VeNrNkBVqLXY=%`dQCLIbMqY+okJ~1HSb&wJwp{RbYPBY z5mpf7tUxpS7$D?->EB$lEO1PSA0d?$d6ckH8#0gP&?-+ws|Uymqh(*h#!y zO|(>B)rfPJ0&{R@cxQWX@VLuedi->GoItMU%puMWY$rGyl3xWaS(cY66TToYH7}tROV|XXD;{)jz#fP)^zEs$F=J(AX4h-&(`? z;*gAwd~-7LpWUCslfGGoKP@^-J-l}NDZH*`_!UJ=I9_B=evfWKzhQ-DdCw(Tv$pu{1RP~XrjfDcr z{Vodj!MHTCp|M^b=u}Rh>0lcKbxV>kR!WV$-6=$WY$Xl007pvl+zTE(78`Z+nLY&UVDGMinn9AfNlj1O+##ody zU{x>~=Q63MpK63}nhQAZPM%9aPv|}(a?hak^WzDZivO9~Yf4@wm`LSNF(6f;+Y1?E$i zBuztarmdvkXyDjE#SL}3D|kDWYJ{*xiC(RQ+R)IKh< znToN$#dWmwlDpqA$n>t9Mif^G%&uUX7S0s8+vY*w3;A#w>|YOL!wCf`JSK3H{HSGf zk;PREqNToFqJqLR?=||2zYPJsuUw1R*suMhuWc)1({Cg!O}2%L6L(XuZE^(;lu1;p z#W>9dXh2orX(&aTlk*r*^|D9A_6fo~yFq3BEGl<(Iu*4C<2@$7m#QQ@&&%NI=#nJX zU@mpCl{c8(@%@CbKo+dv8Pz!Y5*U_Im&T~q6J34n_a`lk##=_F!%IOXP^XhfUFL!u z@bmQm@Wx>ITAt{_V=7Hrw3-?e&mPLCUJV%eV;(gy+F~Tu1Cm;x=DzYKd@M&>6uiRl zI>1=R+Utf4|F835n&uCstndmu$d)Nwes{y%(rVY>?$SP!^dNX}u31;?9cp ztUdbWOmG>imv&@{<^*6Ns0B%QW<&it$iKxiGWH0%D))Y+e$B|w^l=xVRN+Gu~{*8F3nlnXxm9Nj4Zk;Ol3`poYzC9ha7jh6-^}>w$q%5+IG&^{rI8~4N zV7@sOuHmnTJggWchiy^0d0evYF{KRZ8~y&AsYdk`tH68(Ik8$a_oY^%Mc0z*F?2J@ z4&`kujaatBt0@_t=qv^|ZQnzsx^P#QXf8Oscu)f!%aX{KDlkC2)T4Mt++s#Cbxa#_ zSgl86zK1AxLY^D8kxz@^Ib?MNW(G&`+jS{sdOIoo>S6`fjM>{Vn5emY+6^*nk*^?r z=$ZIA-m7TBt2oO84AunWNNxs?Uyh`^E>WKk37ad&QYH4D2+BvCa*z{ind8S{wMk?E za8Euay|o*EW#K^uAP#I>)@3L(VaLdOg|9dvYU&TPyA_0Y+qjlYt|y$4yT=xuRi3Dk zin?#5=;O6(yp~S!y)YYpX#+J7q^Lb9PAi_D72+1sRaffG2%pFr$_T3?{N(dtQkGCG z6M%1J3pCNMP&BrR@+WE$iv-}GD3jg%wxqquOfzSUK7!V0lS}C!H-onifP@LJ8=1By z?cWk(KIb4w*!jjUZkr@jH&DF6jYC`{npQXIbaEg8PGK#HnH8Twv)ZGfD8Gy$H2?O| zn6#7lB+ueBG)X}UMDZ;>hBgZg0@^ER-7f1AkOB;H6%>JAF;9qZ_-e@``{*^J-Qt}P z@7R2Kghm@L8Ij+r>y}wGp}-Bsbhz*G1!%2R$G4{nO(*+Gmpl>6elF>kqUq&U^X`&k` z27$d>N(VWRgW^+Zs1)ds95Ukz(&%g>!V4WPGMf#PWPe8C z-qgMl`25s}Yzm7%v-yxn>~KR^1=p{}1vNqyjP?vHP0E3vU&IXu+d@M4GHi%9Ck84Ri}^Na}NWS5w*_&V9?(YW)E1!n~{D8m98I}iOuWo|aiKRdKC9DIUD zb?vgiCuT@Kt8lgD^=A<3(b^3jA8_iY)KAJDorn;a(2aFE>51_n7GU6D4at*m433v+qI6vzNb` zLpOG6dwYNpsAHYzi_nUX3b%AM?VRK-)S<9~H6SA*@3Mkj5 znWv7b=Gi-Q_Zy|3fdWrydo-vRLy@AbX zg|&TdkR_WY&+Cy5*Pt)@5vKF)PD!ONuPX%&8fd7bU~Z|`3Hq#%L+{L5D|(S`>i4i0 zxp$dq?n1uNT`P}!LSSebu~kP}BwsJH!^0Y$apiEk_FHT+l1rF1DkQm&R)Ze2%@mIT zeQywTx6`rbz9->t{v}3uLkhP+l%xp}$~iTy+nLvH9*qVLyOV3_%>syj5n6^7fy|ZE z0&&Z*sJtcIv(b^8)2h!N)rDszp`!iCZ)rN5K>1>Fv3cH!gg8){aZv;EjyivmLUtjR z1CvMe4_E+Az0Lf42CJ+~K1z{meriqk_s&sl9im>`uZnQ&E-+8}rVaN$JWeN8Vabon z{p{X?Acn?F5J|184hq($G3ormTrJw5%whb(t;FH;KyFv9)&OxYRH%O|x%mv=c7swZ`;xM(9{9Sk+Y77W6zD+nlzW8R!Dn;m6>Wn6I>ZJB_Lx zuaG|$ZA?bCa=yIla|7av?7a`V`qN2S6(gM?c7GHTW~&(NQYTDypa%0da`Ys%4i77{ z{yO%wpMA)$|A9QpJTE4ATtrAtNL$EC(6IJxj;u99k!g1H32n&LGegay1DZy95d!XLgyh;>f)wX3xj=TamhUn9O-ps#-7mslm-_sTDYA)!^ z+E7YhB8C`A?xTDMxVvg4zi1}A59_)Kylju*Wbq=?nJWH= z-N);U^ctOfj|B=;oq6Bvyr1=Zly7=sb61V3lycd(E*B zy2P$NbOPb*#Z+9m^U3qbaduD@YnQt+dFSyPhRXGclGtivVu7uSV|uHscGhYRc_d-N z0Oa|?AW_{^Is(N|yA_6#?2cb97qg)?U-&bcWqkwF1LiOeaIUcKOZvLS&6k~V-LAhX z&Gw2^WDuy=yAyTUYOn#t&Dtt+r>%WeBcFdcq;AITK^C42Vamf(rz zYOkAjRWtGN8Tb9en_Eka7N%#&@;}MK-hLE+NAK0~@Bz<}f-q6_X*tE5Ajkgk zLE^XL!`$AjyFT7W+y#Go^@Sk&UI8PF2I9Px+pqiDV(len8yBxJLEO+S^ds$e;j_7>LjD=DX~7 zyrs8%C^yypX;|n>PArk6*p2AeZ%|1aF|c(|sm7GJ^hReYe}48F^YX2jt(vX&It;;X zQ@&p1_8JfD+@`AT^IqvV$@%E$gO2`il7934JxUYW0;+9I^qls4-lk{w{=Q|PZFKgT z$|B1iKmK=Ee0VL7R8Oav^i2BsYO*Hk-g!>4HItPlHTd1~{r;HLA925TyPi^hB|U~+ z_js>$oKoDd`!I=9A%N}{u(hq6FHy>(`nm(QNk1`;)(5OJF)K8xr5*Wmlc184&Cw0+ z%+syd=&Oy=nPx#1ZP1_K>z@;j|==^1poj532;bRa{vGmbN~PnbOGLGA9w%&02p*dSaefwW^{L9 za%BK;VQFr3E^cLXAT%y8E;Eegmrwuz0vAa{K~!i%?U&nX0x=MV`~FY0s33R(#RGV} zYA*}cQ>of*{^Cg3PO_6Y?N&_S!?>AD=Fd*D$y1hP`8|^aR811#qTB6mmdoV{nED%M zqk+1BUaz-ZtycMNx67%Zj%R4&`rLx500d|UUMy(S7NIHtvs_kS!}X0tO#pJr8rcn|=Jeq({&PNQD2rvsi2_UKxaOQl(kOUBq zCg8;Rh#?6e{=I6?>}=kk~D%1*LN@&yr3OpU@Xpt zBWMj!fUfhrxh{GyHE45PW+(++0$oH2@C^bHXt#x+j%U7`n8Y_nkN`>}FdPm)I2!`N z;!S~0r{ldsjz*&+`3*@Q5F`N5OkuFHz$Dm z7635w_P}OW+H-b3OrL}8gWFmqL1za*v554LvSTT(Gv0BAOw>tZnY5Ad}$ zU)EQEIz2Di!UyMD%1r_Y^AUQtgYy*%-1K3O6QQUGAj~)5J3WeGC}?0(NPJ9?pen!y z!BRn;v(X?4aFGNENdjEly#Tr%0@pQEgI^QyS0n*dlLS=FoeRkFFJE^bM)Dbfl>h($ M07*qoM6N<$g0KS_d;kCd literal 0 HcmV?d00001 diff --git a/_docs/ic_pause_circle_outline_white_48dp.png b/_docs/ic_pause_circle_outline_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..683d5c5309d8b724822ae696a3e191aa818c38d0 GIT binary patch literal 2716 zcmZ`*c{~&T8=o9=O$uR|vs^LPLR+P|m7=+mqo&rVtSlRmqfpD{{*vz;i_9%llEl}s zYJ`@RE6h>uj4_+KP2d{KqS z-&DZcc;a1xFX9R3u3rG)&S9_Op;s^9Rbfy=s7d0NOlJT<9OPh)bS3)E=Ull9@lbqS z7R%du>$!<4mBUtuo1XunmVdeq+@a1x#Ug1)u#kpLuP!*YElYajyeeye-PSH2n5d@t z<@Qs}hPNc!x0a@w)9UY_C;xih@b(#3m|QfwoOaWop>{juODDBH1kB zWJW46j_ZYHO=V~%b}JZ|xk4H!uu#SLXoI~Wwr==--b3zgD|UU5Jz9P}>)QL`(p=M{ zfyFNf!q~YTRVKNHGY|dt)`3g)mjCnqPr!-R#Qf6%WfS4NtmwgnNPbHwc*E3+f7BG#7%Gxfbg_b!?|%Rvx^ zgQ9A;c-(Qnuh|bCJSf=#zoWywFUe+HDy#+&V}7KiIG9f}GY)j;SVV2LrD)i_s<}4K zAoTt0t1!Ae@MJp2E=l?GVIw2%>$?udgpjVbvHfl(QIa zwXSNy%DVkY0of`AYQ%9`Tm%ILRZypQ-8YN^(n%$&5t9L5IjumO0~d zEv9?x{3J6Y*p)K1Uoyrt1fI<4Uj%A|hwdXQuG2VjpqlB46&j6JRm$+V$Kh})+&}fG zP5+Evaxi3N3Ha8$3_P?w0+dqX6$awu4iOhe>E&JSEBU8*&uI}dcVHqdOR;O9y7`U9 zn`@OWpbF!ktMfmt{k6^>^bOKhd8AH`A$|%y)79ilM_9b2StXcy?zN29_((}lLZvVO zo*Glm{V@h#PuAXy7x=fls}7Wj!oNO&^4UgoKibt2y4 zNF?L#sxem+gO`WA|JD?QbC%`$7H=B;hwdxGx%D$(%$=+O@t4}if;%?%TeNa@- z`Bj>I0dN(UGbVK|40&+W*8lS5IW2*aSRiN;aSsM3#&FJF%|EYK-d;K1+1a_VEl@d1 zn8}W7^ZkaNA9zae+FMnGZwb>dAFu>Nq~T+-pa3@)x5E+XWDy~1P zO8je>?=}?4m#Q0=on5!l)A48&bg%N=ytpcJMrE4i1e$E!Z(k4%yl--eRJHGKozfPs zl5k-2)YMdgPHWEyIbWv)CN3(EC-oywBfM@*MeBP%dpYQF>V%46x`DhJd53AyqvtYx zt!{GftRQfJrk0jdQG|rD=^qf2uSY&F=;LO*^Tu>EKdSn#Y8;)w%*^}A3?XZCbDRC7 zA^W5o{&Op|%Y)Af&{e7?1`jgu*&C#I@**KZ965Y&9PawlLFV3eS zbmb@0WV7G-=)5Xf%zQ)Bh9lHo%ADN)F{b(x=3Hw$h``6W$?kvf7;PJlwPxA>@OF|M zfyXqtrhM{`euZMbe&x!4^$QOy@QwTZF@6c&jP!!^NFt^B)>bnxx&fkr$}LjNaM7D| znvw)6qvJ%mAiApC((8lsbp!j;=9n73^Sfu8`(|sU7u8q!$^XmuWS%9d<&ZS}UlVaD zDJjA0Oq0m&KQ3gqn*+zk$D59OC2Azv{XW$q)GkEU)eUNJVw_kxZM<_?FY343kPIhCp}Jo7*B)an?F`~kBlFnKe}bLetj29 zd(;t7#YPQ@QIC0KygLqo`{VI=)yajgQ+w!k8Nbp@%6@rLmJ{CgzDyiLn}PIFAn?RgF0D9 zdaWzPkzTBQ&%hNuV}DFYX^iLI(;K43QUD~RHjd36ACutMlz%#*{!_>`MW%;up^-G@ zE216JY818GuQ;9^)0(i;LLsYk^f4_WodgrH^gW3}z}Yi*g7&>~29+D%IeX^ww#?`T zCed4RvcR>_NU-FP#z4+7zTO^P){A8;YSKgF`QvC?`!f@6Wiq)M+_*qohJPo$GNvo4 z&H1R9E!f0z0LmhS+1w^(86ir?WHU-MYaufVQyU|S2C#Ie*c66Rax-*F1=?diEK~Zu zhH=7u^l+2DX|XR>+b_Tp9Ac?!hk_T8;+GhX3&ND`1N3x(AOyefhv~KYB-yjm5LWKH zv5C=zMaZ1!OR#!*^Q~^_d}=Sq6LrxoE$*o##ZEqP5X4?RYmeR+A!F~kYoAgx6f#6Avml?D;-wlM1H!N3 zOr#D6rG4PumHGC*79!8`G@M{Nc0kA2EmC6pTR2y^@q;T-7TYH=DVD>IbP|M=bVW^~6$-rHn#}%mHEU zYM1iW?yW`|>@Ugpd#rb*eL0{%TqL@SeQfAd-Y%ua9{s}V1jEBASW?Nz^V8|(7j4Ql zIkAs+nj9^xwtW1?jwxS5kyhnja+l9F9@*J}60^esJx)-P7z+?b!$-<{rMbT9WYDDA h|ECK^Q6IJe!;epYxAvaC!v8!19BfWmS6ceV{s$779TNZm literal 0 HcmV?d00001 diff --git a/_docs/index.html b/_docs/index.html new file mode 100644 index 0000000..d73e7db --- /dev/null +++ b/_docs/index.html @@ -0,0 +1,326 @@ + + + +Open Camera + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +Open Camera icon +
+

Open Camera

+
+ + + + + +
+ + + + + + +
+ + + + + + + + +
+ +
+ +

Jump to Instructions.

+ +

Open Camera is an Open Source Camera app for Android™ phones and tablets. Features:

+
    +
  • Option to auto-level so your pictures are perfectly level no matter what.
  • +
  • Expose your camera's functionality: support for scene modes, color effects, white balance, ISO, exposure compensation/lock, selfie with "screen flash", HD video and more.
  • +
  • Handy remote controls: timer (with optional voice countdown), auto-repeat mode (with configurable delay).
  • +
  • Option to take photo remotely by making a noise.
  • +
  • Configurable volume keys and user interface.
  • +
  • Upside-down preview option for use with attachable lenses.
  • +
  • Overlay a choice of grids and crop guides.
  • +
  • Optional GPS location tagging (geotagging) of photos and videos; for photos this includes compass direction (GPSImgDirection, GPSImgDirectionRef).
  • +
  • Apply date and timestamp, location coordinates, and custom text to photos; store date/time and location as video subtitles (.SRT).
  • +
  • Option to remove device exif metadata from photos.
  • +
  • Panorama, including for front camera.
  • +
  • Support for HDR (with auto-alignment and ghost removal) and Exposure Bracketing.
  • +
  • Support for Camera2 API: manual controls (with optional focus assist); burst mode; RAW (DNG) files; camera vendor extensions; slow motion video; log profile video.
  • +
  • Noise reduction (including low light night mode) and Dynamic range optimisation modes.
  • +
  • Options for on-screen histogram, zebra stripes, focus peaking.
  • +
  • Focus bracketing mode.
  • +
  • Completely free, and no third party ads in the app (I only run third party ads on the website). Open Source.
  • +
+ +

(Some features may not be available on all devices, as they may depend on hardware or camera features, the Android version, etc.)

+ + +

Get it on Google Play.

+ +
+

+Open Camera Blog ~ +Discussion Forums ~ +Code Repository (Git) +

+ + +

Contents:

+ + + +
+ + + + + + + + +
+ +

Requirements

+ +

Open Camera requires Android 5.0 or better (versions 1.53.1 or earlier also supported 4.0.3 or better). +Some features may only be available +on some devices (it may depend on Android version, or require specific support from the camera/device).

+ +

Note that it's not possible for me to test Open Camera on every Android device out there, let alone in combination with different +Android versions (or especially alternative ROMs). Please test before using Open Camera to photo/video +your wedding etc :)

+ +

See here for some details on issues with various devices.

+ +

Instructions

+ +

Credits

+ +

Open Camera is written by Mark Harman with additional contributors, see credits for details.

+ +

Privacy policy

+ + + +

See my privacy policy for details.

+ +

Licence and Terms of Service

+ + + + +

Open Camera is released under the GPL v3 or later. The source code is +available from + +https://sourceforge.net/​projects/opencamera/files/ . + +Also see "Can I use the Open Camera source code in my app?" under the FAQ. +

+ +

Open Camera uses the AndroidX/Jetpack libraries, under Apache license version 2.0.

+ +

The following files are used in Open Camera:

+
    + + +
  • Open Camera uses icons from Google's Material Design icons - from + https://developer.android.com/​design/downloads/index.html / + https://design.google.com/icons/ / + https://github.com/google/material-design-icons/ / + https://google.github.io/material-design-icons/ / + https://fonts.google.com/icons , + by Google, under Apache license version 2.0 + (licence text also available here.) + (some cases include modifications, no need to credit me). + In particular: + baseline_add_a_photo_white_48.png, + baseline_bedtime_white_48.png + baseline_bluetooth_white_48.png, baseline_check_white_48.png, baseline_close_white_48.png, + baseline_delete_white_48.png, + baseline_face_retouching_natural_white_48.png, + baseline_filter_vintage_white_48.png, + baseline_folder_open_white_48.png, + baseline_highlight_white_48.png, + baseline_panorama_horizontal_white_48.png, + baseline_photo_library_white_48.png, + baseline_portrait_white_48.png, + baseline_remove_red_eye_white_48.png, + baseline_rotate_left_white_48.png, baseline_rotate_right_white_48.png, + baseline_shutter_speed_white_48.png, + baseline_switch_camera_white_48.png, + baseline_text_fields_red_48.png (modified from baseline_text_fields_white_48), baseline_text_fields_white_48.png, + exposure_locked.png (modified from baseline_lock_white_48 and ic_exposure_white_48dp), + exposure_unlocked.png (modified from baseline_lock_open_white_48 and ic_exposure_white_48dp), + flash_auto.png (from baseline_flash_auto_white_48), flash_off.png (from baseline_flash_off_white_48), + flash_on.png (from ic_action_flash_on), + focus_mode_continuous_picture.png and focus_mode_continuous_video.png (from baseline_loop_white_48), + focus_mode_infinity (from baseline_loop_white_48), + focus_mode_locked.png (modified from baseline_lock_white_48), + ic_burst_mode_white_48dp.png, ic_colorize_white_48dp.png, + ic_exposure_red_48dp.png, ic_exposure_white_48dp.png, ic_face_red_48dp.png (modified from ic_face_white_48dp), ic_face_white_48dp.png, + ic_fast_forward_white_48dp.png, + ic_gps_fixed_red_48dp.png (modified from ic_gps_fixed_white_48dp), ic_gps_fixed_white_48dp.png, + ic_gps_off_white_48dp.png, ic_hdr_on_white_48dp.png, ic_help_outline_white_48dp.png, ic_info_outline_white_48dp.png, + ic_launcher_take_photo.png (modified from ic_photo_camera_white_48dp), + ic_mic_off_white_48dp.png, ic_mic_red_48dp.png (modified from ic_mic_white_48dp), ic_mic_white_48dp.png, + ic_more_horiz_white_48dp.png, + ic_pause_circle_outline_white_48dp.png, ic_photo_camera_white_48dp.png, ic_photo_size_select_large_white_48dp.png, + ic_play_circle_outline_white_48dp.png, + ic_power_settings_new_white_48dp.png, ic_save_white_48dp.png, + ic_slow_motion_video_white_48dp.png, + ic_text_format_red_48dp.png (modified from ic_text_format_white_48dp), ic_text_format_white_48dp.png, + ic_timelapse_white_48dp.png, ic_timer_white_48dp.png, + ic_touch_app_white_48dp.png, ic_videocam_white_48dp.png, + ic_stat_notify_take_photo.png (modified from ic_photo_camera_white_48dp), + key_visualizer_red.xml (modified from key_visualizer), key_visualizer.xml, + popup*.png (modified from ic_more_vert_white, baseline_highlight_white, baseline_remove_red_eye_white, baseline_flash_auto_white, + baseline_flash_off_white, ic_action_flash_on), + settings.png (from ic_action_settings), share.png (from ic_action_share), + switch_camera.png (modified from baseline_loop_white_48), + take_photo.png (modified from ic_photo_camera_white_48dp), take_photo_pref.png (modified from ic_photo_camera_white_48dp), + take_photo_pressed.png (modified from ic_photo_camera_white_48dp), take_photo_when_video_recording.png (modified from ic_photo_camera_white_48dp), + take_video.png (modified from baseline_videocam_white_48), take_video_pref.png (modified from baseline_videocam_white_48), + take_video_pressed.png (modified from baseline_videocam_white_48), take_video_recording.png (modified from baseline_videocam_white_48), + white_balance_locked.png (modified from baseline_lock_white_48), + white_balance_unlocked.png (modified from baseline_lock_open_white_48). +
    Modified versions of some of these icons are also used on this website. +
    Open Camera's app icon/logo also makes use of ic_photo_camera by Google (also Apache license version 2.0).
  • +
+ +

Note that old versions of Open Camera also used the following:

+
    + + + + + + + + + +
  • exposure_locked.png, focus_mode_locked.png, white_balance_locked.png modified from https://www.iconfinder.com/​icons/128411/​antivirus_close_forbid_hide_​lock_locked_password_privacy_​private_protection_restriction​_safe_secure_security_icon#​size=64 , by Aha-Soft, under CC BY 3.0 (no need to credit me).
  • +
  • exposure_unlocked.png, white_balance_unlocked.png modified from https://www.iconfinder.com/​icons/128416/​free_freedom_hack_lock_open_​padlock_password_secure_​security_unlock_unlocked_icon#​size=64 , by Aha-Soft, under CC BY 3.0 (no need to credit me).
  • +
  • flash_off.png, flash_auto.png, flash_on.png from https://www.iconfinder.com/​icons/62201/flash_icon#size=64, by The Working Group, under CC BY-SA 3.0.
  • +
  • flash_red_eye.png, popup_flash_red_eye.png from https://www.iconfinder.com/​icons/103177/​eye_see_view_watch_icon#​size=128 , by Designmodo / Andrian Valeanu, under CC BY 3.0 (no need to credit me).
  • +
  • flash_torch.png, popup_torch.png from https://www.iconfinder.com/​icons/51924/​bulb_light_icon#size=128 , by IconFinder - http://www.iconfinder.net , by CC BY 3.0.
  • +
  • focus_mode_macro.png from https://www.iconfinder.com/​icons/81105/​macro_mb_icon#size=128 , by Yankoa - http://yankoa.deviantart.com/ , under CC BY 3.0.
  • +
  • gallery.png from https://www.iconfinder.com/​icons/6915/​book_gallery_images_photos_​pictures_icon#size=128, by Alessandro Rei, under GPL v3.
  • +
  • settings.png from https://www.iconfinder.com/​icons/115801/​settings_icon#size=128, by Designmodo / Andrian Valeanu, under CC BY 3.0.
  • +
  • share.png from https://www.iconfinder.com/​icons/111030/​share_icon#size=128, by WPZOOM, under CC BY-SA 3.0.
  • +
  • switch_camera.png from https://www.iconfinder.com/​icons/103031/​3d_rotate_icon#size=64, by Valera Zvonko, under CC BY 3.0.
  • +
  • switch_video.png from https://www.iconfinder.com/​icons/92787/​film_photo_icon#size=32, by FatCow Web Hosting, under CC BY 3.0.
  • +
  • switch_video.png - merged from images https://www.iconfinder.com/​icons/81087/​mb_photo_icon#size=128 and https://www.iconfinder.com/​icons/81197/​mb_rec_video_icon#size=128 by Yankoa, under CC BY 3.0 (no need to credit me).
  • +
  • take_video.png, take_video_pref.png, take_video_pressed.png, take_video_recording.png from https://www.iconfinder.com/​icons/81197/​mb_rec_video_icon#size=128 , by Yankoa - http://yankoa.deviantart.com/ , under CC BY 3.0.
  • +
  • App icon/logo, take_photo.png, take_photo_pressed.png from https://www.iconfinder.com/​icons/81087/​mb_photo_icon#size=128, by Yankoa, under CC BY 3.0.
  • +
  • trash.png from https://www.iconfinder.com/​icons/115789/​trash_icon#size=128, by Designmodo / Andrian Valeanu, under CC BY 3.0.
  • +
+ +

Android, Google Play and the Google Play logo are trademarks of Google LLC.

+ +

History

+ +
+

Open Camera Privacy Policy.

+

This website uses icons from third party sources, see licences.

+

Open Camera on Sourceforge.

+
+ + + diff --git a/_docs/info.html b/_docs/info.html new file mode 100644 index 0000000..98df1c4 --- /dev/null +++ b/_docs/info.html @@ -0,0 +1,83 @@ + + + +Open Camera + + + + + + + + + + + + + + + + + + + + + + + + +Open Camera icon +
+

Open Camera

+
+ +

< Main Page.

+ + + + + + + +

Support me!

+ + + +

I am not currently accepting donations. Thanks to those who have supported me in the past!

+ +

Alternative download sites

+ +
    + +
  • Open Camera can also be installed from F-Droid.
  • + +
  • The APK files are also available from + +https://sourceforge.net/​projects/opencamera/files/ . To install the APK directly, you will likely have to enable "Unknown sources" to allow installation, on your device's Settings (usually under Security) - if so, you may wish to consider disabling the option again after installing, for security.
  • + +
+ +
+

Open Camera Privacy Policy.

+

This website uses icons from third party sources, see licences.

+

Open Camera on Sourceforge.

+
+ + + diff --git a/_docs/popup.png b/_docs/popup.png new file mode 100644 index 0000000000000000000000000000000000000000..5285d9bb7aab52336a3e1723c502e6b2ba9f8b00 GIT binary patch literal 706 zcmeAS@N?(olHy`uVBq!ia0vp^2_VeD1|%QND7OGojKx9jP7LeL$-D$|SkfJR9T^xl z_H+M9WCij$3p^r=85sBugD~Uq{1qucL5ULAh?3y^w370~qEv>0#LT=By}Z;C1rt33 zJG+x3dhhm<jf+FWTCO-Kv-vGghE#|`a z4lpl{U3oz|bL+jzTW`zw9|kb=FeQjEEM|~kJ)pth#%RFR5Qr|M%lhW~@9n3bek$4< zXYW+EJhv>rG>X$ghA)5n?YD*d@5{$4=kC1b>mtp#{PN3Evybu0-(~ubU%x3KnyIKT z^Z(qF@)N6fuW1ou+O&0z+pjanR=HI&o_sR(jr_*7P2X?Mn5v<#pgt$pZ~EgJJNxs; z*0@wMTBhZF-YN6aT6(h(S3@Cp(?>;~;5dg55?q^iaqUW}&rno&^1k)G#|hhiq95H; zR8z8Nc6vmwD?RzgQJb;Zr^wI3GOg{;JF}0?)0qAVqx)#t(uO^P(c&x#u>u{XjtbXU z52j>C_I{b!8`}GcY0^pUoAyc3taX$AXx2?>KWSJz`}|Xn`#@I$Ln>OJhd*(~C#I(3 z0W+?KUQqq*%*}Xk*8(OC7i=nZ{G^Q@oHM2|eR9i*6=(ok+880uGI?#P>7{hDFw;}q z9%}1T{&(bx{_y;?{70ai+VL~FcGHz>)04xT7!C3q4eVMHPjPd|IVXmNJ5GPYugk*n zx`}JT)Cp@-)s%pNulkvNEens)$!n}9*Dt^qmyo~+DC_t4>x(zZZjgBbOkxb4u6{1- HoD!M + + +Open Camera Privacy Policy + + + + + + + + + + + + + + +Open Camera icon +
+

Open Camera Privacy Policy

+
+ +

< Main Page.

+ + + + + + + +

Open Camera is developed by Mark Harman.

+ +

Open Camera accesses and records camera sensor and microphone data, which is used for the purpose +of taking photos and recording videos, to fulfil its purpose as a camera. Microphone permission is also used for the optional "Audio control" options.

+ +

Open Camera requires permission (at least for Android 9 and earlier, or using versions of Open Camera older than 1.48.3) to + "access photos, media and files on your devices" (storage permission), as this permission is required for Android to save resultant files such as photos and videos to your device.

+ +

Location permission is requested in order to deliver the optional geotagging features (for photos and videos, including stamp and subtitles options). + When relevant option(s) are enabled, your device location will be stored in photo/video/subtitle files.

+ +

Bluetooth permissions are used to allow the optional feature to discover and connect to Bluetooth LE remote control devices; + the Bluetooth remote control feature also requires location permission (on Android 11 or earlier) or + Nearby Devices permission (on Android 12 or later).

+ +

Resultant data such as photos or videos can be shared with + other apps if you use the share option in Open Camera, or when Open Camera is called by + another app on your device, or when you use the Storage Access Framework option to save + to another app or service.

+ +

Data handling procedures, data retention and deletion policies: Open Camera + does not transmit personal or sensitive information to me.

+ +

Since Open Camera also uses operating system APIs, you should review relevant privacy policies + such as for your device, manufacturer, operating system and/or Google accounts. For example:

+
    +
  • For versions 1.49.2 or earlier: the optional voice control option used the Android + speech recognition service. + When enabled, audio data is likely to be sent to remote servers by Android to perform speech recognition. + This is subject to the Data Processing Addendum for Products where Google is a Data Processor, + located at + https://privacy.google.com/businesses/gdprprocessorterms/ , as updated from time to time. + This option is no longer available in version 1.50 onwards. +
  • +
  • For versions 1.49.2 or earlier: The "addresses" option for photo stamp or video subtitles used the Android + Geocoder API. + When this option is enabled, in order to deliver this functionality the API transmits your device location data across the Internet to a + third party (which may depend on what "backend services" are installed on your device). + This option is no longer available in version 1.50 onwards. +
  • +
  • Apps/services such as cloud services on your device may auto-upload photos and videos that are saved on your device. +
  • +
+ +

If you have inquiries about my privacy policy, please contact me by email at + mark.harman.apps@gmail.com.

+ + +

Although the Open Camera application is ad-free, the Open Camera website has ads via Google Adsense: Third party vendors, including Google, use cookies to +serve ads based on a user's previous visits to this website or other websites. Google's use of advertising cookies enables it and +its partners to serve ads based on people's visit to this sites and/or other sites on the Internet. You may opt out of personalised +advertising by visiting Google's Ads Settings. The cookies of other third-party +vendors or ad networks may also be used to serve ads. You can opt out of some third-party vendors' uses of cookies for personalised advertising by visiting +www.aboutads.info.

+ + + +

Note that cookies are still used for serving even non-personalised ads.

+ +

In countries where the GDPR is applicable, Google's Consent Management Platform (CMP) is used to obtain consent to use +personal data for Google Adsense. In such countries, you may update your choice by either clicking on the privacy and +cookie link at the bottom of other pages on this site that serve ads, or click "Revoke or change cookie consent" from +the site menu.

+ +

In US states with relevant privacy regulations, you should be able to opt out of personalised advertising by clicking the +Do Not Sell or Share My Personal Information link at the bottom of other pages on this site that serve ads, and selecting to +Opt Out.

+ +

The Open Camera website also uses Google Analytics which uses cookies, please see their +Privacy Policy for more details.

+ +

Also see "How Google uses information from sites or apps +that use our services".

+ +

Android is a trademark of Google LLC.

+ +
+

Open Camera Privacy Policy.

+

This website uses icons from third party sources, see licences.

+

Open Camera on Sourceforge.

+
+ + + diff --git a/_docs/settings.png b/_docs/settings.png new file mode 100644 index 0000000000000000000000000000000000000000..8eb10179d6aeaf9a6ba16b120849da7c4d489007 GIT binary patch literal 1327 zcmeAS@N?(olHy`uVBq!ia0vp^2_VeD1|%QND7OGojKx9jP7LeL$-D$|SkfJR9T^xl z_H+M9WCij$3p^r=85o30K$!7fntTONP^!c=q9iy!t)x7$D3u`~F*C13&(AePq0Cs% zRL{`B;7id$ph@#QT^vIy;@-~g?Y(Wlfe%N48jFQ6(;QC5bJ&D;_AqGHS6?W4 zb0U|OWB*p0-G%9l(|2&|DLR}I=gZNKT6cFx7ei@7cE--K$Jdxz{B`g9<>)({V&};) zk2*JZM-xM7LiEL=cSqi`a>Pg7i@g!nFlFV^_8Z^kChTO;j5Rjkowk5?5$o;F2(1P^ zyTB=q8!azcAIwy^Z1^zv!Y!Ex+oXcx-pfVp6+O-3ktx|zGBIvfo%1hsOuOEs%CzJ2-V96Opr~){Z>;`!H!P{T&@`ask?^w-UDYJjYwr^S@YTEl$i}JQ!cl8d~LU4d93r`kB2Vb>$Qv;D_B#vT72e`LOK zPsom+$*|qOc;jELd%6!wtuMm`6&Y?vAA0UsR9I2Ns`$WBkFS5d0*g(LDknc%z`UIxMrb8{Hy+pLP0wOetI zSCj3Lx6HNDRjU)#4hA21!L#Lo!lYNT4tz6?;5xA{Li9mnMK?=x@hn@>oo^WrJS>~G zDTs+XKlT@2LSN(>)}A}8oYM?U3`8r=%GPERg}>!Xiz6nQhLq zO1}OnYD17i-m+x}E$*HQSby>FBju&v9(Vt*_4t2iqs%gC{Y^d&PN#~Wner+ex$e=t zY~fL(quVnzm)k_m5%p&6ImDdMTDDFoXjAn>(P*xUPcl0qx7^viVRyr|RMqZjw0>paJl$}zhA!Oty4&t9lv#bLNz0&+>Nj;F`yiM^!eY|9bFCA?*CIdJ^!ID zkom6G;q+$Z=>kB_>lQw}G4V9V4sD_e3~)ITzOVitGfq!jQg@L0d|Oai+x@y&VEz>57+! zyBDl|9Jq*K)`#+c(_ij>_?TOuvGvlm&sN(QB(E_k_Al$#ZaDU-rvBSzg(r?O*2@_9 zmU6$CSATEoqx`VMhnwGWFW4A!`@U`Lg^IqmmDzP?x3LyT#2S2`aeu9e|J^d@-x3>R z8BLaXd*$BNzO{-$a+BNbcc-R_I~;z0BmY*m+`_=aVu#M3|9^NAQ^qno32CX*Q_oI2 zX#SA3qd=#Q#pCh5C3iw27)^}L&FAm8SGbAECoTcU*2~vyj+Ou| zc>PK6R@L%O4F}%*OV`59_wdeQE97gYR|8GStcVawQz~$A( z|1c$7r{ST|m9E3Dnz)y)EuZz0X$FtptZmOUowyQ=wy=IMcxP@>A!DnsropWxwNXE1 z(Zz^9wi;E>X(AUMH&lTff3a!$Hl3tO#)&Quv~qWA-(JPwmb~lzg>8Emz2(Zt&~a_~ zV=_^v+GbNOBQI~=oN88+ci!P2%~xNLK9tOrVX;7OO?6tHRAdC_anTDM2f{dZ3fgP~ z+9l={eg5?iCW+pcmr4(OS-HZ6k@xJIo&VW$W&5*OTjV*x5x3!vafR<)=@4xP?gBBc z4T}y6PUr-AWFaszS23tD&sn%)Desrm~Z7VKW2`iFaR@WO@^4FyRqk2a=BxR@c^ azw^J3Ub}2+;wDgTWAJqKb6Mw<&;$Uh+Kns5u;7u?9S}$Vd3_w0;3p4;wo67OSgY}Hr2{^|G008%&{|Iebur?(PkpYKfYDHXz%k5UG*S&mwEg@h`X%`POSd!odH4 z+$csUd3ddf{c#)Y59>~f((+DdBdSP+?8ejgZhXV8e$;>aom2Ht&6};QtzW)xoCbPX zpXXLw6y*iy#v2uL8lJbU_^Jp3i|6HE&fo7A0UB@!AJV_oXowknyOIQnt?;^r0pw)yBMh&?_tD ziW$RZ+Lat(GQRdHeX-hu%k>E5b@zAsktL5MZhC zfRId+lxyMptKY7kB4Z@WQB$Vu2Rnk_6(gik`M4rD-lf0Zr!O!+KmQtCL^mYX#iudi zs4g1uE}^Z}aJ6LnNN0fSRlHdc+i=wk{8HssK`*N*Syt_C!CacJULky z5D|fh4WHY!067L52m~8o5U2lgj>So4#O*NM5ucXYvKj?`O{@3lw(m>97dKB1x8!28 z6XzP0Uf3t_ek6}P@KrPNtaSh#-j)+Sc&5oaOLI{!H+ou$ON>Dxk^T}8_^W{6;A}Tn zSJ!B?FV&<(*e>mRkr2|Y+U^89VgqKQI8J@vTw(B4R=2e^WUS>eFu z?@E|*@cSj_p|yob5~QTGluPN zvvHDUa}GLnF4nf|DVUR8W}n-{K`q7L4o>9KrKL-JV&voGWUReQdKL7FB_H`-^-e;H z(ab>xub3_l1izL{HpM>6Tb>3Zu*r?iq#zWBR3nqz|lkd5qB~8&efcdXLk>{o$yKY}fX|10Ei*R7Y zja55@89IkUNS9M;P!gm3WVaSA%N+&eqNjD^MX>-u^F(kJ4`a26cbM?RnQ@ElhCApdf1WD^k;=0XAA zrSjShXx<<2M;6__nx{kDSY@$N6qvUc#0IQ`JfN6Fo0K?pZfA3dB?~pasPtYoeXWT+0dAOlI+|^%y z%ZS`PI6yim3S^TaG&XR2qIu@0hex|i=aI!yuwQ>n5NaQ#r`JfctNaqTV=Q%cj{dNg z9oJp<6geE1`PsXpmg8Md|Ef(OSMk-(ot~n~v&Srmeno|aUcG&NGRU=3829~V(g~8v zoPTZKtnwIr$J86{x2!68;25Kw8tuHL&0R+Ho{IQ+tnq7OT(d*U*-WI>QQ`R=qH)qC zeB_@LD3pwwX6&@j27RSa&hdW=7b?tbV%GHBnkPs^g_%=qMK=d$h)k{WYBi6Ewf(C- zs$b+izK2CN{_gzrJ9_Z=Sm*p&_QyN;0%hzI*$f2GUfq?2AMz-)46n+VRVl^G^_tV6 zkImAQVmM3LTOo3Vmb-%&oZWvMh3Y9Uo`}hGLV*|S`1#7-p5tuGy}XCMFwyU%F7z8x zZVVF#wq?eEuLFxI(iqEen4#?Q=;)~F%YwXnvo}EC?G}f#1+t3LU6cbzq@J`M0qV@; zZeIW|a_u#d$W!Axgr=MB zlWpNU&*p`S%)Q`a9J(_psj0#QQ<5uRw3y!b`1o39Iv<>w(FLK}ereUX-iS1Fi9MXt zXH?X-?jw32b%fsl;?gay9DHoq&OCukmgM@D7Ps+<3H65lI4w~zE=M8b(3~btF z9bDP7r78m-!NdenQUST%7-pi;6Uon^~*K+FvF8J=# z!zHzCN25TuhgkB@vb}Z73D!~cyDYP^&EcV;hUWa=KI;&b^tbi3jY*zxHv1)rSVyWJ zdw-w_-GwIG#%l2hAP_;Pl_d)11mgPdWES09LY;l|+e)t8%%qO_NUD|y8r=Y{;~Fb=l5#(Lx{@Tf@ekz NV2ZUgqG8-){tb9yB+O1GqRNvwp@-LT94 z_|AX+Gw;p2XYM(3&pq?bocctotI9uwJca-O@K8}fRucd~_e&6f_u&3x>|Sc~ALXGb zFAY@vda`{_VB1QmNC7}iJORoA=bpxQRWR@X08-cgUJz4enmqt;1t`i&Y5SV)XX9s? z$fa*p?5wOzE-OauB|t5u@eHgedjG!9A>2AjP{CSLV`3s<&Ii8=dnG8b@?F9TUh*L% zT;(B2$|D*px8SE>upks{Ffhp!ja}>c<=`whRSS9B?y(zrP@EcYtDPNi(1y%4o6?!d zKA5_kZJO273AIQ&k;@oU#&=yhnSFn?y{CLA4)r~g;6indoxNv=OB^xul};qi9j}X^ zeD}`#n47Mdl{I=Gh!4~Xoyp`StF)psSZ=SqD+CJ;I*|=zFMA#qFH-3e$zgNBKTPNi zNEI;Ya#AL)F#~I|BwyO;Ef8`9S7iwT+%F4@v>)RdaLAJD0yxDC6%sU(?b+{B2agKPG4^T;c3z|(+6Kav8DQyUt8j1r|D!;Fovzhm%~3k!x0&i7Q}?R7%T)6Qxx zkZ-u;YQ)LFrFKX;xm&F%>u-!eC2&$0)qr`e+8uBGCcPU}A+?26cy#Ndi+$Y#VU%AX z3p^IBzhD@P6VL!5Q|FF?e*E-Y;2LoCIYc zB8kFia|dsr4r4#?sQC(q?zQ@$t>qp0-&xBUaKPzxF-CUhC01`@$;X}G3KX>O-$>)u zJ11)P#+ZsKv>C#iX^pH?8Q5uYpxyDXJr|85H9?s^&=yhs4!z-VD84@rVoqvBo{h7R z-==h{^ZGdEfv>ERAM6Kv=odY}pdy|@TkHVU+6o1pK}#3 zW{e1Dm_md-`?L6x*M|&qyfkeqgvo>~1;C2OHFKj;e9A_?!}8PEV?w)#ydgpg>iM`e zcP=3%`y#|Ma_B~-gsN%%MaI`QI@I4cMLQ}@2s_Ylgc8bkKYwywFJV|*<1?#fcYNp} zEX5w1phLX2TMb9)4hqtc=Ak6P6sP;hsc8fNXRHdKOUhTn@nmG&+RsTT%by%^Q4knY zg%GWrE|;UZGO1K}c3Pnmobb4#*b@GgT#HEQD4dRJa>GaP>5rt-G<{iQ>`FN1&oO4> zCsZ4Zs+1@@jX#$K57)PnbCQHR+zilf1a)7KCxiLisH|@=6x^__j9eVa-{jNGfIdrM zYt?*fNWKQ}48?wi?8UunrhyQTD>CU6ZS!m3dghZ1M~MhCsFh4&s-3qjXh+mHI3V|j z!K4Jxh?j;G6x$y~MsD*)6-9pVD-kyJKE{5{8PVFT|2kxV>t5j=m8;Utv^@0>lfMN8 z10i{x8l{i5hQxf(`CEEKZwx_JUxL=ywr+U&WDYaA$3B zadn$;e`sVQ5hZ41MpZw5zIyo;OaL>G>Q?&!n(~S}#!iu&BKwepdvCID>Q`fYGODYx zW`1M`w+t$3i~^8OJd6xGiYQd}hcu?DO7}u1iOmTI|S^T2S6B6lF5^!Ni*Yp-9Q?C; zj2wp@^+`O$|Dh$m{o|pC2-Q*uJe&dB7+*SgoX{Xj5FO{l&if}Sk?0sZ%X{P`JMSA| zcrj2*1LN3eNPl!2p4??U6t8?;Eye$s;Mog(^zg(!eN#`(IWi zL!WT|qA)aZZq6f#0T1+|yA2u>BXaQx=9Cy&H~i`j7euB!v)7G5S5=Z99H4k?EqUr! zW%a4|EDuwappci$IXS+UdlaJ{qX}ABoN&jbc-YLG0JC?VFeoCIELeH#N(p`tx>ZU_ua$|eZO1doN9$C9kF-q#7WU?f1WbHO*^A(?riEB))&J^H>!^rR-U6* znn~9k)Z)26zVbB)m!g7nn!~NM-aRzoBaLm)!ndrLpdpp3x}-!lcpm)xx{^hiyh3*( zJ0;`<6k?O@f@U$cYStZio|hzFqxJ&=OQ~d2->JZ&hD;LxW1joKnwHiVwrtK6Zh|C7 z!WXB|&OYGe#Z-gzkLTCPE&pAn z*;6Ye31BX@IncpPY`Zyx9C@k1Ou0?;Hc_m_7?fni_G|y$)moCwY;HscZ0nr;JPSaK zrxm}fhbyjy;MyNW5pz-TRl;5z{%T`TZ$6QjW++L^>jXI9;FUG4!^4KTTje?Yb$NC_ zH0oyiISNb0x(5j>cG_K0{dmzwr?PDCXvFmEIk45~+WA|KnL5(vu)^up;d1mC+F-;i z?efcdHF+m)Y7;7!e8+$4A<@>e2biP@!eou)X1@b{ucMwsF4nB$kfFzvBPSu`qjh4M zgSI};kR}~!@%D=46aU`IMWy%~Nwe~isVHT6{P*@-S%*)&i zU5!!cu>=8Mn*dI?L)vAhL+|n$n|Yf05vPEnw-Egro8!{H;!MGwbhPdqy2tpGEfzNM zykRrsbXVT&byG}4?s1-<>qJ(0d5eURN?Uxae>B;y*YK>zZ`=!vI5x+B(J7I2}BUI-m9E85znD^T?H2GrZYc%V5f2o9|ZQpbUAN0SQ3Y z!<-uO#3n;jgKrS8=O&l`<;rOH7$E6YL)3$&4hxE?dZ?H3yl3iLT~@@D`U)C9Y(}Y51d7R5k6E>g+R-% zHgN_LRfPHO4%mMx({f}i+z1LG{5&UWl6yEahH(~Ij!37Y)^mooTj5#HWEH(C-#_?y z68RzI>S$w#zj%-@L=-b$9%*?D#v^&kRwPgeKAO9e3jXC8Mkd`R*`~2MTK?F&nUIY7 z1k5_u9X!?WDeR-GXf_#NAsEQpjx-aK4+=XF;4V?7Ok?}j&qEm+rV1#7YxKV3_oIYWGfbIJF2>-sh{?b_|U(u&gD@K3v zF#38y1Q-|(+r&Kqpc}2?pDl*y-yepWaGQtw{=3Cu1>aJ+lW4wm;$r98)tFw$_Y+!!5C`=(aWTjau% zbreJMO11-HzCt%T$3b&jv;6=p6HPR2H>)Z&8sV-BJlm+4^%3rv$#Ql%m@F%lFOhr1 zxU#MNj_9B36^o9h>f=0Ob|!S9-@AEglLF3963eCFA@izIx49ti&MVmv7(kul_glu*xIy zIdw*B$$ zGxgQnck1J(;~o>ICg|xdx=Or!90d)wY5s;S-Lo&(E0908NIk>+;_8qKq6|th64GLq z*E1_;1yqU28f2*U@l)|SExJC75IVlNMvoUiQUr;R9wUQm(QKR}AJlkp_ySHQY4|_j zG&_qBMAD+G`1r1+q-DG7YeXL(1Xo%0gd(eU4{YQ}fVW2B$q9*Xig!Dn|8{tw3-IfB z@!a+Q>682rMy5IhA}1)>amtJ_~ORp*2<2fswUQ>4JM?lmc|>8S|vHUz|aX zZ#RHv3qVr4O( zlc85vxZ!7|czfF*bsY1fSMHxMr;*qeUWx1#*w1zNS2`pI&IzGr&Rv|=62LNFFQn5|ii+7O zV}vB(NGrnZ$M-vQO`u9BL4-S~K6%LS?4+VMWFC)6_qrC71EyX(>dexpsVVLw>aaIuiMZfQa-g?&d?#{}$+iqlsg=7}db^Dgt6IlwT*hWuijah}zSg<^;f)>z(bLUkr6Bpu&H^zP zmOmHoU96#MjGB@139}FE@|_o%itrcm(-203dU$bA!zh7ISt{lkPir-oAah2hj&Z3L zN@)EFDNm{!Z1%U=i?{m*ex_&(N%1y`L&3YP+im1TM~9WOBWCZxSef2Ye~xz4djTR1 z=})vXCa^q(7iV~&V&%Ls?{XlWDX*Z#xDTzh>orw=A8^05nxDn(N*IbUZPl2Z9e$JW z>P<_gdNG2?6{`bfjHFi9R%p@nTR=tN<2sgBHkMbI%pbRfG}6O^(?r;2z!x$xqT+0A zr7x~LVim#0>Uj)!H$NmXp3C3g$uH`Gvk36wl}wI5r*r?j3ILe;R8}i%g)I2TKjb4)NOppVSeo@d|p} zCyRG@`&hLA{GT8Y1?RkT?7w>!zLA0S{8w6F{NFDop2UDaIvC-%)`$&yuwxgJ-q0^C~_TO9xE7V?T|e+) z{dnLa`Mw{-L7&*9y0MdrQA2Yd$>_cEg< zzm+b$1^VYSK4Jni&B`iYTN!#pmpkZ#!r>+R&+Bx1SHwnN4)rEyZ|*69Y(aEA5V*w< zwJt!Y)n5%ZPZQ`bzmLk<_o+#oHPm5ulhuX-PA7+GLl^h)#W{7>1+!Q1UIFG`)g#>G zhqyl7T)mh(rh3>8Rv?()yrgEE)}BAE_Vymg7ZGh@#?N{G;AwFo-B{8#F{WX(wUg9$ zw@AjGYbI})It+S#eEDtnQ(!XPD?oP#c%v;FfRy3K0;9-UKSn0&8Vhmgw7nPj_AF7L8ouqvQ*p9?PQb)qqi-^Cr9G<(UF3YMSd5@X|G`nv2r z6l3DnW48E}E>NIGb>YXiTJ362dZ<8(zt`@n%h!U&Z$RU-IpKV=>crn63FSleY4pba zs1L`D-UD*i)5iW)GXb4L&T{^3?KgS9nUzzS3hQfU9y8Y$Zj~RiX|Q73?Std+FK(Rr zU)6_1eL56ch9-D@+$sCv)B5|z;;dZ0hQw7Nw!(*USz;V@bfYR4ItFQz1^wx>cmmG% zw?AlR4WJ4=&$k=VdKiX8GuCGLCw+U>gO`X}LsB=D``)-;9h zZy*Pw^n}t_!{*Ctad5;gruU7wR-6v!q-DnqrVtxO&duAoGy4~XN7MaPhC%-0+nP7xB~8)-XF72Og! znfe5;`7R6{!(0lJyme)Pb=cM(xrZzw@KnasWq%HZzxq({=O|Y%x;Gp9HC+d7>0h9hm_aZ91 z@b3;z%{zI*J?Gf9-7Zt-Y{hXg9>?)Ae6ok61oW@#^$JqUahOm$2n2k#waFZCta@;^ z@o7$r!eRq^YZG=l7`90DdUDp$=t_})vSzJ#nA$O?jw$c*+_H@vYPj`>2>|8j2scfq zi8ppDrpbzeYKN+lptC!eYr2;+#f!OrdyfU&edynS$PHoZpC}6VUW)n$0Ek6pL6R>n z`1DCEZ1HKba{-xEIs=@EUaVYBZy$ZHzcDDD-7UVi4Ar0*F~O%81Fc3NU$5=I=zbiM z{gHKYASz)QgELicrR=pf=4DR1pcP|6|>cgy|H!mZKZiEr2S}XO=!ya|5IW? zW#QU{gwL)&NGo#^5+?exla&VWy=p}Ieh1-v%89DtM=tCXZ5@5)%Za^;(&8ee8vFVQ z^`^E6X19E?#v?lFDFXt27bWQm>3dc5GH340J|`u(J5c|y;_$%n@%5a_`)zLFWph`# zKM4uIZ&S)Ac%=Uxj@E2DT>O+cnkI64#*q_C@4TW!nIloE^6sqR{3^%-Uot}azF-|} zQNV>ls%tw!e*q%*b+GBg(#&qg%?hh|7g#N%IvJPRA!CGbycSWRZqBFBDcsmw;$jy) ziXgU+VmZ6-TokK+Fu`;s=&q0@v2kl8h-?ua5*6L)>l)bUDF~GIvvp2=PHP9huGsu4}$s=XWyZTER&P zbZg9WlaU>!KQV2GbbJ&cyqV67&!D_MVKmg`FiG$F%%ZMRDL0H4L&c^)hABbXP*gdE z+O7rG!AY&tyI4#^NKx0QSUAn(;Cq_xwXS4xKV{0rZS)36;Y?7;vs)hDz}Nax>|QL; z?lCu*Gvd}|zm}f+{H}56SAa?r6}p{_<1hLFLstwbu%E2sNSmao1AdOWwx~l1mud1k ze{?c%Xl~oOT%P}qPEpA@a(=lCRiIcu1)UU5CjkT)zG`v_2hgn{*P=7kvHpQ5;D$ao z#J6^m&sFg$HXh&%5&RV1L9t89Bz}n+tj>;*lvFpZ%&x#EU0I@V@l$kEILl06I5PPx zz0d-eURaAvCz=!wLRot+rVW%3!dS8(C3mxp%b=B$qt^dd!O zi9e05F)(o8KJ35^YHE!8exstk5>!k}SY|&@P>qGZ&=HvLJc*`?%TcBDxqIXvFdqXC zMzS4WP9BzQUTtrmA1<1XY(()gP{zXQrz;XoH}{JddBZy1dHNz!c2#&q%xxW*PwAg| z3J3X5Ym@d)#fNu{Iv+hY@q2J4Xe(SuEBO*9u&aXx#!Jv9*ZHYQ#V${{>E|WS%DC&y zW5H=&8UKGjxn0n&f}2^UxQJr@LP=7u*lu1U8`m4!EUm_!+TWF*g=i7$(IG_|yp=== z0-1=gX?Pg_ObEZ>X-910ccMSXg3}_A^^kCp1wx!qPAn5D+#0Yiu_pF??3FlD&D>;5 zy=V*=EW99-L9;|z5r&ONd)07q2%6;zy)uQ}RhDZwZZC5J#%A@{u^rEgslgTy{7yJmZ32C%8Apf_|2k~r4 zg9ZHxz`rllwI9bF^uaQ!DCP;whiIry%|b^i?wA>fp>U-o+{qkLb6z-yt>YMs>tcOI(DL8u7~#QCgjTc~9y zTnXP1WT<6kbp0AtBogXV%)5x)op>~m*h&F>o;`Yt;_}XLHs_;;pkdNcv-zu=>9HK_y%<-cq!!|TsGT$IDzPkFR{ zConC{aQsX=s6I|bKR1Gr|M6Nx8vgJvHGyIAJK{ip0H~iGf+k$^s&PwAOl1oM=AqEj z&7G3POvmpSo|Lv^?SS6%A5?c|*7FefIKm|n+<33oI6*ZfWIXdCS+f4nnx&HvH$xbC zttCGdi6wBA*Juv=L0cHiUG~NA>+K~=0@xRQGO-IC$hhAmUV_j4_M^RRdFeK~TwP4S}+%N@KS(QKIUA^d7~@O=sdlBrg0;VGopW^nH6FMD87ueBIh|5)qyg6KI#6n{rc0aVyeIwi2{U8Q(sC? z?OPu@?i<#zonuja*?m?iSS#HW_jQ)Vi$PNG#clJ<_t_+S zJ`)G^r9;b6$vo1I1(AzBmbJRv2JfIW9US<89~Y5n1$4#yX4dsQUm#9}-Q}-k6c?U8 zl4kcxdi}zBcl#z-@Co%|FfZarGS2WX%T&_?{Eq!X#-91!mHIZf2baIe%1Q)4Aa22YRdgTzYjrzpnr;rD7rgEyCbivJu&zf9~(ePBqd(lM^Y z<5N~Q9(~Jw=78_@XVUBKWA|5^l4i!m?c3z5B`S+NNfy>=rREq_7(-HS*w>n0E>4D5 zAs=l9Ui~76n=zG6n)OKb{7zYwQP8ozMFEjCM8OI>b`hL3Z_&X~^Jn?nueyqG8E1JP z*U1rlAN2exy@`wW^}5-u*xZT}z4ZOg4n0>ack;k(n=RYFMU?``3(waHkyn}HQJOg! z0<`eduZWd~y|c0P69NM4F|Q6Oo!)9 z5gx>h1b$#cyo#u(vv6_H{X8(j>}}gCrWTnRl!3Gs5Kit+5nOA^niWxjL!&3sgp&TY zUukDlZ#^N)mUjXTF9%SZQcTVsx(x6u3xM~L044uwk>DzBUgmGkSMBZRZ-$kD=gUT>;@AA$htHeB`>PAe><^2?nlbA*v}~x2w_6anev2o{ z=>3En)7`Zn-dBfX@7S)0x<~7 zfqOR2F(BniZ1MS6AT-3BLLLfA#b!#w(`%!#tmBG?IC+fp(3FR%YD<+YD26Gout&j4 zYga*$G!S@_!zwQRE*V91Q6|-=CjWfzc5-bH31pg+yEQknN{~IeM+r_th)662eUTo5 z2yVntm)bT3`%ONcOAeONaK-uajj(ij)Nl(boV5|E0d7?uXeE|hDG9hOtb8G(nxMI- zt@+0|wJ9SnRCD|ZR861+GS$!I_ld^+4eO7dpRxQj1A?AEM&_b2$G1%0{l+QzLl33w z(Bw=0{viDCEzV6BRq(jtl=V=OqXPApqb5W(Hx@^fS9}N8({+#)lNw-FYUaQsVIP^* zy#nFoED}20~PQo8Jon)H*%JCdJ(!seCCm1j{MyHro^ywzZ>iBJB#CM*!EO>l6 zxfLBsLQC_>$%!;+@C#Giz~NkXkBo2JPj^99{1sTjYXv#Bk}qwg>76q+-{jCX;E|KPGRuubZFQ3`$5d$M;rDN*%hpT zwQodtUKNzvhX{p8|HrPnSE7<6PDE{zI3#!V)A4MU(Oj{LSJC;$fWPGl7}oz&a*{9t z!jh!U;Tp^czB<1(6du+`xeAiMdY>Gu9ewuFf$Ae{n2_S3Tp92|bl97LE`H{9IIJ6Y zRDH`ezq*?I?mQ}p(uk>SLu!NZ-h zlW%{$M@^nJ*p#w!WITbwtQ>Ia;oa88MRB$fxgOXDj9qi4&C%X_6oyw3<7z&GXQBQ5 z+c;taJ(P0$AgjR*?B0c|V{sI<+PjEwy@z{nX(Us9lbM1}eAS(upt1T3NC)W-SNh-W zwUT>iOv;a8;*n3WS0-B&S(cFyf-TiRN>9=&h1$Sq<9wYMC(>Sq#6Rf3p1%ci=@a=S zh2(%mYe>=6u@j8(DbW1QGnI)e>`ux7sW?k-mTQ=28I``Okf6COsk?RcMYHBcoI?}` znm`KG+Xu632p_3sIYt&>_16L2G6YmN{$5FZ(lB88mVfKtT-qugXrE)1TTRFF+r0bW zD$Bd`0L^=-(A#Ia_>o{n9ANoxf*`c1JsX?emhSP*l?V!ErKbwr>MvZvbO@OW1trpO zDk>R*NN8Mub`r7^PHk*e9@Yl@NJtBxHnk2UPugbimqYh^%2drS4u?B$Ofo$Y>Xmyj zEG~=nu9Ypr4o{^Ax!})Pg^zIVo48$)Eh5G)9s;0c*8cv2(w zZ~mzTbD1TkN;gz>BkfZ7qmn%gv`(rX<5ypqhkwa`yri%nv{r0nMJAZ647p`Ub3 z^AS<5NsVHE_UosKpRJo*CfQd?l314jc~o{F*}S8>;amJlKX?wn^%#$(E6lR708|tMq$VO_tIJlM1Oo`WEZ#&%DBX z`hHq)U!FX&4&T!lH#w7EwBfb7fI*zREB)_wm-V-#*Ro84fqb6737#GcvXnD$>Qx}g zbOG@+FP~wznfG2S?VKgx525=4R(oM&XcN?#Z;8U4{pniGH|sYvx&S)C;FYo|qii5k ze^lKlpH?`c4|zB^J+`jy$7_VT?17k**zh2I*6=j0Datk!zMlU){>yp9^MX~+)v_ZD z6Ob=cl;Tu|9bO~jrbuK{I!Tp zU;DZP;)IFBvYE#6DOlC&)#uHKUy!(C-c@QzJ$=xIVq+u}pI?VSKb+TsKdC9@Ib*Bs z@zi1A2k{|FNtiwe+x%Bdko?;Z+#m40nA($0NBGvMv+5T$Cj+o0n|4H&I#$2*2xao} z`*lM)!QyS;I4NEbQ*8Nzc73P==9%FQTlF=yQGi%AxSEgyOKpqyXT&SoaFIbm!5 zDII^_5~)M*!tw5TDqRiBDMkN-O5n-ob4aPv2@b*WU!EU9xI92vZW3Z(lFV_Rt{cMm z>|dqtrX5ZLl^wp0o4Vegw_`7H9%O~%CVh!_fXf;d6zMr`_k>bFL0h|YaJYw*!ecHn zVA=0bk?^w+`-iD2xyBQm`)%yB0JSN?^zzqK&$L6wXWTxR=qk}m^AZrRw{zya_FTgIab#J&m}*L1DTPB^DN|*7tjupFy9uMXY<|i_rTD;u&g>u zB%x>eSeN&cwg{2taene@gQwN<`*BrgCc M%Bjj$Nt=iM55a?b@c;k- literal 0 HcmV?d00001 diff --git a/_docs/take_video.png b/_docs/take_video.png new file mode 100644 index 0000000000000000000000000000000000000000..32c12ebce0d4b15d3baa988e3a7d2816672c09a9 GIT binary patch literal 8593 zcmZX4XHe76_w^?sKxiQW>C$@` z0hJC?rK&VR0+F8b=lgqJJ}-85@9sJG+`Y52JF~lwY^+S#nE9Cj0AMpWGqM8!&}kC{ zKp9RemwRR2|5?FyrUpRO;Mw(41LC89SswsupRkZU=ud5?Kr^Rc0O0WduYi_X3d;aM zRLa~)-{HR7RxV3}%ayEELV)9(W8OvhjUWocbqa-A;(t{l=SE|;Cukd#!}$~|-8!oFBMo-xA3jfJ0MSK{FL^(#IY z(w2Sm-3_KQdt>wB< zFs|Lj=d)7m#efX`mCrO8A6WVH*Xr=yKZTX@K03l_&@2;(h+)d=BJ&cs4IjV;9rUQL z*U3IhZg%s&d04J)D!z;tH3D=z8!|FhP!$e}iGiYsmRG>QknY2@TLlEVZjM`WqH+Td z#F+EMz{BXCqz%&1-sfG9vEiX=rI+l6MsOW{h{#P7Z5{Br-(}QT|MyU}Bq%$?$T}#} ze)$zq9}(Xp0>*=ja@zRBqZJONHYKryx)Obr_-AmCSyH-{HKx7vxa+s4haFyM(g^8L zz?5AAaC`f@-`D@DWXin-;hDHo%bYAolw3vt!42*PX@ zQWBOAnkw^1If7>t!~pI`hXZUZ$9mlV2)_;2*&Rl9!8Tkw$c~GBCtr*B63Y6xh(Z3j z*aDPV&!L7$xmBC$Oyw6|f!T`mpf&u%MStd90b{81HQ-d`^VuCtwNAEZN_~djh0Q6s za1QOej`wo_aukC;V`3$r9XGSOc=B>Pjwa7dzFDaVJ|_!I966G{(JpHi3C`hb>L-&C zj1v6l^O;|6;G4Rf(8}0F@6I?j?W-*j)q?)A&K5xVYbuY%M*dZyy(9}}ck^l8;D62< zq6~a;qBa2_y91)_w>?H@6Ci~|N~9=iQ|ZI{*yRvHK^7A^twJWu5At#C2fduxYak^rFb7z;{rgY>OJ(3%_(p*|vJg+!RZEv-Ao^bEdL2>UvNG}8Y@)p@(;#2SDj}-34#!<4QOIrJQ`h{3Hh39X5q-T!UVZdW55R0yg*8 z(hWHIbJ~^qu&Kh5LO;l&(bt9cxyWM1!*CKsZe(6Pb%@!Q-F;^Eu;8sXbG8A%&%Y-* zV4Z8?P0#NuSXM}wd*7nD&*kL?@UM_>_6kFuXQ&Zu*A{twAa7aXdPXZB?>4C!O5y!L z%7af(0(M)w$v;m_>Ef{{+Ml5B;ahpXNvd-^eHFm7T;bS3^#cR#X9~-Q)Qmbfd_zcL z@5E^kq9O}i*rl;>&WP!cc#eP-#H`(K{ z^q|tX0ZtOMu#kL@pKxnCT*>zXFqIKRN8oV!#aRW(VAC-W%si2lMWAeiS1cDN>i=rA zOu^KjJ$wN??;BD&7*w*4Hn4okD%4iNdpi@jwn|eVw83E?j5cCA5ay2o?S(G>9hTG| z##4P9?Ky|cw<{nHd~=6A4>Ch>?(C{Yzr^YeUuU9n*G!Z{lh2Ri05P5CTomxcI~sQ~#^VuynT2 z!AzA-`7*L;Eao)RMP?|-?s%B2Z)l^sy(kw*W|Fe_epYr+Ni7>Z`Y_8TIjD4q`^YY6g%B%CI2!#C#a)fO_d?L(nGt<~-2tie*d?MHwBnUa zViEGVnloc4E%KISx0u>qp%HqsK3SG;?Pal)gP{q~I_TwQ+qFXMROaD!jB#F@h94J?1QE_GXx*m{)$`hVV#; zg8A2UlW89AH!gP+s{l>*v7`SuRSbBGa|T%#j|yg9bd+4y)LuNZ3P!(1PAX1bSJgY6 z`GoSpJ3~aDJ=MOHK7JjSfGs0Lw7+wO^P zP={n}%d8W~2W*qKZ}0xzTSsD#C!0R;4OX9Xz(X={|17&S7%hO2+6%Aib+^M5W(7td zgSm-$#tYLwZwKMuu`V)GyKDP8oyHIc>ZS}ehTwfrd^!J9S;H(0Q# zRQU?NCpCXxe%3P}b$~{lX3~lWhwPg(uDn-N0WOZk+8wZ5W_`VJZyG*s7VI%r5~i0T zsZa|MNuioyZVcSVzxd?td)MUxgo*mcFCln=#eZ^T%S_@1699^N3RSA{4(VJ|dP^7v zWQpbG*i9p&pZTt$O>=$~tceF&03d zXnV1dAcU_X49jJ@VZRAJ;9ql?`cRu(and zfzdC?u`}5t>3+z<&q3c$?26|v7>8Fhc)J>X0nQGbd~3)bo_Xd7{glN$4g`0Gf%BuHj#&(n-YJ|W zV1PfTe!pKZ;N3eOn|fesW`r;+Y@HtM=c1tr?rB=*Wg&^uCy6X8y@0FS9!iU4@ zmjX%IMRPcajlIPQH9z4+Bs5&_ed-Jt3>)*^A zMy*l)?ZUS5r7Az&_F^Ga!N5gwPx6nPiNiA#>-BNQ&qwviAHz~JE6)#NW)n*XoOHB# z<_*K9WDD~(Waz-d_Wn+M8ihPLF_X3Q6^KxbV5mB%_t!WxKzN5+B1B(tXhNXp2u;A*NX zPn`xSyvn~$W^f!pfmNQsgkJ_B1=G`LUnD6!DrQBw7+ywdQCvxTkvm@#LStWA0+p&2 z#w@l@z^k_e#rCAra&}%gR%rz2Ab3gkiXTsg4XS9>_1&saOI%*5D59e)$UQH-eJ;R?CyLl)b$QACKV=Kvyw;>V>ml`e6rz}6BP z+w)vlv@^3=rkC6uVYkzUaXF=kA8ww;TMtTkzb$^htMmei1|jPIKW}u0f5}}MS!~$z z2lOd>Uj{r0p1Qb4X2MO0xJAjV(|{9@h&4ZA-^KYFsYSN@Jlc~`-K+O49sWv0NkHe* zC0E&l7OlM^felvD9V2nQe|@UN=hZT)6=#C*$L_?;eof@hV)lZdr51`$4&xIYmF27l z;)9*bNAGx6WO)AkdH9CP6D^rMh_tjM*kMZtWiHcL|AnS10h-@~{C*r^-qPrWzci3k zt&Rb+x#0=j*WJRQ7XnLklhVd%I3KUMBg3`R_&RW*3`CDr3<^C{AMghlp&<jE19g_^|l|H4fu36AYC z7smQ_-zZqB_q+<+!WmDbJUo?RR4XDii$$Z%zCUi?Q1!YBn1rKGHQ3>+`MhZ~*P4he zqGdbLNldWCBR4R{>{v;SJBa$l1GI*ncM^QAuow^gYHtqM;)-|nV_FTbmEgSwz*b9u z+sxMEOrXqPOK}+nHZ`U=yj&&6QLX~hg=Vn!N>~TGh)>Ec-)w^{&jiDHJReYqXmFbm z9vZUwdEI&|hUBzaa|ImuE9vV4Eu;>2JZs?ZtT(RP$SX)`bxe#*yS=wiek?s}Xrgpw z^oo`CKuhm+dKr7H!DAN`_6PQH%W20q%IOSl3-!Q(3ym49-B;PEL- zD*vmwO-_5Ac@coVuZIx;DNI0|4*q|gL1F-`NroAx%%n)E6a#kR+jqE)_2E+Og@lAO zYood>Q@AtU2CzUe8eW3OUlsjFxwr9ToTrTiH_QPa#z!8N#lFAfiiCEc@iKNy?LpIs3C6ZGO7<0Q>A?Y;0U^y6Ur)J@Qbm23dySxJxojy1^s z(JYTy@vJUyaz8Lz@UN5r2T@Yaw0^+Mblf3+O_u|q#w`q6_}#A3F|!q{hUSdfiNR-w zK2S@mK=+qWSD4xh{8*s|41Su3B)TAqH62|Iq+A7H5al6lnlp*0rA;8>?#DhX0bPss z0A45x$^!j{593{1R%;;1bTBMCYBA=ISefKL=~5l%5<-!S2i!}+KsLjyPT z(Cp$xw%dQY;sSZl2?(%FJQd@Wn$#%W{#jTKs_4gz2()qIsX2U=w?3P9x_LlBQ@TGd zHececINgtn*+uS~?Hh}@Mcl~)5hHv52rTxYM_>slaG+iTg)kFF`C8P;ffY9}25ZwV zF-uI?2u>*QpmYNV^{|VoS)3ro zn~mVjM3w-xxQOnbw0RLA%y==Nl8=pt^c;s5ndSg7vS53r|I!#h$~8tUUhGApH?eq- ziYQo1gS&~!E~*IXL0f{L%X&g7=_;r`@O(0mcsJ?{U+S?ML-13Y-J)jL~p%)Y!7L3{xXz)?h>Xj#YFzrN>ewriib$}Pb72uB5Kh^NOFA0W` zO+i9&MaMD~AAJw`fbS{tKuUzcuy@p1Hd(rO*5wD#4`wvB$7tYcuFq>)raSFo6_B|H zNDdQfyuFH9;dN$3#_fd*mWQqRoHqgGXWJ)=w3GV?ms9seY7P*WBev!Es3@s3*j9V$ zCZAo$K?NV1J%evdzcUUm%P$HpBxsn?)Hlv0pck$D)4s%m;d(kYKVkmH42b5ON?)7Hm~oF^0*Iev^rM>*=8 zo5;aCs{k@Lh<6MqDe6T_PO8Ld&y8oVgakCi#PEXsjVhDep zhSI2m_(n`ZHUvGn>lBr=Cgg9@)8xHaq%=~|Xf3qOi69CGr~Z!$Qx{Cj#wfuWgeeUD z3KQeO@63TiC2@Ms0I4#Tp*o<8E(+)8s52bp^>eTg2aUsI`roKZ{b;h{JiWBiHPVLR zPTx{$J<_JZcw8OqNo^~wJ=-qfn|B}~O5oUbnDK+MwdDs=9se%Ggxr0p=X<$>&a%GR zb}zPEW0xiEyFkA4d6IR7rfjJ0?Zt{vQ>r8Eo__@&Uj}`_Q!X41e&aRatL6@))jFZ^ z=I@m66FJXjEC80bh1-Y?%zJMyFAuzkL)_O#Nch@-@wk7b6K5z>d)n{dgL~z>oc<=g zU2a$rHHc{=fq~MG>MguE`!YPB(NNv?CBO^6hg|uz#N}^dQU~ax&l+RGe)&X3a&Mv; za{~_+d^fnU#M&hk^1XLR$_C#aXZ%&;Ke0R`3)li!2fr0$qxddJ`I*|>AXo3ViIg<) zQ$Pix>d5g{xw>b^I&F+zeW!}BlOzGmR;|yQ6cD@z*5aLjcLgF1ye=k_&&!`MYGbSe zW~tA$OI-<{Vc(4ekcwdh>jn(0naPSYK!2}om+Y99btd}Mp_m1J>^??i*j>S1Z||71NdfGVXVCs zq+s*gyD#HGe97mfhJ1q`)XmcES?-$~)!9>En^<&G+fa6G(04&DNIujaV>%Yg5cXpd znCzZ!+YqBe-~4t*qByaVi`~J#_zXOoEish-a#Zl9bco|m2O!KXX2y-}Lq~QklMlVu z^8&oS%S8&**OVz4!~W`GFGkIs%ZeT=YJ6yDwIJLScKbhwvCBdeGqE=BCh2`g_F*pV zmJ17NiUlij?Tc7oJM6RJRVHSquBB)~oDJ>$8DD8v+K2`Z&}IM5drJgM^Bgz~l#`?Y zqMRC4C+cg`iQ`M}-dAr_t!vWuo+bg;gqA=3UftKY@H-Zpgk3zJmYYvm-Shkyr?ly| zzwa8>r}>Bp^})dAxn8fZIQ4}`@+nAuDQ126h%Lcor&p=-FQ(pGOd<<(y2hoqzoVM2 zjWLOVaRHepBR^hFgn&(HcO+$^xvGwb@q!ldM)d6ks4S;A#>#a_me85#jvt9txeIu@ z!_cWcQ_B#2mt`>80tDH+$qg!H;q{8NA;v-ecEuXC=7D3H8#xmwDuVrPAjmNrXt2?6 zC+J5ABL0)vB9+Bvp*6<%;GVyf)iNI{A+Q?2zDi7VP?lbBf#{pPg5M7N;JeFB)i2JOCPATo8AiVtTkjN9h9AxU4$gh?H6y(oh<^tG{SEx6Ct_rTGE0W! z+{_X8U!s;PcO;PM%c%O@8ZS;wSO@nK%#yeL9rXi;hmW)N!8tm%+H!1Mzyq;HbHl)Q>T_{Z4=ujbd5Cg&5En(BI!I-S z@4fRu25;D#SO0sJ@{k3kp<3|ReX_tart8c7zIjlg+gJaS@2s3gn|M|D(#?eS1Fd=Q zM1BjyDB%0Ee7V~fU`vu=edk7b;I~ANPJVI2A}>z;d^kpAEim|CSECU^7gw~+!<+iC zY-O3b^S;C%vg!2n1jmiy?Hz0802M5=6z3x6-W@h#toASNv9l8VtoAzk=uW{HZ+mO7 zEQTH&Gdm{VjP$u2K$M~@L#u%Lm%dkpv%@@JJ$T9n6^#~_xUzjcEj!!YP^oUZ{+CLzFR(v6q@$|xGbgG_r^n^8z%qVklPQd z6^|@c+jNf-%F*cQegBn%DbejODFAN@)K|u}P~^aoTq>R4W1gX0U%YhilG#JdyUst+ZgF%)ya`KIZe=lTb8Az1d%pG_=~os7r?m$QU1y=j1RlU z*nX)1c;SKqwUDl>jKfIb?$Ob_JbJb$e4@NcF33UV0J3-^mLf@5$*dj6kBVI3 z=h~6?4U47UnA!aovhT+XDhr(W)Rj!a6JFKhNLx0M@aGz0g5up(%5qFr=FRVD4DdWt zockG>5235y>)+3g^p81!(KXZUw_;sY7?}|zt=VFMQ<-5UzYpO^u(jA8rt#c#_Y~s zRCUgznk^x_hzh+hzjG7-p0jT^#5DMS#?6J5$@<+o^;-`* zHi8i&xxwM?mi&wex`5G>^J_k(9|DveKX@M0P2VN7^cZHl0diCG?b3%6C;8;s(&whS z1;*X&y)sDt)DCXuDvRtoNFis9IoA)Oqbkx)@;k^GFXDV*K)h3XF$Ew?!$Ag1UKcx+Qjd~=K>-x|@j+jbiCg|z_ z2?1M9nkQd&%S6D+48PxYb@l?g-#+NM+)hkj?L^RQu%EA!#@qD8r~(L3Cf4{5tSySU z!9AJT$F~yMUh1DmNng~A_M|b=R?}mzxHN}vG+sxqvx85>3z-pE{_{?85k{Kewk{Zx z5XZ#fwGU3Vz3dXp)@e|bU`{P{QPXui770?QF~ms|Q*Y<>k)}`rlZTELsqXS(K&YLQ zVF55-9`}MIo8v@%Gz^8}+;?W5!0)kJbXN-_u)GRR7-C5AkI}Pb@`nz2cM=ogmtO53 zi7~JfPtNC-Zg02j7VGYt8sz@>a;zo4$p6m*tn-%4*&%s$3)7QMMd(GFy>1mS0?vAN zivW}?ow8)6MHg)aBM72ju65ys8Np(Twk0S(-_mqSq89_0Y`-w~*KYAB{2bfbZlil5 zGLAH~lNuF}n2}C`qAV1gXu_rP*~jNo$J3=l+43Wu?9ofFh&6VHHB$U?erf+K#A2M2 z05$B$GzjEsf%UF17O`4=kRSA@!QQs5osQ$TKs*%7m=n4kmvHSi;Te90Da0r5o_w~f zAunP{cFMi8Fkm~Is-#;J^0s2oUs((gvda~9KozsYvwCw&i7`p(g^v0oFR=eSHNPHA z>pw8MZ;b%;!9I9VIudeJ+&grT7&Ywg3mwc<#>f-71!*H)r-!p zY&|=|Vm!#Wq`*0AIy9N=>Px# literal 0 HcmV?d00001 diff --git a/androidx_LICENSE-2.0.txt b/androidx_LICENSE-2.0.txt new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/androidx_LICENSE-2.0.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/app/build.gradle b/app/build.gradle new file mode 100644 index 0000000..f9e5b88 --- /dev/null +++ b/app/build.gradle @@ -0,0 +1,67 @@ +apply plugin: 'com.android.application' + +android { + compileSdk 35 + compileOptions.encoding = 'UTF-8' + + defaultConfig { + applicationId "net.sourceforge.opencamera" + minSdkVersion 21 + targetSdkVersion 35 + //compileSdkVersion 31 // needed to support appcompat:1.4.0 (which we need for emoji policy support, and not yet ready to target SDK 30) + + testApplicationId "net.sourceforge.opencamera.test" + //testInstrumentationRunner "android.test.InstrumentationTestRunner" + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + } + + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt' + } + } + + + // needed to use android.test package (ActivityInstrumentationTestCase2 etc) when targetting sdk 28 (Android 9) - + // see https://developer.android.com/training/testing/set-up-project + useLibrary 'android.test.runner' + useLibrary 'android.test.base' + lint { + abortOnError false + checkReleaseBuilds false + } + namespace 'net.sourceforge.opencamera' + buildFeatures { + } + //useLibrary 'android.test.mock' +} + +dependencies { + androidTestImplementation 'androidx.test.ext:junit:1.3.0' + + //implementation 'androidx.activity:activity:1.9.3' // needed for EdgeToEdge.enable(this) + + // appcompat version must be 1.4.0 or later to satisfy emoji policy! + implementation 'androidx.appcompat:appcompat:1.7.1' + + // needed to fix errors since upgrading to appcompat:1.7.0, see https://stackoverflow.com/questions/75263047/duplicate-class-in-kotlin-android + implementation(platform("org.jetbrains.kotlin:kotlin-bom:1.9.0")) + + implementation 'androidx.legacy:legacy-support-v4:1.0.0' + + implementation 'androidx.exifinterface:exifinterface:1.4.1' + + testImplementation 'junit:junit:4.13.2' + + // newer AndroidJUnit4 InstrumentedTest + androidTestImplementation "androidx.test:runner:1.7.0" + androidTestImplementation "androidx.test:rules:1.7.0" + androidTestImplementation "androidx.test.espresso:espresso-core:3.7.0" +} + +java { + toolchain { + languageVersion = JavaLanguageVersion.of(17) + } +} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/AvgInstrumentedTests.java b/app/src/androidTest/java/net/sourceforge/opencamera/AvgInstrumentedTests.java new file mode 100644 index 0000000..4bb4f4b --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/AvgInstrumentedTests.java @@ -0,0 +1,17 @@ +package net.sourceforge.opencamera; + +import org.junit.experimental.categories.Categories; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** Tests for Avg algorithm - only need to run on a single device + * Should manually look over the images dumped onto DCIM/ + * To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/ + * folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes + * time to transfer to the device every time we run the tests. + * On Android 10+, scoped storage permission needs to be given to Open Camera for the DCIM/testOpenCamera/ folder. + */ +@RunWith(Categories.class) +@Categories.IncludeCategory(AvgTests.class) +@Suite.SuiteClasses({InstrumentedTest.class}) +public class AvgInstrumentedTests {} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/HDRInstrumentedTests.java b/app/src/androidTest/java/net/sourceforge/opencamera/HDRInstrumentedTests.java new file mode 100644 index 0000000..402486e --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/HDRInstrumentedTests.java @@ -0,0 +1,17 @@ +package net.sourceforge.opencamera; + +import org.junit.experimental.categories.Categories; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** Tests for HDR algorithm - only need to run on a single device + * Should manually look over the images dumped onto DCIM/ + * To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/ + * folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes + * time to transfer to the device every time we run the tests. + * On Android 10+, scoped storage permission needs to be given to Open Camera for the DCIM/testOpenCamera/ folder. + */ +@RunWith(Categories.class) +@Categories.IncludeCategory(HDRTests.class) +@Suite.SuiteClasses({InstrumentedTest.class}) +public class HDRInstrumentedTests {} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/HDRNInstrumentedTests.java b/app/src/androidTest/java/net/sourceforge/opencamera/HDRNInstrumentedTests.java new file mode 100644 index 0000000..663ce78 --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/HDRNInstrumentedTests.java @@ -0,0 +1,17 @@ +package net.sourceforge.opencamera; + +import org.junit.experimental.categories.Categories; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** Tests for HDR algorithm with more than 3 images - only need to run on a single device + * Should manually look over the images dumped onto DCIM/ + * To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/ + * folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes + * time to transfer to the device every time we run the tests. + * On Android 10+, scoped storage permission needs to be given to Open Camera for the DCIM/testOpenCamera/ folder. + */ +@RunWith(Categories.class) +@Categories.IncludeCategory(HDRNTests.class) +@Suite.SuiteClasses({InstrumentedTest.class}) +public class HDRNInstrumentedTests {} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/InstrumentedTest.java b/app/src/androidTest/java/net/sourceforge/opencamera/InstrumentedTest.java new file mode 100644 index 0000000..b66e826 --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/InstrumentedTest.java @@ -0,0 +1,7311 @@ +package net.sourceforge.opencamera; + +import static androidx.test.espresso.Espresso.onView; +import static androidx.test.espresso.action.ViewActions.click; +import static androidx.test.espresso.assertion.ViewAssertions.matches; +import static androidx.test.espresso.matcher.ViewMatchers.isDisplayed; +import static androidx.test.espresso.matcher.ViewMatchers.withId; +import static androidx.test.platform.app.InstrumentationRegistry.getInstrumentation; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.endsWith; +import static org.junit.Assert.*; + +import android.annotation.TargetApi; +import android.content.Intent; +import android.content.SharedPreferences; +import android.graphics.Bitmap; +import android.graphics.Insets; +import android.hardware.camera2.CameraExtensionCharacteristics; +import android.media.CamcorderProfile; +import android.os.Build; +import android.os.Looper; +import android.preference.PreferenceManager; +import android.util.Log; +import android.view.View; +import android.widget.RelativeLayout; +import android.widget.SeekBar; + +import androidx.test.core.app.ApplicationProvider; +import androidx.test.espresso.matcher.ViewMatchers; +import androidx.test.ext.junit.rules.ActivityScenarioRule; +import androidx.test.ext.junit.runners.AndroidJUnit4; + +import net.sourceforge.opencamera.cameracontroller.CameraController; +import net.sourceforge.opencamera.preview.Preview; +import net.sourceforge.opencamera.ui.DrawPreview; +import net.sourceforge.opencamera.ui.PopupView; + +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Locale; +import java.util.concurrent.atomic.AtomicReference; + +interface MainTests {} + +interface PhotoTests {} + +interface VideoTests {} + +interface HDRTests {} + +interface HDRNTests {} + +interface AvgTests {} + +interface PanoramaTests {} + +// ignore warning about "Call to Thread.sleep in a loop", this is only test code +/** @noinspection BusyWait*/ +@RunWith(AndroidJUnit4.class) +public class InstrumentedTest { + + private static final String TAG = "InstrumentedTest"; + + static final Intent intent; + static { + // used for code to run before the activity is started + intent = new Intent(ApplicationProvider.getApplicationContext(), MainActivity.class); + TestUtils.setDefaultIntent(intent); + intent.putExtra("test_project_junit4", true); + + // need to run this here, not in before(), so it's run before activity is created (otherwise Camera2 won't be enabled) + TestUtils.initTest(ApplicationProvider.getApplicationContext()); + } + + @Rule + //public ActivityTestRule mActivityRule = new ActivityTestRule<>(MainActivity.class); + //public ActivityScenarioRule mActivityRule = new ActivityScenarioRule<>(MainActivity.class); + public final ActivityScenarioRule mActivityRule = new ActivityScenarioRule<>(intent); + + /** This is run before each test, but after the activity is started (unlike MainActivityTest.setUp() which + * is run before the activity is started). + */ + @Before + public void before() throws InterruptedException { + Log.d(TAG, "before"); + + // don't run TestUtils.initTest() here - instead we do it in the static code block, and then + // after each test + + // the following was true for MainActivityTest (using ActivityInstrumentationTestCase2), unclear if it's true for + // InstrumentedTest: + // don't waitUntilCameraOpened() here, as if an assertion fails in setUp(), it can cause later tests to hang in the suite + // instead we now wait for camera to open in setToDefault() + //waitUntilCameraOpened(); + } + + @After + public void after() throws InterruptedException { + Log.d(TAG, "after"); + + // As noted above, we need to call TestUtils.initTest() before the activity starts (so in + // the static code block, and not before()). But we should still call initTest() before every + // subsequent test (so that settings are reset, and test static variables are reset), so + // easiest to do this after each test. This also means the application is left in a default + // state after running tests. + mActivityRule.getScenario().onActivity(activity -> { + Log.d(TAG, "after: init"); + TestUtils.initTest(activity); + }); + + Log.d(TAG, "after done"); + } + + private interface GetActivityValueCallback { + T get(MainActivity activity); + } + + /** This helper method simplifies getting data from the MainActivity. + * We can't call MainActivity classes directly, but instead have to go via + * mActivityRule.getScenario().onActivity(). + */ + private T getActivityValue(GetActivityValueCallback cb) { + AtomicReference resultRef = new AtomicReference<>(); + mActivityRule.getScenario().onActivity(activity -> resultRef.set( cb.get(activity) )); + return resultRef.get(); + } + + private void waitUntilCameraOpened() { + waitUntilCameraOpened(true); + } + + private void waitUntilCameraOpened(boolean wait_for_preview) { + Log.d(TAG, "wait until camera opened"); + long time_s = System.currentTimeMillis(); + + boolean done = false; + while( !done ) { + assertTrue( System.currentTimeMillis() - time_s < 20000 ); + done = getActivityValue(activity -> activity.getPreview().openCameraAttempted()); + } + + Log.d(TAG, "camera is open!"); + + try { + Thread.sleep(100); // sleep a bit just to be safe + } catch (InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + } + + if( wait_for_preview ) { + waitUntilPreviewStarted(); // needed for Camera2 API when starting preview on background thread and not waiting for it to start + } + } + + private void waitUntilPreviewStarted() { + Log.d(TAG, "wait until preview started"); + long time_s = System.currentTimeMillis(); + + boolean done = false; + while( !done ) { + assertTrue( System.currentTimeMillis() - time_s < 20000 ); + done = getActivityValue(activity -> activity.getPreview().isPreviewStarted()); + } + + Log.d(TAG, "preview is started!"); + + try { + Thread.sleep(100); // sleep a bit just to be safe + } catch (InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + } + } + + private void waitUntilTimer() { + Log.d(TAG, "wait until timer stopped"); + boolean done = false; + while( !done ) { + done = getActivityValue(activity -> !activity.getPreview().isOnTimer()); + } + } + + private void restart() { + restart(true); + } + + private void restart(boolean wait_for_preview) { + Log.d(TAG, "restart"); + mActivityRule.getScenario().recreate(); + waitUntilCameraOpened(wait_for_preview); + Log.d(TAG, "restart done"); + } + + private void pauseAndResume() { + Log.d(TAG, "pauseAndResume"); + boolean camera_is_open = getActivityValue(activity -> activity.getPreview().getCameraController() != null); + pauseAndResume(camera_is_open); + } + + private void pauseAndResume(boolean wait_until_camera_opened) { + Log.d(TAG, "pauseAndResume: " + wait_until_camera_opened); + + mActivityRule.getScenario().onActivity(activity -> { + Log.d(TAG, "pause..."); + getInstrumentation().callActivityOnPause(activity); + Log.d(TAG, "resume..."); + getInstrumentation().callActivityOnResume(activity); + }); + if( wait_until_camera_opened ) { + waitUntilCameraOpened(); + } + } + + private void updateForSettings() { + Log.d(TAG, "updateForSettings"); + mActivityRule.getScenario().onActivity(activity -> { + assertEquals(Looper.getMainLooper().getThread(), Thread.currentThread()); // check on UI thread + // updateForSettings has code that must run on UI thread + activity.initLocation(); // initLocation now called via MainActivity.setWindowFlagsForCamera() rather than updateForSettings() + activity.getApplicationInterface().getDrawPreview().updateSettings(); + activity.updateForSettings(true); + }); + + waitUntilCameraOpened(); // may need to wait if camera is reopened, e.g., when changing scene mode - see testSceneMode() + // but we also need to wait for the delay if instead we've stopped and restarted the preview, the latter now only happens after dim_effect_time_c + try { + Thread.sleep(DrawPreview.dim_effect_time_c+50); // wait for updateForSettings + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + } + } + + /** Used to click when we have View instead of an Id. It should only be called from onActivity() + * (so that we can be sure we're already on the UI thread). + */ + private void clickView(final View view) { + Log.d(TAG, "clickView: "+ view); + assertEquals(Looper.getMainLooper().getThread(), Thread.currentThread()); // check on UI thread + assertEquals(view.getVisibility(), View.VISIBLE); + assertTrue(view.performClick()); + } + + private void openPopupMenu() { + Log.d(TAG, "openPopupMenu"); + assertFalse( getActivityValue(MainActivity::popupIsOpen) ); + onView(withId(R.id.popup)).check(matches(isDisplayed())); + onView(withId(R.id.popup)).perform(click()); + + Log.d(TAG, "wait for popup to open"); + + boolean done = false; + while( !done ) { + done = getActivityValue(MainActivity::popupIsOpen); + } + Log.d(TAG, "popup is now open"); + } + + private void switchToFlashValue(String required_flash_value) { + Log.d(TAG, "switchToFlashValue: "+ required_flash_value); + boolean supports_flash = getActivityValue(activity -> activity.getPreview().supportsFlash()); + if( supports_flash ) { + String flash_value = getActivityValue(activity -> activity.getPreview().getCurrentFlashValue()); + Log.d(TAG, "start flash_value: "+ flash_value); + if( !flash_value.equals(required_flash_value) ) { + + openPopupMenu(); + + String flash_value_f = flash_value; + mActivityRule.getScenario().onActivity(activity -> { + View currentFlashButton = activity.getUIButton("TEST_FLASH_" + flash_value_f); + assertNotNull(currentFlashButton); + assertEquals(currentFlashButton.getAlpha(), PopupView.ALPHA_BUTTON_SELECTED, 1.0e-5); + View flashButton = activity.getUIButton("TEST_FLASH_" + required_flash_value); + assertNotNull(flashButton); + assertEquals(flashButton.getAlpha(), PopupView.ALPHA_BUTTON, 1.0e-5); + clickView(flashButton); + }); + + flash_value = getActivityValue(activity -> activity.getPreview().getCurrentFlashValue()); + Log.d(TAG, "changed flash_value to: "+ flash_value); + } + assertEquals(flash_value, required_flash_value); + String controller_flash_value = getActivityValue(activity -> activity.getPreview().getCameraController().getFlashValue()); + Log.d(TAG, "controller_flash_value: "+ controller_flash_value); + if( flash_value.equals("flash_frontscreen_auto") || flash_value.equals("flash_frontscreen_on") ) { + // for frontscreen flash, the controller flash value will be "" (due to real flash not supported) - although on Galaxy Nexus this is "flash_off" due to parameters.getFlashMode() returning Camera.Parameters.FLASH_MODE_OFF + assertTrue(controller_flash_value.isEmpty() || controller_flash_value.equals("flash_off")); + } + else { + Log.d(TAG, "expected_flash_value: "+ flash_value); + assertEquals(flash_value, controller_flash_value); + } + } + } + + private void switchToFocusValue(String required_focus_value) { + Log.d(TAG, "switchToFocusValue: "+ required_focus_value); + boolean supports_focus = getActivityValue(activity -> activity.getPreview().supportsFocus()); + if( supports_focus ) { + String focus_value = getActivityValue(activity -> activity.getPreview().getCurrentFocusValue()); + Log.d(TAG, "start focus_value: "+ focus_value); + if( !focus_value.equals(required_focus_value) ) { + + openPopupMenu(); + + mActivityRule.getScenario().onActivity(activity -> { + View focusButton = activity.getUIButton("TEST_FOCUS_" + required_focus_value); + assertNotNull(focusButton); + clickView(focusButton); + }); + + focus_value = getActivityValue(activity -> activity.getPreview().getCurrentFocusValue()); + Log.d(TAG, "changed focus_value to: "+ focus_value); + } + assertEquals(focus_value, required_focus_value); + String controller_focus_value = getActivityValue(activity -> activity.getPreview().getCameraController().getFocusValue()); + Log.d(TAG, "controller_focus_value: "+ controller_focus_value); + boolean using_camera2 = getActivityValue(activity -> activity.getPreview().usingCamera2API()); + String compare_focus_value = focus_value; + if( compare_focus_value.equals("focus_mode_locked") ) + compare_focus_value = "focus_mode_auto"; + else if( compare_focus_value.equals("focus_mode_infinity") && using_camera2 ) + compare_focus_value = "focus_mode_manual2"; + assertEquals(compare_focus_value, controller_focus_value); + } + } + + private void switchToISO(int required_iso) { + Log.d(TAG, "switchToISO: "+ required_iso); + int iso = getActivityValue(activity -> activity.getPreview().getCameraController().getISO()); + Log.d(TAG, "start iso: "+ iso); + if( iso != required_iso ) { + mActivityRule.getScenario().onActivity(activity -> { + View exposureButton = activity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureContainer = activity.findViewById(net.sourceforge.opencamera.R.id.exposure_container); + assertEquals(exposureContainer.getVisibility(), View.GONE); + clickView(exposureButton); + }); + mActivityRule.getScenario().onActivity(activity -> { + View exposureContainer = activity.findViewById(net.sourceforge.opencamera.R.id.exposure_container); + assertEquals(exposureContainer.getVisibility(), View.VISIBLE); + View isoButton = activity.getUIButton("TEST_ISO_" + required_iso); + assertNotNull(isoButton); + clickView(isoButton); + }); + try { + Thread.sleep(DrawPreview.dim_effect_time_c+50); // wait for updateForSettings + //this.getInstrumentation().waitForIdleSync(); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + } + iso = getActivityValue(activity -> activity.getPreview().getCameraController().getISO()); + Log.d(TAG, "changed iso to: "+ iso); + mActivityRule.getScenario().onActivity(activity -> { + View exposureButton = activity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureContainer = activity.findViewById(net.sourceforge.opencamera.R.id.exposure_container); + clickView(exposureButton); + assertEquals(exposureContainer.getVisibility(), View.GONE); + }); + } + assertEquals(iso, required_iso); + } + + /* Sets the camera up to a predictable state: + * - Flash off (if flash supported) + * - Focus mode picture continuous (if focus modes supported) + * As a side-effect, the camera and/or camera parameters values may become invalid. + */ + private void setToDefault() { + waitUntilCameraOpened(); + + assertFalse( getActivityValue(activity -> activity.getPreview().isVideo()) ); + + switchToFlashValue("flash_off"); + switchToFocusValue("focus_mode_continuous_picture"); + + // pause for safety - needed for Nokia 8 at least otherwise some tests like testContinuousPictureFocusRepeat, + // testLocationOff result in hang whilst waiting for photo to be taken, and hit the timeout in waitForTakePhoto() + try { + Thread.sleep(200); + } + catch (InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + } + } + + /*@Test + public void testDummy() { + Log.d(TAG, "testDummy"); + }*/ + + private static void checkHistogramDetails(TestUtils.HistogramDetails hdrHistogramDetails, int exp_min_value, int exp_median_value, int exp_max_value) { + TestUtils.checkHistogramDetails(hdrHistogramDetails, exp_min_value, exp_median_value, exp_max_value); + } + + /** Tests calling the DRO routine with 0.0 factor, and DROALGORITHM_NONE - and that the resultant image is identical. + */ + @Category(HDRTests.class) + @Test + @TargetApi(Build.VERSION_CODES.LOLLIPOP) + public void testDROZero() throws IOException, InterruptedException { + Log.d(TAG, "testDROZero"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + Bitmap bitmap = TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR3/input1.jpg"); + Bitmap bitmap_saved = bitmap.copy(bitmap.getConfig(), false); + + try { + Thread.sleep(1000); // wait for camera to open + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + } + + List inputs = new ArrayList<>(); + inputs.add(bitmap); + try { + activity.getApplicationInterface().getHDRProcessor().processHDR(inputs, true, null, true, null, 0.0f, 4, true, HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_REINHARD, HDRProcessor.DROTonemappingAlgorithm.DROALGORITHM_NONE); + } + catch(HDRProcessorException e) { + Log.e(TAG, "processHDR failed", e); + throw new RuntimeException(); + } + + TestUtils.saveBitmap(activity, inputs.get(0), "droZerotestHDR3_output.jpg"); + TestUtils.checkHistogram(activity, bitmap); + + // check bitmaps are the same + Log.d(TAG, "compare bitmap " + bitmap); + Log.d(TAG, "with bitmap_saved " + bitmap_saved); + // sameAs doesn't seem to work + //assertTrue( bitmap.sameAs(bitmap_saved) ); + assertEquals(bitmap.getWidth(), bitmap_saved.getWidth()); + assertEquals(bitmap.getHeight(), bitmap_saved.getHeight()); + int [] old_row = new int[bitmap.getWidth()]; + int [] new_row = new int[bitmap.getWidth()]; + for(int y=0;y { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.avg_images_path + "testAvg3/input0.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testDRODark0_output.jpg", true, -1, -1); + }); + } + + /** Tests DRO only on a dark image. + */ + @TargetApi(Build.VERSION_CODES.LOLLIPOP) + @Category(HDRTests.class) + @Test + public void testDRODark1() throws IOException, InterruptedException { + Log.d(TAG, "testDRODark1"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.avg_images_path + "testAvg8/input0.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testDRODark1_output.jpg", true, -1, -1); + }); + } + + /** Tests HDR algorithm on test samples "saintpaul". + */ + @Category(HDRTests.class) + @Test + public void testHDR1() throws IOException, InterruptedException { + Log.d(TAG, "testHDR1"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "saintpaul/input2.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "saintpaul/input3.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "saintpaul/input4.jpg") ); + + // actual ISO unknown, so guessing + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR1_output.jpg", false, 1600, 1000000000L); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + //checkHistogramDetails(hdrHistogramDetails, 1, 44, 253); + //checkHistogramDetails(hdrHistogramDetails, 1, 42, 253); + //checkHistogramDetails(hdrHistogramDetails, 1, 24, 254); + checkHistogramDetails(hdrHistogramDetails, 2, 30, 254); + }); + } + + /** Tests HDR algorithm on test samples "saintpaul", but with 5 images. + */ + @Category(HDRNTests.class) + @Test + public void testHDR1_exp5() throws IOException, InterruptedException { + Log.d(TAG, "testHDR1_exp5"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "saintpaul/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "saintpaul/input2.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "saintpaul/input3.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "saintpaul/input4.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "saintpaul/input5.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR1_exp5_output.jpg", false, -1, -1); + + int [] exp_offsets_x = {0, 0, 0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 3, 43, 251); + //checkHistogramDetails(hdrHistogramDetails, 6, 42, 251); + checkHistogramDetails(hdrHistogramDetails, 6, 49, 252); + }); + } + + /** Tests HDR algorithm on test samples "stlouis". + */ + @Category(HDRTests.class) + @Test + public void testHDR2() throws IOException, InterruptedException { + Log.d(TAG, "testHDR2"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "stlouis/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "stlouis/input2.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "stlouis/input3.jpg") ); + + // actual ISO unknown, so guessing + TestUtils.subTestHDR(activity, inputs, "testHDR2_output.jpg", false, 1600, (long)(1000000000L*2.5)); + + int [] exp_offsets_x = {0, 0, 2}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR3". + */ + @Category(HDRTests.class) + @Test + public void testHDR3() throws IOException, InterruptedException { + Log.d(TAG, "testHDR3"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR3/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR3/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR3/input2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR3_output.jpg", false, 40, 1000000000L/680); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {1, 0, -1}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + + //TestUtils.checkHistogramDetails(hdrHistogramDetails, 3, 104, 255); + //TestUtils.checkHistogramDetails(hdrHistogramDetails, 4, 113, 255); + TestUtils.checkHistogramDetails(hdrHistogramDetails, 8, 113, 255); + }); + } + + /** Tests HDR algorithm on test samples "testHDR4". + */ + @Category(HDRTests.class) + @Test + public void testHDR4() throws IOException, InterruptedException { + Log.d(TAG, "testHDR4"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR4/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR4/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR4/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR4_output.jpg", true, 102, 1000000000L/60); + + int [] exp_offsets_x = {-2, 0, 2}; + int [] exp_offsets_y = {-1, 0, 1}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR5". + */ + @Category(HDRTests.class) + @Test + public void testHDR5() throws IOException, InterruptedException { + Log.d(TAG, "testHDR5"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR5/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR5/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR5/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR5_output.jpg", false, 40, 1000000000L/398); + + // Nexus 6: + //int [] exp_offsets_x = {0, 0, 0}; + //int [] exp_offsets_y = {-1, 0, 0}; + // OnePlus 3T: + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR6". + */ + @Category(HDRTests.class) + @Test + public void testHDR6() throws IOException, InterruptedException { + Log.d(TAG, "testHDR6"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR6/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR6/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR6/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR6_output.jpg", false, 40, 1000000000L/2458); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {1, 0, -1}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR7". + */ + @Category(HDRTests.class) + @Test + public void testHDR7() throws IOException, InterruptedException { + Log.d(TAG, "testHDR7"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR7/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR7/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR7/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR7_output.jpg", false, 40, 1000000000L/538); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 1}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR8". + */ + @Category(HDRTests.class) + @Test + public void testHDR8() throws IOException, InterruptedException { + Log.d(TAG, "testHDR8"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR8/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR8/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR8/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR8_output.jpg", false, 40, 1000000000L/148); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR9". + */ + @Category(HDRTests.class) + @Test + public void testHDR9() throws IOException, InterruptedException { + Log.d(TAG, "testHDR9"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR9/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR9/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR9/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR9_output.jpg", false, 40, 1000000000L/1313); + + int [] exp_offsets_x = {-1, 0, 1}; + int [] exp_offsets_y = {0, 0, -1}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR10". + */ + @Category(HDRTests.class) + @Test + public void testHDR10() throws IOException, InterruptedException { + Log.d(TAG, "testHDR10"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR10/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR10/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR10/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR10_output.jpg", false, 107, 1000000000L/120); + + int [] exp_offsets_x = {2, 0, 0}; + int [] exp_offsets_y = {5, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR11". + */ + @Category(HDRTests.class) + @Test + public void testHDR11() throws IOException, InterruptedException { + Log.d(TAG, "testHDR11"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR11/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR11/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR11/input2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR11_output.jpg", true, 40, 1000000000L/2662); + + int [] exp_offsets_x = {-2, 0, 1}; + int [] exp_offsets_y = {1, 0, -1}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 0, 48, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 65, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 72, 255); + }); + } + + /** Tests HDR algorithm on test samples "testHDR12". + */ + @Category(HDRTests.class) + @Test + public void testHDR12() throws IOException, InterruptedException { + Log.d(TAG, "testHDR12"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR12/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR12/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR12/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR12_output.jpg", true, 1196, 1000000000L/12); + + int [] exp_offsets_x = {0, 0, 7}; + int [] exp_offsets_y = {0, 0, 8}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR13". + */ + @Category(HDRTests.class) + @Test + public void testHDR13() throws IOException, InterruptedException { + Log.d(TAG, "testHDR13"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR13/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR13/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR13/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR13_output.jpg", false, 323, 1000000000L/24); + + int [] exp_offsets_x = {0, 0, 2}; + int [] exp_offsets_y = {0, 0, -1}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR14". + */ + @Category(HDRTests.class) + @Test + public void testHDR14() throws IOException, InterruptedException { + Log.d(TAG, "testHDR14"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR14/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR14/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR14/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR14_output.jpg", false, 40, 1000000000L/1229); + + int [] exp_offsets_x = {0, 0, 1}; + int [] exp_offsets_y = {0, 0, -1}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR15". + */ + @Category(HDRTests.class) + @Test + public void testHDR15() throws IOException, InterruptedException { + Log.d(TAG, "testHDR15"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR15/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR15/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR15/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR15_output.jpg", false, 40, 1000000000L/767); + + int [] exp_offsets_x = {1, 0, -1}; + int [] exp_offsets_y = {2, 0, -3}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR16". + */ + @Category(HDRTests.class) + @Test + public void testHDR16() throws IOException, InterruptedException { + Log.d(TAG, "testHDR16"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR16/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR16/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR16/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR16_output.jpg", false, 52, 1000000000L/120); + + int [] exp_offsets_x = {-1, 0, 2}; + int [] exp_offsets_y = {1, 0, -6}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR17". + */ + @Category(HDRTests.class) + @Test + public void testHDR17() throws IOException, InterruptedException { + Log.d(TAG, "testHDR17"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR17/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR17/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR17/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR17_output.jpg", true, 557, 1000000000L/12); + + // Nexus 6: + //int [] exp_offsets_x = {0, 0, -3}; + //int [] exp_offsets_y = {0, 0, -4}; + // OnePlus 3T: + int [] exp_offsets_x = {0, 0, -2}; + int [] exp_offsets_y = {0, 0, -3}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR18". + */ + @Category(HDRTests.class) + @Test + public void testHDR18() throws IOException, InterruptedException { + Log.d(TAG, "testHDR18"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR18/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR18/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR18/input2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR18_output.jpg", true, 100, 1000000000L/800); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 1, 113, 254); + //checkHistogramDetails(hdrHistogramDetails, 1, 119, 255); + //checkHistogramDetails(hdrHistogramDetails, 5, 120, 255); + checkHistogramDetails(hdrHistogramDetails, 2, 120, 255); + }); + } + + /** Tests HDR algorithm on test samples "testHDR19". + */ + @Category(HDRTests.class) + @Test + public void testHDR19() throws IOException, InterruptedException { + Log.d(TAG, "testHDR19"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR19/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR19/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR19/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR19_output.jpg", true, 100, 1000000000L/160); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR20". + */ + @Category(HDRTests.class) + @Test + public void testHDR20() throws IOException, InterruptedException { + Log.d(TAG, "testHDR20"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR20/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR20/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR20/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR20_output.jpg", true, 100, 1000000000L*2); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {-1, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR21". + */ + @Category(HDRTests.class) + @Test + public void testHDR21() throws IOException, InterruptedException { + Log.d(TAG, "testHDR21"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR21/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR21/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR21/input2.jpg") ); + + // ISO and exposure unknown, so guessing + TestUtils.subTestHDR(activity, inputs, "testHDR21_output.jpg", true, 800, 1000000000L/12); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR22". + */ + @Category(HDRTests.class) + @Test + public void testHDR22() throws IOException, InterruptedException { + Log.d(TAG, "testHDR22"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR22/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR22/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR22/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR22_output.jpg", true, 391, 1000000000L/12); + + // Nexus 6: + //int [] exp_offsets_x = {1, 0, -5}; + //int [] exp_offsets_y = {1, 0, -6}; + // OnePlus 3T: + int [] exp_offsets_x = {0, 0, -5}; + int [] exp_offsets_y = {1, 0, -6}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR23", but with 2 images. + */ + @Category(HDRNTests.class) + @Test + public void testHDR23_exp2() throws IOException, InterruptedException { + Log.d(TAG, "testHDR23_exp2"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0068.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0064.png") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR23_exp2_output.jpg", false, -1, -1); + + int [] exp_offsets_x = {0, 0}; + int [] exp_offsets_y = {0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 33, 78, 250); + checkHistogramDetails(hdrHistogramDetails, 17, 75, 250); + }); + } + + /** Tests HDR algorithm on test samples "testHDR23", but with 2 images, and greater exposure gap. + */ + @Category(HDRNTests.class) + @Test + public void testHDR23_exp2b() throws IOException, InterruptedException { + Log.d(TAG, "testHDR23_exp2b"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0070.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0062.png") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR23_exp2b_output.jpg", false, -1, -1); + + int [] exp_offsets_x = {0, 0}; + int [] exp_offsets_y = {0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR23". + */ + @Category(HDRTests.class) + @Test + public void testHDR23() throws IOException, InterruptedException { + Log.d(TAG, "testHDR23"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0068.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0066.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0064.png") ); + + // ISO unknown, so guessing + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR23_output.jpg", false, 1600, 1000000000L); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 17, 81, 255); + //checkHistogramDetails(hdrHistogramDetails, 32, 74, 255); + checkHistogramDetails(hdrHistogramDetails, 23, 71, 255); + }); + } + + /** Tests HDR algorithm on test samples "testHDR23", but with 4 images. + */ + @Category(HDRNTests.class) + @Test + public void testHDR23_exp4() throws IOException, InterruptedException { + Log.d(TAG, "testHDR23_exp4"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0070.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0068.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0064.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0062.png") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR23_exp4_output.jpg", false, -1, -1); + + int [] exp_offsets_x = {0, 0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 31, 75, 254); + checkHistogramDetails(hdrHistogramDetails, 23, 74, 254); + }); + } + + /** Tests HDR algorithm on test samples "testHDR23", but with 5 images. + */ + @Category(HDRNTests.class) + @Test + public void testHDR23_exp5() throws IOException, InterruptedException { + Log.d(TAG, "testHDR23_exp5"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0070.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0068.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0066.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0064.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0062.png") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR23_exp5_output.jpg", false, -1, -1); + + int [] exp_offsets_x = {0, 0, 0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 17, 81, 255); + //checkHistogramDetails(hdrHistogramDetails, 28, 82, 255); + //checkHistogramDetails(hdrHistogramDetails, 21, 74, 255); + checkHistogramDetails(hdrHistogramDetails, 17, 74, 255); + }); + } + + /** Tests HDR algorithm on test samples "testHDR23", but with 6 images. + */ + @Category(HDRNTests.class) + @Test + public void testHDR23_exp6() throws IOException, InterruptedException { + Log.d(TAG, "testHDR23_exp6"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0072.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0070.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0068.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0064.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0062.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0061.png") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR23_exp6_output.jpg", false, -1, -1); + + int [] exp_offsets_x = {0, 0, 0, 0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0, 0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 32, 76, 254); + checkHistogramDetails(hdrHistogramDetails, 25, 75, 254); + }); + } + + /** Tests HDR algorithm on test samples "testHDR23", but with 7 images. + */ + @Category(HDRNTests.class) + @Test + public void testHDR23_exp7() throws IOException, InterruptedException { + Log.d(TAG, "testHDR23_exp7"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0072.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0070.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0068.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0066.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0064.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0062.png") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR23/memorial0061.png") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR23_exp7_output.jpg", false, -1, -1); + + int [] exp_offsets_x = {0, 0, 0, 0, 0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0, 0, 0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 17, 81, 255); + //checkHistogramDetails(hdrHistogramDetails, 28, 82, 255); + checkHistogramDetails(hdrHistogramDetails, 20, 72, 255); + }); + } + + /** Tests HDR algorithm on test samples "testHDR24". + */ + @Category(HDRTests.class) + @Test + public void testHDR24() throws IOException, InterruptedException { + Log.d(TAG, "testHDR24"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR24/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR24/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR24/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR24_output.jpg", true, 40, 1000000000L/422); + + int [] exp_offsets_x = {0, 0, 1}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR25". + */ + @Category(HDRTests.class) + @Test + public void testHDR25() throws IOException, InterruptedException { + Log.d(TAG, "testHDR25"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR25/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR25/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR25/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR25_output.jpg", true, 40, 1000000000L/1917); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {1, 0, -1}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR26". + */ + @Category(HDRTests.class) + @Test + public void testHDR26() throws IOException, InterruptedException { + Log.d(TAG, "testHDR26"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR26/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR26/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR26/input2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR26_output.jpg", true, 40, 1000000000L/5325); + + int [] exp_offsets_x = {-1, 0, 1}; + int [] exp_offsets_y = {1, 0, -1}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 0, 104, 254); + checkHistogramDetails(hdrHistogramDetails, 0, 119, 254); + }); + } + + /** Tests HDR algorithm on test samples "testHDR27". + */ + @Category(HDRTests.class) + @Test + public void testHDR27() throws IOException, InterruptedException { + Log.d(TAG, "testHDR27"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR27/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR27/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR27/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR27_output.jpg", true, 40, 1000000000L/949); + + int [] exp_offsets_x = {0, 0, 2}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR28". + */ + @Category(HDRTests.class) + @Test + public void testHDR28() throws IOException, InterruptedException { + Log.d(TAG, "testHDR28"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR28/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR28/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR28/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR28_output.jpg", true, 294, 1000000000L/20); + + int [] exp_offsets_x = {0, 0, 2}; + int [] exp_offsets_y = {0, 0, -1}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR29". + */ + @Category(HDRTests.class) + @Test + public void testHDR29() throws IOException, InterruptedException { + Log.d(TAG, "testHDR29"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR29/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR29/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR29/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR29_output.jpg", false, 40, 1000000000L/978); + + int [] exp_offsets_x = {-1, 0, 3}; + int [] exp_offsets_y = {0, 0, -1}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR30". + */ + @Category(HDRTests.class) + @Test + public void testHDR30() throws IOException, InterruptedException { + Log.d(TAG, "testHDR30"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR30/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR30/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR30/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR30_output.jpg", false, 40, 1000000000L/978); + + // offsets for full image + //int [] exp_offsets_x = {-6, 0, -1}; + //int [] exp_offsets_y = {23, 0, -13}; + // offsets using centre quarter image + int [] exp_offsets_x = {-5, 0, 0}; + int [] exp_offsets_y = {22, 0, -13}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR31". + */ + @Category(HDRTests.class) + @Test + public void testHDR31() throws IOException, InterruptedException { + Log.d(TAG, "testHDR31"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR31/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR31/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR31/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR31_output.jpg", false, 40, 1000000000L/422); + + // offsets for full image + //int [] exp_offsets_x = {0, 0, 4}; + //int [] exp_offsets_y = {21, 0, -11}; + // offsets using centre quarter image + int [] exp_offsets_x = {0, 0, 3}; + int [] exp_offsets_y = {21, 0, -11}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR32". + */ + @Category(HDRTests.class) + @Test + public void testHDR32() throws IOException, InterruptedException { + Log.d(TAG, "testHDR32"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR32/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR32/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR32/input2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR32_output.jpg", true, 40, 1000000000L/1331); + + int [] exp_offsets_x = {1, 0, 0}; + int [] exp_offsets_y = {13, 0, -10}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 3, 101, 251); + //checkHistogramDetails(hdrHistogramDetails, 3, 109, 251); + //checkHistogramDetails(hdrHistogramDetails, 6, 111, 252); + checkHistogramDetails(hdrHistogramDetails, 2, 111, 252); + }); + } + + /** Tests HDR algorithm on test samples "testHDR33". + */ + @Category(HDRTests.class) + @Test + public void testHDR33() throws IOException, InterruptedException { + Log.d(TAG, "testHDR33"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR33/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR33/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR33/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR33_output.jpg", true, 40, 1000000000L/354); + + int [] exp_offsets_x = {13, 0, -10}; + int [] exp_offsets_y = {24, 0, -12}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR34". + */ + @Category(HDRTests.class) + @Test + public void testHDR34() throws IOException, InterruptedException { + Log.d(TAG, "testHDR34"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR34/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR34/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR34/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR34_output.jpg", true, 40, 1000000000L/4792); + + int [] exp_offsets_x = {5, 0, -8}; + int [] exp_offsets_y = {0, 0, -2}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR35". + */ + @Category(HDRTests.class) + @Test + public void testHDR35() throws IOException, InterruptedException { + Log.d(TAG, "testHDR35"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR35/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR35/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR35/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR35_output.jpg", true, 40, 1000000000L/792); + + int [] exp_offsets_x = {-10, 0, 3}; + int [] exp_offsets_y = {7, 0, -3}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR36". + */ + @Category(HDRTests.class) + @Test + public void testHDR36() throws IOException, InterruptedException { + Log.d(TAG, "testHDR36"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR36/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR36/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR36/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR36_output.jpg", false, 100, 1000000000L/1148); + + int [] exp_offsets_x = {2, 0, -2}; + int [] exp_offsets_y = {-4, 0, 2}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR37". + */ + @Category(HDRTests.class) + @Test + public void testHDR37() throws IOException, InterruptedException { + Log.d(TAG, "testHDR37"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR37/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR37/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR37/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR37_output.jpg", false, 46, 1000000000L/120); + + int [] exp_offsets_x = {0, 0, 3}; + int [] exp_offsets_y = {2, 0, -19}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR38". + * Tests with Filmic tonemapping. + */ + @Category(HDRTests.class) + @Test + public void testHDR38Filmic() throws IOException, InterruptedException { + Log.d(TAG, "testHDR38Filmic"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR38/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR38/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR38/input2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR38_filmic_output.jpg", false, 125, 1000000000L/2965, HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_FU2); + + int [] exp_offsets_x = {-1, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 0, 92, 254); + checkHistogramDetails(hdrHistogramDetails, 0, 93, 254); + }); + } + + /** Tests HDR algorithm on test samples "testHDR39". + */ + @Category(HDRTests.class) + @Test + public void testHDR39() throws IOException, InterruptedException { + Log.d(TAG, "testHDR39"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR39/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR39/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR39/input2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR39_output.jpg", false, 125, 1000000000L/2135); + + int [] exp_offsets_x = {-6, 0, -2}; + int [] exp_offsets_y = {6, 0, -8}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + + checkHistogramDetails(hdrHistogramDetails, 0, 128, 222); + }); + } + + /** Tests HDR algorithm on test samples "testHDR40". + */ + @Category(HDRTests.class) + @Test + public void testHDR40() throws IOException, InterruptedException { + Log.d(TAG, "testHDR40"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR40/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR40/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR40/input2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR40_output.jpg", false, 50, 1000000000L/262); + + int [] exp_offsets_x = {5, 0, -2}; + int [] exp_offsets_y = {13, 0, 24}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + + checkHistogramDetails(hdrHistogramDetails, 1, 138, 254); + }); + } + + /** Tests HDR algorithm on test samples "testHDR40" with Exponential tonemapping. + */ + @Category(HDRTests.class) + @Test + public void testHDR40Exponential() throws IOException, InterruptedException { + Log.d(TAG, "testHDR40Exponential"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR40/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR40/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR40/input2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR40_exponential_output.jpg", false, 50, 1000000000L/262, HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_EXPONENTIAL); + + int [] exp_offsets_x = {5, 0, -2}; + int [] exp_offsets_y = {13, 0, 24}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + + checkHistogramDetails(hdrHistogramDetails, 1, 138, 254); + }); + } + + /** Tests HDR algorithm on test samples "testHDR40" with Filmic tonemapping. + */ + @Category(HDRTests.class) + @Test + public void testHDR40Filmic() throws IOException, InterruptedException { + Log.d(TAG, "testHDR40Filmic"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR40/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR40/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR40/input2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR40_filmic_output.jpg", false, 50, 1000000000L/262, HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_FU2); + + int [] exp_offsets_x = {5, 0, -2}; + int [] exp_offsets_y = {13, 0, 24}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + + checkHistogramDetails(hdrHistogramDetails, 1, 130, 254); + }); + } + + /** Tests HDR algorithm on test samples "testHDR41". + */ + @Category(HDRTests.class) + @Test + public void testHDR41() throws IOException, InterruptedException { + Log.d(TAG, "testHDR41"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR41/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR41/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR41/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR41_output.jpg", false, 925, 1000000000L/25); + }); + } + + /** Tests HDR algorithm on test samples "testHDR42". + */ + @Category(HDRTests.class) + @Test + public void testHDR42() throws IOException, InterruptedException { + Log.d(TAG, "testHDR42"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR42/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR42/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR42/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR42_output.jpg", false, 112, 1000000000L/679); + }); + } + + /** Tests HDR algorithm on test samples "testHDR43". + */ + @Category(HDRTests.class) + @Test + public void testHDR43() throws IOException, InterruptedException { + Log.d(TAG, "testHDR43"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR43/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR43/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR43/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR43_output.jpg", false, 1196, 1000000000L/12); + }); + } + + /** Tests HDR algorithm on test samples "testHDR44". + */ + @Category(HDRTests.class) + @Test + public void testHDR44() throws IOException, InterruptedException { + Log.d(TAG, "testHDR44"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR44/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR44/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR44/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR44_output.jpg", false, 100, 1000000000L/1016); + }); + } + + /** Tests HDR algorithm on test samples "testHDR45". + */ + @Category(HDRNTests.class) + @Test + public void testHDR45() throws IOException, InterruptedException { + Log.d(TAG, "testHDR45"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + //inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR45/IMG_6314.jpg") ); + //inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR45/IMG_6312.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR45/IMG_6310.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR45/IMG_6309.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR45/IMG_6311.jpg") ); + //inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR45/IMG_6313.jpg") ); + //inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR45/IMG_6315.jpg") ); + + // ISO 100, exposure time 2s, but pass in -1 since these are HDRNTests + TestUtils.subTestHDR(activity, inputs, "testHDR45_output.jpg", false, -1, -1); + }); + } + + /** Tests HDR algorithm on test samples "testHDR45". + */ + @Category(HDRNTests.class) + @Test + public void testHDR45_exp5() throws IOException, InterruptedException { + Log.d(TAG, "testHDR45_exp5"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + //inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR45/IMG_6314.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR45/IMG_6312.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR45/IMG_6310.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR45/IMG_6309.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR45/IMG_6311.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR45/IMG_6313.jpg") ); + //inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR45/IMG_6315.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR45_exp5_output.jpg", false, -1, -1); + }); + } + + /** Tests HDR algorithm on test samples "testHDR45". + */ + @Category(HDRNTests.class) + @Test + public void testHDR45_exp7() throws IOException, InterruptedException { + Log.d(TAG, "testHDR45_exp7"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR45/IMG_6314.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR45/IMG_6312.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR45/IMG_6310.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR45/IMG_6309.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR45/IMG_6311.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR45/IMG_6313.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR45/IMG_6315.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR45_exp7_output.jpg", false, -1, -1); + }); + } + + /** Tests HDR algorithm on test samples "testHDR46". + */ + @Category(HDRNTests.class) + @Test + public void testHDR46() throws IOException, InterruptedException { + Log.d(TAG, "testHDR46"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + //inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 06.jpg") ); + //inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 05.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 04.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 03.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 02.jpg") ); + //inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 01.jpg") ); + + // ISO 100, exposure time 1/60s, but pass in -1 since these are HDRNTests + TestUtils.subTestHDR(activity, inputs, "testHDR46_output.jpg", false, -1, -1); + }); + } + + /** Tests HDR algorithm on test samples "testHDR46". + */ + @Category(HDRNTests.class) + @Test + public void testHDR46_exp5() throws IOException, InterruptedException { + Log.d(TAG, "testHDR46_exp5"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + //inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 06.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 05.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 04.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 03.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 02.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 01.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR46_exp5_output.jpg", false, -1, -1); + }); + } + + /** Tests HDR algorithm on test samples "testHDR47". + */ + @Category(HDRNTests.class) + @Test + public void testHDR47_exp2() throws IOException, InterruptedException { + Log.d(TAG, "testHDR47_exp2"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 05.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 03.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDR47_exp2_output.jpg", false, -1, -1); + }); + } + + /** Tests HDR algorithm on test samples "testHDR47". + */ + @Category(HDRNTests.class) + @Test + public void testHDR47() throws IOException, InterruptedException { + Log.d(TAG, "testHDR47"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + //inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 08.jpg") ); + //inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 07.jpg") ); + //inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 06.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 05.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 04.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 03.jpg") ); + //inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 02.jpg") ); + //inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 01.jpg") ); + + // ISO 400, exposure time 1/60s, but pass in -1 since these are HDRNTests + TestUtils.subTestHDR(activity, inputs, "testHDR47_output.jpg", false, -1, -1); + }); + } + + /** Tests HDR algorithm on test samples "testHDR47". + */ + @Category(HDRNTests.class) + @Test + public void testHDR47_exp5() throws IOException, InterruptedException { + Log.d(TAG, "testHDR47_exp5"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + //inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 08.jpg") ); + //inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 07.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 06.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 05.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 04.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 03.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 02.jpg") ); + //inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 01.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR47_exp5_output.jpg", false, -1, -1); + + //checkHistogramDetails(hdrHistogramDetails, 1, 73, 255); + checkHistogramDetails(hdrHistogramDetails, 1, 80, 255); + }); + } + + /** Tests HDR algorithm on test samples "testHDR47". + */ + @Category(HDRNTests.class) + @Test + public void testHDR47_exp7() throws IOException, InterruptedException { + Log.d(TAG, "testHDR47_exp7"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + //inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 08.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 07.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 06.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 05.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 04.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 03.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 02.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 01.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR47_exp7_output.jpg", false, -1, -1); + + //checkHistogramDetails(hdrHistogramDetails, 1, 73, 255); + checkHistogramDetails(hdrHistogramDetails, 1, 80, 255); + }); + } + + /** Tests HDR algorithm on test samples "testHDR48". + */ + @Category(HDRNTests.class) + @Test + public void testHDR48() throws IOException, InterruptedException { + Log.d(TAG, "testHDR48"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + //inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR48/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR48/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR48/input2.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR48/input3.jpg") ); + //inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR48/input4.jpg") ); + + // ISO 100, exposure time 1/716s, but pass in -1 since these are HDRNTests + TestUtils.subTestHDR(activity, inputs, "testHDR48_output.jpg", false, -1, -1); + }); + } + + /** Tests HDR algorithm on test samples "testHDR48". + */ + @Category(HDRNTests.class) + @Test + public void testHDR48_exp5() throws IOException, InterruptedException { + Log.d(TAG, "testHDR48_exp5"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR48/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR48/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR48/input2.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR48/input3.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR48/input4.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR48_exp5_output.jpg", false, -1, -1); + + //checkHistogramDetails(hdrHistogramDetails, 0, 59, 241); + checkHistogramDetails(hdrHistogramDetails, 0, 67, 241); + }); + } + + /** Tests HDR algorithm on test samples "testHDR49". + */ + @Category(HDRNTests.class) + @Test + public void testHDR49_exp2() throws IOException, InterruptedException { + Log.d(TAG, "testHDR49_exp2"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR49/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR49/input3.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR49_exp2_output.jpg", false, -1, -1); + + //checkHistogramDetails(hdrHistogramDetails, 12, 120, 251); + checkHistogramDetails(hdrHistogramDetails, 0, 122, 251); + }); + } + + /** Tests HDR algorithm on test samples "testHDR49". + */ + @Category(HDRNTests.class) + @Test + public void testHDR49() throws IOException, InterruptedException { + Log.d(TAG, "testHDR49"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR49/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR49/input2.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR49/input3.jpg") ); + + // ISO 100, exposure time 1/417s, but pass in -1 since these are HDRNTests + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR49_output.jpg", false, -1, -1); + + //checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 89, 254); + }); + } + + /** Tests HDR algorithm on test samples "testHDR49". + */ + @Category(HDRNTests.class) + @Test + public void testHDR49_exp4() throws IOException, InterruptedException { + Log.d(TAG, "testHDR49_exp4"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR49/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR49/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR49/input3.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR49/input4.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR49_exp4_output.jpg", false, -1, -1); + + checkHistogramDetails(hdrHistogramDetails, 19, 109, 244); + }); + } + + /** Tests HDR algorithm on test samples "testHDR49". + */ + @Category(HDRNTests.class) + @Test + public void testHDR49_exp5() throws IOException, InterruptedException { + Log.d(TAG, "testHDR49_exp5"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR49/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR49/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR49/input2.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR49/input3.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR49/input4.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR49_exp5_output.jpg", false, -1, -1); + + //checkHistogramDetails(hdrHistogramDetails, 0, 72, 244); + //checkHistogramDetails(hdrHistogramDetails, 0, 78, 243); + checkHistogramDetails(hdrHistogramDetails, 0, 87, 243); + }); + } + + /** Tests HDR algorithm on test samples "testHDR50". + */ + @Category(HDRTests.class) + @Test + public void testHDR50() throws IOException, InterruptedException { + Log.d(TAG, "testHDR50"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR50/IMG_20180626_221357_0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR50/IMG_20180626_221357_1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR50/IMG_20180626_221357_2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR50_output.jpg", false, 867, 1000000000L/14); + + checkHistogramDetails(hdrHistogramDetails, 0, 69, 255); + }); + } + + /** Tests HDR algorithm on test samples "testHDR51". + */ + @Category(HDRTests.class) + @Test + public void testHDR51() throws IOException, InterruptedException { + Log.d(TAG, "testHDR51"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR51/IMG_20180323_104702_0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR51/IMG_20180323_104702_1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR51/IMG_20180323_104702_2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR51_output.jpg", true, 1600, 1000000000L/11); + + //checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + }); + } + + /** Tests HDR algorithm on test samples "testHDR52". + */ + @Category(HDRTests.class) + @Test + public void testHDR52() throws IOException, InterruptedException { + Log.d(TAG, "testHDR52"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR52/IMG_20181023_143633_EXP0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR52/IMG_20181023_143633_EXP1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR52/IMG_20181023_143633_EXP2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR52_output.jpg", false, 100, 1000000000L/2105); + + //checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + }); + } + + /** Tests HDR algorithm on test samples "testHDR53". + */ + @Category(HDRTests.class) + @Test + public void testHDR53() throws IOException, InterruptedException { + Log.d(TAG, "testHDR53"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR53/IMG_20181106_135411_EXP0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR53/IMG_20181106_135411_EXP1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR53/IMG_20181106_135411_EXP2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR53_output.jpg", false, 103, 1000000000L/5381); + + //checkHistogramDetails(hdrHistogramDetails, 0, 55, 254); + checkHistogramDetails(hdrHistogramDetails, 0, 72, 255); + }); + } + + /** Tests HDR algorithm on test samples "testHDR54". + */ + @Category(HDRTests.class) + @Test + public void testHDR54() throws IOException, InterruptedException { + Log.d(TAG, "testHDR54"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR54/IMG_20181107_115508_EXP0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR54/IMG_20181107_115508_EXP1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR54/IMG_20181107_115508_EXP2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR54_output.jpg", false, 752, 1000000000L/14); + + //checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + }); + } + + /** Tests HDR algorithm on test samples "testHDR55". + */ + @Category(HDRTests.class) + @Test + public void testHDR55() throws IOException, InterruptedException { + Log.d(TAG, "testHDR55"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR55/IMG_20181107_115608_EXP0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR55/IMG_20181107_115608_EXP1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR55/IMG_20181107_115608_EXP2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR55_output.jpg", false, 1505, 1000000000L/10); + + //checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + }); + } + + /** Tests HDR algorithm on test samples "testHDR56". + */ + @Category(HDRTests.class) + @Test + public void testHDR56() throws IOException, InterruptedException { + Log.d(TAG, "testHDR56"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR56/180502_141722_OC_0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR56/180502_141722_OC_1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR56/180502_141722_OC_2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR56_output.jpg", false, 50, 1000000000L/40); + + //checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + }); + } + + /** Tests HDR algorithm on test samples "testHDR57". + */ + @Category(HDRTests.class) + @Test + public void testHDR57() throws IOException, InterruptedException { + Log.d(TAG, "testHDR57"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR57/IMG_20181119_145313_EXP0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR57/IMG_20181119_145313_EXP1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR57/IMG_20181119_145313_EXP2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR57_output.jpg", true, 100, 1000000000L/204); + + //checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + }); + } + + /** Tests HDR algorithm on test samples "testHDR58". + */ + @Category(HDRTests.class) + @Test + public void testHDR58() throws IOException, InterruptedException { + Log.d(TAG, "testHDR58"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR58/IMG_20190911_210146_0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR58/IMG_20190911_210146_1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR58/IMG_20190911_210146_2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR58_output.jpg", false, 1250, 1000000000L/10); + //HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR58_output.jpg", false, 1250, 1000000000L/10, HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_CLAMP); + + checkHistogramDetails(hdrHistogramDetails, 11, 119, 255); + }); + } + + /** Tests HDR algorithm on test samples "testHDR59". + */ + @Category(HDRTests.class) + @Test + public void testHDR59() throws IOException, InterruptedException { + Log.d(TAG, "testHDR59"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR59/IMG_20190911_210154_0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR59/IMG_20190911_210154_1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR59/IMG_20190911_210154_2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR59_output.jpg", false, 1250, 1000000000L/10); + //HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR59_output.jpg", false, 1250, 1000000000L/10, HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_CLAMP); + + //checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + }); + } + + /** Tests HDR algorithm on test samples "testHDR60". + */ + @Category(HDRTests.class) + @Test + public void testHDR60() throws IOException, InterruptedException { + Log.d(TAG, "testHDR60"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR60/IMG_20200507_020319_0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR60/IMG_20200507_020319_1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR60/IMG_20200507_020319_2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR60_output.jpg", false, 491, 1000000000L/10); + //HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR60_output.jpg", false, 491, 1000000000L/10, HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_CLAMP); + + //checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + }); + } + + /** Tests HDR algorithm on test samples "testHDR61". + */ + @Category(HDRTests.class) + @Test + public void testHDR61() throws IOException, InterruptedException { + Log.d(TAG, "testHDR61"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR61/IMG_20191111_145230_0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR61/IMG_20191111_145230_1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR61/IMG_20191111_145230_2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR61_output.jpg", false, 50, 1000000000L/5025); + + checkHistogramDetails(hdrHistogramDetails, 0, 93, 255); + + int [] exp_offsets_x = {0, 0, 1}; + int [] exp_offsets_y = {0, 0, -2}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDR62". + */ + @Category(HDRTests.class) + @Test + public void testHDR62() throws IOException, InterruptedException { + Log.d(TAG, "testHDR62"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR62/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR62/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDR62/input2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestHDR(activity, inputs, "testHDR62_output.jpg", false, 100, 1000000000L/485); + + checkHistogramDetails(hdrHistogramDetails, 0, 113, 247); + + int [] exp_offsets_x = {0, 0, -3}; + int [] exp_offsets_y = {3, 0, -6}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y); + }); + } + + /** Tests HDR algorithm on test samples "testHDRtemp". + * Used for one-off testing, or to recreate HDR images from the base exposures to test an updated algorithm. + * The test images should be copied to the test device into DCIM/testOpenCamera/testdata/hdrsamples/testHDRtemp/ . + */ + @Test + public void testHDRtemp() throws IOException, InterruptedException { + Log.d(TAG, "testHDRtemp"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDRtemp/input0.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDRtemp/input1.jpg") ); + inputs.add( TestUtils.getBitmapFromFile(activity, TestUtils.hdr_images_path + "testHDRtemp/input2.jpg") ); + + TestUtils.subTestHDR(activity, inputs, "testHDRtemp_output.jpg", true, 100, 1000000000L/100); + }); + } + + /** Tests Avg algorithm on test samples "testAvg1". + */ + @Category(AvgTests.class) + @Test + public void testAvg1() throws IOException, InterruptedException { + Log.d(TAG, "testAvg1"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg1/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg1/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg1/input2.jpg"); + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity, inputs, "testAvg1_output.jpg", 1600, 1000000000L/17, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + //int [] exp_offsets_x = {0, 3, 0}; + //int [] exp_offsets_y = {0, 1, 0}; + //int [] exp_offsets_x = {0, 4, 0}; + //int [] exp_offsets_y = {0, 1, 0}; + //int [] exp_offsets_x = {0, 2, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + int [] exp_offsets_x = {0, 4, 0}; + int [] exp_offsets_y = {0, 0, 0}; + assertEquals(0, activity.getApplicationInterface().getHDRProcessor().sharp_index); + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 2 ) { + //int [] exp_offsets_x = {0, 6, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + //int [] exp_offsets_x = {0, 8, 0}; + //int [] exp_offsets_y = {0, 1, 0}; + //int [] exp_offsets_x = {0, 7, 0}; + //int [] exp_offsets_y = {0, -1, 0}; + //int [] exp_offsets_x = {0, 8, 0}; + //int [] exp_offsets_y = {0, -4, 0}; + int [] exp_offsets_x = {0, 8, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else { + fail(); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + }); + } + + /** Tests Avg algorithm on test samples "testAvg2". + */ + @Category(AvgTests.class) + @Test + public void testAvg2() throws IOException, InterruptedException { + Log.d(TAG, "testAvg2"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg2/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg2/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg2/input2.jpg"); + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg2_output.jpg", 1600, 1000000000L/17, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + //int [] exp_offsets_x = {0, -15, 0}; + //int [] exp_offsets_y = {0, -10, 0}; + //int [] exp_offsets_x = {0, -15, 0}; + //int [] exp_offsets_y = {0, -11, 0}; + //int [] exp_offsets_x = {0, -12, 0}; + //int [] exp_offsets_y = {0, -12, 0}; + int [] exp_offsets_x = {0, -16, 0}; + int [] exp_offsets_y = {0, -12, 0}; + assertEquals(0, activity.getApplicationInterface().getHDRProcessor().sharp_index); + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 2 ) { + //int [] exp_offsets_x = {0, -15, 0}; + //int [] exp_offsets_y = {0, -10, 0}; + //int [] exp_offsets_x = {0, -13, 0}; + //int [] exp_offsets_y = {0, -12, 0}; + //int [] exp_offsets_x = {0, -12, 0}; + //int [] exp_offsets_y = {0, -14, 0}; + int [] exp_offsets_x = {0, -12, 0}; + int [] exp_offsets_y = {0, -12, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else { + fail(); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + }); + } + + /** Tests Avg algorithm on test samples "testAvg3". + */ + @Category(AvgTests.class) + @Test + public void testAvg3() throws IOException, InterruptedException { + Log.d(TAG, "testAvg3"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg3/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg3/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg3/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg3/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg3/input4.jpg"); + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg3_output.jpg", 1600, 1000000000L/16, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + /*if( index == 1 ) { + //int [] exp_offsets_x = {0, 2, 0}; + //int [] exp_offsets_y = {0, -18, 0}; + //int [] exp_offsets_x = {0, -1, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + //int [] exp_offsets_x = {0, -9, 0}; + //int [] exp_offsets_y = {0, -11, 0}; + //int [] exp_offsets_x = {0, -8, 0}; + //int [] exp_offsets_y = {0, -10, 0}; + int [] exp_offsets_x = {0, -8, 0}; + int [] exp_offsets_y = {0, -8, 0}; + assertTrue(activity.getApplicationInterface().getHDRProcessor().sharp_index == 0); + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 2 ) { + //int [] exp_offsets_x = {0, -18, 0}; + //int [] exp_offsets_y = {0, 17, 0}; + //int [] exp_offsets_x = {0, -2, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + //int [] exp_offsets_x = {0, -7, 0}; + //int [] exp_offsets_y = {0, -2, 0}; + //int [] exp_offsets_x = {0, -8, 0}; + //int [] exp_offsets_y = {0, -8, 0}; + int [] exp_offsets_x = {0, -12, 0}; + int [] exp_offsets_y = {0, 8, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 3 ) { + //int [] exp_offsets_x = {0, -12, 0}; + //int [] exp_offsets_y = {0, -25, 0}; + //int [] exp_offsets_x = {0, -2, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + //int [] exp_offsets_x = {0, -9, 0}; + //int [] exp_offsets_y = {0, 14, 0}; + //int [] exp_offsets_x = {0, -8, 0}; + //int [] exp_offsets_y = {0, 2, 0}; + //int [] exp_offsets_x = {0, -12, 0}; + //int [] exp_offsets_y = {0, 12, 0}; + int [] exp_offsets_x = {0, -12, 0}; + int [] exp_offsets_y = {0, 4, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 4 ) { + //int [] exp_offsets_x = {0, -29, 0}; + //int [] exp_offsets_y = {0, -22, 0}; + //int [] exp_offsets_x = {0, -2, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + //int [] exp_offsets_x = {0, -7, 0}; + //int [] exp_offsets_y = {0, 11, 0}; + //int [] exp_offsets_x = {0, -6, 0}; + //int [] exp_offsets_y = {0, 14, 0}; + //int [] exp_offsets_x = {0, -8, 0}; + //int [] exp_offsets_y = {0, 2, 0}; + //int [] exp_offsets_x = {0, -8, 0}; + //int [] exp_offsets_y = {0, 12, 0}; + int [] exp_offsets_x = {0, -8, 0}; + int [] exp_offsets_y = {0, 4, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else { + assertTrue(false); + }*/ + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 21, 177); + //checkHistogramDetails(hdrHistogramDetails, 0, 21, 152); + //checkHistogramDetails(hdrHistogramDetails, 0, 21, 166); + checkHistogramDetails(hdrHistogramDetails, 0, 23, 194); + }); + } + + /** Tests Avg algorithm on test samples "testAvg4". + */ + @Category(AvgTests.class) + @Test + public void testAvg4() throws IOException, InterruptedException { + Log.d(TAG, "testAvg4"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg4/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg4/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg4/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg4/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg4/input4.jpg"); + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg4_output.jpg", 1600, 1000000000L/16, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + //int [] exp_offsets_x = {0, 5, 0}; + //int [] exp_offsets_y = {0, 2, 0}; + int [] exp_offsets_x = {0, 5, 0}; + int [] exp_offsets_y = {0, 1, 0}; + assertEquals(0, activity.getApplicationInterface().getHDRProcessor().sharp_index); + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 2 ) { + //int [] exp_offsets_x = {0, 3, 0}; + //int [] exp_offsets_y = {0, 5, 0}; + int [] exp_offsets_x = {0, 4, 0}; + int [] exp_offsets_y = {0, 4, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 3 ) { + //int [] exp_offsets_x = {0, 0, 0}; + //int [] exp_offsets_y = {0, 7, 0}; + //int [] exp_offsets_x = {0, 1, 0}; + //int [] exp_offsets_y = {0, 6, 0}; + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 8, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 4 ) { + //int [] exp_offsets_x = {0, 4, 0}; + //int [] exp_offsets_y = {0, 8, 0}; + //int [] exp_offsets_x = {0, 3, 0}; + //int [] exp_offsets_y = {0, 7, 0}; + //int [] exp_offsets_x = {0, 3, 0}; + //int [] exp_offsets_y = {0, 8, 0}; + int [] exp_offsets_x = {0, 3, 0}; + int [] exp_offsets_y = {0, 9, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else { + fail(); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + }); + } + + /** Tests Avg algorithm on test samples "testAvg5". + */ + @Category(AvgTests.class) + @Test + public void testAvg5() throws IOException, InterruptedException { + Log.d(TAG, "testAvg5"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg5/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg5/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg5/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg5/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg5/input4.jpg"); + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg5_output.jpg", 1600, 1000000000L/16, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + /*if( index == 1 ) { + //int [] exp_offsets_x = {0, 4, 0}; + //int [] exp_offsets_y = {0, -1, 0}; + //int [] exp_offsets_x = {0, 5, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + //int [] exp_offsets_x = {0, 6, 0}; + //int [] exp_offsets_y = {0, -2, 0}; + int [] exp_offsets_x = {0, 4, 0}; + int [] exp_offsets_y = {0, 0, 0}; + assertTrue(activity.getApplicationInterface().getHDRProcessor().sharp_index == 0); + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 2 ) { + //int [] exp_offsets_x = {0, 7, 0}; + //int [] exp_offsets_y = {0, -2, 0}; + //int [] exp_offsets_x = {0, 8, 0}; + //int [] exp_offsets_y = {0, -1, 0}; + int [] exp_offsets_x = {0, 8, 0}; + int [] exp_offsets_y = {0, -4, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 3 ) { + //int [] exp_offsets_x = {0, 9, 0}; + //int [] exp_offsets_y = {0, -2, 0}; + //int [] exp_offsets_x = {0, 8, 0}; + //int [] exp_offsets_y = {0, -1, 0}; + int [] exp_offsets_x = {0, 8, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 4 ) { + //int [] exp_offsets_x = {0, 10, 0}; + //int [] exp_offsets_y = {0, -4, 0}; + int [] exp_offsets_x = {0, 11, 0}; + int [] exp_offsets_y = {0, -3, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else { + assertTrue(false); + }*/ + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + }); + } + + /** Tests Avg algorithm on test samples "testAvg6". + */ + @Category(AvgTests.class) + @Test + public void testAvg6() throws IOException, InterruptedException { + Log.d(TAG, "testAvg6"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg6/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg6/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg6/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg6/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg6/input4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg6/input5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg6/input6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg6/input7.jpg"); + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg6_output.jpg", 1600, 1000000000L/17, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + /*if( true ) + return;*/ + if( index == 1 ) { + //int [] exp_offsets_x = {0, 0, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + //int [] exp_offsets_x = {0, -2, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + assertEquals(0, activity.getApplicationInterface().getHDRProcessor().sharp_index); + } + else if( index == 2 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 3 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 4 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 5 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 6 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 7 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else { + fail(); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 18, 51, 201); + //checkHistogramDetails(hdrHistogramDetails, 14, 38, 200); + //checkHistogramDetails(hdrHistogramDetails, 0, 9, 193); + //checkHistogramDetails(hdrHistogramDetails, 0, 9, 199); + //checkHistogramDetails(hdrHistogramDetails, 12, 46, 202); + //checkHistogramDetails(hdrHistogramDetails, 12, 46, 205); + //checkHistogramDetails(hdrHistogramDetails, 12, 44, 209); + //checkHistogramDetails(hdrHistogramDetails, 12, 44, 202); + //checkHistogramDetails(hdrHistogramDetails, 5, 16, 190); + checkHistogramDetails(hdrHistogramDetails, 5, 19, 199); + }); + } + + /** Tests Avg algorithm on test samples "testAvg7". + */ + @Category(AvgTests.class) + @Test + public void testAvg7() throws IOException, InterruptedException { + Log.d(TAG, "testAvg7"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg7/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg7/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg7/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg7/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg7/input4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg7/input5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg7/input6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg7/input7.jpg"); + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg7_output.jpg", 1600, 1000000000L/16, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + //int [] exp_offsets_x = {0, 0, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + //int [] exp_offsets_x = {0, -10, 0}; + //int [] exp_offsets_y = {0, 6, 0}; + //int [] exp_offsets_x = {0, -6, 0}; + //int [] exp_offsets_y = {0, 2, 0}; + //int [] exp_offsets_x = {0, -4, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + //int [] exp_offsets_x = {0, 0, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + //int [] exp_offsets_x = {0, -4, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + assertEquals(0, activity.getApplicationInterface().getHDRProcessor().sharp_index); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + }); + } + + /** Tests Avg algorithm on test samples "testAvg8". + */ + @Category(AvgTests.class) + @Test + public void testAvg8() throws IOException, InterruptedException { + Log.d(TAG, "testAvg8"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg8/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg8/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg8/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg8/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg8/input4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg8/input5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg8/input6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg8/input7.jpg"); + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg8_output.jpg", 1600, 1000000000L/16, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + assertEquals(0, activity.getApplicationInterface().getHDRProcessor().sharp_index); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 4, 26, 92); + //checkHistogramDetails(hdrHistogramDetails, 3, 19, 68); + //checkHistogramDetails(hdrHistogramDetails, 0, 10, 60); + //checkHistogramDetails(hdrHistogramDetails, 1, 8, 72); + //checkHistogramDetails(hdrHistogramDetails, 1, 6, 64); + //checkHistogramDetails(hdrHistogramDetails, 1, 15, 75); + checkHistogramDetails(hdrHistogramDetails, 1, 16, 78); + }); + } + + /** Tests Avg algorithm on test samples "testAvg9". + */ + @Category(AvgTests.class) + @Test + public void testAvg9() throws IOException, InterruptedException { + Log.d(TAG, "testAvg9"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + final boolean use_auto_photos = true; + + if( use_auto_photos ) { + inputs.add(TestUtils.avg_images_path + "testAvg9/input_auto0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input_auto1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input_auto2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input_auto3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input_auto4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input_auto5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input_auto6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input_auto7.jpg"); + } + else { + inputs.add(TestUtils.avg_images_path + "testAvg9/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input7.jpg"); + } + + String out_filename = use_auto_photos ? "testAvg9_auto_output.jpg" : "testAvg9_output.jpg"; + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, out_filename, 1600, use_auto_photos ? 1000000000L/16 : 1000000000L/11, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + assertEquals(0, activity.getApplicationInterface().getHDRProcessor().sharp_index); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + }); + } + + /** Tests Avg algorithm on test samples "testAvg10". + */ + @Category(AvgTests.class) + @Test + public void testAvg10() throws IOException, InterruptedException { + Log.d(TAG, "testAvg10"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + final boolean use_auto_photos = false; + + if( use_auto_photos ) { + inputs.add(TestUtils.avg_images_path + "testAvg10/input_auto0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input_auto1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input_auto2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input_auto3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input_auto4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input_auto5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input_auto6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input_auto7.jpg"); + } + else { + inputs.add(TestUtils.avg_images_path + "testAvg10/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input7.jpg"); + } + + String out_filename = use_auto_photos ? "testAvg10_auto_output.jpg" : "testAvg10_output.jpg"; + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, out_filename, 1196, use_auto_photos ? 1000000000L/12 : 1000000000L/10, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + assertEquals(0, activity.getApplicationInterface().getHDRProcessor().sharp_index); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + }); + } + + /** Tests Avg algorithm on test samples "testAvg11". + */ + @Category(AvgTests.class) + @Test + public void testAvg11() throws IOException, InterruptedException { + Log.d(TAG, "testAvg11"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + // note, we don't actually use 8 images for a bright scene like this, but it serves as a good test for + // misalignment/ghosting anyway + inputs.add(TestUtils.avg_images_path + "testAvg11/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg11/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg11/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg11/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg11/input4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg11/input5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg11/input6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg11/input7.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg11_output.jpg", 100, 1000000000L/338, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + //int [] exp_offsets_x = {0, 4, 0}; + //int [] exp_offsets_y = {0, -8, 0}; + //int [] exp_offsets_x = {0, 6, 0}; + //int [] exp_offsets_y = {0, -8, 0}; + //int [] exp_offsets_x = {0, -6, 0}; + //int [] exp_offsets_y = {0, 8, 0}; + int [] exp_offsets_x = {0, -4, 0}; + int [] exp_offsets_y = {0, 8, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + //assertTrue(activity.getApplicationInterface().getHDRProcessor().sharp_index == 1); + } + else if( index == 2 ) { + //int [] exp_offsets_x = {0, -5, 0}; + //int [] exp_offsets_y = {0, -1, 0}; + //int [] exp_offsets_x = {0, -10, 0}; + //int [] exp_offsets_y = {0, 6, 0}; + int [] exp_offsets_x = {0, -8, 0}; + int [] exp_offsets_y = {0, 8, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 3 ) { + //int [] exp_offsets_x = {0, -1, 0}; + //int [] exp_offsets_y = {0, -18, 0}; + //int [] exp_offsets_x = {0, 0, 0}; + //int [] exp_offsets_y = {0, -16, 0}; + //int [] exp_offsets_x = {0, -4, 0}; + //int [] exp_offsets_y = {0, -10, 0}; + //int [] exp_offsets_x = {0, -4, 0}; + //int [] exp_offsets_y = {0, -8, 0}; + int [] exp_offsets_x = {0, -4, 0}; + int [] exp_offsets_y = {0, -12, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 4 ) { + //int [] exp_offsets_x = {0, -3, 0}; + //int [] exp_offsets_y = {0, -20, 0}; + //int [] exp_offsets_x = {0, -2, 0}; + //int [] exp_offsets_y = {0, -18, 0}; + //int [] exp_offsets_x = {0, -6, 0}; + //int [] exp_offsets_y = {0, -12, 0}; + int [] exp_offsets_x = {0, -8, 0}; + int [] exp_offsets_y = {0, -12, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 5 ) { + //int [] exp_offsets_x = {0, -8, 0}; + //int [] exp_offsets_y = {0, 2, 0}; + //int [] exp_offsets_x = {0, -10, 0}; + //int [] exp_offsets_y = {0, 4, 0}; + //int [] exp_offsets_x = {0, -12, 0}; + //int [] exp_offsets_y = {0, 10, 0}; + int [] exp_offsets_x = {0, -12, 0}; + int [] exp_offsets_y = {0, 8, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 6 ) { + //int [] exp_offsets_x = {0, 0, 0}; + //int [] exp_offsets_y = {0, -6, 0}; + //int [] exp_offsets_x = {0, 2, 0}; + //int [] exp_offsets_y = {0, -6, 0}; + //int [] exp_offsets_x = {0, -4, 0}; + //int [] exp_offsets_y = {0, 2, 0}; + int [] exp_offsets_x = {0, -4, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 7 ) { + //int [] exp_offsets_x = {0, 7, 0}; + //int [] exp_offsets_y = {0, -2, 0}; + //int [] exp_offsets_x = {0, 6, 0}; + //int [] exp_offsets_y = {0, 6, 0}; + //int [] exp_offsets_x = {0, 4, 0}; + //int [] exp_offsets_y = {0, 4, 0}; + //int [] exp_offsets_x = {0, 8, 0}; + //int [] exp_offsets_y = {0, 8, 0}; + int [] exp_offsets_x = {0, 4, 0}; + int [] exp_offsets_y = {0, 4, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else { + fail(); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + }); + } + + /** Tests Avg algorithm on test samples "testAvg12". + */ + @Category(AvgTests.class) + @Test + public void testAvg12() throws IOException, InterruptedException { + Log.d(TAG, "testAvg12"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg12/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg12/input1.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg12_output.jpg", 100, 1000000000L/1617, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + //assertTrue(activity.getApplicationInterface().getHDRProcessor().sharp_index == 1); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 30, 254); + //checkHistogramDetails(hdrHistogramDetails, 0, 27, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 20, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 17, 254); + checkHistogramDetails(hdrHistogramDetails, 0, 31, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvg13". + */ + @Category(AvgTests.class) + @Test + public void testAvg13() throws IOException, InterruptedException { + Log.d(TAG, "testAvg13"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg13/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg13/input1.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg13_output.jpg", 100, 1000000000L/2482, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + //assertTrue(activity.getApplicationInterface().getHDRProcessor().sharp_index == 1); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + }); + } + + /** Tests Avg algorithm on test samples "testAvg14". + */ + @Category(AvgTests.class) + @Test + public void testAvg14() throws IOException, InterruptedException { + Log.d(TAG, "testAvg14"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg14/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg14/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg14/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg14/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg14/input4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg14/input5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg14/input6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg14/input7.jpg"); + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg14_output.jpg", 1600, 1000000000L/10, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + int [] exp_offsets_x = {0, -8, 0}; + int [] exp_offsets_y = {0, -8, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + assertEquals(0, activity.getApplicationInterface().getHDRProcessor().sharp_index); + } + else if( index == 7 ) { + //int [] exp_offsets_x = {0, 4, 0}; + //int [] exp_offsets_y = {0, 28, 0}; + int [] exp_offsets_x = {0, 4, 0}; + int [] exp_offsets_y = {0, 40, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 25, 245); + checkHistogramDetails(hdrHistogramDetails, 0, 18, 246); + }); + } + + /** Tests Avg algorithm on test samples "testAvg15". + */ + @Category(AvgTests.class) + @Test + public void testAvg15() throws IOException, InterruptedException { + Log.d(TAG, "testAvg15"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg15/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg15/input1.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg15_output.jpg", 100, 1000000000L/1525, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + assertEquals(0, activity.getApplicationInterface().getHDRProcessor().sharp_index); + } + } + }); + + checkHistogramDetails(hdrHistogramDetails, 0, 38, 254); + }); + } + + /** Tests Avg algorithm on test samples "testAvg16". + */ + @Category(AvgTests.class) + @Test + public void testAvg16() throws IOException, InterruptedException { + Log.d(TAG, "testAvg16"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg16/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg16/input1.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg16_output.jpg", 100, 1000000000L/293, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + //assertTrue(activity.getApplicationInterface().getHDRProcessor().sharp_index == 1); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + }); + } + + /** Tests Avg algorithm on test samples "testAvg17". + */ + @Category(AvgTests.class) + @Test + public void testAvg17() throws IOException, InterruptedException { + Log.d(TAG, "testAvg17"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg17/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg17/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg17/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg17/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg17/input4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg17/input5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg17/input6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg17/input7.jpg"); + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg17_output.jpg", 1600, 1000000000L/17, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + int [] exp_offsets_x = {0, -8, 0}; + int [] exp_offsets_y = {0, 4, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + assertEquals(0, activity.getApplicationInterface().getHDRProcessor().sharp_index); + } + else if( index == 7 ) { + int [] exp_offsets_x = {0, 12, 0}; + int [] exp_offsets_y = {0, 28, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 100, 233); + //checkHistogramDetails(hdrHistogramDetails, 0, 100, 236); + //checkHistogramDetails(hdrHistogramDetails, 0, 92, 234); + //checkHistogramDetails(hdrHistogramDetails, 0, 102, 241); + //checkHistogramDetails(hdrHistogramDetails, 0, 102, 238); + checkHistogramDetails(hdrHistogramDetails, 0, 103, 244); + }); + } + + /** Tests Avg algorithm on test samples "testAvg18". + */ + @Category(AvgTests.class) + @Test + public void testAvg18() throws IOException, InterruptedException { + Log.d(TAG, "testAvg18"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg18/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg18/input1.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg18_output.jpg", 100, 1000000000L/591, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + //assertTrue(activity.getApplicationInterface().getHDRProcessor().sharp_index == 1); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + }); + } + + /** Tests Avg algorithm on test samples "testAvg19". + */ + @Category(AvgTests.class) + @Test + public void testAvg19() throws IOException, InterruptedException { + Log.d(TAG, "testAvg19"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + // repeat same image twice + inputs.add(TestUtils.avg_images_path + "testAvg19/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg19/input0.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg19_output.jpg", 100, 1000000000L/2483, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + assertEquals(0, activity.getApplicationInterface().getHDRProcessor().sharp_index); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 88, 252); + //checkHistogramDetails(hdrHistogramDetails, 0, 77, 252); + //checkHistogramDetails(hdrHistogramDetails, 0, 87, 252); + //checkHistogramDetails(hdrHistogramDetails, 0, 74, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 58, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvg20". + */ + @Category(AvgTests.class) + @Test + public void testAvg20() throws IOException, InterruptedException { + Log.d(TAG, "testAvg20"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + // repeat same image twice + inputs.add(TestUtils.avg_images_path + "testAvg20/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg20/input0.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg20_output.jpg", 100, 1000000000L/3124, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + assertEquals(0, activity.getApplicationInterface().getHDRProcessor().sharp_index); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + }); + } + + /** Tests Avg algorithm on test samples "testAvg21". + */ + @Category(AvgTests.class) + @Test + public void testAvg21() throws IOException, InterruptedException { + Log.d(TAG, "testAvg21"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + // repeat same image twice + inputs.add(TestUtils.avg_images_path + "testAvg21/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg21/input0.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg21_output.jpg", 102, 1000000000L/6918, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + assertEquals(0, activity.getApplicationInterface().getHDRProcessor().sharp_index); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + }); + } + + /** Tests Avg algorithm on test samples "testAvg22". + */ + @Category(AvgTests.class) + @Test + public void testAvg22() throws IOException, InterruptedException { + Log.d(TAG, "testAvg22"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + // repeat same image twice + inputs.add(TestUtils.avg_images_path + "testAvg22/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg22/input0.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg22_output.jpg", 100, 1000000000L/3459, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + assertEquals(0, activity.getApplicationInterface().getHDRProcessor().sharp_index); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + }); + } + + /** Tests Avg algorithm on test samples "testAvg23". + */ + @Category(AvgTests.class) + @Test + public void testAvg23() throws IOException, InterruptedException { + Log.d(TAG, "testAvg23"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg23/IMG_20180520_111250_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg23/IMG_20180520_111250_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg23/IMG_20180520_111250_2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg23/IMG_20180520_111250_3.jpg"); + // only test 4 images, to reflect latest behaviour that we take 4 images for this ISO + /*inputs.add(TestUtils.avg_images_path + "testAvg23/IMG_20180520_111250_4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg23/IMG_20180520_111250_5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg23/IMG_20180520_111250_6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg23/IMG_20180520_111250_7.jpg");*/ + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg23_output.jpg", 1044, 1000000000L/10, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + int [] exp_offsets_x = {0, -4, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 2 ) { + int [] exp_offsets_x = {0, -4, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 3 ) { + int [] exp_offsets_x = {0, -8, 0}; + int [] exp_offsets_y = {0, 4, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 4 ) { + int [] exp_offsets_x = {0, -8, 0}; + int [] exp_offsets_y = {0, 4, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 5 ) { + int [] exp_offsets_x = {0, -12, 0}; + int [] exp_offsets_y = {0, 4, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 6 ) { + int [] exp_offsets_x = {0, -12, 0}; + int [] exp_offsets_y = {0, 4, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 7 ) { + int [] exp_offsets_x = {0, -12, 0}; + int [] exp_offsets_y = {0, 4, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else { + fail(); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 81, 251); + //checkHistogramDetails(hdrHistogramDetails, 0, 80, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 83, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvg24". + */ + @Category(AvgTests.class) + @Test + public void testAvg24() throws IOException, InterruptedException { + Log.d(TAG, "testAvg24"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg24/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg24/input1.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg24_output.jpg", 100, 1000000000L/2421, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 77, 250); + //checkHistogramDetails(hdrHistogramDetails, 0, 74, 250); + //checkHistogramDetails(hdrHistogramDetails, 0, 86, 250); + //checkHistogramDetails(hdrHistogramDetails, 0, 86, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 80, 254); + checkHistogramDetails(hdrHistogramDetails, 0, 56, 254); + }); + } + + /** Tests Avg algorithm on test samples "testAvg25". + */ + @Category(AvgTests.class) + @Test + public void testAvg25() throws IOException, InterruptedException { + Log.d(TAG, "testAvg25"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg25/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg25/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg25/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg25/input3.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg25_output.jpg", 512, 1000000000L/20, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + }); + } + + /** Tests Avg algorithm on test samples "testAvg26". + */ + @Category(AvgTests.class) + @Test + public void testAvg26() throws IOException, InterruptedException { + Log.d(TAG, "testAvg26"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + // note we now take only 3 images for bright scenes, but still test with 4 images as this serves as a good test + // against ghosting + inputs.add(TestUtils.avg_images_path + "testAvg26/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg26/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg26/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg26/input3.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg26_output.jpg", 100, 1000000000L/365, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + /*if( true ) + return;*/ + if( index == 1 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 2 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 3 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, -4, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else { + fail(); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + }); + } + + /** Tests Avg algorithm on test samples "testAvg27". + */ + @Category(AvgTests.class) + @Test + public void testAvg27() throws IOException, InterruptedException { + Log.d(TAG, "testAvg27"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg27/IMG_20180610_205929_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg27/IMG_20180610_205929_1.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg27_output.jpg", 100, 1000000000L/482, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + }); + } + + /** Tests Avg algorithm on test samples "testAvg28". + */ + @Category(AvgTests.class) + @Test + public void testAvg28() throws IOException, InterruptedException { + Log.d(TAG, "testAvg28"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + // example from Google HDR+ dataset + // note, the number of input images doesn't necessarily match what we'd take for this scene, but we want to compare + // to the Google HDR+ result + inputs.add(TestUtils.avg_images_path + "testAvg28/input001.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg28/input002.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg28/input003.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg28/input004.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg28/input005.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg28/input006.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg28/input007.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg28/input008.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg28_output.jpg", 811, 1000000000L/21, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 21, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 18, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 8, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 13, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvg29". + */ + @Category(AvgTests.class) + @Test + public void testAvg29() throws IOException, InterruptedException { + Log.d(TAG, "testAvg29"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + // example from Google HDR+ dataset + // note, the number of input images doesn't necessarily match what we'd take for this scene, but we want to compare + // to the Google HDR+ result + inputs.add(TestUtils.avg_images_path + "testAvg29/input001.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg29/input002.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg29/input003.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg29/input004.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg29/input005.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg29/input006.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg29/input007.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg29/input008.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg29/input009.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg29_output.jpg", 40, 1000000000L/2660, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 88, 127, 255); + //checkHistogramDetails(hdrHistogramDetails, 92, 134, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvg30". + */ + @Category(AvgTests.class) + @Test + public void testAvg30() throws IOException, InterruptedException { + Log.d(TAG, "testAvg30"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + // example from Google HDR+ dataset + // note, the number of input images doesn't necessarily match what we'd take for this scene, but we want to compare + // to the Google HDR+ result + inputs.add(TestUtils.avg_images_path + "testAvg30/input001.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg30/input002.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg30/input003.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg30_output.jpg", 60, 1000000000L/411, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 2 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, -4, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 3 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, -4, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else { + fail(); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 134, 254); + //checkHistogramDetails(hdrHistogramDetails, 0, 144, 254); + checkHistogramDetails(hdrHistogramDetails, 0, 107, 254); + }); + } + + /** Tests Avg algorithm on test samples "testAvg31". + */ + @Category(AvgTests.class) + @Test + public void testAvg31() throws IOException, InterruptedException { + Log.d(TAG, "testAvg31"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + // example from Google HDR+ dataset + // note, the number of input images doesn't necessarily match what we'd take for this scene, but we want to compare + // to the Google HDR+ result + inputs.add(TestUtils.avg_images_path + "testAvg31/input001.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg31/input002.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg31/input003.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg31/input004.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg31/input005.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg31/input006.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg31/input007.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg31/input008.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg31/input009.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg31/input010.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg31_output.jpg", 609, 1000000000L/25, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 24, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 9, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 13, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvg32". + */ + @Category(AvgTests.class) + @Test + public void testAvg32() throws IOException, InterruptedException { + Log.d(TAG, "testAvg32"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + // example from Google HDR+ dataset + // note, the number of input images doesn't necessarily match what we'd take for this scene, but we want to compare + // to the Google HDR+ result + inputs.add(TestUtils.avg_images_path + "testAvg32/input001.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg32/input002.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg32/input003.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg32/input004.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg32/input005.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg32/input006.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg32/input007.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg32_output.jpg", 335, 1000000000L/120, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 34, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 13, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 36, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 61, 254); + }); + } + + /** Tests Avg algorithm on test samples "testAvg33". + */ + @Category(AvgTests.class) + @Test + public void testAvg33() throws IOException, InterruptedException { + Log.d(TAG, "testAvg33"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + // example from Google HDR+ dataset + // note, the number of input images doesn't necessarily match what we'd take for this scene, but we want to compare + // to the Google HDR+ result + inputs.add(TestUtils.avg_images_path + "testAvg33/input001.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg33/input002.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg33/input003.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg33/input004.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg33/input005.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg33/input006.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg33/input007.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg33/input008.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg33/input009.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg33/input010.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg33_output.jpg", 948, 1000000000L/18, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 81, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 63, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvg34". + */ + @Category(AvgTests.class) + @Test + public void testAvg34() throws IOException, InterruptedException { + Log.d(TAG, "testAvg34"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg34/IMG_20180627_121959_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg34/IMG_20180627_121959_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg34/IMG_20180627_121959_2.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg34_output.jpg", 100, 1000000000L/289, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 86, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 108, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 114, 254); + checkHistogramDetails(hdrHistogramDetails, 0, 103, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvg35". + */ + @Category(AvgTests.class) + @Test + public void testAvg35() throws IOException, InterruptedException { + Log.d(TAG, "testAvg35"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg35/IMG_20180711_144453_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg35/IMG_20180711_144453_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg35/IMG_20180711_144453_2.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg35_output.jpg", 100, 1000000000L/2549, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 165, 247); + checkHistogramDetails(hdrHistogramDetails, 0, 169, 248); + }); + } + + /** Tests Avg algorithm on test samples "testAvg36". + */ + @Category(AvgTests.class) + @Test + public void testAvg36() throws IOException, InterruptedException { + Log.d(TAG, "testAvg36"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg36/IMG_20180709_114831_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg36/IMG_20180709_114831_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg36/IMG_20180709_114831_2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg36/IMG_20180709_114831_3.jpg"); + // only test 4 images, to reflect latest behaviour that we take 4 images for this ISO/exposure time + /*inputs.add(TestUtils.avg_images_path + "testAvg36/IMG_20180709_114831_4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg36/IMG_20180709_114831_5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg36/IMG_20180709_114831_6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg36/IMG_20180709_114831_7.jpg");*/ + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg36_output.jpg", 752, 1000000000L/10, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + int [] exp_offsets_x = {0, -12, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 3 ) { + int [] exp_offsets_x = {0, -28, 0}; + int [] exp_offsets_y = {0, 0, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 86, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvg37". + */ + @Category(AvgTests.class) + @Test + public void testAvg37() throws IOException, InterruptedException { + Log.d(TAG, "testAvg37"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg37/IMG_20180715_173155_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg37/IMG_20180715_173155_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg37/IMG_20180715_173155_2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg37/IMG_20180715_173155_3.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg37_output.jpg", 131, 1000000000L/50, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 12, 109, 255); + //checkHistogramDetails(hdrHistogramDetails, 3, 99, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 99, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 125, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 94, 255); + checkHistogramDetails(hdrHistogramDetails, 6, 94, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvg38". + */ + @Category(AvgTests.class) + @Test + public void testAvg38() throws IOException, InterruptedException { + Log.d(TAG, "testAvg38"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg38/IMG_20180716_232102_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg38/IMG_20180716_232102_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg38/IMG_20180716_232102_2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg38/IMG_20180716_232102_3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg38/IMG_20180716_232102_4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg38/IMG_20180716_232102_5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg38/IMG_20180716_232102_6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg38/IMG_20180716_232102_7.jpg"); + + // n.b., this was a zoomed in photo, but can't quite remember the exact zoom level! + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg38_output.jpg", 1505, 1000000000L/10, 3.95f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + }); + } + + /** Tests Avg algorithm on test samples "testAvg39". + */ + @Category(AvgTests.class) + @Test + public void testAvg39() throws IOException, InterruptedException { + Log.d(TAG, "testAvg39"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + // example from Google HDR+ dataset + // note, the number of input images doesn't necessarily match what we'd take for this scene, but we want to compare + // to the Google HDR+ result + inputs.add(TestUtils.avg_images_path + "testAvg39/input001.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg39/input002.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg39/input003.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg39/input004.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg39/input005.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg39/input006.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg39/input007.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg39/input008.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg39/input009.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg39/input010.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg39_output.jpg", 521, 1000000000L/27, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 64, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 25, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvg40". + */ + @Category(AvgTests.class) + @Test + public void testAvg40() throws IOException, InterruptedException { + Log.d(TAG, "testAvg40"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + // example from Google HDR+ dataset + // note, the number of input images doesn't necessarily match what we'd take for this scene, but we want to compare + // to the Google HDR+ result + inputs.add(TestUtils.avg_images_path + "testAvg40/input001.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg40/input002.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg40/input003.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg40/input004.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg40/input005.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg40/input006.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg40/input007.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg40/input008.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg40/input009.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg40_output.jpg", 199, 1000000000L/120, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 50, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 19, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 50, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 67, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvg41". + */ + @Category(AvgTests.class) + @Test + public void testAvg41() throws IOException, InterruptedException { + Log.d(TAG, "testAvg41"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + // example from Google HDR+ dataset + // note, the number of input images doesn't necessarily match what we'd take for this scene, but we want to compare + // to the Google HDR+ result + inputs.add(TestUtils.avg_images_path + "testAvg41/input001.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg41/input002.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg41/input003.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg41/input004.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg41/input005.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg41/input006.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg41/input007.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg41/input008.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg41/input009.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg41/input010.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg41_output.jpg", 100, 1000000000L/869, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 49, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 37, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 59, 254); + }); + } + + /** Tests Avg algorithm on test samples "testAvg42". + */ + @Category(AvgTests.class) + @Test + public void testAvg42() throws IOException, InterruptedException { + Log.d(TAG, "testAvg42"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg42/IMG_20180822_145152_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg42/IMG_20180822_145152_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg42/IMG_20180822_145152_2.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg42_output.jpg", 100, 1000000000L/2061, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 67, 254); + checkHistogramDetails(hdrHistogramDetails, 0, 61, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvg43". + */ + @Category(AvgTests.class) + @Test + public void testAvg43() throws IOException, InterruptedException { + Log.d(TAG, "testAvg43"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg43/IMG_20180831_143226_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg43/IMG_20180831_143226_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg43/IMG_20180831_143226_2.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg43_output.jpg", 100, 1000000000L/2152, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + checkHistogramDetails(hdrHistogramDetails, 0, 69, 253); + }); + } + + /** Tests Avg algorithm on test samples "testAvg44". + */ + @Category(AvgTests.class) + @Test + public void testAvg44() throws IOException, InterruptedException { + Log.d(TAG, "testAvg44"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg44/IMG_20180830_133917_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg44/IMG_20180830_133917_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg44/IMG_20180830_133917_2.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg44_output.jpg", 40, 1000000000L/2130, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvg45". + */ + @Category(AvgTests.class) + @Test + public void testAvg45() throws IOException, InterruptedException { + Log.d(TAG, "testAvg45"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg45/IMG_20180719_133947_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg45/IMG_20180719_133947_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg45/IMG_20180719_133947_2.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg45_output.jpg", 100, 1000000000L/865, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvg46". + */ + @Category(AvgTests.class) + @Test + public void testAvg46() throws IOException, InterruptedException { + Log.d(TAG, "testAvg46"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg46/IMG_20180903_203141_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg46/IMG_20180903_203141_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg46/IMG_20180903_203141_2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg46/IMG_20180903_203141_3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg46/IMG_20180903_203141_4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg46/IMG_20180903_203141_5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg46/IMG_20180903_203141_6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg46/IMG_20180903_203141_7.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg46_output.jpg", 1505, 1000000000L/10, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + checkHistogramDetails(hdrHistogramDetails, 0, 30, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvg47". + */ + @Category(AvgTests.class) + @Test + public void testAvg47() throws IOException, InterruptedException { + Log.d(TAG, "testAvg47"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg47/IMG_20180911_114752_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg47/IMG_20180911_114752_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg47/IMG_20180911_114752_2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg47/IMG_20180911_114752_3.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg47_output.jpg", 749, 1000000000L/12, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 30, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvg48". + */ + @Category(AvgTests.class) + @Test + public void testAvg48() throws IOException, InterruptedException { + Log.d(TAG, "testAvg48"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg48/IMG_20180911_110520_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg48/IMG_20180911_110520_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg48/IMG_20180911_110520_2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg48/IMG_20180911_110520_3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg48/IMG_20180911_110520_4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg48/IMG_20180911_110520_5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg48/IMG_20180911_110520_6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg48/IMG_20180911_110520_7.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg48_output.jpg", 1196, 1000000000L/10, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 30, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvg49". + */ + @Category(AvgTests.class) + @Test + public void testAvg49() throws IOException, InterruptedException { + Log.d(TAG, "testAvg49"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg49/IMG_20180911_120200_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg49/IMG_20180911_120200_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg49/IMG_20180911_120200_2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg49/IMG_20180911_120200_3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg49/IMG_20180911_120200_4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg49/IMG_20180911_120200_5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg49/IMG_20180911_120200_6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg49/IMG_20180911_120200_7.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg49_output.jpg", 1505, 1000000000L/10, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 30, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvg50". + */ + @Category(AvgTests.class) + @Test + public void testAvg50() throws IOException, InterruptedException { + Log.d(TAG, "testAvg50"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg50/IMG_20181015_144335_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg50/IMG_20181015_144335_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg50/IMG_20181015_144335_2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg50/IMG_20181015_144335_3.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg50_output.jpg", 114, 1000000000L/33, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + checkHistogramDetails(hdrHistogramDetails, 0, 91, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvg51". + */ + @Category(AvgTests.class) + @Test + public void testAvg51() throws IOException, InterruptedException { + Log.d(TAG, "testAvg51"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg51/IMG_20181025_182917_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg51/IMG_20181025_182917_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg51/IMG_20181025_182917_2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg51/IMG_20181025_182917_3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg51/IMG_20181025_182917_4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg51/IMG_20181025_182917_5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg51/IMG_20181025_182917_6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg51/IMG_20181025_182917_7.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg51_output.jpg", 1600, 1000000000L/3, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + int [] exp_offsets_x = {0, 8, 0}; + int [] exp_offsets_y = {0, 4, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + assertEquals(0, activity.getApplicationInterface().getHDRProcessor().sharp_index); + } + else if( index == 7 ) { + int [] exp_offsets_x = {0, 60, 0}; + int [] exp_offsets_y = {0, 28, 0}; + TestUtils.checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, activity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 91, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvg52". + */ + @Category(AvgTests.class) + @Test + public void testAvg52() throws IOException, InterruptedException { + Log.d(TAG, "testAvg52"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg52/IMG_20181119_144836_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg52/IMG_20181119_144836_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg52/IMG_20181119_144836_2.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvg52_output.jpg", 100, 1000000000L/297, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 91, 255); + }); + } + + /** Tests Avg algorithm on test samples "testAvgtemp". + * Used for one-off testing, or to recreate NR images from the base exposures to test an updated alorithm. + * The test images should be copied to the test device into DCIM/testOpenCamera/testdata/hdrsamples/testAvgtemp/ . + */ + @Test + public void testAvgtemp() throws IOException, InterruptedException { + Log.d(TAG, "testAvgtemp"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvgtemp/input0.png"); + /*inputs.add(TestUtils.avg_images_path + "testAvgtemp/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvgtemp/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvgtemp/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvgtemp/input3.jpg");*/ + /*inputs.add(TestUtils.avg_images_path + "testAvgtemp/input4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvgtemp/input5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvgtemp/input6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvgtemp/input7.jpg");*/ + + TestUtils.HistogramDetails hdrHistogramDetails = TestUtils.subTestAvg(activity,inputs, "testAvgtemp_output.jpg", 250, 1000000000L/33, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + }); + } + + /** Tests panorama algorithm on test samples "testPanoramaWhite". + * This tests that auto-alignment fails gracefully if we can't find any matches. + */ + @Category(PanoramaTests.class) + @Test + public void testPanoramaWhite() throws IOException, InterruptedException { + Log.d(TAG, "testPanoramaWhite"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + inputs.add(TestUtils.panorama_images_path + "testPanoramaWhite/input0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanoramaWhite/input0.jpg"); + float camera_angle_x = 66.3177f; + float camera_angle_y = 50.04736f; + float panorama_pics_per_screen = 2.0f; + String output_name = "testPanoramaWhite_output.jpg"; + + TestUtils.subTestPanorama(activity, inputs, output_name, null, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 2.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama1". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama1() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama1"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + inputs.add(TestUtils.panorama_images_path + "testPanorama1/input0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama1/input1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama1/input2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama1/input3.jpg"); + float camera_angle_x = 62.93796f; + float camera_angle_y = 47.44656f; + float panorama_pics_per_screen = 2.0f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(47.44656/49.56283); + String output_name = "testPanorama1_output.jpg"; + + TestUtils.subTestPanorama(activity, inputs, output_name, null, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 2.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama2". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama2() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama2"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + /*final float panorama_pics_per_screen = 1.0f; + //inputs.add(TestUtils.panorama_images_path + "testPanorama2xxx/input0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama2xxx/input1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama2xxx/input2.jpg");*/ + /*final float panorama_pics_per_screen = 2.0f; + //inputs.add(TestUtils.panorama_images_path + "testPanorama1/input0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama1/input1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama1/input2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama1/input3.jpg"); + String output_name = "testPanorama1_output.jpg";*/ + float panorama_pics_per_screen = 4.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama2/input0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama2/input1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama2/input2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama2/input3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama2/input4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama2/input5.jpg"); + String output_name = "testPanorama2_output.jpg"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/52.26029); + + TestUtils.subTestPanorama(activity, inputs, output_name, null, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 2.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama3". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama3() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama3"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 4.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131249.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131252.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131255.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131258.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131301.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131303.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131305.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131307.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131315.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131317.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131320.jpg"); + String output_name = "testPanorama3_output.jpg"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/52.26029); + + TestUtils.subTestPanorama(activity, inputs, output_name, null, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama3", with panorama_pics_per_screen set + * to 4.0. + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama3_picsperscreen2() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama3_picsperscreen2"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 2.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131249.jpg"); + //inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131252.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131255.jpg"); + //inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131258.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131301.jpg"); + //inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131303.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131305.jpg"); + //inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131307.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131315.jpg"); + //inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131317.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131320.jpg"); + String output_name = "testPanorama3_picsperscreen2_output.jpg"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/52.26029); + + TestUtils.subTestPanorama(activity, inputs, output_name, null, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama4". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama4() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama4"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 4.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama4/IMG_20190222_225317_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama4/IMG_20190222_225317_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama4/IMG_20190222_225317_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama4/IMG_20190222_225317_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama4/IMG_20190222_225317_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama4/IMG_20190222_225317_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama4/IMG_20190222_225317_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama4/IMG_20190222_225317_7.jpg"); + String output_name = "testPanorama4_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama4/IMG_20190222_225317.xml"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/52.26029); + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama5". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama5() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama5"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 4.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama5/IMG_20190223_220524_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama5/IMG_20190223_220524_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama5/IMG_20190223_220524_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama5/IMG_20190223_220524_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama5/IMG_20190223_220524_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama5/IMG_20190223_220524_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama5/IMG_20190223_220524_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama5/IMG_20190223_220524_7.jpg"); + String output_name = "testPanorama5_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama5/IMG_20190223_220524.xml"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/52.26029); + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama6". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama6() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama6"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 4.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama6/IMG_20190225_154232_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama6/IMG_20190225_154232_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama6/IMG_20190225_154232_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama6/IMG_20190225_154232_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama6/IMG_20190225_154232_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama6/IMG_20190225_154232_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama6/IMG_20190225_154232_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama6/IMG_20190225_154232_7.jpg"); + String output_name = "testPanorama6_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama6/IMG_20190225_154232.xml"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/52.26029); + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama7". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama7() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama7"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 4.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama7/IMG_20190225_155510_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama7/IMG_20190225_155510_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama7/IMG_20190225_155510_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama7/IMG_20190225_155510_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama7/IMG_20190225_155510_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama7/IMG_20190225_155510_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama7/IMG_20190225_155510_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama7/IMG_20190225_155510_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama7/IMG_20190225_155510_8.jpg"); + String output_name = "testPanorama7_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama7/IMG_20190225_155510.xml"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/52.26029); + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama8". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama8() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama8"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 2.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama8/IMG_20190227_001431_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama8/IMG_20190227_001431_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama8/IMG_20190227_001431_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama8/IMG_20190227_001431_3.jpg"); + String output_name = "testPanorama8_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama8/IMG_20190227_001431.xml"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/52.26029); + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama9". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama9() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama9"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama9/IMG_20190301_145213_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama9/IMG_20190301_145213_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama9/IMG_20190301_145213_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama9/IMG_20190301_145213_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama9/IMG_20190301_145213_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama9/IMG_20190301_145213_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama9/IMG_20190301_145213_6.jpg"); + String output_name = "testPanorama9_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama9/IMG_20190301_145213.xml"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/50.44399); + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + + try { + Thread.sleep(1000); // need to wait for debug images to be saved/broadcast? + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + } + }); + } + + /** Tests panorama algorithm on test samples "testPanorama10". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama10() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama10"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_9.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_10.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_11.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_12.jpg"); + String output_name = "testPanorama10_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948.xml"; + //gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/50.44399); + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama11". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama11() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama11"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama11/IMG_20190306_143652_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama11/IMG_20190306_143652_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama11/IMG_20190306_143652_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama11/IMG_20190306_143652_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama11/IMG_20190306_143652_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama11/IMG_20190306_143652_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama11/IMG_20190306_143652_6.jpg"); + String output_name = "testPanorama11_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama11/IMG_20190306_143652.xml"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/50.44399); + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama12". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama12() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama12"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008_9.jpg"); + String output_name = "testPanorama12_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008.xml"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/50.44399); + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama13". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama13() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama13"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152_9.jpg"); + String output_name = "testPanorama13_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152.xml"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama14". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama14() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama14"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249_9.jpg"); + String output_name = "testPanorama14_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249.xml"; + //gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama15". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama15() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama15"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624_9.jpg"); + String output_name = "testPanorama15_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624.xml"; + //gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama16". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama16() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama16"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731_9.jpg"); + String output_name = "testPanorama16_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731.xml"; + //gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama17". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama17() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama17"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423_9.jpg"); + String output_name = "testPanorama17_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423.xml"; + //gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama18". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama18() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama18"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559_9.jpg"); + String output_name = "testPanorama18_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559.xml"; + //gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama19". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama19() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama19"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059_9.jpg"); + String output_name = "testPanorama19_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059.xml"; + //gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama20". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama20() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama20"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027_9.jpg"); + String output_name = "testPanorama20_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027.xml"; + //gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama21". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama21() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama21"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552_9.jpg"); + String output_name = "testPanorama21_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552.xml"; + //gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama22". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama22() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama22"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama22/IMG_20190629_165627_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama22/IMG_20190629_165627_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama22/IMG_20190629_165627_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama22/IMG_20190629_165627_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama22/IMG_20190629_165627_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama22/IMG_20190629_165627_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama22/IMG_20190629_165627_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama22/IMG_20190629_165627_7.jpg"); + String output_name = "testPanorama22_output.jpg"; + String gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama23". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama23() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama23"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama23/IMG_20190702_145916_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama23/IMG_20190702_145916_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama23/IMG_20190702_145916_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama23/IMG_20190702_145916_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama23/IMG_20190702_145916_4.jpg"); + String output_name = "testPanorama23_output.jpg"; + String gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama24". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama24() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama24"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama24/IMG_20190703_154333_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama24/IMG_20190703_154333_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama24/IMG_20190703_154333_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama24/IMG_20190703_154333_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama24/IMG_20190703_154333_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama24/IMG_20190703_154333_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama24/IMG_20190703_154333_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama24/IMG_20190703_154333_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama24/IMG_20190703_154333_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama24/IMG_20190703_154333_9.jpg"); + String output_name = "testPanorama24_output.jpg"; + String gyro_name = null; + // taken with OnePlus 3T, Camera2 API: + float camera_angle_x = 62.93796f; + float camera_angle_y = 47.44656f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama25". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama25() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama25"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + //float panorama_pics_per_screen = 3.33333f / 2.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama25/IMG_20190706_215940_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama25/IMG_20190706_215940_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama25/IMG_20190706_215940_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama25/IMG_20190706_215940_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama25/IMG_20190706_215940_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama25/IMG_20190706_215940_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama25/IMG_20190706_215940_6.jpg"); + String output_name = "testPanorama25_output.jpg"; + String gyro_name = null; + // taken with Nokia 8, Camera2 API: + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama26". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama26() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama26"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama26/IMG_20190706_214842_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama26/IMG_20190706_214842_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama26/IMG_20190706_214842_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama26/IMG_20190706_214842_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama26/IMG_20190706_214842_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama26/IMG_20190706_214842_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama26/IMG_20190706_214842_6.jpg"); + String output_name = "testPanorama26_output.jpg"; + String gyro_name = null; + // taken with Nokia 8, Camera2 API: + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama27". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama27() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama27"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama27/IMG_20190706_192120_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama27/IMG_20190706_192120_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama27/IMG_20190706_192120_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama27/IMG_20190706_192120_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama27/IMG_20190706_192120_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama27/IMG_20190706_192120_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama27/IMG_20190706_192120_6.jpg"); + String output_name = "testPanorama27_output.jpg"; + String gyro_name = null; + // taken with Nokia 8, Camera2 API: + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama28". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama28() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama28"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + // right-to-left: + /*inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_9.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_0.jpg");*/ + // converted from original JPEGs to PNG using Nokia 8: + inputs.add(TestUtils.panorama_images_path + "testPanorama28/input_bitmap_0.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/input_bitmap_1.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/input_bitmap_2.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/input_bitmap_3.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/input_bitmap_4.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/input_bitmap_5.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/input_bitmap_6.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/input_bitmap_7.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/input_bitmap_8.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/input_bitmap_9.png"); + String output_name = "testPanorama28_output.jpg"; + String gyro_name = null; + // taken with Samsung Galaxy S10e, Camera2 API, standard rear camera: + float camera_angle_x = 66.3177f; + float camera_angle_y = 50.04736f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama28", but with a nbnq similar set of + * input images. Instead of converting the original JPEGs to PNG on Nokia 8, this was done on + * the Samsung Galaxy S10e, which gives small differences, but enough to show up potential + * stability issues. + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama28_galaxys10e() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama28_galaxys10e"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + // right-to-left: + /*inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_9.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_0.jpg");*/ + // converted from original JPEGs to PNG using Samsung Galaxy S10e: + inputs.add(TestUtils.panorama_images_path + "testPanorama28/galaxys10e_input_bitmap_0.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/galaxys10e_input_bitmap_1.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/galaxys10e_input_bitmap_2.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/galaxys10e_input_bitmap_3.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/galaxys10e_input_bitmap_4.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/galaxys10e_input_bitmap_5.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/galaxys10e_input_bitmap_6.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/galaxys10e_input_bitmap_7.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/galaxys10e_input_bitmap_8.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/galaxys10e_input_bitmap_9.png"); + String output_name = "testPanorama28_galaxys10e_output.jpg"; + String gyro_name = null; + // taken with Samsung Galaxy S10e, Camera2 API, standard rear camera: + float camera_angle_x = 66.3177f; + float camera_angle_y = 50.04736f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama29". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama29() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama29"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + // right-to-left: + inputs.add(TestUtils.panorama_images_path + "testPanorama29/IMG_20190719_145852_9.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama29/IMG_20190719_145852_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama29/IMG_20190719_145852_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama29/IMG_20190719_145852_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama29/IMG_20190719_145852_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama29/IMG_20190719_145852_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama29/IMG_20190719_145852_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama29/IMG_20190719_145852_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama29/IMG_20190719_145852_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama29/IMG_20190719_145852_0.jpg"); + String output_name = "testPanorama29_output.jpg"; + String gyro_name = null; + // taken with Nokia 8, old API: + float camera_angle_x = 66.1062f; + float camera_angle_y = 49.88347f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama30". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama30() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama30"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + /*inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_9.jpg");*/ + // converted from original JPEGs to PNG using Nokia 8: + inputs.add(TestUtils.panorama_images_path + "testPanorama30/nokia8_input_bitmap_0.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/nokia8_input_bitmap_1.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/nokia8_input_bitmap_2.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/nokia8_input_bitmap_3.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/nokia8_input_bitmap_4.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/nokia8_input_bitmap_5.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/nokia8_input_bitmap_6.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/nokia8_input_bitmap_7.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/nokia8_input_bitmap_8.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/nokia8_input_bitmap_9.png"); + String output_name = "testPanorama30_output.jpg"; + String gyro_name = null; + // taken with Samsung Galaxy S10e, old API, standard rear camera: + // n.b., camera angles are indeed the exact same as with Camera2 + float camera_angle_x = 66.3177f; + float camera_angle_y = 50.04736f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama30", but with a nbnq similar set of + * input images. Instead of converting the original JPEGs to PNG on Nokia 8, this was done on + * the Samsung Galaxy S10e, which gives small differences, but enough to show up potential + * stability issues. + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama30_galaxys10e() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama30_galaxys10e"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + /*inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_9.jpg");*/ + // converted from original JPEGs to PNG using Samsung Galaxy S10e: + inputs.add(TestUtils.panorama_images_path + "testPanorama30/galaxys10e_input_bitmap_0.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/galaxys10e_input_bitmap_1.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/galaxys10e_input_bitmap_2.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/galaxys10e_input_bitmap_3.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/galaxys10e_input_bitmap_4.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/galaxys10e_input_bitmap_5.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/galaxys10e_input_bitmap_6.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/galaxys10e_input_bitmap_7.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/galaxys10e_input_bitmap_8.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/galaxys10e_input_bitmap_9.png"); + String output_name = "testPanorama30_galaxys10e_output.jpg"; + String gyro_name = null; + // taken with Samsung Galaxy S10e, old API, standard rear camera: + // n.b., camera angles are indeed the exact same as with Camera2 + float camera_angle_x = 66.3177f; + float camera_angle_y = 50.04736f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama31". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama31() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama31"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama31/IMG_20190704_135633_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama31/IMG_20190704_135633_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama31/IMG_20190704_135633_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama31/IMG_20190704_135633_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama31/IMG_20190704_135633_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama31/IMG_20190704_135633_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama31/IMG_20190704_135633_6.jpg"); + String output_name = "testPanorama31_output.jpg"; + String gyro_name = null; + // taken with OnePlus 3T, Camera2 API: + float camera_angle_x = 62.93796f; + float camera_angle_y = 47.44656f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama3". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama32() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama32"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama32/IMG_20190705_145938_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama32/IMG_20190705_145938_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama32/IMG_20190705_145938_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama32/IMG_20190705_145938_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama32/IMG_20190705_145938_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama32/IMG_20190705_145938_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama32/IMG_20190705_145938_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama32/IMG_20190705_145938_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama32/IMG_20190705_145938_8.jpg"); + String output_name = "testPanorama32_output.jpg"; + String gyro_name = null; + // taken with OnePlus 3T, old API: + float camera_angle_x = 60.0f; + float camera_angle_y = 45.0f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama33". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama33() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama33"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama33/IMG_20190713_013437_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama33/IMG_20190713_013437_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama33/IMG_20190713_013437_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama33/IMG_20190713_013437_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama33/IMG_20190713_013437_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama33/IMG_20190713_013437_5.jpg"); + String output_name = "testPanorama33_output.jpg"; + String gyro_name = null; + // taken with Nokia 8, old API: + float camera_angle_x = 66.1062f; + float camera_angle_y = 49.88347f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama34". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama34() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama34"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + // right-to-left: + inputs.add(TestUtils.panorama_images_path + "testPanorama34/IMG_20190717_144042_9.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama34/IMG_20190717_144042_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama34/IMG_20190717_144042_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama34/IMG_20190717_144042_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama34/IMG_20190717_144042_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama34/IMG_20190717_144042_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama34/IMG_20190717_144042_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama34/IMG_20190717_144042_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama34/IMG_20190717_144042_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama34/IMG_20190717_144042_0.jpg"); + String output_name = "testPanorama34_output.jpg"; + String gyro_name = null; + // taken with Nexus 6, old API: + float camera_angle_x = 62.7533f; + float camera_angle_y = 47.298824f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama35". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama35() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama35"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama35/IMG_20190717_145114_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama35/IMG_20190717_145114_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama35/IMG_20190717_145114_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama35/IMG_20190717_145114_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama35/IMG_20190717_145114_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama35/IMG_20190717_145114_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama35/IMG_20190717_145114_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama35/IMG_20190717_145114_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama35/IMG_20190717_145114_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama35/IMG_20190717_145114_9.jpg"); + String output_name = "testPanorama35_output.jpg"; + String gyro_name = null; + // taken with Nexus 7, old API: + float camera_angle_x = 55.0f; + float camera_angle_y = 41.401073f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama36". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama36() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama36"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama36/IMG_20190722_201331_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama36/IMG_20190722_201331_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama36/IMG_20190722_201331_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama36/IMG_20190722_201331_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama36/IMG_20190722_201331_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama36/IMG_20190722_201331_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama36/IMG_20190722_201331_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama36/IMG_20190722_201331_7.jpg"); + String output_name = "testPanorama36_output.jpg"; + String gyro_name = null; + // taken with Samsung Galaxy S10e, Camera2 API, ultra wide rear camera: + float camera_angle_x = 104.00253f; + float camera_angle_y = 81.008804f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama37". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama37() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama37"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama37/IMG_20190723_203441_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama37/IMG_20190723_203441_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama37/IMG_20190723_203441_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama37/IMG_20190723_203441_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama37/IMG_20190723_203441_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama37/IMG_20190723_203441_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama37/IMG_20190723_203441_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama37/IMG_20190723_203441_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama37/IMG_20190723_203441_8.jpg"); + String output_name = "testPanorama37_output.jpg"; + String gyro_name = null; + // taken with Samsung Galaxy S10e, old API, standard rear camera: + // n.b., camera angles are indeed the exact same as with Camera2 + float camera_angle_x = 66.3177f; + float camera_angle_y = 50.04736f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + /** Tests panorama algorithm on test samples "testPanorama38". + */ + @Category(PanoramaTests.class) + @Test + public void testPanorama38() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama38"); + + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { // for simplicity, run the entire test on the UI thread + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama38/IMG_20190722_141148_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama38/IMG_20190722_141148_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama38/IMG_20190722_141148_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama38/IMG_20190722_141148_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama38/IMG_20190722_141148_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama38/IMG_20190722_141148_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama38/IMG_20190722_141148_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama38/IMG_20190722_141148_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama38/IMG_20190722_141148_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama38/IMG_20190722_141148_9.jpg"); + String output_name = "testPanorama38_output.jpg"; + String gyro_name = null; + // taken with Samsung Galaxy S10e, Camera2 API, standard rear camera: + float camera_angle_x = 66.3177f; + float camera_angle_y = 50.04736f; + + TestUtils.subTestPanorama(activity, inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + }); + } + + private void waitForTakePhoto() { + Log.d(TAG, "wait until finished taking photo"); + long time_s = System.currentTimeMillis(); + while(true) { + boolean waiting = getActivityValue(activity -> (activity.getPreview().isTakingPhoto() || !activity.getApplicationInterface().canTakeNewPhoto())); + if( !waiting ) { + break; + } + mActivityRule.getScenario().onActivity(activity -> { + TestUtils.waitForTakePhotoChecks(activity, time_s); + }); + } + + Log.d(TAG, "done taking photo"); + } + + /** Tests behaviour of the MainActivity.OnApplyWindowInsetsListener() for edge-to-edge mode on + * Android 15. + */ + @Category(MainTests.class) + @Test + public void testWindowInsets() throws InterruptedException { + Log.d(TAG, "testWindowInsets"); + setToDefault(); + + if( !getActivityValue(MainActivity::getEdgeToEdgeMode) ) { + Log.d(TAG, "test requires edge-to-edge mode"); + return; + } + + MainActivity.test_force_system_orientation = true; + MainActivity.test_force_window_insets = true; + + // portrait, typical with navigation at bottom, cutout at top + MainActivity.test_system_orientation = MainActivity.SystemOrientation.PORTRAIT; + MainActivity.test_insets = Insets.of(0, 200, 0, 300); + MainActivity.test_cutout_insets = Insets.of(0, 200, 0, 0); + restart(); // restart to force OnApplyWindowInsetsListener() to be called with new test values + mActivityRule.getScenario().onActivity(activity -> { + assertEquals(300, activity.getNavigationGap()); + assertEquals(0, activity.getNavigationGapLandscape()); + assertEquals(0, activity.getNavigationGapReverseLandscape()); + }); + // landscape + MainActivity.test_system_orientation = MainActivity.SystemOrientation.LANDSCAPE; + MainActivity.test_insets = Insets.of(200, 0, 300, 0); + MainActivity.test_cutout_insets = Insets.of(200, 0, 0, 0); + restart(); // restart to force OnApplyWindowInsetsListener() to be called with new test values + mActivityRule.getScenario().onActivity(activity -> { + assertEquals(300, activity.getNavigationGap()); + assertEquals(0, activity.getNavigationGapLandscape()); + assertEquals(0, activity.getNavigationGapReverseLandscape()); + }); + // reverse landscape + MainActivity.test_system_orientation = MainActivity.SystemOrientation.REVERSE_LANDSCAPE; + MainActivity.test_insets = Insets.of(300, 0, 200, 0); + MainActivity.test_cutout_insets = Insets.of(0, 0, 200, 0); + restart(); // restart to force OnApplyWindowInsetsListener() to be called with new test values + mActivityRule.getScenario().onActivity(activity -> { + assertEquals(300, activity.getNavigationGap()); + assertEquals(0, activity.getNavigationGapLandscape()); + assertEquals(0, activity.getNavigationGapReverseLandscape()); + }); + + // portrait, navigation at bottom, double cutout at top and bottom + MainActivity.test_system_orientation = MainActivity.SystemOrientation.PORTRAIT; + MainActivity.test_insets = Insets.of(0, 100, 0, 500); + MainActivity.test_cutout_insets = Insets.of(0, 100, 0, 300); + restart(); // restart to force OnApplyWindowInsetsListener() to be called with new test values + mActivityRule.getScenario().onActivity(activity -> { + assertEquals(200, activity.getNavigationGap()); // should only include the gap for the navigation, not the bottom cutout + assertEquals(0, activity.getNavigationGapLandscape()); + assertEquals(0, activity.getNavigationGapReverseLandscape()); + }); + // landscape + MainActivity.test_system_orientation = MainActivity.SystemOrientation.LANDSCAPE; + MainActivity.test_insets = Insets.of(100, 0, 500, 0); + MainActivity.test_cutout_insets = Insets.of(100, 0, 300, 0); + restart(); // restart to force OnApplyWindowInsetsListener() to be called with new test values + mActivityRule.getScenario().onActivity(activity -> { + assertEquals(200, activity.getNavigationGap()); // should only include the gap for the navigation, not the bottom cutout + assertEquals(0, activity.getNavigationGapLandscape()); + assertEquals(0, activity.getNavigationGapReverseLandscape()); + }); + // reverse landscape + MainActivity.test_system_orientation = MainActivity.SystemOrientation.REVERSE_LANDSCAPE; + MainActivity.test_insets = Insets.of(500, 0, 100, 0); + MainActivity.test_cutout_insets = Insets.of(300, 0, 100, 0); + restart(); // restart to force OnApplyWindowInsetsListener() to be called with new test values + mActivityRule.getScenario().onActivity(activity -> { + assertEquals(200, activity.getNavigationGap()); // should only include the gap for the navigation, not the bottom cutout + assertEquals(0, activity.getNavigationGapLandscape()); + assertEquals(0, activity.getNavigationGapReverseLandscape()); + }); + + // portrait, no navigation bar, waterfall cutout + MainActivity.test_system_orientation = MainActivity.SystemOrientation.PORTRAIT; + MainActivity.test_insets = Insets.of(50, 100, 50, 100); + MainActivity.test_cutout_insets = Insets.of(50, 100, 50, 100); + restart(); // restart to force OnApplyWindowInsetsListener() to be called with new test values + mActivityRule.getScenario().onActivity(activity -> { + // navigation gaps should be 0, as shouldn't include cutout + assertEquals(0, activity.getNavigationGap()); + assertEquals(0, activity.getNavigationGapLandscape()); + assertEquals(0, activity.getNavigationGapReverseLandscape()); + }); + + // landscape, navigation along landscape edge + MainActivity.test_system_orientation = MainActivity.SystemOrientation.LANDSCAPE; + MainActivity.test_insets = Insets.of(100, 0, 0, 250); + MainActivity.test_cutout_insets = Insets.of(100, 0, 0, 0); + restart(); // restart to force OnApplyWindowInsetsListener() to be called with new test values + mActivityRule.getScenario().onActivity(activity -> { + assertEquals(0, activity.getNavigationGap()); + assertEquals(250, activity.getNavigationGapLandscape()); + assertEquals(0, activity.getNavigationGapReverseLandscape()); + }); + if( getActivityValue(activity -> activity.getPreview().usingCamera2API()) ) { + // also test manual focus seekbar + mActivityRule.getScenario().onActivity(activity -> { + View focus_seekbar = activity.findViewById(R.id.focus_seekbar); + assertEquals(focus_seekbar.getVisibility(), View.GONE); + }); + switchToFocusValue("focus_mode_manual2"); + mActivityRule.getScenario().onActivity(activity -> { + View focus_seekbar = activity.findViewById(R.id.focus_seekbar); + assertEquals(focus_seekbar.getVisibility(), View.VISIBLE); + RelativeLayout.LayoutParams layoutParams = (RelativeLayout.LayoutParams)focus_seekbar.getLayoutParams(); + assertEquals(0, layoutParams.leftMargin); + assertEquals(0, layoutParams.topMargin); + assertEquals(0, layoutParams.rightMargin); + assertEquals(250, layoutParams.bottomMargin); + }); + } + + // reverse landscape, navigation along landscape edge + MainActivity.test_system_orientation = MainActivity.SystemOrientation.REVERSE_LANDSCAPE; + MainActivity.test_insets = Insets.of(0, 0, 100, 250); + MainActivity.test_cutout_insets = Insets.of(0, 0, 100, 0); + restart(); // restart to force OnApplyWindowInsetsListener() to be called with new test values + mActivityRule.getScenario().onActivity(activity -> { + assertEquals(0, activity.getNavigationGap()); + assertEquals(0, activity.getNavigationGapLandscape()); + assertEquals(250, activity.getNavigationGapReverseLandscape()); + }); + if( getActivityValue(activity -> activity.getPreview().usingCamera2API()) ) { + // also test manual focus seekbar + switchToFocusValue("focus_mode_manual2"); + mActivityRule.getScenario().onActivity(activity -> { + View focus_seekbar = activity.findViewById(R.id.focus_seekbar); + assertEquals(focus_seekbar.getVisibility(), View.VISIBLE); + RelativeLayout.LayoutParams layoutParams = (RelativeLayout.LayoutParams)focus_seekbar.getLayoutParams(); + assertEquals(0, layoutParams.leftMargin); + assertEquals(0, layoutParams.topMargin); + assertEquals(0, layoutParams.rightMargin); + assertEquals(0, layoutParams.bottomMargin); + }); + } + } + + private void subTestTouchToFocus(final boolean wait_after_focus, final boolean single_tap_photo, final boolean double_tap_photo, final boolean manual_can_auto_focus, final boolean can_focus_area, final String focus_value, final String focus_value_ui) throws InterruptedException { + // touch to auto-focus with focus area (will also exit immersive mode) + // autofocus shouldn't be immediately, but after a delay + // and Galaxy S10e needs a longer delay for some reason, for the subsequent touch of the preview view to register + Thread.sleep(2000); + int saved_count = getActivityValue(activity -> activity.getPreview().count_cameraAutoFocus); + Log.d(TAG, "saved count_cameraAutoFocus: " + saved_count); + Log.d(TAG, "### about to click preview for autofocus"); + + onView(anyOf(ViewMatchers.withClassName(endsWith("MySurfaceView")), ViewMatchers.withClassName(endsWith("MyTextureView")))).perform(click()); + + Log.d(TAG, "### done click preview for autofocus"); + + mActivityRule.getScenario().onActivity(activity -> { + TestUtils.touchToFocusChecks(activity, single_tap_photo, double_tap_photo, manual_can_auto_focus, can_focus_area, focus_value, focus_value_ui, saved_count); + }); + + if( double_tap_photo ) { + Thread.sleep(100); + Log.d(TAG, "about to click preview again for double tap"); + //onView(withId(preview_view_id)).perform(ViewActions.doubleClick()); + mActivityRule.getScenario().onActivity(activity -> { + //onView(anyOf(ViewMatchers.withClassName(endsWith("MySurfaceView")), ViewMatchers.withClassName(endsWith("MyTextureView")))).perform(click()); + activity.getPreview().onDoubleTap(); // calling tapView twice doesn't seem to work consistently, so we call this directly! + }); + } + if( wait_after_focus && !single_tap_photo && !double_tap_photo) { + // don't wait after single or double tap photo taking, as the photo taking operation is already started + Log.d(TAG, "wait after focus..."); + Thread.sleep(3000); + } + } + + private void subTestTakePhoto(boolean locked_focus, boolean immersive_mode, boolean touch_to_focus, boolean wait_after_focus, boolean single_tap_photo, boolean double_tap_photo, boolean is_raw, boolean test_wait_capture_result) throws InterruptedException { + Thread.sleep(500); + + TestUtils.SubTestTakePhotoInfo info = getActivityValue(activity -> TestUtils.getSubTestTakePhotoInfo(activity, immersive_mode, single_tap_photo, double_tap_photo)); + + int saved_count_cameraTakePicture = getActivityValue(activity -> activity.getPreview().count_cameraTakePicture); + + // count initial files in folder + String [] files = getActivityValue(activity -> TestUtils.filesInSaveFolder(activity)); + int n_files = files == null ? 0 : files.length; + Log.d(TAG, "n_files at start: " + n_files); + + int saved_count = getActivityValue(activity -> activity.getPreview().count_cameraAutoFocus); + + int saved_thumbnail_count = getActivityValue(activity -> activity.getApplicationInterface().getDrawPreview().test_thumbnail_anim_count); + Log.d(TAG, "saved_thumbnail_count: " + saved_thumbnail_count); + + if( touch_to_focus ) { + subTestTouchToFocus(wait_after_focus, single_tap_photo, double_tap_photo, info.manual_can_auto_focus, info.can_focus_area, info.focus_value, info.focus_value_ui); + } + Log.d(TAG, "saved count_cameraAutoFocus: " + saved_count); + + if( !single_tap_photo && !double_tap_photo ) { + mActivityRule.getScenario().onActivity(activity -> { + View takePhotoButton = activity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + assertFalse( activity.hasThumbnailAnimation() ); + Log.d(TAG, "about to click take photo"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo"); + }); + } + + waitForTakePhoto(); + + int new_count_cameraTakePicture = getActivityValue(activity -> activity.getPreview().count_cameraTakePicture); + Log.d(TAG, "take picture count: " + new_count_cameraTakePicture); + assertEquals(new_count_cameraTakePicture, saved_count_cameraTakePicture + 1); + + /*if( test_wait_capture_result ) { + // if test_wait_capture_result, then we'll have waited too long for thumbnail animation + } + else if( info.is_focus_bracketing ) { + // thumbnail animation may have already occurred (e.g., see testTakePhotoFocusBracketingHeavy() + } + else*/ if( info.has_thumbnail_anim ) { + long time_s = System.currentTimeMillis(); + for(;;) { + //boolean waiting = getActivityValue(activity -> !activity.hasThumbnailAnimation()); + boolean waiting = getActivityValue(activity -> (activity.getApplicationInterface().getDrawPreview().test_thumbnail_anim_count <= saved_thumbnail_count)); + if( !waiting ) { + break; + } + Log.d(TAG, "waiting for thumbnail animation"); + Thread.sleep(10); + int allowed_time_ms = 10000; + if( info.is_hdr || info.is_nr || info.is_expo ) { + // some devices need longer time (especially Nexus 6) + allowed_time_ms = 16000; + } + assertTrue( System.currentTimeMillis() - time_s < allowed_time_ms ); + } + } + else { + boolean has_thumbnail_animation = getActivityValue(activity -> activity.hasThumbnailAnimation()); + assertFalse( has_thumbnail_animation ); + int new_thumbnail_count = getActivityValue(activity -> activity.getApplicationInterface().getDrawPreview().test_thumbnail_anim_count); + assertEquals(saved_thumbnail_count, new_thumbnail_count); + } + + mActivityRule.getScenario().onActivity(activity -> { + activity.waitUntilImageQueueEmpty(); + + TestUtils.checkFocusAfterTakePhoto(activity, info.focus_value, info.focus_value_ui); + + try { + TestUtils.checkFilesAfterTakePhoto(activity, is_raw, test_wait_capture_result, files); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from checkFilesAfterTakePhoto", e); + } + + TestUtils.checkFocusAfterTakePhoto2(activity, touch_to_focus, single_tap_photo, double_tap_photo, test_wait_capture_result, locked_focus, info.can_auto_focus, info.can_focus_area, saved_count); + + TestUtils.postTakePhotoChecks(activity, immersive_mode, info.exposureVisibility, info.exposureLockVisibility); + + assertFalse(activity.getApplicationInterface().getImageSaver().test_queue_blocked); + assertTrue( activity.getPreview().getCameraController() == null || activity.getPreview().getCameraController().count_camera_parameters_exception == 0 ); + }); + + } + + /*@Category(PhotoTests.class) + @Test + public void testTakePhoto() throws InterruptedException { + Log.d(TAG, "testTakePhoto"); + setToDefault(); + subTestTakePhoto(false, false, true, true, false, false, false, false); + }*/ + + /** Tests option to remove device exif info. + */ + @Category(PhotoTests.class) + @Test + public void testTakePhotoRemoveExifOn() throws InterruptedException { + Log.d(TAG, "testTakePhotoRemoveExifOn"); + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(activity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.RemoveDeviceExifPreferenceKey, "preference_remove_device_exif_on"); + editor.apply(); + }); + updateForSettings(); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + + mActivityRule.getScenario().onActivity(activity -> { + try { + TestUtils.testExif(activity, activity.test_last_saved_image, activity.test_last_saved_imageuri, false, false, false); + } + catch(IOException e) { + Log.e(TAG, "testExif failed", e); + fail(); + } + }); + } + + /** Tests option to remove device exif info, but with auto-level to test codepath where we + * resave the bitmap. + */ + @Category(PhotoTests.class) + @Test + public void testTakePhotoRemoveExifOn2() throws InterruptedException { + Log.d(TAG, "testTakePhotoRemoveExifOn2"); + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(activity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.RemoveDeviceExifPreferenceKey, "preference_remove_device_exif_on"); + editor.putBoolean(PreferenceKeys.AutoStabilisePreferenceKey, true); + editor.apply(); + }); + updateForSettings(); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + + mActivityRule.getScenario().onActivity(activity -> { + try { + TestUtils.testExif(activity, activity.test_last_saved_image, activity.test_last_saved_imageuri, false, false, false); + } + catch(IOException e) { + Log.e(TAG, "testExif failed", e); + fail(); + } + }); + } + + /** Tests option to remove device exif info, but keeping datetime tags. + */ + @Category(PhotoTests.class) + @Test + public void testTakePhotoRemoveExifKeepDatetime() throws InterruptedException { + Log.d(TAG, "testTakePhotoRemoveExifKeepDatetime"); + setToDefault(); + + mActivityRule.getScenario().onActivity(activity -> { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(activity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.RemoveDeviceExifPreferenceKey, "preference_remove_device_exif_keep_datetime"); + editor.apply(); + }); + updateForSettings(); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + + mActivityRule.getScenario().onActivity(activity -> { + try { + TestUtils.testExif(activity, activity.test_last_saved_image, activity.test_last_saved_imageuri, false, true, false); + } + catch(IOException e) { + Log.e(TAG, "testExif failed", e); + fail(); + } + }); + } + + @Category(PhotoTests.class) + @Test + public void testTakePhotoVendorExtensions() throws InterruptedException { + Log.d(TAG, "testTakePhotoVendorExtensions"); + setToDefault(); + + List supported_extension_modes = new ArrayList<>(); + mActivityRule.getScenario().onActivity(activity -> { + if( activity.supportsCameraExtension(CameraExtensionCharacteristics.EXTENSION_AUTOMATIC) ) + supported_extension_modes.add("preference_photo_mode_x_auto"); + if( activity.supportsCameraExtension(CameraExtensionCharacteristics.EXTENSION_HDR) ) + supported_extension_modes.add("preference_photo_mode_x_hdr"); + if( activity.supportsCameraExtension(CameraExtensionCharacteristics.EXTENSION_NIGHT) ) + supported_extension_modes.add("preference_photo_mode_x_night"); + if( activity.supportsCameraExtension(CameraExtensionCharacteristics.EXTENSION_BOKEH) ) + supported_extension_modes.add("preference_photo_mode_x_bokeh"); + if( activity.supportsCameraExtension(CameraExtensionCharacteristics.EXTENSION_BEAUTY) ) + supported_extension_modes.add("preference_photo_mode_x_beauty"); + }); + + if( supported_extension_modes.isEmpty() ) { + Log.d(TAG, "test requires camera extensions"); + return; + } + + boolean check_exif = true; + boolean is_samsung = Build.MANUFACTURER.toLowerCase(Locale.US).contains("samsung"); + if( is_samsung && Build.VERSION.SDK_INT < Build.VERSION_CODES.TIRAMISU ) { + // Samsung Galaxy S10e Android 12 doesn't store various exif tags with vendor extensions + // unclear if this is Samsung specific or Android version specific + check_exif = false; + } + + for(String photo_mode : supported_extension_modes) { + mActivityRule.getScenario().onActivity(activity -> { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(activity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, photo_mode); + editor.apply(); + }); + updateForSettings(); + + subTestTakePhoto(false, false, false, false, false, false, false, false); + + if( check_exif ) { + mActivityRule.getScenario().onActivity(activity -> { + try { + TestUtils.testExif(activity, activity.test_last_saved_image, activity.test_last_saved_imageuri, true, true, false); + } + catch(IOException e) { + Log.e(TAG, "testExif failed", e); + fail(); + } + }); + } + } + + mActivityRule.getScenario().onActivity(activity -> { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(activity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_std"); + editor.apply(); + }); + updateForSettings(); + + if( getActivityValue(activity -> activity.getPreview().getCameraControllerManager().getNumberOfCameras()) > 1 ) { + Log.d(TAG, "test front camera"); + mActivityRule.getScenario().onActivity(activity -> { + Log.d(TAG, "switch camera"); + View switchCameraButton = activity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + }); + waitUntilCameraOpened(); + + for(String photo_mode : supported_extension_modes) { + mActivityRule.getScenario().onActivity(activity -> { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(activity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, photo_mode); + editor.apply(); + }); + updateForSettings(); + + subTestTakePhoto(false, false, false, false, false, false, false, false); + + if( check_exif ) { + mActivityRule.getScenario().onActivity(activity -> { + try { + TestUtils.testExif(activity, activity.test_last_saved_image, activity.test_last_saved_imageuri, true, true, false); + } + catch(IOException e) { + Log.e(TAG, "testExif failed", e); + fail(); + } + }); + } + } + + mActivityRule.getScenario().onActivity(activity -> { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(activity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_std"); + editor.apply(); + }); + updateForSettings(); + } + } + + /** Tests preshots. + */ + @Category(PhotoTests.class) + @Test + public void testTakePhotoPreshots() throws InterruptedException { + Log.d(TAG, "testTakePhotoPreshots"); + setToDefault(); + + if( !getActivityValue(activity -> activity.getPreview().usingCamera2API()) ) { + Log.d(TAG, "test requires camera2 api"); + return; + } + + mActivityRule.getScenario().onActivity(activity -> { + assertNotNull(activity.getPreview().getPreShotsRingBuffer()); + assertEquals(0, activity.getPreview().getPreShotsRingBuffer().getNBitmaps()); + }); + + mActivityRule.getScenario().onActivity(activity -> { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(activity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PreShotsPreferenceKey, "preference_save_preshots_on"); + editor.apply(); + }); + updateForSettings(); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + + mActivityRule.getScenario().onActivity(activity -> { + assertNotNull(activity.getPreview().getPreShotsRingBuffer()); + assertTrue(activity.getPreview().getPreShotsRingBuffer().getNBitmaps() > 0); + }); + + // test ring buffer flushed on pause + mActivityRule.getScenario().onActivity(activity -> { + Log.d(TAG, "pause..."); + getInstrumentation().callActivityOnPause(activity); + }); + mActivityRule.getScenario().onActivity(activity -> { + assertNotNull(activity.getPreview().getPreShotsRingBuffer()); + assertEquals(0, activity.getPreview().getPreShotsRingBuffer().getNBitmaps()); + }); + } + + /** Tests manual exposure longer than preview exposure rate, with the RequestTagType.RUN_POST_CAPTURE flag for Camera2 API. + */ + @Category(PhotoTests.class) + @Test + public void testTakePhotoManualISOExposurePostCapture() throws InterruptedException { + Log.d(TAG, "testTakePhotoPreshots"); + setToDefault(); + + if( !getActivityValue(activity -> activity.getPreview().usingCamera2API()) ) { + Log.d(TAG, "test requires camera2 api"); + return; + } + else if( !getActivityValue(activity -> activity.getPreview().supportsISORange()) ) { + Log.d(TAG, "test requires manual iso range"); + return; + } + + switchToISO(100); + + mActivityRule.getScenario().onActivity(activity -> { + // ensure we test RequestTagType.RUN_POST_CAPTURE even when not testing on a Samsung + activity.getPreview().getCameraController().test_force_run_post_capture = true; + }); + + mActivityRule.getScenario().onActivity(activity -> { + // open exposure UI + View exposureButton = activity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureContainer = activity.findViewById(net.sourceforge.opencamera.R.id.manual_exposure_container); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.GONE); + + clickView(exposureButton); + }); + AtomicReference chosen_exposureRef = new AtomicReference<>(); + mActivityRule.getScenario().onActivity(activity -> { + View exposureButton = activity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureContainer = activity.findViewById(net.sourceforge.opencamera.R.id.manual_exposure_container); + SeekBar isoSeekBar = activity.findViewById(net.sourceforge.opencamera.R.id.iso_seekbar); + SeekBar exposureTimeSeekBar = activity.findViewById(net.sourceforge.opencamera.R.id.exposure_time_seekbar); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.VISIBLE); + assertEquals(isoSeekBar.getVisibility(), View.VISIBLE); + assertEquals(exposureTimeSeekBar.getVisibility(), (activity.getPreview().supportsExposureTime() ? View.VISIBLE : View.GONE)); + //subTestISOButtonAvailability(); + + // change exposure time to min of (max, 0.5s) + int progress = exposureTimeSeekBar.getMax(); + while( progress > exposureTimeSeekBar.getMin() && activity.getManualSeekbars().getExposureTime(progress) > 1000000000L/2 ) { + progress--; + } + chosen_exposureRef.set(activity.getManualSeekbars().getExposureTime(progress)); + + Log.d(TAG, "change exposure time to progress " + progress + " time " + chosen_exposureRef.get()); + exposureTimeSeekBar.setProgress(progress); + }); + long chosen_exposure = chosen_exposureRef.get(); + mActivityRule.getScenario().onActivity(activity -> { + assertEquals(activity.getPreview().getCameraController().getISO(), 100); + assertEquals(activity.getPreview().getCameraController().getExposureTime(), chosen_exposure); + + // close the exposure UI + View exposureButton = activity.findViewById(net.sourceforge.opencamera.R.id.exposure); + clickView(exposureButton); + }); + mActivityRule.getScenario().onActivity(activity -> { + View exposureButton = activity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureContainer = activity.findViewById(net.sourceforge.opencamera.R.id.manual_exposure_container); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.GONE); + }); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + + } + + @Category(PhotoTests.class) + @Test + public void testTakePhotoJpegR() throws InterruptedException { + Log.d(TAG, "testTakePhotoJpegR"); + + setToDefault(); + + if( !getActivityValue(activity -> activity.getPreview().supportsJpegR()) ) { + Log.d(TAG, "jpeg_r not supported"); + return; + } + + mActivityRule.getScenario().onActivity(activity -> { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(activity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.ImageFormatPreferenceKey, "preference_image_format_jpeg_r"); + editor.apply(); + }); + updateForSettings(); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + } + + /** Tests with flag set to force preview to take 6s to start (tests Camera2 behaviour for this happening on background thread, and + * not). + */ + @Category(PhotoTests.class) + @Test + public void testTakePhotoSlowPreviewStart() throws InterruptedException { + Log.d(TAG, "testTakePhotoSlowPreviewStart"); + + setToDefault(); + + if( !getActivityValue(activity -> activity.getPreview().usingCamera2API()) ) { + Log.d(TAG, "test requires camera2 api"); + return; + } + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.UPSIDE_DOWN_CAKE ) { + // matches the use of wait_until_started in Preview.cameraOpened() + Log.d(TAG, "test requires Android 14+"); + return; + } + + CameraController.test_force_slow_preview_start = true; + long time_s = System.currentTimeMillis(); + restart(false); // restart to force preview to start with test flag + Log.d(TAG, "time to restart: " + (System.currentTimeMillis() - time_s)); + assertTrue( System.currentTimeMillis() - time_s < 3000 ); // test didn't get stuck on UI thread + + // make sure the application isn't stuck on the UI thread + time_s = System.currentTimeMillis(); + boolean done = false; + while( !done ) { + assertTrue( System.currentTimeMillis() - time_s < 500 ); + done = getActivityValue(activity -> activity.getPreview().isPreviewStarting()); + } + + waitUntilPreviewStarted(); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + } + + /** Tests with flag set to force preview to take 6s to start (tests Camera2 behaviour for this happening on background thread, and + * not). + */ + @Category(PhotoTests.class) + @Test + public void testSettingsSlowPreviewStart() throws InterruptedException { + Log.d(TAG, "testSettingsSlowPreviewStart"); + + setToDefault(); + + if( !getActivityValue(activity -> activity.getPreview().usingCamera2API()) ) { + Log.d(TAG, "test requires camera2 api"); + return; + } + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.UPSIDE_DOWN_CAKE ) { + // matches the use of wait_until_started in Preview.cameraOpened() + Log.d(TAG, "test requires Android 14+"); + return; + } + + CameraController.test_force_slow_preview_start = true; + long time_s = System.currentTimeMillis(); + restart(false); // restart to force preview to start with test flag + Log.d(TAG, "time to restart: " + (System.currentTimeMillis() - time_s)); + assertTrue( System.currentTimeMillis() - time_s < 3000 ); // test didn't get stuck on UI thread + + assertFalse(getActivityValue(activity -> activity.getPreview().isPreviewStarted())); // shouldn't have started preview yet + + // go to settings + assertFalse(getActivityValue(activity -> activity.isCameraInBackground())); + mActivityRule.getScenario().onActivity(activity -> { + View settingsButton = activity.findViewById(net.sourceforge.opencamera.R.id.settings); + clickView(settingsButton); + }); + assertTrue(getActivityValue(activity -> activity.isCameraInBackground())); + + // leave settings + Thread.sleep(500); + mActivityRule.getScenario().onActivity(activity -> { + Log.d(TAG, "on back pressed..."); + activity.onBackPressed(); + }); + Thread.sleep(500); + assertFalse(getActivityValue(activity -> activity.isCameraInBackground())); + + // make sure preview starts up + waitUntilPreviewStarted(); + } + + private int getNFiles() { + // count initial files in folder + String [] files = getActivityValue(activity -> TestUtils.filesInSaveFolder(activity)); + Log.d(TAG, "getNFiles: " + Arrays.toString(files)); + return files == null ? 0 : files.length; + } + + /** + * @return The number of resultant video files + */ + private int subTestTakeVideo(boolean test_exposure_lock, boolean test_focus_area, boolean allow_failure, boolean immersive_mode, TestUtils.VideoTestCallback test_cb, long time_ms, boolean max_filesize, int n_non_video_files) throws InterruptedException { + boolean supports_exposure_lock = getActivityValue(activity -> activity.getPreview().supportsExposureLock()); + if( test_exposure_lock && !supports_exposure_lock ) { + return 0; + } + + Thread.sleep(500); // needed for Pixel 6 Pro with Camera 2 API + + mActivityRule.getScenario().onActivity(activity -> { + TestUtils.preTakeVideoChecks(activity, immersive_mode); + + if( !activity.getPreview().isVideo() ) { + View switchVideoButton = activity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + } + }); + + waitUntilCameraOpened(); + + mActivityRule.getScenario().onActivity(activity -> { + assertTrue(activity.getPreview().isVideo()); + TestUtils.preTakeVideoChecks(activity, immersive_mode); + // reset: + activity.getApplicationInterface().test_n_videos_scanned = 0; + }); + + // count initial files in folder + int n_files = getNFiles(); + Log.d(TAG, "n_files at start: " + n_files); + + // store status to compare with later + int exposureVisibility = getActivityValue(activity -> { + View exposureButton = activity.findViewById(net.sourceforge.opencamera.R.id.exposure); + return exposureButton.getVisibility(); + }); + int exposureLockVisibility = getActivityValue(activity -> { + View exposureLockButton = activity.findViewById(net.sourceforge.opencamera.R.id.exposure_lock); + return exposureLockButton.getVisibility(); + }); + + mActivityRule.getScenario().onActivity(activity -> { + Log.d(TAG, "about to click take video"); + View takePhotoButton = activity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take video"); + }); + getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + mActivityRule.getScenario().onActivity(activity -> { + Preview preview = activity.getPreview(); + if( preview.usingCamera2API() ) { + assertEquals(preview.getCurrentPreviewSize().width, preview.getCameraController().test_texture_view_buffer_w); + assertEquals(preview.getCurrentPreviewSize().height, preview.getCameraController().test_texture_view_buffer_h); + } + }); + + waitUntilTimer(); + + int exp_n_new_files = 0; + boolean failed_to_start = false; + boolean is_video_recording = getActivityValue(activity -> activity.getPreview().isVideoRecording()); + if( is_video_recording ) { + mActivityRule.getScenario().onActivity(activity -> { + TestUtils.takeVideoRecordingChecks(activity, immersive_mode, exposureVisibility, exposureLockVisibility); + }); + + if( test_cb == null ) { + if( !immersive_mode && time_ms > 500 ) { + // test turning torch on/off (if in immersive mode, popup button will be hidden) + switchToFlashValue("flash_torch"); + Thread.sleep(500); + switchToFlashValue("flash_off"); + } + + Thread.sleep(time_ms); + mActivityRule.getScenario().onActivity(activity -> { + TestUtils.takeVideoRecordingChecks(activity, immersive_mode, exposureVisibility, exposureLockVisibility); + + Preview preview = activity.getPreview(); + assertFalse(preview.hasFocusArea()); + if( !allow_failure ) { + assertNull(preview.getCameraController().getFocusAreas()); + assertNull(preview.getCameraController().getMeteringAreas()); + } + }); + + if( test_focus_area ) { + // touch to auto-focus with focus area + Log.d(TAG, "touch to focus"); + onView(anyOf(ViewMatchers.withClassName(endsWith("MySurfaceView")), ViewMatchers.withClassName(endsWith("MyTextureView")))).perform(click()); + Log.d(TAG, "done touch to focus"); + Thread.sleep(1000); // wait for autofocus + mActivityRule.getScenario().onActivity(activity -> { + Preview preview = activity.getPreview(); + if( preview.supportsFocus() ) { + assertTrue(preview.hasFocusArea()); + assertNotNull(preview.getCameraController().getFocusAreas()); + assertEquals(1, preview.getCameraController().getFocusAreas().size()); + assertNotNull(preview.getCameraController().getMeteringAreas()); + assertEquals(1, preview.getCameraController().getMeteringAreas().size()); + } + }); + + // this time, don't wait + Log.d(TAG, "touch again to focus"); + onView(anyOf(ViewMatchers.withClassName(endsWith("MySurfaceView")), ViewMatchers.withClassName(endsWith("MyTextureView")))).perform(click()); + Log.d(TAG, "done touch to focus"); + } + + if( test_exposure_lock ) { + mActivityRule.getScenario().onActivity(activity -> { + Log.d(TAG, "test exposure lock"); + assertFalse(activity.getPreview().getCameraController().getAutoExposureLock()); + View exposureLockButton = activity.findViewById(net.sourceforge.opencamera.R.id.exposure_lock); + clickView(exposureLockButton); + }); + getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + mActivityRule.getScenario().onActivity(activity -> { + assertTrue( activity.getPreview().getCameraController().getAutoExposureLock() ); + }); + Thread.sleep(2000); + } + + mActivityRule.getScenario().onActivity(activity -> { + TestUtils.takeVideoRecordingChecks(activity, immersive_mode, exposureVisibility, exposureLockVisibility); + + Log.d(TAG, "about to click stop video"); + View takePhotoButton = activity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + clickView(takePhotoButton); + Log.d(TAG, "done clicking stop video"); + }); + getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + } + else { + exp_n_new_files = test_cb.doTest(); + + mActivityRule.getScenario().onActivity(activity -> { + if( activity.getPreview().isVideoRecording() ) { + Log.d(TAG, "about to click stop video"); + View takePhotoButton = activity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + clickView(takePhotoButton); + Log.d(TAG, "done clicking stop video"); + } + }); + getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + } + } + else { + Log.d(TAG, "didn't start video"); + assertTrue(allow_failure); + failed_to_start = true; + } + + int n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + int exp_n_new_files_f = exp_n_new_files; + boolean failed_to_start_f = failed_to_start; + mActivityRule.getScenario().onActivity(activity -> { + TestUtils.checkFilesAfterTakeVideo(activity, allow_failure, test_cb != null, time_ms, n_non_video_files, failed_to_start_f, exp_n_new_files_f, n_new_files); + + TestUtils.postTakeVideoChecks(activity, immersive_mode, max_filesize, exposureVisibility, exposureLockVisibility); + }); + + return n_new_files; + } + + /*@Category(VideoTests.class) + @Test + public void testTakeVideo() throws InterruptedException { + Log.d(TAG, "testTakeVideo"); + + setToDefault(); + + int n_new_files = subTestTakeVideo(false, false, false, false, null, 5000, false, 0); + + assertEquals(1, n_new_files); + }*/ + + /** Test for bug fix made on 20221112, to do with Pixel 6 Pro and video resolution larger than + * FullHD but smaller than 4K. Problem that we selected 60fps because that's supported at + * FullHD (and is in the CamcorderProfile for FullHD), but the larger non-4K resolution does + * not support 60fps. + */ + @Category(VideoTests.class) + @Test + public void testTakeVideoAltResolution() throws InterruptedException { + Log.d(TAG, "testTakeVideoAltResolution"); + + setToDefault(); + + if( !getActivityValue(activity -> activity.getPreview().usingCamera2API()) ) { + Log.d(TAG, "test requires camera2 api"); + return; + } + + String chosen_video_quality = getActivityValue(activity -> { + String return_quality = null; + CamcorderProfile best_profile = null; + List supported_video_quality = activity.getPreview().getVideoQualityHander().getSupportedVideoQuality(); + if( supported_video_quality != null ) { + for(String quality : supported_video_quality) { + CamcorderProfile profile = activity.getPreview().getCamcorderProfile(quality); + if( profile.videoFrameWidth > 1920 && profile.videoFrameHeight > 1080 && profile.videoFrameWidth < 3840 && profile.videoFrameHeight < 2160 ) { + if( best_profile == null || profile.videoFrameWidth*profile.videoFrameHeight < best_profile.videoFrameWidth*best_profile.videoFrameHeight ) { + return_quality = quality; + best_profile = profile; + } + } + } + } + if( return_quality != null ) { + Log.d(TAG, "video_quality: " + return_quality); + Log.d(TAG, "best_profile: " + best_profile.videoFrameWidth + " x " + best_profile.videoFrameHeight); + } + return return_quality; + }); + if( chosen_video_quality == null ) { + Log.d(TAG, "can't find desired video resolution"); + return; + } + mActivityRule.getScenario().onActivity(activity -> { + activity.getApplicationInterface().setVideoQualityPref(chosen_video_quality); + }); + + int n_new_files = subTestTakeVideo(false, false, false, false, null, 5000, false, 0); + + assertEquals(1, n_new_files); + + pauseAndResume(); + + mActivityRule.getScenario().onActivity(activity -> { + String video_quality = activity.getApplicationInterface().getVideoQualityPref(); + Log.d(TAG, "video_quality: " + video_quality); + assertEquals(chosen_video_quality, video_quality); + }); + } + + private void subTestTakeVideoSnapshot() throws InterruptedException { + Log.d(TAG, "subTestTakeVideoSnapshot"); + + mActivityRule.getScenario().onActivity(activity -> { + View takePhotoVideoButton = activity.findViewById(net.sourceforge.opencamera.R.id.take_photo_when_video_recording); + assertEquals(takePhotoVideoButton.getVisibility(), View.GONE); + }); + + subTestTakeVideo(false, false, false, false, new TestUtils.VideoTestCallback() { + @Override + public int doTest() { + Log.d(TAG, "wait before taking photo"); + try { + Thread.sleep(3000); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + mActivityRule.getScenario().onActivity(activity -> { + View takePhotoButton = activity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + View takePhotoVideoButton = activity.findViewById(net.sourceforge.opencamera.R.id.take_photo_when_video_recording); + assertEquals(takePhotoButton.getContentDescription(), activity.getResources().getString(net.sourceforge.opencamera.R.string.stop_video)); + assertEquals(takePhotoVideoButton.getVisibility(), View.VISIBLE); + assertTrue( activity.getPreview().isVideoRecording() ); + assertFalse(activity.getPreview().isVideoRecordingPaused()); + + Log.d(TAG, "about to click take photo snapshot"); + clickView(takePhotoVideoButton); + Log.d(TAG, "done clicking take photo snapshot"); + }); + + getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + waitForTakePhoto(); + + mActivityRule.getScenario().onActivity(activity -> { + View takePhotoVideoButton = activity.findViewById(net.sourceforge.opencamera.R.id.take_photo_when_video_recording); + assertEquals(takePhotoVideoButton.getVisibility(), View.VISIBLE); + assertTrue( activity.getPreview().isVideoRecording() ); + assertFalse(activity.getPreview().isVideoRecordingPaused()); + }); + + Log.d(TAG, "wait before stopping"); + try { + Thread.sleep(3000); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + mActivityRule.getScenario().onActivity(activity -> { + View takePhotoButton = activity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + View takePhotoVideoButton = activity.findViewById(net.sourceforge.opencamera.R.id.take_photo_when_video_recording); + assertEquals(takePhotoButton.getContentDescription(), activity.getResources().getString(net.sourceforge.opencamera.R.string.stop_video)); + assertEquals(takePhotoVideoButton.getVisibility(), View.VISIBLE); + assertTrue( activity.getPreview().isVideoRecording() ); + + Log.d(TAG, "about to click stop video"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking stop video"); + }); + + getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + return 2; + } + }, 5000, false, 1); + + mActivityRule.getScenario().onActivity(activity -> { + activity.waitUntilImageQueueEmpty(); + }); + } + + /** Test taking photo while recording video. + */ + /*@Category(VideoTests.class) + @Test + public void testTakeVideoSnapshot() throws InterruptedException { + Log.d(TAG, "testTakeVideoSnapshot"); + + setToDefault(); + + if( !getActivityValue(activity -> activity.getPreview().supportsPhotoVideoRecording()) ) { + Log.d(TAG, "video snapshot not supported"); + return; + } + + subTestTakeVideoSnapshot(); + }*/ + + /** Test taking photo while recording video, when JPEG_R is set. + */ + @Category(VideoTests.class) + @Test + public void testTakeVideoSnapshotJpegR() throws InterruptedException { + Log.d(TAG, "testTakeVideoSnapshotJpegR"); + + setToDefault(); + + if( !getActivityValue(activity -> activity.getPreview().supportsPhotoVideoRecording()) ) { + Log.d(TAG, "video snapshot not supported"); + return; + } + else if( !getActivityValue(activity -> activity.getPreview().supportsJpegR()) ) { + Log.d(TAG, "jpeg_r not supported"); + return; + } + + mActivityRule.getScenario().onActivity(activity -> { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(activity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.ImageFormatPreferenceKey, "preference_image_format_jpeg_r"); + editor.apply(); + }); + updateForSettings(); + + subTestTakeVideoSnapshot(); + } +} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/MainInstrumentedTests.java b/app/src/androidTest/java/net/sourceforge/opencamera/MainInstrumentedTests.java new file mode 100644 index 0000000..5bd52a2 --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/MainInstrumentedTests.java @@ -0,0 +1,13 @@ +package net.sourceforge.opencamera; + +import org.junit.experimental.categories.Categories; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** Tests that don't fit into another of the Test suites. + */ + +@RunWith(Categories.class) +@Categories.IncludeCategory(MainTests.class) +@Suite.SuiteClasses({InstrumentedTest.class}) +public class MainInstrumentedTests {} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/PanoramaInstrumentedTests.java b/app/src/androidTest/java/net/sourceforge/opencamera/PanoramaInstrumentedTests.java new file mode 100644 index 0000000..f0155aa --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/PanoramaInstrumentedTests.java @@ -0,0 +1,17 @@ +package net.sourceforge.opencamera; + +import org.junit.experimental.categories.Categories; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** Tests for Panorama algorithm - only need to run on a single device + * Should manually look over the images dumped onto DCIM/ + * To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/ + * folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes + * time to transfer to the device every time we run the tests. + * On Android 10+, scoped storage permission needs to be given to Open Camera for the DCIM/testOpenCamera/ folder. + */ +@RunWith(Categories.class) +@Categories.IncludeCategory(PanoramaTests.class) +@Suite.SuiteClasses({InstrumentedTest.class}) +public class PanoramaInstrumentedTests {} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/PhotoInstrumentedTests.java b/app/src/androidTest/java/net/sourceforge/opencamera/PhotoInstrumentedTests.java new file mode 100644 index 0000000..d9441d0 --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/PhotoInstrumentedTests.java @@ -0,0 +1,13 @@ +package net.sourceforge.opencamera; + +import org.junit.experimental.categories.Categories; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** Tests related to taking photos; note that tests to do with photo mode that don't take photos are still part of MainInstrumentedTests. + */ + +@RunWith(Categories.class) +@Categories.IncludeCategory(PhotoTests.class) +@Suite.SuiteClasses({InstrumentedTest.class}) +public class PhotoInstrumentedTests {} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/TestUtils.java b/app/src/androidTest/java/net/sourceforge/opencamera/TestUtils.java new file mode 100644 index 0000000..58b1ac7 --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/TestUtils.java @@ -0,0 +1,1681 @@ +package net.sourceforge.opencamera; + +import static org.junit.Assert.*; + +import android.annotation.TargetApi; +import android.content.ContentUris; +import android.content.ContentValues; +import android.content.Context; +import android.content.Intent; +import android.content.SharedPreferences; +import android.database.Cursor; +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.graphics.Matrix; +import android.hardware.camera2.CameraExtensionCharacteristics; +import android.net.Uri; +import android.os.Build; +import android.os.Environment; +import android.os.ParcelFileDescriptor; +import android.preference.PreferenceManager; +import android.provider.DocumentsContract; +import android.provider.MediaStore; +import android.util.Log; +import android.view.View; + +import androidx.exifinterface.media.ExifInterface; + +import net.sourceforge.opencamera.preview.Preview; + +import java.io.File; +import java.io.FileDescriptor; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Locale; + +/** Helper class for testing. This method should not include any code specific to any test framework + * (e.g., shouldn't be specific to ActivityInstrumentationTestCase2). + */ +public class TestUtils { + private static final String TAG = "TestUtils"; + + public static final boolean test_camera2 = false; + //public static final boolean test_camera2 = true; + + final private static String images_base_path = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM).getAbsolutePath(); + final public static String hdr_images_path = images_base_path + "/testOpenCamera/testdata/hdrsamples/"; + final public static String avg_images_path = images_base_path + "/testOpenCamera/testdata/avgsamples/"; + final public static String logprofile_images_path = images_base_path + "/testOpenCamera/testdata/logprofilesamples/"; + final public static String panorama_images_path = images_base_path + "/testOpenCamera/testdata/panoramasamples/"; + + public static void setDefaultIntent(Intent intent) { + intent.putExtra("test_project", true); + } + + /** Code to call before running each test. + */ + public static void initTest(Context context) { + Log.d(TAG, "initTest: " + test_camera2); + // initialise test statics (to avoid the persisting between tests in a test suite run!) + MainActivity.test_preview_want_no_limits = false; + MainActivity.test_preview_want_no_limits_value = false; + ImageSaver.test_small_queue_size = false; + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(context); + SharedPreferences.Editor editor = settings.edit(); + editor.clear(); + if( test_camera2 ) { + MainActivity.test_force_supports_camera2 = true; + //editor.putBoolean(PreferenceKeys.UseCamera2PreferenceKey, true); + editor.putString(PreferenceKeys.CameraAPIPreferenceKey, "preference_camera_api_camera2"); + } + editor.apply(); + + Log.d(TAG, "initTest: done"); + } + + public static boolean isEmulator() { + return Build.MODEL.contains("Android SDK built for x86"); + } + + /** Converts a path to a Uri for com.android.providers.media.documents. + */ + private static Uri getDocumentUri(String filename) throws FileNotFoundException { + Log.d(TAG, "getDocumentUri: " + filename); + + // convert from File path format to Storage Access Framework form + Uri treeUri = Uri.parse("content://com.android.externalstorage.documents/tree/primary%3ADCIM%2FtestOpenCamera"); + Log.d(TAG, "treeUri: " + treeUri); + if( !filename.startsWith(images_base_path) ) { + Log.e(TAG, "unknown base for: " + filename); + throw new FileNotFoundException(); + } + String stem = filename.substring(images_base_path.length()); + Uri stemUri = Uri.parse("content://com.android.externalstorage.documents/tree/primary%3ADCIM" + stem.replace("/", "%2F")); + Log.d(TAG, "stem: " + stem); + Log.d(TAG, "stemUri: " + stemUri); + //String docID = "primary:DCIM" + stem; + String docID = DocumentsContract.getTreeDocumentId(stemUri); + Log.d(TAG, "docID: " + docID); + Uri uri = DocumentsContract.buildDocumentUriUsingTree(treeUri, docID); + + if( uri == null ) { + throw new FileNotFoundException(); + } + return uri; + } + + public static Bitmap getBitmapFromFile(MainActivity activity, String filename) { + return getBitmapFromFile(activity, filename, 1); + } + + public static Bitmap getBitmapFromFile(MainActivity activity, String filename, int inSampleSize) { + try { + return getBitmapFromFileCore(activity, filename, inSampleSize); + } + catch(FileNotFoundException e) { + Log.e(TAG, "FileNotFoundException loading: " + filename, e); + fail("FileNotFoundException loading: " + filename); + return null; + } + } + + /** Loads bitmap from supplied filename. + * Note that on Android 10+ (with scoped storage), this uses Storage Access Framework, which + * means Open Camera must have SAF permission to the folder DCIM/testOpenCamera. + */ + private static Bitmap getBitmapFromFileCore(MainActivity activity, String filename, int inSampleSize) throws FileNotFoundException { + Log.d(TAG, "getBitmapFromFileCore: " + filename); + BitmapFactory.Options options = new BitmapFactory.Options(); + options.inMutable = true; + //options.inSampleSize = inSampleSize; + if( inSampleSize > 1 ) { + // use inDensity for better quality, as inSampleSize uses nearest neighbour + // see same code in ImageSaver.setBitmapOptionsSampleSize() + options.inDensity = inSampleSize; + options.inTargetDensity = 1; + } + + Uri uri = null; + Bitmap bitmap; + + if( MainActivity.useScopedStorage() ) { + uri = getDocumentUri(filename); + Log.d(TAG, "uri: " + uri); + InputStream is = activity.getContentResolver().openInputStream(uri); + bitmap = BitmapFactory.decodeStream(is, null, options); + try { + is.close(); + } + catch(IOException e) { + Log.e(TAG, "failed to close input stream", e); + } + } + else { + bitmap = BitmapFactory.decodeFile(filename, options); + } + if( bitmap == null ) + throw new FileNotFoundException(); + Log.d(TAG, " done: " + bitmap); + + // now need to take exif orientation into account, as some devices or camera apps store the orientation in the exif tag, + // which getBitmap() doesn't account for + ParcelFileDescriptor parcelFileDescriptor = null; + FileDescriptor fileDescriptor; + try { + ExifInterface exif = null; + if( uri != null ) { + parcelFileDescriptor = activity.getContentResolver().openFileDescriptor(uri, "r"); + if( parcelFileDescriptor != null ) { + fileDescriptor = parcelFileDescriptor.getFileDescriptor(); + exif = new ExifInterface(fileDescriptor); + } + } + else { + exif = new ExifInterface(filename); + } + if( exif != null ) { + int exif_orientation_s = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_UNDEFINED); + boolean needs_tf = false; + int exif_orientation = 0; + // from http://jpegclub.org/exif_orientation.html + // and http://stackoverflow.com/questions/20478765/how-to-get-the-correct-orientation-of-the-image-selected-from-the-default-image + if( exif_orientation_s == ExifInterface.ORIENTATION_UNDEFINED || exif_orientation_s == ExifInterface.ORIENTATION_NORMAL ) { + // leave unchanged + } + else if( exif_orientation_s == ExifInterface.ORIENTATION_ROTATE_180 ) { + needs_tf = true; + exif_orientation = 180; + } + else if( exif_orientation_s == ExifInterface.ORIENTATION_ROTATE_90 ) { + needs_tf = true; + exif_orientation = 90; + } + else if( exif_orientation_s == ExifInterface.ORIENTATION_ROTATE_270 ) { + needs_tf = true; + exif_orientation = 270; + } + else { + // just leave unchanged for now + Log.e(TAG, " unsupported exif orientation: " + exif_orientation_s); + } + Log.d(TAG, " exif orientation: " + exif_orientation); + + if( needs_tf ) { + Log.d(TAG, " need to rotate bitmap due to exif orientation tag"); + Matrix m = new Matrix(); + m.setRotate(exif_orientation, bitmap.getWidth() * 0.5f, bitmap.getHeight() * 0.5f); + Bitmap rotated_bitmap = Bitmap.createBitmap(bitmap, 0, 0,bitmap.getWidth(), bitmap.getHeight(), m, true); + if( rotated_bitmap != bitmap ) { + bitmap.recycle(); + bitmap = rotated_bitmap; + } + } + } + } + catch(IOException e) { + Log.e(TAG, "failed to load bitmap", e); + } + finally { + if( parcelFileDescriptor != null ) { + try { + parcelFileDescriptor.close(); + } + catch(IOException e) { + Log.e(TAG, "failed to close parcelFileDescriptor", e); + } + } + } + /*{ + for(int y=0;y= Build.VERSION_CODES.Q ? + MediaStore.Images.Media.getContentUri(MediaStore.VOLUME_EXTERNAL_PRIMARY) : + MediaStore.Images.Media.EXTERNAL_CONTENT_URI; + + // first try to delete pre-existing image + Uri old_uri = getUriFromName(activity, folder, name); + if( old_uri != null ) { + Log.d(TAG, "delete: " + old_uri); + activity.getContentResolver().delete(old_uri, null, null); + } + + contentValues = new ContentValues(); + contentValues.put(MediaStore.Images.Media.DISPLAY_NAME, name); + String extension = name.substring(name.lastIndexOf(".")); + String mime_type = activity.getStorageUtils().getImageMimeType(extension); + Log.d(TAG, "mime_type: " + mime_type); + contentValues.put(MediaStore.Images.Media.MIME_TYPE, mime_type); + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q ) { + String relative_path = Environment.DIRECTORY_DCIM + File.separator; + Log.d(TAG, "relative_path: " + relative_path); + contentValues.put(MediaStore.Images.Media.RELATIVE_PATH, relative_path); + contentValues.put(MediaStore.Images.Media.IS_PENDING, 1); + } + + uri = activity.getContentResolver().insert(folder, contentValues); + Log.d(TAG, "saveUri: " + uri); + if( uri == null ) { + throw new IOException(); + } + outputStream = activity.getContentResolver().openOutputStream(uri); + } + else { + file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM) + File.separator + name); + outputStream = new FileOutputStream(file); + } + + bitmap.compress(Bitmap.CompressFormat.JPEG, 90, outputStream); + outputStream.close(); + + if( MainActivity.useScopedStorage() ) { + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q ) { + contentValues.clear(); + contentValues.put(MediaStore.Images.Media.IS_PENDING, 0); + activity.getContentResolver().update(uri, contentValues, null, null); + } + } + else { + activity.getStorageUtils().broadcastFile(file, true, false, true, false, null); + } + } + + public static class HistogramDetails { + public final int min_value; + public final int median_value; + public final int max_value; + + HistogramDetails(int min_value, int median_value, int max_value) { + this.min_value = min_value; + this.median_value = median_value; + this.max_value = max_value; + } + } + + /** Checks for the resultant histogram. + * We check that we have a single range of non-zero values. + * @param bitmap The bitmap to compute and check a histogram for. + */ + public static HistogramDetails checkHistogram(MainActivity activity, Bitmap bitmap) { + int [] histogram = activity.getApplicationInterface().getHDRProcessor().computeHistogram(bitmap, HDRProcessor.HistogramType.HISTOGRAM_TYPE_INTENSITY); + assertEquals(256, histogram.length); + int total = 0; + for(int i=0;i= middle && median_value == -1 ) + median_value = i; + } + } + Log.d(TAG, "min_value: " + min_value); + Log.d(TAG, "median_value: " + median_value); + Log.d(TAG, "max_value: " + max_value); + return new HistogramDetails(min_value, median_value, max_value); + } + + public static HistogramDetails subTestHDR(MainActivity activity, List inputs, String output_name, boolean test_dro, int iso, long exposure_time) { + return subTestHDR(activity, inputs, output_name, test_dro, iso, exposure_time, HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_REINHARD); + } + + /** The testHDRX tests test the HDR algorithm on a given set of input images. + * By testing on a fixed sample, this makes it easier to finetune the HDR algorithm for quality and performance. + * To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/ + * folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes + * time to transfer to the device everytime we run the tests. + * @param iso The ISO of the middle image (for testing Open Camera's "smart" contrast enhancement). If set to -1, then use "always" contrast enhancement. + * @param exposure_time The exposure time of the middle image (for testing Open Camera's "smart" contrast enhancement) + */ + @TargetApi(Build.VERSION_CODES.LOLLIPOP) + public static HistogramDetails subTestHDR(MainActivity activity, List inputs, String output_name, boolean test_dro, int iso, long exposure_time, HDRProcessor.TonemappingAlgorithm tonemapping_algorithm/*, HDRTestCallback test_callback*/) { + Log.d(TAG, "subTestHDR"); + + try { + Thread.sleep(1000); // wait for camera to open + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + } + + Bitmap dro_bitmap_in = null; + if( test_dro ) { + // save copy of input bitmap to also test DRO (since the HDR routine will free the inputs) + int mid = (inputs.size()-1)/2; + dro_bitmap_in = inputs.get(mid); + dro_bitmap_in = dro_bitmap_in.copy(dro_bitmap_in.getConfig(), true); + } + + HistogramDetails hdrHistogramDetails = null; + if( inputs.size() > 1 ) { + String preference_hdr_contrast_enhancement = (iso==-1) ? "preference_hdr_contrast_enhancement_always" : "preference_hdr_contrast_enhancement_smart"; + float hdr_alpha = ImageSaver.getHDRAlpha(preference_hdr_contrast_enhancement, exposure_time, inputs.size()); + long time_s = System.currentTimeMillis(); + try { + activity.getApplicationInterface().getHDRProcessor().processHDR(inputs, true, null, true, null, hdr_alpha, 4, true, tonemapping_algorithm, HDRProcessor.DROTonemappingAlgorithm.DROALGORITHM_GAINGAMMA); + //test_callback.doHDR(inputs, tonemapping_algorithm, hdr_alpha); + } + catch(HDRProcessorException e) { + Log.e(TAG, "processHDR failed", e); + throw new RuntimeException(); + } + Log.d(TAG, "HDR time: " + (System.currentTimeMillis() - time_s)); + + saveBitmap(activity, inputs.get(0), output_name); + hdrHistogramDetails = checkHistogram(activity, inputs.get(0)); + } + inputs.get(0).recycle(); + inputs.clear(); + + if( test_dro ) { + inputs.add(dro_bitmap_in); + long time_s = System.currentTimeMillis(); + try { + activity.getApplicationInterface().getHDRProcessor().processHDR(inputs, true, null, true, null, 0.5f, 4, true, HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_REINHARD, HDRProcessor.DROTonemappingAlgorithm.DROALGORITHM_GAINGAMMA); + //test_callback.doHDR(inputs, HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_REINHARD, 0.5f); + } + catch(HDRProcessorException e) { + Log.e(TAG, "processHDR failed", e); + throw new RuntimeException(); + } + Log.d(TAG, "DRO time: " + (System.currentTimeMillis() - time_s)); + + saveBitmap(activity, inputs.get(0), "dro" + output_name); + checkHistogram(activity, inputs.get(0)); + inputs.get(0).recycle(); + inputs.clear(); + } + try { + Thread.sleep(500); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + } + + return hdrHistogramDetails; + } + + public static void checkHDROffsets(MainActivity activity, int [] exp_offsets_x, int [] exp_offsets_y) { + checkHDROffsets(activity, exp_offsets_x, exp_offsets_y, 1); + } + + /** Checks that the HDR offsets used for auto-alignment are as expected. + */ + public static void checkHDROffsets(MainActivity activity, int [] exp_offsets_x, int [] exp_offsets_y, int scale) { + int [] offsets_x = activity.getApplicationInterface().getHDRProcessor().offsets_x; + int [] offsets_y = activity.getApplicationInterface().getHDRProcessor().offsets_y; + for(int i=0;i inputs, String output_name, int iso, long exposure_time, float zoom_factor, TestAvgCallback cb) { + Log.d(TAG, "subTestAvg"); + + try { + Thread.sleep(1000); // wait for camera to open + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + } + + /*Bitmap nr_bitmap = getBitmapFromFile(activity, inputs.get(0)); + long time_s = System.currentTimeMillis(); + try { + for(int i=1;i times = new ArrayList<>(); + long time_s = System.currentTimeMillis(); + HDRProcessor.AvgData avg_data = hdrProcessor.processAvg(bitmap0, bitmap1, avg_factor, iso, exposure_time, zoom_factor); + times.add(System.currentTimeMillis() - time_s); + // processAvg recycles both bitmaps + if( cb != null ) { + cb.doneProcessAvg(1); + } + + for(int i=2;i inputs, String output_name, String gyro_debug_info_filename, float panorama_pics_per_screen, float camera_angle_x, float camera_angle_y, float gyro_tol_degrees) { + Log.d(TAG, "subTestPanorama"); + + // we set panorama_pics_per_screen in the test rather than using MyApplicationInterface.panorama_pics_per_screen, + // in case the latter value is changed + + boolean first = true; + Matrix scale_matrix = null; + int bitmap_width = 0; + int bitmap_height = 0; + List bitmaps = new ArrayList<>(); + for(String input : inputs) { + Bitmap bitmap = getBitmapFromFile(activity, input); + + if( first ) { + bitmap_width = bitmap.getWidth(); + bitmap_height = bitmap.getHeight(); + Log.d(TAG, "bitmap_width: " + bitmap_width); + Log.d(TAG, "bitmap_height: " + bitmap_height); + + final int max_height = 2080; + //final int max_height = 2079; // test non power of 2 + if( bitmap_height > max_height ) { + float scale = ((float)max_height) / ((float)bitmap_height); + Log.d(TAG, "scale: " + scale); + scale_matrix = new Matrix(); + scale_matrix.postScale(scale, scale); + } + + first = false; + } + + // downscale + if( scale_matrix != null ) { + Bitmap new_bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap_width, bitmap_height, scale_matrix, true); + bitmap.recycle(); + bitmap = new_bitmap; + } + + bitmaps.add(bitmap); + } + + bitmap_width = bitmaps.get(0).getWidth(); + bitmap_height = bitmaps.get(0).getHeight(); + Log.d(TAG, "bitmap_width is now: " + bitmap_width); + Log.d(TAG, "bitmap_height is now: " + bitmap_height); + + + /*ImageSaver.GyroDebugInfo gyro_debug_info = null; + if( gyro_debug_info_filename != null ) { + InputStream inputStream; + try { + inputStream = new FileInputStream(gyro_debug_info_filename); + } + catch(FileNotFoundException e) { + Log.e(TAG, "failed to load gyro debug info file: " + gyro_debug_info_filename, e); + throw new RuntimeException(); + } + + gyro_debug_info = new ImageSaver.GyroDebugInfo(); + if( !ImageSaver.readGyroDebugXml(inputStream, gyro_debug_info) ) { + Log.e(TAG, "failed to read gyro debug xml"); + throw new RuntimeException(); + } + else if( gyro_debug_info.image_info.size() != bitmaps.size() ) { + Log.e(TAG, "gyro debug xml has unexpected number of images: " + gyro_debug_info.image_info.size()); + throw new RuntimeException(); + } + }*/ + //bitmaps.subList(2,bitmaps.size()).clear(); // test + + Bitmap panorama = null; + try { + final boolean crop = true; + //final boolean crop = false; // test + panorama = activity.getApplicationInterface().getPanoramaProcessor().panorama(bitmaps, panorama_pics_per_screen, camera_angle_y, crop); + } + catch(PanoramaProcessorException e) { + Log.e(TAG, "panorama failed", e); + fail(); + } + + saveBitmap(activity, panorama, output_name); + try { + Thread.sleep(500); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + } + + // check we've cropped correctly: + final float black_factor = 0.9f; + // top: + int n_black = 0; + for(int i=0;i> 16) & 0xff) == 0 && ((color >> 8) & 0xff) == 0 && ((color) & 0xff) == 0 ) { + n_black++; + } + } + if( n_black >= panorama.getWidth()*black_factor ) { + Log.e(TAG, "too many black pixels on top border: " + n_black); + fail(); + } + // bottom: + n_black = 0; + for(int i=0;i> 16) & 0xff) == 0 && ((color >> 8) & 0xff) == 0 && ((color) & 0xff) == 0 ) { + n_black++; + } + } + if( n_black >= panorama.getWidth()*black_factor ) { + Log.e(TAG, "too many black pixels on bottom border: " + n_black); + fail(); + } + // left: + n_black = 0; + for(int i=0;i> 16) & 0xff) == 0 && ((color >> 8) & 0xff) == 0 && ((color) & 0xff) == 0 ) { + n_black++; + } + } + if( n_black >= panorama.getHeight()*black_factor ) { + Log.e(TAG, "too many black pixels on left border: " + n_black); + fail(); + } + // right: + n_black = 0; + for(int i=0;i> 16) & 0xff) == 0 && ((color >> 8) & 0xff) == 0 && ((color) & 0xff) == 0 ) { + n_black++; + } + } + if( n_black >= panorama.getHeight()*black_factor ) { + Log.e(TAG, "too many black pixels on right border: " + n_black); + fail(); + } + } + + public static void waitForTakePhotoChecks(MainActivity activity, long time_s) { + Preview preview = activity.getPreview(); + View switchCameraButton = activity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + View switchMultiCameraButton = activity.findViewById(net.sourceforge.opencamera.R.id.switch_multi_camera); + View switchVideoButton = activity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + //View flashButton = activity.findViewById(net.sourceforge.opencamera.R.id.flash); + //View focusButton = activity.findViewById(net.sourceforge.opencamera.R.id.focus_mode); + View exposureButton = activity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureLockButton = activity.findViewById(net.sourceforge.opencamera.R.id.exposure_lock); + View audioControlButton = activity.findViewById(net.sourceforge.opencamera.R.id.audio_control); + View popupButton = activity.findViewById(net.sourceforge.opencamera.R.id.popup); + View trashButton = activity.findViewById(net.sourceforge.opencamera.R.id.trash); + View shareButton = activity.findViewById(net.sourceforge.opencamera.R.id.share); + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(activity); + boolean is_focus_bracketing = activity.supportsFocusBracketing() && sharedPreferences.getString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_std").equals("preference_photo_mode_focus_bracketing"); + boolean is_panorama = activity.supportsPanorama() && sharedPreferences.getString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_std").equals("preference_photo_mode_panorama"); + + // make sure the test fails rather than hanging, if for some reason we get stuck (note that testTakePhotoManualISOExposure takes over 10s on Nexus 6) + // also see note at end of setToDefault for Nokia 8, need to sleep briefly to avoid hanging here + if( !is_focus_bracketing ) { + assertTrue(System.currentTimeMillis() - time_s < (is_panorama ? 50000 : 20000)); // need longer for panorama on Nexus 7 for testTakePhotoPanoramaMax + } + assertTrue(!preview.isTakingPhoto() || switchCameraButton.getVisibility() == View.GONE); + assertTrue(!preview.isTakingPhoto() || switchMultiCameraButton.getVisibility() == View.GONE); + assertTrue(!preview.isTakingPhoto() || switchVideoButton.getVisibility() == View.GONE); + //assertTrue(!preview.isTakingPhoto() || flashButton.getVisibility() == View.GONE); + //assertTrue(!preview.isTakingPhoto() || focusButton.getVisibility() == View.GONE); + assertTrue(!preview.isTakingPhoto() || exposureButton.getVisibility() == View.GONE); + assertTrue(!preview.isTakingPhoto() || exposureLockButton.getVisibility() == View.GONE); + assertTrue(!preview.isTakingPhoto() || audioControlButton.getVisibility() == View.GONE); + assertTrue(!preview.isTakingPhoto() || popupButton.getVisibility() == View.GONE); + assertTrue(!preview.isTakingPhoto() || trashButton.getVisibility() == View.GONE); + assertTrue(!preview.isTakingPhoto() || shareButton.getVisibility() == View.GONE); + } + + private static void checkFocusInitial(MainActivity activity, final String focus_value, final String focus_value_ui) { + String new_focus_value_ui = activity.getPreview().getCurrentFocusValue(); + //noinspection StringEquality + assertTrue(new_focus_value_ui == focus_value_ui || new_focus_value_ui.equals(focus_value_ui)); // also need to do == check, as strings may be null if focus not supported + assertEquals(activity.getPreview().getCameraController().getFocusValue(), focus_value); + } + + public static void checkFocusAfterTakePhoto(MainActivity activity, final String focus_value, final String focus_value_ui) { + // focus should be back to normal now: + String new_focus_value_ui = activity.getPreview().getCurrentFocusValue(); + Log.d(TAG, "focus_value_ui: " + focus_value_ui); + Log.d(TAG, "new new_focus_value_ui: " + new_focus_value_ui); + //noinspection StringEquality + assertTrue(new_focus_value_ui == focus_value_ui || new_focus_value_ui.equals(focus_value_ui)); // also need to do == check, as strings may be null if focus not supported + String new_focus_value = activity.getPreview().getCameraController().getFocusValue(); + Log.d(TAG, "focus_value: " + focus_value); + Log.d(TAG, "new focus_value: " + new_focus_value); + if( new_focus_value_ui != null && new_focus_value_ui.equals("focus_mode_continuous_picture") && focus_value.equals("focus_mode_auto") && new_focus_value.equals("focus_mode_continuous_picture") ) { + // this is fine, it just means we were temporarily in touch-to-focus mode + } + else { + assertEquals(new_focus_value, focus_value); + } + } + + public static void checkFocusAfterTakePhoto2(MainActivity activity, final boolean touch_to_focus, final boolean single_tap_photo, final boolean double_tap_photo, final boolean test_wait_capture_result, final boolean locked_focus, final boolean can_auto_focus, final boolean can_focus_area, final int saved_count) { + Preview preview = activity.getPreview(); + // in locked focus mode, taking photo should never redo an auto-focus + // if photo mode, we may do a refocus if the previous auto-focus failed, but not if it succeeded + Log.d(TAG, "2 count_cameraAutoFocus: " + preview.count_cameraAutoFocus); + if( locked_focus ) { + assertEquals(preview.count_cameraAutoFocus, (can_auto_focus ? saved_count + 1 : saved_count)); + } + if( test_wait_capture_result ) { + // if test_wait_capture_result, then we'll have waited too long, so focus settings may have changed + } + else if( touch_to_focus ) { + Log.d(TAG, "can_focus_area?: " + can_focus_area); + Log.d(TAG, "hasFocusArea?: " + preview.hasFocusArea()); + if( single_tap_photo || double_tap_photo ) { + assertFalse(preview.hasFocusArea()); + assertNull(preview.getCameraController().getFocusAreas()); + assertNull(preview.getCameraController().getMeteringAreas()); + } + else if( can_focus_area ) { + assertTrue(preview.hasFocusArea()); + assertNotNull(preview.getCameraController().getFocusAreas()); + assertEquals(1, preview.getCameraController().getFocusAreas().size()); + assertNotNull(preview.getCameraController().getMeteringAreas()); + assertEquals(1, preview.getCameraController().getMeteringAreas().size()); + } + else { + assertFalse(preview.hasFocusArea()); + assertNull(preview.getCameraController().getFocusAreas()); + + if( preview.getCameraController().supportsMetering() ) { + // we still set metering areas + assertNotNull(preview.getCameraController().getMeteringAreas()); + assertEquals(1, preview.getCameraController().getMeteringAreas().size()); + } + else { + assertNull(preview.getCameraController().getMeteringAreas()); + } + } + } + else { + assertFalse(preview.hasFocusArea()); + assertNull(preview.getCameraController().getFocusAreas()); + assertNull(preview.getCameraController().getMeteringAreas()); + } + } + + private static int getExpNNewFiles(MainActivity activity, final boolean is_raw) { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(activity); + boolean hdr_save_expo = sharedPreferences.getBoolean(PreferenceKeys.HDRSaveExpoPreferenceKey, false); + boolean is_hdr = activity.supportsHDR() && sharedPreferences.getString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_std").equals("preference_photo_mode_hdr"); + boolean is_expo = activity.supportsExpoBracketing() && sharedPreferences.getString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_std").equals("preference_photo_mode_expo_bracketing"); + boolean is_focus_bracketing = activity.supportsFocusBracketing() && sharedPreferences.getString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_std").equals("preference_photo_mode_focus_bracketing"); + boolean is_fast_burst = activity.supportsFastBurst() && sharedPreferences.getString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_std").equals("preference_photo_mode_fast_burst"); + String n_expo_images_s = sharedPreferences.getString(PreferenceKeys.ExpoBracketingNImagesPreferenceKey, "3"); + int n_expo_images = Integer.parseInt(n_expo_images_s); + String n_focus_bracketing_images_s = sharedPreferences.getString(PreferenceKeys.FocusBracketingNImagesPreferenceKey, "3"); + int n_focus_bracketing_images = Integer.parseInt(n_focus_bracketing_images_s); + String n_fast_burst_images_s = sharedPreferences.getString(PreferenceKeys.FastBurstNImagesPreferenceKey, "5"); + int n_fast_burst_images = Integer.parseInt(n_fast_burst_images_s); + boolean is_preshot = activity.getApplicationInterface().getPreShotsPref(activity.getApplicationInterface().getPhotoMode()); + + int exp_n_new_files; + if( is_hdr && hdr_save_expo ) { + exp_n_new_files = 4; + if( is_raw && !activity.getApplicationInterface().isRawOnly() ) { + exp_n_new_files += 3; + } + } + else if( is_expo ) { + exp_n_new_files = n_expo_images; + if( is_raw && !activity.getApplicationInterface().isRawOnly() ) { + exp_n_new_files *= 2; + } + } + else if( is_focus_bracketing ) { + exp_n_new_files = n_focus_bracketing_images; + if( is_raw && !activity.getApplicationInterface().isRawOnly() ) { + exp_n_new_files *= 2; + } + } + else if( is_fast_burst ) + exp_n_new_files = n_fast_burst_images; + else { + exp_n_new_files = 1; + if( is_raw && !activity.getApplicationInterface().isRawOnly() ) { + exp_n_new_files *= 2; + } + } + + if( is_preshot ) + exp_n_new_files++; + + Log.d(TAG, "exp_n_new_files: " + exp_n_new_files); + return exp_n_new_files; + } + + private static void checkFilenames(MainActivity activity, final boolean is_raw, final String [] files, final String [] files2) { + Log.d(TAG, "checkFilenames"); + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(activity); + boolean hdr_save_expo = sharedPreferences.getBoolean(PreferenceKeys.HDRSaveExpoPreferenceKey, false); + boolean is_hdr = activity.supportsHDR() && sharedPreferences.getString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_std").equals("preference_photo_mode_hdr"); + boolean is_fast_burst = activity.supportsFastBurst() && sharedPreferences.getString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_std").equals("preference_photo_mode_fast_burst"); + boolean is_expo = activity.supportsExpoBracketing() && sharedPreferences.getString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_std").equals("preference_photo_mode_expo_bracketing"); + boolean is_focus_bracketing = activity.supportsFocusBracketing() && sharedPreferences.getString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_std").equals("preference_photo_mode_focus_bracketing"); + boolean is_preshot = activity.getApplicationInterface().getPreShotsPref(activity.getApplicationInterface().getPhotoMode()); + + // check files have names as expected + String filename_jpeg = null; + String filename_dng = null; + String filename_preshot_video = null; + int n_files = files == null ? 0 : files.length; + for(String file : files2) { + Log.d(TAG, "check file: " + file); + boolean is_new = true; + for(int j=0;j mediaFilesinSaveFolder(MainActivity activity, Uri baseUri, String bucket_id, UriType uri_type) { + List files = new ArrayList<>(); + final int column_name_c = 0; // filename (without path), including extension + + String [] projection; + switch( uri_type ) { + case MEDIASTORE_IMAGES: + projection = new String[] {MediaStore.Images.ImageColumns.DISPLAY_NAME}; + break; + case MEDIASTORE_VIDEOS: + //noinspection DuplicateBranchesInSwitch + projection = new String[] {MediaStore.Video.VideoColumns.DISPLAY_NAME}; + break; + case STORAGE_ACCESS_FRAMEWORK: + projection = new String[] {DocumentsContract.Document.COLUMN_DISPLAY_NAME}; + break; + default: + throw new RuntimeException("unknown uri_type: " + uri_type); + } + + String selection = ""; + switch( uri_type ) { + case MEDIASTORE_IMAGES: + selection = MediaStore.Images.ImageColumns.BUCKET_ID + " = " + bucket_id; + break; + case MEDIASTORE_VIDEOS: + //noinspection DuplicateBranchesInSwitch + selection = MediaStore.Video.VideoColumns.BUCKET_ID + " = " + bucket_id; + break; + case STORAGE_ACCESS_FRAMEWORK: + break; + default: + throw new RuntimeException("unknown uri_type: " + uri_type); + } + Log.d(TAG, "selection: " + selection); + + Cursor cursor = activity.getContentResolver().query(baseUri, projection, selection, null, null); + if( cursor != null && cursor.moveToFirst() ) { + Log.d(TAG, "found: " + cursor.getCount()); + + do { + String name = cursor.getString(column_name_c); + files.add(name); + } + while( cursor.moveToNext() ); + } + + if( cursor != null ) { + cursor.close(); + } + + return files; + } + + /** Returns an array of filenames (not including full path) in the current save folder. + */ + public static String [] filesInSaveFolder(MainActivity activity) { + Log.d(TAG, "filesInSaveFolder"); + if( MainActivity.useScopedStorage() ) { + List files = new ArrayList<>(); + if( activity.getStorageUtils().isUsingSAF() ) { + // See documentation for StorageUtils.getLatestMediaSAF() - for some reason with scoped storage when not having READ_EXTERNAL_STORAGE, + // we can't query the mediastore for files saved via SAF! + Uri treeUri = activity.getStorageUtils().getTreeUriSAF(); + Uri baseUri = DocumentsContract.buildChildDocumentsUriUsingTree(treeUri, DocumentsContract.getTreeDocumentId(treeUri)); + files.addAll( mediaFilesinSaveFolder(activity, baseUri, null, UriType.STORAGE_ACCESS_FRAMEWORK) ); + } + else { + String save_folder = activity.getStorageUtils().getImageFolderPath(); + String bucket_id = String.valueOf(save_folder.toLowerCase().hashCode()); + files.addAll( mediaFilesinSaveFolder(activity, MediaStore.Images.Media.EXTERNAL_CONTENT_URI, bucket_id, UriType.MEDIASTORE_IMAGES) ); + files.addAll( mediaFilesinSaveFolder(activity, MediaStore.Video.Media.EXTERNAL_CONTENT_URI, bucket_id, UriType.MEDIASTORE_VIDEOS) ); + } + + if( files.isEmpty() ) { + return null; + } + else { + return files.toArray(new String[0]); + } + } + else { + File folder = activity.getImageFolder(); + File [] files = folder.listFiles(); + if( files == null ) + return null; + String [] filenames = new String[files.length]; + for(int i=0;i 1 ? View.VISIBLE : View.GONE)); + assertEquals(switchMultiCameraButton.getVisibility(), (activity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE)); + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + if( !immersive_mode ) { + assertEquals(exposureButton.getVisibility(), exposureVisibility); + assertEquals(exposureLockButton.getVisibility(), exposureLockVisibility); + } + assertEquals(audioControlButton.getVisibility(), (has_audio_control_button ? View.VISIBLE : View.GONE)); + assertEquals(popupButton.getVisibility(), View.VISIBLE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + } + } + + public static class SubTestTakePhotoInfo { + public boolean has_thumbnail_anim; + public boolean is_hdr; + public boolean is_nr; + public boolean is_expo; + public int exposureVisibility; + public int exposureLockVisibility; + public String focus_value; + public String focus_value_ui; + public boolean can_auto_focus; + public boolean manual_can_auto_focus; + public boolean can_focus_area; + } + + public static SubTestTakePhotoInfo getSubTestTakePhotoInfo(MainActivity activity, boolean immersive_mode, boolean single_tap_photo, boolean double_tap_photo) { + assertTrue(activity.getPreview().isPreviewStarted()); + assertFalse(activity.getApplicationInterface().getImageSaver().test_queue_blocked); + + SubTestTakePhotoInfo info = new SubTestTakePhotoInfo(); + + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(activity); + + info.has_thumbnail_anim = sharedPreferences.getBoolean(PreferenceKeys.ThumbnailAnimationPreferenceKey, true); + info.is_hdr = activity.supportsHDR() && sharedPreferences.getString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_std").equals("preference_photo_mode_hdr"); + info.is_nr = activity.supportsNoiseReduction() && sharedPreferences.getString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_std").equals("preference_photo_mode_noise_reduction"); + info.is_expo = activity.supportsExpoBracketing() && sharedPreferences.getString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_std").equals("preference_photo_mode_expo_bracketing"); + + boolean has_audio_control_button = !sharedPreferences.getString(PreferenceKeys.AudioControlPreferenceKey, "none").equals("none"); + + View switchCameraButton = activity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + View switchMultiCameraButton = activity.findViewById(net.sourceforge.opencamera.R.id.switch_multi_camera); + View switchVideoButton = activity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + //View flashButton = activity.findViewById(net.sourceforge.opencamera.R.id.flash); + //View focusButton = activity.findViewById(net.sourceforge.opencamera.R.id.focus_mode); + View exposureButton = activity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureLockButton = activity.findViewById(net.sourceforge.opencamera.R.id.exposure_lock); + View audioControlButton = activity.findViewById(net.sourceforge.opencamera.R.id.audio_control); + View popupButton = activity.findViewById(net.sourceforge.opencamera.R.id.popup); + View trashButton = activity.findViewById(net.sourceforge.opencamera.R.id.trash); + View shareButton = activity.findViewById(net.sourceforge.opencamera.R.id.share); + assertEquals(switchCameraButton.getVisibility(), (immersive_mode ? View.GONE : (activity.getPreview().getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE))); + assertEquals(switchMultiCameraButton.getVisibility(), (immersive_mode ? View.GONE : (activity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE))); + assertEquals(switchVideoButton.getVisibility(), (immersive_mode ? View.GONE : View.VISIBLE)); + info.exposureVisibility = exposureButton.getVisibility(); + info.exposureLockVisibility = exposureLockButton.getVisibility(); + assertEquals(audioControlButton.getVisibility(), ((has_audio_control_button && !immersive_mode) ? View.VISIBLE : View.GONE)); + assertEquals(popupButton.getVisibility(), (immersive_mode ? View.GONE : View.VISIBLE)); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + + info.focus_value = activity.getPreview().getCameraController().getFocusValue(); + info.focus_value_ui = activity.getPreview().getCurrentFocusValue(); + info.can_auto_focus = false; + info.manual_can_auto_focus = false; + info.can_focus_area = false; + if( info.focus_value.equals("focus_mode_auto") || info.focus_value.equals("focus_mode_macro") ) { + info.can_auto_focus = true; + } + + if( info.focus_value.equals("focus_mode_auto") || info.focus_value.equals("focus_mode_macro") ) { + info.manual_can_auto_focus = true; + } + else if( info.focus_value.equals("focus_mode_continuous_picture") && !single_tap_photo && !double_tap_photo ) { + // if single_tap_photo or double_tap_photo, and continuous mode, we go straight to taking a photo rather than doing a touch to focus + info.manual_can_auto_focus = true; + } + + if( activity.getPreview().getMaxNumFocusAreas() != 0 && ( info.focus_value.equals("focus_mode_auto") || info.focus_value.equals("focus_mode_macro") || info.focus_value.equals("focus_mode_continuous_picture") || info.focus_value.equals("focus_mode_continuous_video") || info.focus_value.equals("focus_mode_manual2") ) ) { + info.can_focus_area = true; + } + Log.d(TAG, "focus_value? " + info.focus_value); + Log.d(TAG, "can_auto_focus? " + info.can_auto_focus); + Log.d(TAG, "manual_can_auto_focus? " + info.manual_can_auto_focus); + Log.d(TAG, "can_focus_area? " + info.can_focus_area); + + checkFocusInitial(activity, info.focus_value, info.focus_value_ui); + + return info; + } + + public static void touchToFocusChecks(MainActivity activity, final boolean single_tap_photo, final boolean double_tap_photo, final boolean manual_can_auto_focus, final boolean can_focus_area, final String focus_value, final String focus_value_ui, int saved_count) { + Preview preview = activity.getPreview(); + Log.d(TAG, "1 count_cameraAutoFocus: " + preview.count_cameraAutoFocus); + assertEquals((manual_can_auto_focus ? saved_count + 1 : saved_count), preview.count_cameraAutoFocus); + Log.d(TAG, "has focus area?: " + preview.hasFocusArea()); + if( single_tap_photo || double_tap_photo ) { + assertFalse(preview.hasFocusArea()); + assertNull(preview.getCameraController().getFocusAreas()); + assertNull(preview.getCameraController().getMeteringAreas()); + } + else if( can_focus_area ) { + assertTrue(preview.hasFocusArea()); + assertNotNull(preview.getCameraController().getFocusAreas()); + assertEquals(1, preview.getCameraController().getFocusAreas().size()); + assertNotNull(preview.getCameraController().getMeteringAreas()); + assertEquals(1, preview.getCameraController().getMeteringAreas().size()); + } + else { + assertFalse(preview.hasFocusArea()); + assertNull(preview.getCameraController().getFocusAreas()); + if( preview.getCameraController().supportsMetering() ) { + // we still set metering areas + assertNotNull(preview.getCameraController().getMeteringAreas()); + assertEquals(1, preview.getCameraController().getMeteringAreas().size()); + } + else { + assertNull(preview.getCameraController().getMeteringAreas()); + } + } + String new_focus_value_ui = preview.getCurrentFocusValue(); + //noinspection StringEquality + assertTrue(new_focus_value_ui == focus_value_ui || new_focus_value_ui.equals(focus_value_ui)); // also need to do == check, as strings may be null if focus not supported + if( focus_value.equals("focus_mode_continuous_picture") && !single_tap_photo && !double_tap_photo && preview.supportsFocus() && preview.getSupportedFocusValues().contains("focus_mode_auto") ) + assertEquals("focus_mode_auto", preview.getCameraController().getFocusValue()); // continuous focus mode switches to auto focus on touch (unless single_tap_photo, or auto focus not supported) + else + assertEquals(preview.getCameraController().getFocusValue(), focus_value); + } + + private static boolean gpsIsNull(String gps_string) { + return gps_string == null || gps_string.equals("0/1,0/1,0/100000"); + } + + /** Tests the Exif tags in the resultant file. If the file is null, the uri will be + * used instead to read the Exif tags. + */ + public static void testExif(MainActivity activity, String file, Uri uri, boolean expect_device_tags, boolean expect_datetime, boolean expect_gps) throws IOException { + //final String TAG_GPS_IMG_DIRECTION = "GPSImgDirection"; + //final String TAG_GPS_IMG_DIRECTION_REF = "GPSImgDirectionRef"; + InputStream inputStream = null; + ExifInterface exif; + if( file != null ) { + assertNull(uri); // should only supply one of file or uri + exif = new ExifInterface(file); + } + else { + assertNotNull(uri); + inputStream = activity.getContentResolver().openInputStream(uri); + exif = new ExifInterface(inputStream); + } + + assertNotNull(exif.getAttribute(ExifInterface.TAG_ORIENTATION)); + if( !( isEmulator() && Build.VERSION.SDK_INT <= Build.VERSION_CODES.N_MR1 ) ) { + // older Android emulator versions don't store exif info in photos + if( expect_device_tags ) { + assertNotNull(exif.getAttribute(ExifInterface.TAG_MAKE)); + assertNotNull(exif.getAttribute(ExifInterface.TAG_MODEL)); + } + else { + assertNull(exif.getAttribute(ExifInterface.TAG_MAKE)); + assertNull(exif.getAttribute(ExifInterface.TAG_MODEL)); + + assertNull(exif.getAttribute(ExifInterface.TAG_F_NUMBER)); + assertNull(exif.getAttribute(ExifInterface.TAG_EXPOSURE_TIME)); + assertNull(exif.getAttribute(ExifInterface.TAG_FLASH)); + assertNull(exif.getAttribute(ExifInterface.TAG_FOCAL_LENGTH)); + assertNull(exif.getAttribute(ExifInterface.TAG_IMAGE_DESCRIPTION)); + assertNull(exif.getAttribute(ExifInterface.TAG_IMAGE_UNIQUE_ID)); + assertNull(exif.getAttribute(ExifInterface.TAG_USER_COMMENT)); + assertNull(exif.getAttribute(ExifInterface.TAG_ARTIST)); + assertNull(exif.getAttribute(ExifInterface.TAG_COPYRIGHT)); + } + + if( expect_datetime ) { + assertNotNull(exif.getAttribute(ExifInterface.TAG_DATETIME)); + assertNotNull(exif.getAttribute(ExifInterface.TAG_DATETIME_ORIGINAL)); + assertNotNull(exif.getAttribute(ExifInterface.TAG_DATETIME_DIGITIZED)); + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP ) { + // not available on Galaxy Nexus Android 4.3 at least + assertNotNull(exif.getAttribute(ExifInterface.TAG_SUBSEC_TIME)); + assertNotNull(exif.getAttribute(ExifInterface.TAG_SUBSEC_TIME_ORIGINAL)); + assertNotNull(exif.getAttribute(ExifInterface.TAG_SUBSEC_TIME_DIGITIZED)); + // TAG_OFFSET_TIME at least no longer saved on Pixel 6 Pro + //assertNotNull(exif.getAttribute(ExifInterface.TAG_OFFSET_TIME)); + //assertNotNull(exif.getAttribute(ExifInterface.TAG_OFFSET_TIME_ORIGINAL)); + //assertNotNull(exif.getAttribute(ExifInterface.TAG_OFFSET_TIME_DIGITIZED)); + } + } + else { + assertNull(exif.getAttribute(ExifInterface.TAG_DATETIME)); + assertNull(exif.getAttribute(ExifInterface.TAG_DATETIME_ORIGINAL)); + assertNull(exif.getAttribute(ExifInterface.TAG_DATETIME_DIGITIZED)); + assertNull(exif.getAttribute(ExifInterface.TAG_SUBSEC_TIME)); + assertNull(exif.getAttribute(ExifInterface.TAG_SUBSEC_TIME_ORIGINAL)); + assertNull(exif.getAttribute(ExifInterface.TAG_SUBSEC_TIME_DIGITIZED)); + assertNull(exif.getAttribute(ExifInterface.TAG_OFFSET_TIME)); + assertNull(exif.getAttribute(ExifInterface.TAG_OFFSET_TIME_ORIGINAL)); + assertNull(exif.getAttribute(ExifInterface.TAG_OFFSET_TIME_DIGITIZED)); + } + + if( expect_gps ) { + assertFalse(gpsIsNull(exif.getAttribute(ExifInterface.TAG_GPS_LATITUDE))); + assertNotNull(exif.getAttribute(ExifInterface.TAG_GPS_LATITUDE_REF)); + assertFalse(gpsIsNull(exif.getAttribute(ExifInterface.TAG_GPS_LONGITUDE))); + assertNotNull(exif.getAttribute(ExifInterface.TAG_GPS_LONGITUDE_REF)); + // can't read custom tags, even though we can write them?! + //assertTrue(exif.getAttribute(TAG_GPS_IMG_DIRECTION) != null); + //assertTrue(exif.getAttribute(TAG_GPS_IMG_DIRECTION_REF) != null); + } + else { + assertTrue(gpsIsNull(exif.getAttribute(ExifInterface.TAG_GPS_LATITUDE))); + assertTrue(gpsIsNull(exif.getAttribute(ExifInterface.TAG_GPS_LONGITUDE))); + // TAG_GPS_LATITUDE_REF, TAG_GPS_LONGITUDE_REF are still non-null on Samsung Galaxy S24+ with Camera2 API + //assertNull(exif.getAttribute(ExifInterface.TAG_GPS_LATITUDE_REF)); + //assertNull(exif.getAttribute(ExifInterface.TAG_GPS_LONGITUDE_REF)); + // can't read custom tags, even though we can write them?! + //assertTrue(exif.getAttribute(TAG_GPS_IMG_DIRECTION) == null); + //assertTrue(exif.getAttribute(TAG_GPS_IMG_DIRECTION_REF) == null); + } + } + + if( inputStream != null ) { + inputStream.close(); + } + } + + public static void preTakeVideoChecks(MainActivity activity, boolean immersive_mode) { + Preview preview = activity.getPreview(); + + assertTrue(preview.isPreviewStarted()); + if( preview.usingCamera2API() ) { + assertEquals(preview.getCurrentPreviewSize().width, preview.getCameraController().test_texture_view_buffer_w); + assertEquals(preview.getCurrentPreviewSize().height, preview.getCameraController().test_texture_view_buffer_h); + } + + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(activity); + boolean has_audio_control_button = !sharedPreferences.getString(PreferenceKeys.AudioControlPreferenceKey, "none").equals("none"); + + View takePhotoButton = activity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + View pauseVideoButton = activity.findViewById(net.sourceforge.opencamera.R.id.pause_video); + View takePhotoVideoButton = activity.findViewById(net.sourceforge.opencamera.R.id.take_photo_when_video_recording); + View switchVideoButton = activity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + View switchCameraButton = activity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + View switchMultiCameraButton = activity.findViewById(net.sourceforge.opencamera.R.id.switch_multi_camera); + View audioControlButton = activity.findViewById(net.sourceforge.opencamera.R.id.audio_control); + View popupButton = activity.findViewById(net.sourceforge.opencamera.R.id.popup); + View trashButton = activity.findViewById(net.sourceforge.opencamera.R.id.trash); + View shareButton = activity.findViewById(net.sourceforge.opencamera.R.id.share); + + if( preview.isVideo() ) { + assertEquals((int) (Integer) takePhotoButton.getTag(), net.sourceforge.opencamera.R.drawable.take_video_selector); + assertEquals((int) (Integer) switchVideoButton.getTag(), net.sourceforge.opencamera.R.drawable.take_photo); + assertEquals(takePhotoButton.getContentDescription(), activity.getResources().getString(net.sourceforge.opencamera.R.string.start_video)); + assertEquals(pauseVideoButton.getContentDescription(), activity.getResources().getString(net.sourceforge.opencamera.R.string.pause_video)); + assertEquals(switchVideoButton.getContentDescription(), activity.getResources().getString(net.sourceforge.opencamera.R.string.switch_to_photo)); + } + else { + assertEquals((int) (Integer) takePhotoButton.getTag(), net.sourceforge.opencamera.R.drawable.take_photo_selector); + assertEquals((int) (Integer) switchVideoButton.getTag(), net.sourceforge.opencamera.R.drawable.take_video); + assertEquals(takePhotoButton.getContentDescription(), activity.getResources().getString(net.sourceforge.opencamera.R.string.take_photo)); + assertEquals(pauseVideoButton.getContentDescription(), activity.getResources().getString(net.sourceforge.opencamera.R.string.pause_video)); + assertEquals(switchVideoButton.getContentDescription(), activity.getResources().getString(net.sourceforge.opencamera.R.string.switch_to_video)); + } + assertEquals(pauseVideoButton.getVisibility(), View.GONE); + assertEquals(takePhotoVideoButton.getVisibility(), View.GONE); + + assertEquals(switchCameraButton.getVisibility(), (immersive_mode ? View.GONE : (preview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE))); + assertEquals(switchMultiCameraButton.getVisibility(), (immersive_mode ? View.GONE : (activity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE))); + assertEquals(switchVideoButton.getVisibility(), (immersive_mode ? View.GONE : View.VISIBLE)); + assertEquals(audioControlButton.getVisibility(), ((has_audio_control_button && !immersive_mode) ? View.VISIBLE : View.GONE)); + assertEquals(popupButton.getVisibility(), (immersive_mode ? View.GONE : View.VISIBLE)); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + } + + public static void takeVideoRecordingChecks(MainActivity activity, boolean immersive_mode, int exposureVisibility, int exposureLockVisibility) { + Preview preview = activity.getPreview(); + + View takePhotoButton = activity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + View pauseVideoButton = activity.findViewById(net.sourceforge.opencamera.R.id.pause_video); + View takePhotoVideoButton = activity.findViewById(net.sourceforge.opencamera.R.id.take_photo_when_video_recording); + View switchVideoButton = activity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + View switchCameraButton = activity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + View switchMultiCameraButton = activity.findViewById(net.sourceforge.opencamera.R.id.switch_multi_camera); + View exposureButton = activity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureLockButton = activity.findViewById(net.sourceforge.opencamera.R.id.exposure_lock); + View audioControlButton = activity.findViewById(net.sourceforge.opencamera.R.id.audio_control); + View popupButton = activity.findViewById(net.sourceforge.opencamera.R.id.popup); + View trashButton = activity.findViewById(net.sourceforge.opencamera.R.id.trash); + View shareButton = activity.findViewById(net.sourceforge.opencamera.R.id.share); + + assertEquals((int) (Integer) takePhotoButton.getTag(), net.sourceforge.opencamera.R.drawable.take_video_recording); + assertEquals((int) (Integer) switchVideoButton.getTag(), net.sourceforge.opencamera.R.drawable.take_photo); + assertEquals(takePhotoButton.getContentDescription(), activity.getResources().getString(net.sourceforge.opencamera.R.string.stop_video)); + assertEquals(pauseVideoButton.getContentDescription(), activity.getResources().getString(net.sourceforge.opencamera.R.string.pause_video)); + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.N ) + assertEquals(pauseVideoButton.getVisibility(), View.VISIBLE); + else + assertEquals(pauseVideoButton.getVisibility(), View.GONE); + if( preview.supportsPhotoVideoRecording() ) + assertEquals(takePhotoVideoButton.getVisibility(), View.VISIBLE); + else + assertEquals(takePhotoVideoButton.getVisibility(), View.GONE); + assertEquals(switchCameraButton.getVisibility(), View.GONE); + assertEquals(switchMultiCameraButton.getVisibility(), View.GONE); + //assertTrue(switchVideoButton.getVisibility() == (immersive_mode ? View.GONE : View.VISIBLE)); + assertEquals(switchVideoButton.getVisibility(), View.GONE); + assertEquals(audioControlButton.getVisibility(), View.GONE); + assertEquals(popupButton.getVisibility(), (!immersive_mode && preview.supportsFlash() ? View.VISIBLE : View.GONE)); // popup button only visible when recording video if flash supported + assertEquals(exposureButton.getVisibility(), exposureVisibility); + assertEquals(exposureLockButton.getVisibility(), exposureLockVisibility); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + } + + public static void checkFilesAfterTakeVideo(MainActivity activity, boolean allow_failure, boolean has_cb, long time_ms, int n_non_video_files, boolean failed_to_start, int exp_n_new_files, int n_new_files) { + if( !has_cb ) { + if( time_ms <= 500 ) { + // if quick, should have deleted corrupt video - but may be device dependent, sometimes we manage to record a video anyway! + assertTrue(n_new_files == 0 || n_new_files == 1); + } + else if( failed_to_start ) { + // if video recording failed to start, we should have deleted any file created! + assertEquals(0, n_new_files); + } + else { + assertEquals(n_non_video_files+1, n_new_files); + } + } + else { + Log.d(TAG, "exp_n_new_files: " + exp_n_new_files); + if( exp_n_new_files >= 0 ) { + assertEquals(exp_n_new_files, n_new_files); + } + } + + Log.d(TAG, "test_n_videos_scanned: " + activity.getApplicationInterface().test_n_videos_scanned); + if( !allow_failure ) { + assertEquals(n_new_files-n_non_video_files, activity.getApplicationInterface().test_n_videos_scanned); + } + } + + public static void postTakeVideoChecks(MainActivity activity, boolean immersive_mode, boolean max_filesize, int exposureVisibility, int exposureLockVisibility) { + Preview preview = activity.getPreview(); + + assertTrue(preview.isPreviewStarted()); // check preview restarted + + if( preview.usingCamera2API() ) { + assertNotNull(preview.getCameraController()); + assertEquals(preview.getCurrentPreviewSize().width, preview.getCameraController().test_texture_view_buffer_w); + assertEquals(preview.getCurrentPreviewSize().height, preview.getCameraController().test_texture_view_buffer_h); + } + + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(activity); + boolean has_audio_control_button = !sharedPreferences.getString(PreferenceKeys.AudioControlPreferenceKey, "none").equals("none"); + + View takePhotoButton = activity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + View pauseVideoButton = activity.findViewById(net.sourceforge.opencamera.R.id.pause_video); + View takePhotoVideoButton = activity.findViewById(net.sourceforge.opencamera.R.id.take_photo_when_video_recording); + View switchVideoButton = activity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + View switchCameraButton = activity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + View switchMultiCameraButton = activity.findViewById(net.sourceforge.opencamera.R.id.switch_multi_camera); + View exposureButton = activity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureLockButton = activity.findViewById(net.sourceforge.opencamera.R.id.exposure_lock); + View audioControlButton = activity.findViewById(net.sourceforge.opencamera.R.id.audio_control); + View popupButton = activity.findViewById(net.sourceforge.opencamera.R.id.popup); + View trashButton = activity.findViewById(net.sourceforge.opencamera.R.id.trash); + View shareButton = activity.findViewById(net.sourceforge.opencamera.R.id.share); + + if( !max_filesize ) { + // if doing restart on max filesize, we may have already restarted by now (on Camera2 API at least) + Log.d(TAG, "switchCameraButton.getVisibility(): " + switchCameraButton.getVisibility()); + assertEquals(switchCameraButton.getVisibility(), (immersive_mode ? View.GONE : (preview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE))); + assertEquals(switchMultiCameraButton.getVisibility(), (immersive_mode ? View.GONE : (activity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE))); + assertEquals(audioControlButton.getVisibility(), ((has_audio_control_button && !immersive_mode) ? View.VISIBLE : View.GONE)); + } + assertEquals(switchVideoButton.getVisibility(), (immersive_mode ? View.GONE : View.VISIBLE)); + assertEquals(exposureButton.getVisibility(), exposureVisibility); + assertEquals(exposureLockButton.getVisibility(), exposureLockVisibility); + assertEquals(popupButton.getVisibility(), (immersive_mode ? View.GONE : View.VISIBLE)); + // trash/share only shown when preview is paused after taking a photo + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + + assertFalse( preview.isVideoRecording() ); + assertEquals((int) (Integer) takePhotoButton.getTag(), net.sourceforge.opencamera.R.drawable.take_video_selector); + assertEquals((int) (Integer) switchVideoButton.getTag(), net.sourceforge.opencamera.R.drawable.take_photo); + assertEquals( takePhotoButton.getContentDescription(), activity.getResources().getString(net.sourceforge.opencamera.R.string.start_video) ); + assertEquals(pauseVideoButton.getContentDescription(), activity.getResources().getString(net.sourceforge.opencamera.R.string.pause_video)); + Log.d(TAG, "pauseVideoButton.getVisibility(): " + pauseVideoButton.getVisibility()); + assertEquals(pauseVideoButton.getVisibility(), View.GONE); + assertEquals(takePhotoVideoButton.getVisibility(), View.GONE); + + assertTrue( preview.getCameraController() == null || preview.getCameraController().count_camera_parameters_exception == 0 ); + } + + public interface VideoTestCallback { + int doTest(); // return expected number of new files (or -1 to indicate not to check this) + } +} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/VideoInstrumentedTests.java b/app/src/androidTest/java/net/sourceforge/opencamera/VideoInstrumentedTests.java new file mode 100644 index 0000000..292b51d --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/VideoInstrumentedTests.java @@ -0,0 +1,13 @@ +package net.sourceforge.opencamera; + +import org.junit.experimental.categories.Categories; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +/** Tests related to video recording; note that tests to do with video mode that don't record are still part of MainTests. + */ + +@RunWith(Categories.class) +@Categories.IncludeCategory(VideoTests.class) +@Suite.SuiteClasses({InstrumentedTest.class}) +public class VideoInstrumentedTests {} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/test/AvgTests.java b/app/src/androidTest/java/net/sourceforge/opencamera/test/AvgTests.java new file mode 100644 index 0000000..b66b9ad --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/test/AvgTests.java @@ -0,0 +1,71 @@ +package net.sourceforge.opencamera.test; + +import junit.framework.Test; +import junit.framework.TestSuite; + +public class AvgTests { + /** Tests for Avg algorithm - only need to run on a single device + * Should manually look over the images dumped onto DCIM/ + * To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/ + * folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes + * time to transfer to the device every time we run the tests. + * On Android 10+, scoped storage permission needs to be given to Open Camera for the DCIM/testOpenCamera/ folder. + * UPDATE: now deprecated, replaced with AvgInstrumentedTests. + */ + public static Test suite() { + TestSuite suite = new TestSuite(MainTests.class.getName()); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg1")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg2")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg3")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg4")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg5")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg6")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg7")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg8")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg9")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg10")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg11")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg12")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg13")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg14")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg15")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg16")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg17")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg18")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg19")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg20")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg21")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg22")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg23")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg24")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg25")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg26")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg27")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg28")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg29")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg30")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg31")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg32")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg33")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg34")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg35")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg36")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg37")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg38")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg39")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg40")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg41")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg42")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg43")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg44")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg45")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg46")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg47")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg48")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg49")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg50")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg51")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg52")); + return suite; + } +} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/test/HDRNTests.java b/app/src/androidTest/java/net/sourceforge/opencamera/test/HDRNTests.java new file mode 100644 index 0000000..72d2367 --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/test/HDRNTests.java @@ -0,0 +1,47 @@ +package net.sourceforge.opencamera.test; + +import junit.framework.Test; +import junit.framework.TestSuite; + +public class HDRNTests { + /** Tests for HDR algorithm with more than 3 images - only need to run on a single device + * Should manually look over the images dumped onto DCIM/ + * To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/ + * folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes + * time to transfer to the device every time we run the tests. + * On Android 10+, scoped storage permission needs to be given to Open Camera for the DCIM/testOpenCamera/ folder. + * UPDATE: now deprecated, replaced with HDRNInstrumentedTests. + */ + public static Test suite() { + TestSuite suite = new TestSuite(MainTests.class.getName()); + + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR23_exp2")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR23_exp2b")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR47_exp2")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR49_exp2")); + + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR45")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR46")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR47")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR48")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR49")); + + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR23_exp4")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR49_exp4")); + + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR1_exp5")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR23_exp5")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR45_exp5")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR46_exp5")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR47_exp5")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR48_exp5")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR49_exp5")); + + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR23_exp6")); + + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR23_exp7")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR45_exp7")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR47_exp7")); + return suite; + } +} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/test/HDRTests.java b/app/src/androidTest/java/net/sourceforge/opencamera/test/HDRTests.java new file mode 100644 index 0000000..105d894 --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/test/HDRTests.java @@ -0,0 +1,85 @@ +package net.sourceforge.opencamera.test; + +import junit.framework.Test; +import junit.framework.TestSuite; + +public class HDRTests { + /** Tests for HDR algorithm - only need to run on a single device + * Should manually look over the images dumped onto DCIM/ + * To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/ + * folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes + * time to transfer to the device every time we run the tests. + * On Android 10+, scoped storage permission needs to be given to Open Camera for the DCIM/testOpenCamera/ folder. + * UPDATE: now deprecated, replaced with HDRInstrumentedTests. + */ + public static Test suite() { + TestSuite suite = new TestSuite(MainTests.class.getName()); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testDROZero")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testDRODark0")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testDRODark1")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR1")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR2")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR3")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR4")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR5")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR6")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR7")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR8")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR9")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR10")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR11")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR12")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR13")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR14")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR15")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR16")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR17")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR18")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR19")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR20")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR21")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR22")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR23")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR24")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR25")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR26")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR27")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR28")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR29")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR30")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR31")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR32")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR33")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR34")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR35")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR36")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR37")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR38Filmic")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR39")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR40")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR40Exponential")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR40Filmic")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR41")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR42")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR43")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR44")); + // don't include testHDR45, this is tested as part of HDRNTests + // don't include testHDR46, this is tested as part of HDRNTests + // don't include testHDR47, this is tested as part of HDRNTests + // don't include testHDR48, this is tested as part of HDRNTests + // don't include testHDR49, this is tested as part of HDRNTests + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR50")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR51")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR52")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR53")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR54")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR55")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR56")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR57")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR58")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR59")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR60")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR61")); + return suite; + } +} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/test/MainActivityTest.java b/app/src/androidTest/java/net/sourceforge/opencamera/test/MainActivityTest.java new file mode 100644 index 0000000..86d6911 --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/test/MainActivityTest.java @@ -0,0 +1,17623 @@ +package net.sourceforge.opencamera.test; + +import static org.junit.Assert.assertNotEquals; + +import java.io.File; +//import java.io.FileInputStream; +//import java.io.FileNotFoundException; +//import java.io.InputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Set; + +import net.sourceforge.opencamera.LocationSupplier; +import net.sourceforge.opencamera.MyPreferenceFragment; +import net.sourceforge.opencamera.TestUtils; +import net.sourceforge.opencamera.cameracontroller.CameraController2; +import net.sourceforge.opencamera.HDRProcessor; +import net.sourceforge.opencamera.HDRProcessorException; +import net.sourceforge.opencamera.ImageSaver; +import net.sourceforge.opencamera.MainActivity; +import net.sourceforge.opencamera.MyApplicationInterface; +import net.sourceforge.opencamera.PreferenceKeys; +import net.sourceforge.opencamera.preview.ApplicationInterface; +import net.sourceforge.opencamera.preview.VideoProfile; +import net.sourceforge.opencamera.SaveLocationHistory; +import net.sourceforge.opencamera.cameracontroller.CameraController; +import net.sourceforge.opencamera.preview.Preview; +import net.sourceforge.opencamera.ui.DrawPreview; +import net.sourceforge.opencamera.ui.FolderChooserDialog; +import net.sourceforge.opencamera.ui.MainUI; +import net.sourceforge.opencamera.ui.PopupView; + +import android.annotation.SuppressLint; +import android.annotation.TargetApi; +import android.content.Intent; +import android.content.SharedPreferences; +//import android.content.res.AssetManager; +import android.graphics.Bitmap; +import android.graphics.Color; +import android.graphics.Point; +import android.graphics.PointF; +import android.hardware.camera2.CameraMetadata; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.params.TonemapCurve; +import android.location.Location; +import android.media.CamcorderProfile; +import android.media.MediaScannerConnection; +import android.net.Uri; +import android.os.Build; +import android.preference.PreferenceManager; +import android.provider.MediaStore; +import android.test.ActivityInstrumentationTestCase2; +import android.test.TouchUtils; +import android.util.Log; +import android.view.KeyEvent; +import android.view.View; +import android.view.WindowManager; +import android.widget.SeekBar; +import android.widget.TextView; + +// ignore warning about "Call to Thread.sleep in a loop", this is only test code +@SuppressWarnings("BusyWait") +public class MainActivityTest extends ActivityInstrumentationTestCase2 { + private static final String TAG = "MainActivityTest"; + private MainActivity mActivity = null; + private Preview mPreview = null; + + public MainActivityTest() { + //noinspection deprecation + super("net.sourceforge.opencamera", MainActivity.class); + } + + private static Intent createDefaultIntent() { + Intent intent = new Intent(); + TestUtils.setDefaultIntent(intent); + return intent; + } + + @Override + protected void setUp() throws Exception { + Log.d(TAG, "setUp"); + super.setUp(); + + setActivityInitialTouchMode(false); + + // use getTargetContext() as we haven't started the activity yet (and don't want to, as we want to set prefs before starting) + TestUtils.initTest(this.getInstrumentation().getTargetContext()); + + Intent intent = createDefaultIntent(); + setActivityIntent(intent); + Log.d(TAG, "setUp: about to get activity"); + mActivity = getActivity(); + Log.d(TAG, "setUp: activity: " + mActivity); + mPreview = mActivity.getPreview(); + Log.d(TAG, "setUp: preview: " + mPreview); + + // don't waitUntilCameraOpened() here, as if an assertion fails in setUp(), it can cause later tests to hang in the suite + // instead we now wait for camera to open in setToDefault() + //waitUntilCameraOpened(); + + //restart(); // no longer need to restart, as we reset prefs before starting up; not restarting makes tests run faster! + + //Camera camera = mPreview.getCamera(); + /*mSpinner = (Spinner) mActivity.findViewById( + com.android.example.spinner.R.id.Spinner01 + );*/ + + //mPlanetData = mSpinner.getAdapter(); + } + + @Override + protected void tearDown() throws Exception { + Log.d(TAG, "tearDown"); + + // shouldn't have assertions in tearDown, otherwise we'll never cleanup properly - when run as suite, the next test will either fail or hang! + //assertTrue( mPreview.getCameraController() == null || mPreview.getCameraController().count_camera_parameters_exception == 0 ); + //assertTrue( mPreview.getCameraController() == null || mPreview.getCameraController().count_precapture_timeout == 0 ); + + // reset back to defaults (whilst each test will reset the settings anyway via before()->TestUtils.initTest(), it's useful + // to leave the application in a default state after running tests) + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.clear(); + editor.apply(); + + Log.d(TAG, "tearDown done"); + super.tearDown(); + } + + public void testPreConditions() { + assertNotNull(mPreview); + //assertTrue(mPreview.getCamera() != null); + //assertTrue(mCamera != null); + //assertTrue(mSpinner.getOnItemSelectedListener() != null); + //assertTrue(mPlanetData != null); + //assertEquals(mPlanetData.getCount(),ADAPTER_COUNT); + } + + private void waitUntilCameraOpened() { + waitUntilCameraOpened(true); + } + + private void waitUntilCameraOpened(boolean wait_for_preview) { + Log.d(TAG, "wait until camera opened"); + long time_s = System.currentTimeMillis(); + while( !mPreview.openCameraAttempted() ) { + assertTrue( System.currentTimeMillis() - time_s < 20000 ); + } + Log.d(TAG, "camera is open!"); + this.getInstrumentation().waitForIdleSync(); // allow the onPostExecute of open camera task run + Log.d(TAG, "done idle sync"); + try { + Thread.sleep(100); // sleep a bit just to be safe + } catch (InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + } + + if( wait_for_preview ) { + waitUntilPreviewStarted(); // needed for Camera2 API when starting preview on background thread and not waiting for it to start + } + } + + private void waitUntilPreviewStarted() { + Log.d(TAG, "wait until preview started"); + long time_s = System.currentTimeMillis(); + while( !mPreview.isPreviewStarted() ) { + assertTrue( System.currentTimeMillis() - time_s < 20000 ); + } + Log.d(TAG, "preview is started!"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "done idle sync"); + try { + Thread.sleep(100); // sleep a bit just to be safe + } catch (InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + } + } + + private void restart() { + restart(true); + } + + /** Restarts Open Camera. + * WARNING: Make sure that any assigned variables related to the activity, e.g., anything + * returned by findViewById(), is updated to the new mActivity after calling this method! + */ + private void restart(boolean wait_for_preview) { + Log.d(TAG, "restart"); + mActivity.finish(); + setActivity(null); + Log.d(TAG, "now starting"); + mActivity = getActivity(); + Log.d(TAG, "mActivity is now: " + mActivity); + mPreview = mActivity.getPreview(); + Log.d(TAG, "mPreview is now: " + mPreview); + waitUntilCameraOpened(wait_for_preview); + Log.d(TAG, "restart done"); + } + + private void pauseAndResume() { + Log.d(TAG, "pauseAndResume"); + boolean camera_is_open = mPreview.getCameraController() != null; + pauseAndResume(camera_is_open); + } + + private void pauseAndResume(boolean wait_until_camera_opened) { + Log.d(TAG, "pauseAndResume: " + wait_until_camera_opened); + // onResume has code that must run on UI thread + mActivity.runOnUiThread(new Runnable() { + public void run() { + Log.d(TAG, "pause..."); + getInstrumentation().callActivityOnPause(mActivity); + Log.d(TAG, "resume..."); + getInstrumentation().callActivityOnResume(mActivity); + /*Handler handler = new Handler(); + handler.postDelayed(new Runnable() { + public void run() { + Log.d(TAG, "resume..."); + getInstrumentation().callActivityOnResume(mActivity); + } + }, 500);*/ + } + }); + // need to wait for UI code to finish before leaving + this.getInstrumentation().waitForIdleSync(); + if( wait_until_camera_opened ) { + waitUntilCameraOpened(); + } + } + + private void updateForSettings() { + Log.d(TAG, "updateForSettings"); + // updateForSettings has code that must run on UI thread + mActivity.runOnUiThread(new Runnable() { + public void run() { + mActivity.initLocation(); // initLocation now called via MainActivity.setWindowFlagsForCamera() rather than updateForSettings() + mActivity.getApplicationInterface().getDrawPreview().updateSettings(); + mActivity.updateForSettings(true); + } + }); + // need to wait for UI code to finish before leaving + this.getInstrumentation().waitForIdleSync(); + waitUntilCameraOpened(); // may need to wait if camera is reopened, e.g., when changing scene mode - see testSceneMode() + // but we also need to wait for the delay if instead we've stopped and restarted the preview, the latter now only happens after dim_effect_time_c + try { + Thread.sleep(DrawPreview.dim_effect_time_c+50); // wait for updateForSettings + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + } + this.getInstrumentation().waitForIdleSync(); + + } + + private void clickView(final View view) { + // TouchUtils.clickView doesn't work properly if phone held in portrait mode! + //TouchUtils.clickView(MainActivityTest.this, view); + Log.d(TAG, "clickView: "+ view); + assertEquals(view.getVisibility(), View.VISIBLE); + mActivity.runOnUiThread(new Runnable() { + public void run() { + assertTrue(view.performClick()); + } + }); + // need to wait for UI code to finish before leaving + this.getInstrumentation().waitForIdleSync(); + } + + private void switchToCamera(int cameraId) { + int origCameraId = mPreview.getCameraId(); + Log.d(TAG, "switchToCamera: "+ cameraId); + Log.d(TAG, "origCameraId: "+ origCameraId); + int newCameraId = origCameraId; + while( newCameraId != cameraId ) { + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + this.getInstrumentation().waitForIdleSync(); + waitUntilCameraOpened(); + newCameraId = mPreview.getCameraId(); + Log.d(TAG, "changed cameraId to: "+ newCameraId); + assertTrue(newCameraId != origCameraId); + } + } + + private void openPopupMenu() { + Log.d(TAG, "openPopupMenu"); + assertFalse( mActivity.popupIsOpen() ); + View popupButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.popup); + clickView(popupButton); + Log.d(TAG, "wait for popup to open"); + while( !mActivity.popupIsOpen() ) { + } + Log.d(TAG, "popup is now open"); + } + + private void closePopupMenu() { + Log.d(TAG, "closePopupMenu"); + assertTrue( mActivity.popupIsOpen() ); + View popupButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.popup); + clickView(popupButton); + Log.d(TAG, "wait for popup to close"); + while( mActivity.popupIsOpen() ) { + } + Log.d(TAG, "popup is now closed"); + } + + private void switchToFlashValue(String required_flash_value) { + Log.d(TAG, "switchToFlashValue: "+ required_flash_value); + if( mPreview.supportsFlash() ) { + String flash_value = mPreview.getCurrentFlashValue(); + Log.d(TAG, "start flash_value: "+ flash_value); + if( !flash_value.equals(required_flash_value) ) { + openPopupMenu(); + View currentFlashButton = mActivity.getUIButton("TEST_FLASH_" + flash_value); + assertNotNull(currentFlashButton); + assertEquals(currentFlashButton.getAlpha(), PopupView.ALPHA_BUTTON_SELECTED); + View flashButton = mActivity.getUIButton("TEST_FLASH_" + required_flash_value); + assertNotNull(flashButton); + assertEquals(flashButton.getAlpha(), PopupView.ALPHA_BUTTON, 1.0e-5); + clickView(flashButton); + flash_value = mPreview.getCurrentFlashValue(); + Log.d(TAG, "changed flash_value to: "+ flash_value); + } + assertEquals(flash_value, required_flash_value); + String controller_flash_value = mPreview.getCameraController().getFlashValue(); + Log.d(TAG, "controller_flash_value: "+ controller_flash_value); + if( flash_value.equals("flash_frontscreen_auto") || flash_value.equals("flash_frontscreen_on") ) { + // for frontscreen flash, the controller flash value will be "" (due to real flash not supported) - although on Galaxy Nexus this is "flash_off" due to parameters.getFlashMode() returning Camera.Parameters.FLASH_MODE_OFF + assertTrue(controller_flash_value.isEmpty() || controller_flash_value.equals("flash_off")); + } + else { + Log.d(TAG, "expected_flash_value: "+ flash_value); + assertEquals(flash_value, controller_flash_value); + } + } + } + + private void switchToFocusValue(String required_focus_value) { + Log.d(TAG, "switchToFocusValue: "+ required_focus_value); + if( mPreview.supportsFocus() ) { + String focus_value = mPreview.getCurrentFocusValue(); + Log.d(TAG, "start focus_value: "+ focus_value); + if( !focus_value.equals(required_focus_value) ) { + openPopupMenu(); + View focusButton = mActivity.getUIButton("TEST_FOCUS_" + required_focus_value); + assertNotNull(focusButton); + clickView(focusButton); + focus_value = mPreview.getCurrentFocusValue(); + Log.d(TAG, "changed focus_value to: "+ focus_value); + } + assertEquals(focus_value, required_focus_value); + String controller_focus_value = mPreview.getCameraController().getFocusValue(); + Log.d(TAG, "controller_focus_value: "+ controller_focus_value); + String compare_focus_value = focus_value; + if( compare_focus_value.equals("focus_mode_locked") ) + compare_focus_value = "focus_mode_auto"; + else if( compare_focus_value.equals("focus_mode_infinity") && mPreview.usingCamera2API() ) + compare_focus_value = "focus_mode_manual2"; + assertEquals(compare_focus_value, controller_focus_value); + } + } + + private void switchToISO(int required_iso) { + Log.d(TAG, "switchToISO: "+ required_iso); + int iso = mPreview.getCameraController().getISO(); + Log.d(TAG, "start iso: "+ iso); + if( iso != required_iso ) { + /*openPopupMenu(); + View isoButton = mActivity.getUIButton("TEST_ISO_" + required_iso); + assertTrue(isoButton != null); + clickView(isoButton); + iso = mPreview.getCameraController().getISO(); + Log.d(TAG, "changed iso to: "+ iso);*/ + View exposureButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureContainer = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure_container); + assertEquals(exposureContainer.getVisibility(), View.GONE); + clickView(exposureButton); + assertEquals(exposureContainer.getVisibility(), View.VISIBLE); + View isoButton = mActivity.getUIButton("TEST_ISO_" + required_iso); + assertNotNull(isoButton); + clickView(isoButton); + try { + Thread.sleep(DrawPreview.dim_effect_time_c+50); // wait for updateForSettings + this.getInstrumentation().waitForIdleSync(); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + } + iso = mPreview.getCameraController().getISO(); + Log.d(TAG, "changed iso to: "+ iso); + clickView(exposureButton); + assertEquals(exposureContainer.getVisibility(), View.GONE); + } + assertEquals(iso, required_iso); + } + + /* Sets the camera up to a predictable state: + * - Back camera + * - Photo mode + * - Flash off (if flash supported) + * - Focus mode picture continuous (if focus modes supported) + * As a side-effect, the camera and/or camera parameters values may become invalid. + */ + private void setToDefault() { + waitUntilCameraOpened(); + + if( mPreview.isVideo() ) { + Log.d(TAG, "turn off video mode"); + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + waitUntilCameraOpened(); + } + assertFalse(mPreview.isVideo()); + + if( mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ) { + switchToCamera(0); + } + + switchToFlashValue("flash_off"); + switchToFocusValue("focus_mode_continuous_picture"); + + // pause for safety - needed for Nokia 8 at least otherwise some tests like testContinuousPictureFocusRepeat, + // testLocationOff result in hang whilst waiting for photo to be taken, and hit the timeout in waitForTakePhoto() + try { + Thread.sleep(200); + } + catch (InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + } + } + + /* Ensures that shut down properly when pausing. + */ + public void testPause() throws InterruptedException { + Log.d(TAG, "testPause"); + + setToDefault(); + Thread.sleep(1000); + + // checker ticker is running okay + assertTrue(mPreview.test_ticker_called); + mPreview.test_ticker_called = false; + Thread.sleep(300); + assertTrue(mPreview.test_ticker_called); + + mActivity.runOnUiThread(new Runnable() { + public void run() { + Log.d(TAG, "pause..."); + getInstrumentation().callActivityOnPause(mActivity); + } + }); + this.getInstrumentation().waitForIdleSync(); + + // ensure ticker is turned off after certain time + Thread.sleep(3000); + mPreview.test_ticker_called = false; + Thread.sleep(1000); + assertFalse(mPreview.test_ticker_called); + + // resume, and assume we've started the ticker again + mActivity.runOnUiThread(new Runnable() { + public void run() { + Log.d(TAG, "resume..."); + getInstrumentation().callActivityOnResume(mActivity); + } + }); + Thread.sleep(3000); + waitUntilCameraOpened(); + assertTrue(mPreview.test_ticker_called); + mPreview.test_ticker_called = false; + Thread.sleep(300); + assertTrue(mPreview.test_ticker_called); + } + + /** Tests that we clean up the background task for opening camera properly. + */ + public void testImmediatelyQuit() throws InterruptedException { + Log.d(TAG, "testImmediatelyQuit"); + setToDefault(); + + for(int i=0;i<5;i++) { + // like restart, but don't wait for camera to be opened + Log.d(TAG, "call finish"); + mActivity.finish(); + setActivity(null); + Log.d(TAG, "now starting"); + mActivity = getActivity(); + mPreview = mActivity.getPreview(); + + // now restart straight away + restart(); + + Thread.sleep(1000); + } + } + + /* Ensures that we only start the camera preview once when starting up. + */ + public void testStartCameraPreviewCount() { + Log.d(TAG, "testStartCameraPreviewCount"); + /*Log.d(TAG, "1 count_cameraStartPreview: " + mPreview.count_cameraStartPreview); + int init_count_cameraStartPreview = mPreview.count_cameraStartPreview; + mActivity.finish(); + setActivity(null); + mActivity = this.getActivity(); + mPreview = mActivity.getPreview(); + Log.d(TAG, "2 count_cameraStartPreview: " + mPreview.count_cameraStartPreview); + assertTrue(mPreview.count_cameraStartPreview == init_count_cameraStartPreview); + this.getInstrumentation().callActivityOnPause(mActivity); + Log.d(TAG, "3 count_cameraStartPreview: " + mPreview.count_cameraStartPreview); + assertTrue(mPreview.count_cameraStartPreview == init_count_cameraStartPreview); + this.getInstrumentation().callActivityOnResume(mActivity); + Log.d(TAG, "4 count_cameraStartPreview: " + mPreview.count_cameraStartPreview); + assertTrue(mPreview.count_cameraStartPreview == init_count_cameraStartPreview+1);*/ + setToDefault(); + + restart(); + // onResume has code that must run on UI thread + mActivity.runOnUiThread(new Runnable() { + public void run() { + Log.d(TAG, "1 count_cameraStartPreview: " + mPreview.count_cameraStartPreview); + assertEquals(1, mPreview.count_cameraStartPreview); + getInstrumentation().callActivityOnPause(mActivity); + Log.d(TAG, "2 count_cameraStartPreview: " + mPreview.count_cameraStartPreview); + assertEquals(1, mPreview.count_cameraStartPreview); + getInstrumentation().callActivityOnResume(mActivity); + } + }); + // need to wait for UI code to finish before leaving + Log.d(TAG, "wait for idle sync"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "done waiting for idle sync"); + // waiting for camera to open can't be on the ui thread, as it's on the ui thread that Open Camera sets that we've opened the camera + waitUntilCameraOpened(); + Log.d(TAG, "3 count_cameraStartPreview: " + mPreview.count_cameraStartPreview); + assertEquals(2, mPreview.count_cameraStartPreview); + } + + /* Ensures that we save the video mode. + * Also tests the icons and content descriptions of the take photo and switch photo/video buttons are as expected. + */ + private void subTestSaveVideoMode() { + Log.d(TAG, "subTestSaveVideoMode"); + setToDefault(); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + + assertFalse(mPreview.isVideo()); + assertEquals(takePhotoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.take_photo)); + assertEquals(switchVideoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.switch_to_video)); + + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertTrue(mPreview.isVideo()); + assertEquals(takePhotoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.start_video)); + assertEquals(switchVideoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.switch_to_photo)); + + restart(); + assertTrue(mPreview.isVideo()); + assertEquals(takePhotoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.start_video)); + assertEquals(switchVideoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.switch_to_photo)); + + pauseAndResume(); + assertTrue(mPreview.isVideo()); + assertEquals(takePhotoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.start_video)); + assertEquals(switchVideoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.switch_to_photo)); + } + + /* Returns a focus mode that is supported by the device, but not the default focus mode. + */ + private String getNonDefaultFocus() { + String non_default_focus; + if( mPreview.getSupportedFocusValues().contains("focus_mode_macro") ) { + non_default_focus = "focus_mode_macro"; + } + else if( mPreview.getSupportedFocusValues().contains("focus_mode_infinity") ) { + non_default_focus = "focus_mode_infinity"; + } + else { + non_default_focus = null; + fail("can't choose a non-default focus for this device"); + } + return non_default_focus; + } + + /* Ensures that we save the focus mode for photos when restarting. + * Note that saving the focus mode for video mode is tested in testFocusSwitchVideoResetContinuous. + */ + private void subTestSaveFocusMode() { + Log.d(TAG, "subTestSaveFocusMode"); + setToDefault(); + + if( !mPreview.supportsFocus() ) { + Log.d(TAG, "test requires focus"); + return; + } + + String non_default_focus = getNonDefaultFocus(); + + String focus_value = mPreview.getCameraController().getFocusValue(); + assertNotEquals(focus_value, non_default_focus); + + switchToFocusValue(non_default_focus); + + restart(); + focus_value = mPreview.getCameraController().getFocusValue(); + assertEquals(focus_value, non_default_focus); + + pauseAndResume(); + focus_value = mPreview.getCameraController().getFocusValue(); + assertEquals(focus_value, non_default_focus); + } + + /* Ensures that we save the flash mode torch when quitting and restarting. + */ + private void subTestSaveFlashTorchQuit() throws InterruptedException { + Log.d(TAG, "subTestSaveFlashTorchQuit"); + + setToDefault(); + + if( !mPreview.supportsFlash() ) { + Log.d(TAG, "doesn't support flash"); + return; + } + + switchToFlashValue("flash_torch"); + + restart(); + Thread.sleep(4000); // needs to be long enough for the autofocus to complete + String controller_flash_value = mPreview.getCameraController().getFlashValue(); + Log.d(TAG, "controller_flash_value: " + controller_flash_value); + assertEquals("flash_torch", controller_flash_value); + String flash_value = mPreview.getCurrentFlashValue(); + Log.d(TAG, "flash_value: " + flash_value); + assertEquals("flash_torch", flash_value); + } + + private void subTestExposureLockNotSaved() { + Log.d(TAG, "subTestExposureLockNotSaved"); + + if( !mPreview.supportsExposureLock() ) { + return; + } + + setToDefault(); + + View exposureLockButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure_lock); + assertEquals(exposureLockButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.exposure_lock)); + clickView(exposureLockButton); + assertTrue(mPreview.getCameraController().getAutoExposureLock()); + assertEquals(exposureLockButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.exposure_unlock)); + + this.pauseAndResume(); + assertFalse(mPreview.getCameraController().getAutoExposureLock()); + assertEquals(exposureLockButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.exposure_lock)); + + // now with restart + + clickView(exposureLockButton); + assertTrue(mPreview.getCameraController().getAutoExposureLock()); + assertEquals(exposureLockButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.exposure_unlock)); + + restart(); + assertFalse(mPreview.getCameraController().getAutoExposureLock()); + exposureLockButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure_lock); + assertEquals(exposureLockButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.exposure_lock)); + } + + private void subTestWhiteBalanceLockNotSaved() { + Log.d(TAG, "subTestWhiteBalanceLockNotSaved"); + + if( !mPreview.supportsWhiteBalanceLock() ) { + return; + } + + setToDefault(); + + View whiteBalanceLockButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.white_balance_lock); + assertEquals(whiteBalanceLockButton.getVisibility(), View.GONE); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.ShowWhiteBalanceLockPreferenceKey, true); + editor.apply(); + updateForSettings(); + assertEquals(whiteBalanceLockButton.getVisibility(), View.VISIBLE); + + assertEquals(whiteBalanceLockButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.white_balance_lock)); + clickView(whiteBalanceLockButton); + assertTrue(mPreview.getCameraController().getAutoWhiteBalanceLock()); + assertEquals(whiteBalanceLockButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.white_balance_unlock)); + + this.pauseAndResume(); + assertFalse(mPreview.getCameraController().getAutoWhiteBalanceLock()); + assertEquals(whiteBalanceLockButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.white_balance_lock)); + + // now with restart + + clickView(whiteBalanceLockButton); + assertTrue(mPreview.getCameraController().getAutoWhiteBalanceLock()); + assertEquals(whiteBalanceLockButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.white_balance_unlock)); + + restart(); + assertFalse(mPreview.getCameraController().getAutoWhiteBalanceLock()); + whiteBalanceLockButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.white_balance_lock); + assertEquals(whiteBalanceLockButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.white_balance_lock)); + } + + /** Tests for things which should (or shouldn't) be saved. + */ + public void testSaveModes() throws InterruptedException { + Log.d(TAG, "testSaveModes"); + subTestSaveVideoMode(); + subTestSaveFocusMode(); + subTestSaveFlashTorchQuit(); + subTestExposureLockNotSaved(); + subTestWhiteBalanceLockNotSaved(); + } + + /* Ensures that the flash mode changes as expected when switching between photo and video modes. + */ + public void testFlashVideoMode() { + Log.d(TAG, "testFlashVideoMode"); + setToDefault(); + + if( !mPreview.supportsFlash() ) { + return; + } + + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + assertFalse(mPreview.isVideo()); + + switchToFlashValue("flash_auto"); + assertEquals("flash_auto", mPreview.getCurrentFlashValue()); + + Log.d(TAG, "switch to video"); + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertTrue(mPreview.isVideo()); + + // flash should turn off when in video mode, so that flash doesn't fire for photo snapshot while recording video + assertEquals("flash_off", mPreview.getCurrentFlashValue()); + + restart(); + switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + assertTrue(mPreview.isVideo()); + assertEquals("flash_off", mPreview.getCurrentFlashValue()); + + // switch back to photo mode, should return to flash auto + Log.d(TAG, "switch to photo"); + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertFalse(mPreview.isVideo()); + assertEquals("flash_auto", mPreview.getCurrentFlashValue()); + + // turn on torch, check it remains on for video + switchToFlashValue("flash_torch"); + assertEquals("flash_torch", mPreview.getCurrentFlashValue()); + + Log.d(TAG, "switch to video"); + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertTrue(mPreview.isVideo()); + assertEquals("flash_torch", mPreview.getCurrentFlashValue()); + + restart(); + switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + assertTrue(mPreview.isVideo()); + assertEquals("flash_torch", mPreview.getCurrentFlashValue()); + + // switch back to photo mode, should remain in flash torch + Log.d(TAG, "switch to photo"); + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertFalse(mPreview.isVideo()); + assertEquals("flash_torch", mPreview.getCurrentFlashValue()); + } + + /* Ensures that we save the flash mode torch when switching to front camera and then to back + * Note that this sometimes fail on Galaxy Nexus, because flash turns off after autofocus (and other camera apps do this too), but this only seems to happen some of the time! + * And Nexus 7 has no flash anyway. + * So commented out test for now. + */ + /*public void testSaveFlashTorchSwitchCamera() { + Log.d(TAG, "testSaveFlashTorchSwitchCamera"); + + setToDefault(); + + if( !mPreview.supportsFlash() ) { + return; + } + else if( Camera.getNumberOfCameras() <= 1 ) { + return; + } + + switchToFlashValue("flash_torch"); + + int cameraId = mPreview.getCameraId(); + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + waitUntilCameraOpened(); + int new_cameraId = mPreview.getCameraId(); + assertTrue(cameraId != new_cameraId); + + clickView(switchCameraButton); + waitUntilCameraOpened(); + new_cameraId = mPreview.getCameraId(); + assertTrue(cameraId == new_cameraId); + + Camera camera = mPreview.getCamera(); + Camera.Parameters parameters = camera.getParameters(); + Log.d(TAG, "parameters flash mode: " + parameters.getFlashMode()); + assertTrue(parameters.getFlashMode().equals(Camera.Parameters.FLASH_MODE_TORCH)); + String flash_value = mPreview.getCurrentFlashValue(); + Log.d(TAG, "flash_value: " + flash_value); + assertTrue(flash_value.equals("flash_torch")); + }*/ + + public void testFlashStartup() throws InterruptedException { + Log.d(TAG, "testFlashStartup"); + setToDefault(); + + if( !mPreview.supportsFlash() ) { + return; + } + + Log.d(TAG, "# switch to flash on"); + switchToFlashValue("flash_on"); + Log.d(TAG, "# restart"); + restart(); + + Log.d(TAG, "# switch flash mode"); + // now switch to torch - the idea is that this is done while the camera is starting up + // though note that sometimes we might not be quick enough here! + // don't use switchToFlashValue here, it'll get confused due to the autofocus changing the parameters flash mode + // update: now okay to use it, now we have the popup UI + //View flashButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.flash); + //clickView(flashButton); + switchToFlashValue("flash_torch"); + + //Camera camera = mPreview.getCamera(); + //Camera.Parameters parameters = camera.getParameters(); + //String flash_mode = mPreview.getCurrentFlashMode(); + String flash_value = mPreview.getCurrentFlashValue(); + Log.d(TAG, "# flash value is now: " + flash_value); + Log.d(TAG, "# sleep"); + Thread.sleep(4000); // needs to be long enough for the autofocus to complete + /*parameters = camera.getParameters(); + Log.d(TAG, "# parameters flash mode: " + parameters.getFlashMode()); + assertTrue(parameters.getFlashMode().equals(flash_mode));*/ + String camera_flash_value = mPreview.getCameraController().getFlashValue(); + Log.d(TAG, "# camera flash value: " + camera_flash_value); + assertEquals(camera_flash_value, flash_value); + } + + /** Tests that flash remains on, with the startup focus flash hack. + */ + public void testFlashStartup2() throws InterruptedException { + Log.d(TAG, "testFlashStartup2"); + setToDefault(); + + if( !mPreview.supportsFlash() ) { + return; + } + + Log.d(TAG, "# switch to flash on"); + switchToFlashValue("flash_on"); + Log.d(TAG, "# restart"); + restart(); + Thread.sleep(3000); + String flash_value = mPreview.getCameraController().getFlashValue(); + Log.d(TAG, "1 flash value is now: " + flash_value); + assertEquals("flash_on", flash_value); + + switchToFocusValue("focus_mode_continuous_picture"); + restart(); + Thread.sleep(3000); + flash_value = mPreview.getCameraController().getFlashValue(); + Log.d(TAG, "2 flash value is now: " + flash_value); + assertEquals("flash_on", flash_value); + } + + private void checkOptimalPreviewSize() { + Log.d(TAG, "preview size: " + mPreview.getCameraController().getPreviewSize().width + ", " + mPreview.getCameraController().getPreviewSize().height); + List sizes = mPreview.getSupportedPreviewSizes(); + CameraController.Size best_size = mPreview.getOptimalPreviewSize(sizes); + Log.d(TAG, "best size: " + best_size.width + ", " + best_size.height); + assertEquals(best_size.width, mPreview.getCameraController().getPreviewSize().width); + assertEquals(best_size.height, mPreview.getCameraController().getPreviewSize().height); + } + + private void checkOptimalVideoPictureSize(double targetRatio) { + // even the picture resolution should have same aspect ratio for video - otherwise have problems on Nexus 7 with Android 4.4.3 + Log.d(TAG, "video picture size: " + mPreview.getCameraController().getPictureSize().width + ", " + mPreview.getCameraController().getPictureSize().height); + List sizes = mPreview.getSupportedPictureSizes(false); + CameraController.Size best_size = mPreview.getOptimalVideoPictureSize(sizes, targetRatio); + Log.d(TAG, "best size: " + best_size.width + ", " + best_size.height); + assertEquals(best_size.width, mPreview.getCameraController().getPictureSize().width); + assertEquals(best_size.height, mPreview.getCameraController().getPictureSize().height); + } + + private void checkSquareAspectRatio() { + Log.d(TAG, "preview size: " + mPreview.getCameraController().getPreviewSize().width + ", " + mPreview.getCameraController().getPreviewSize().height); + Log.d(TAG, "frame size: " + mPreview.getView().getWidth() + ", " + mPreview.getView().getHeight()); + double frame_aspect_ratio = ((double)mPreview.getView().getWidth()) / (double)mPreview.getView().getHeight(); + double preview_aspect_ratio = ((double)mPreview.getCameraController().getPreviewSize().width) / (double)mPreview.getCameraController().getPreviewSize().height; + if( mActivity.getSystemOrientation() == MainActivity.SystemOrientation.PORTRAIT ) { + frame_aspect_ratio = 1.0f / frame_aspect_ratio; + } + Log.d(TAG, "frame_aspect_ratio: " + frame_aspect_ratio); + Log.d(TAG, "preview_aspect_ratio: " + preview_aspect_ratio); + // we calculate etol like this, due to errors from rounding + //double etol = 1.0f / Math.min((double)mPreview.getWidth(), (double)mPreview.getHeight()) + 1.0e-5; + double min_dim = Math.min(mPreview.getView().getWidth(), (double)mPreview.getView().getHeight()); + min_dim = Math.min(min_dim, mPreview.getCameraController().getPreviewSize().width); + min_dim = Math.min(min_dim, mPreview.getCameraController().getPreviewSize().height); + double etol = 1.0f / min_dim + 1.0e-5; + //double etol = (double)mPreview.getView().getWidth() / (double)(mPreview.getView().getHeight() * (mPreview.getView().getHeight()-1) ) + 1.0e-5; + Log.d(TAG, "min_dim: " + min_dim); + Log.d(TAG, "etol: " + etol); + assertTrue( Math.abs(frame_aspect_ratio - preview_aspect_ratio) <= etol ); + } + + /* Ensures that preview resolution is set as expected in non-WYSIWYG mode + */ + public void testPreviewSize() throws InterruptedException { + Log.d(TAG, "testPreviewSize"); + + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PreviewSizePreferenceKey, "preference_preview_size_display"); + editor.apply(); + updateForSettings(); + + Point display_size = new Point(); + { + mActivity.getApplicationInterface().getDisplaySize(display_size, false); + Log.d(TAG, "display_size: " + display_size.x + " x " + display_size.y); + } + //double targetRatio = mPreview.getTargetRatioForPreview(display_size); + double targetRatio = mPreview.getTargetRatio(); + double expTargetRatio = ((double)display_size.x) / (double)display_size.y; + if( mActivity.getSystemOrientation() == MainActivity.SystemOrientation.PORTRAIT ) { + expTargetRatio = 1.0f / expTargetRatio; + } + Log.d(TAG, "targetRatio: " + targetRatio); + Log.d(TAG, "expTargetRatio: " + expTargetRatio); + assertTrue( Math.abs(targetRatio - expTargetRatio) <= 1.0e-5 ); + checkOptimalPreviewSize(); + checkSquareAspectRatio(); + + if( mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ) { + Log.d(TAG, "switch camera"); + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + waitUntilCameraOpened(); + + //targetRatio = mPreview.getTargetRatioForPreview(display_size); + targetRatio = mPreview.getTargetRatio(); + Log.d(TAG, "targetRatio: " + targetRatio); + Log.d(TAG, "expTargetRatio: " + expTargetRatio); + assertTrue( Math.abs(targetRatio - expTargetRatio) <= 1.0e-5 ); + checkOptimalPreviewSize(); + checkSquareAspectRatio(); + } + } + + /* Ensures that preview resolution is set as expected in WYSIWYG mode + */ + public void testPreviewSizeWYSIWYG() { + Log.d(TAG, "testPreviewSizeWYSIWYG"); + + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PreviewSizePreferenceKey, "preference_preview_size_wysiwyg"); + editor.apply(); + updateForSettings(); + + Point display_size = new Point(); + { + mActivity.getApplicationInterface().getDisplaySize(display_size, false); + Log.d(TAG, "display_size: " + display_size.x + " x " + display_size.y); + } + CameraController.Size picture_size = mPreview.getCameraController().getPictureSize(); + CameraController.Size preview_size = mPreview.getCameraController().getPreviewSize(); + //double targetRatio = mPreview.getTargetRatioForPreview(display_size); + double targetRatio = mPreview.getTargetRatio(); + double expTargetRatio = ((double)picture_size.width) / (double)picture_size.height; + double previewRatio = ((double)preview_size.width) / (double)preview_size.height; + Log.d(TAG, "picture_size: " + picture_size.width + " x " + picture_size.height); + Log.d(TAG, "preview_size: " + preview_size.width + " x " + preview_size.height); + Log.d(TAG, "expTargetRatio: " + expTargetRatio); + Log.d(TAG, "targetRatio: " + targetRatio); + Log.d(TAG, "previewRatio: " + previewRatio); + // need larger tolerance for Pixel 6 Pro, as default resolution 4080x3072 has aspect ratio 1.328125, + // but closet preview aspect ratio is 4:3 + assertTrue( Math.abs(targetRatio - expTargetRatio) <= 1.0e-5 ); + assertTrue( Math.abs(previewRatio - expTargetRatio) <= 0.01 ); + checkOptimalPreviewSize(); + checkSquareAspectRatio(); + + Log.d(TAG, "switch to video"); + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertTrue(mPreview.isVideo()); + VideoProfile profile = mPreview.getVideoProfile(); + CameraController.Size video_preview_size = mPreview.getCameraController().getPreviewSize(); + //targetRatio = mPreview.getTargetRatioForPreview(display_size); + targetRatio = mPreview.getTargetRatio(); + expTargetRatio = ((double)profile.videoFrameWidth) / (double)profile.videoFrameHeight; + previewRatio = ((double)video_preview_size.width) / (double)video_preview_size.height; + assertTrue( Math.abs(targetRatio - expTargetRatio) <= 1.0e-5 ); + assertTrue( Math.abs(previewRatio - expTargetRatio) <= 0.01 ); + checkOptimalPreviewSize(); + checkSquareAspectRatio(); + checkOptimalVideoPictureSize(expTargetRatio); + + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertFalse(mPreview.isVideo()); + CameraController.Size new_picture_size = mPreview.getCameraController().getPictureSize(); + CameraController.Size new_preview_size = mPreview.getCameraController().getPreviewSize(); + Log.d(TAG, "picture_size: " + picture_size.width + " x " + picture_size.height); + Log.d(TAG, "new_picture_size: " + new_picture_size.width + " x " + new_picture_size.height); + Log.d(TAG, "preview_size: " + preview_size.width + " x " + preview_size.height); + Log.d(TAG, "new_preview_size: " + new_preview_size.width + " x " + new_preview_size.height); + assertEquals(new_picture_size, picture_size); + assertEquals(new_preview_size, preview_size); + + if( mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ) { + Log.d(TAG, "switch camera"); + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + waitUntilCameraOpened(); + + picture_size = mPreview.getCameraController().getPictureSize(); + preview_size = mPreview.getCameraController().getPreviewSize(); + //targetRatio = mPreview.getTargetRatioForPreview(display_size); + targetRatio = mPreview.getTargetRatio(); + expTargetRatio = ((double)picture_size.width) / (double)picture_size.height; + previewRatio = ((double)preview_size.width) / (double)preview_size.height; + assertTrue( Math.abs(targetRatio - expTargetRatio) <= 1.0e-5 ); + assertTrue( Math.abs(previewRatio - expTargetRatio) <= 0.01 ); + checkOptimalPreviewSize(); + checkSquareAspectRatio(); + + Log.d(TAG, "switch to video again"); + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertTrue(mPreview.isVideo()); + profile = mPreview.getVideoProfile(); + video_preview_size = mPreview.getCameraController().getPreviewSize(); + //targetRatio = mPreview.getTargetRatioForPreview(display_size); + targetRatio = mPreview.getTargetRatio(); + expTargetRatio = ((double)profile.videoFrameWidth) / (double)profile.videoFrameHeight; + previewRatio = ((double)video_preview_size.width) / (double)video_preview_size.height; + assertTrue( Math.abs(targetRatio - expTargetRatio) <= 1.0e-5 ); + assertTrue( Math.abs(previewRatio - expTargetRatio) <= 0.01 ); + checkOptimalPreviewSize(); + checkSquareAspectRatio(); + checkOptimalVideoPictureSize(expTargetRatio); + + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertFalse(mPreview.isVideo()); + new_picture_size = mPreview.getCameraController().getPictureSize(); + new_preview_size = mPreview.getCameraController().getPreviewSize(); + assertEquals(new_picture_size, picture_size); + assertEquals(new_preview_size, preview_size); + } + } + + private void subTestResolutionMaxMP(String photo_mode_preference, MyApplicationInterface.PhotoMode photo_mode, int max_mp, boolean test_change_resolution, boolean expect_reduce_resolution, boolean expect_supports_burst) { + Log.d(TAG, "subTestResolutionMaxMP"); + Log.d(TAG, " photo_mode_preference: " + photo_mode_preference); + Log.d(TAG, " photo_mode: " + photo_mode); + Log.d(TAG, " max_mp: " + max_mp); + Log.d(TAG, " test_change_resolution: " + test_change_resolution); + + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.Standard); + + CameraController.Size std_size = mPreview.getCurrentPictureSize(); + assertNotNull(std_size); + Log.d(TAG, "std_size: " + std_size.width + " x " + std_size.height); + final List all_picture_sizes = new ArrayList<>(mPreview.getSupportedPictureSizes(false)); + final List std_picture_sizes = new ArrayList<>(mPreview.getSupportedPictureSizes(true)); + assertEquals(all_picture_sizes, std_picture_sizes); + assertTrue(all_picture_sizes.contains(std_size)); + + // switch to the photo mode and check we reduce the resolution + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, photo_mode_preference); + editor.apply(); + updateForSettings(); + assertSame(mActivity.getApplicationInterface().getPhotoMode(), photo_mode); + + CameraController.Size new_size = mPreview.getCurrentPictureSize(); + Log.d(TAG, "new_size: " + new_size.width + " x " + new_size.height); + if( expect_reduce_resolution ) { + assertNotEquals(new_size, std_size); + assertTrue(new_size.width*new_size.height <= max_mp); + } + else { + assertEquals(new_size, std_size); + } + if( expect_supports_burst ) { + assertTrue(new_size.supports_burst); + } + final List all_picture_sizes_new = new ArrayList<>(mPreview.getSupportedPictureSizes(false)); + final List picture_sizes_new = new ArrayList<>(mPreview.getSupportedPictureSizes(true)); + assertEquals(all_picture_sizes, all_picture_sizes_new); + if( expect_reduce_resolution ) { + assertTrue(picture_sizes_new.size() < all_picture_sizes.size()); + // check the filtered modes are a subset of all of them + assertTrue(all_picture_sizes.containsAll(picture_sizes_new)); + // check all of the filtered modes satisfy the max_mp + for(CameraController.Size size : picture_sizes_new) { + assertTrue(size.width*size.height <= max_mp); + } + } + else { + assertEquals(all_picture_sizes, picture_sizes_new); + } + if( expect_supports_burst ) { + // check all of the filtered modes support burst + for(CameraController.Size size : picture_sizes_new) { + assertTrue(size.supports_burst); + } + } + // check the filtered modes include the chosen mode + assertTrue(picture_sizes_new.contains(new_size)); + + // pause and resume, check resolutions unchanged + pauseAndResume(); + assertSame(mActivity.getApplicationInterface().getPhotoMode(), photo_mode); + CameraController.Size new_size2 = mPreview.getCurrentPictureSize(); + assertEquals(new_size, new_size2); + final List all_picture_sizes_new2 = new ArrayList<>(mPreview.getSupportedPictureSizes(false)); + final List picture_sizes_new2 = new ArrayList<>(mPreview.getSupportedPictureSizes(true)); + assertEquals(all_picture_sizes_new, all_picture_sizes_new2); + assertEquals(picture_sizes_new, picture_sizes_new2); + + CameraController.Size change_to_size = null; + String settings_size = ""; + if( test_change_resolution ) { + // test changing the resolution in the new mode + + // save old resolution + settings_size = settings.getString(PreferenceKeys.getResolutionPreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref()), ""); + + // find a different resolution + for(CameraController.Size size : picture_sizes_new) { + if( !size.equals(new_size) ) { + change_to_size = size; + break; + } + } + assertNotNull(change_to_size); + Log.d(TAG, "set size to " + change_to_size.width + " x " + change_to_size.height); + settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + editor = settings.edit(); + editor.putString(PreferenceKeys.getResolutionPreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref()), change_to_size.width + " " + change_to_size.height); + editor.apply(); + updateForSettings(); + + CameraController.Size new_size3 = mPreview.getCurrentPictureSize(); + assertEquals(change_to_size, new_size3); + assertNotEquals(new_size, new_size3); + } + + // switch back to STD, and check we return to the original resolution (or not, if test_change_resolution==true) + settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_std"); + editor.apply(); + updateForSettings(); + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.Standard); + + new_size = mPreview.getCurrentPictureSize(); + if( test_change_resolution ) { + assertEquals(change_to_size, new_size); + } + else { + assertEquals(std_size, new_size); + } + final List all_picture_sizes2 = new ArrayList<>(mPreview.getSupportedPictureSizes(false)); + final List std_picture_sizes2 = new ArrayList<>(mPreview.getSupportedPictureSizes(true)); + assertEquals(all_picture_sizes, all_picture_sizes2); + assertEquals(all_picture_sizes, std_picture_sizes2); + assertTrue(std_picture_sizes2.contains(new_size)); + + if( test_change_resolution ) { + // set back, so we don't confuse later parts of the test + settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + editor = settings.edit(); + editor.putString(PreferenceKeys.getResolutionPreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref()), settings_size); + editor.apply(); + updateForSettings(); + } + } + + /* Ensures that we enforce a maximum resolution correctly in some photo modes. + */ + public void testResolutionMaxMP() { + Log.d(TAG, "testResolutionMaxMP"); + + setToDefault(); + + CameraController.Size std_size = mPreview.getCurrentPictureSize(); + assertNotNull(std_size); + Log.d(TAG, "std_size: " + std_size.width + " x " + std_size.height); + + int max_mp = (std_size.width*std_size.height-100); + mActivity.getApplicationInterface().test_max_mp = max_mp; + Log.d(TAG, "test_max_mp: " + mActivity.getApplicationInterface().test_max_mp); + + if( mActivity.supportsHDR() ) { + subTestResolutionMaxMP("preference_photo_mode_hdr", MyApplicationInterface.PhotoMode.HDR, max_mp, false, true, true); + subTestResolutionMaxMP("preference_photo_mode_hdr", MyApplicationInterface.PhotoMode.HDR, max_mp, true, true, true); + } + if( mActivity.supportsNoiseReduction() ) { + subTestResolutionMaxMP("preference_photo_mode_noise_reduction", MyApplicationInterface.PhotoMode.NoiseReduction, max_mp, false, true, true); + subTestResolutionMaxMP("preference_photo_mode_noise_reduction", MyApplicationInterface.PhotoMode.NoiseReduction, max_mp, true, true, true); + } + if( mActivity.supportsDRO() ) { + subTestResolutionMaxMP("preference_photo_mode_dro", MyApplicationInterface.PhotoMode.DRO, max_mp, false, false, false); + subTestResolutionMaxMP("preference_photo_mode_dro", MyApplicationInterface.PhotoMode.DRO, max_mp, true, false, false); + } + } + + /* Ensures that we handle correctly when the largest resolution doesn't support burst. + */ + public void testResolutionBurst() { + Log.d(TAG, "testResolutionBurst"); + + if( !mPreview.usingCamera2API() ) { + Log.d(TAG, "test requires camera2 api"); + return; + } + + setToDefault(); + + mPreview.test_burst_resolution = true; + pauseAndResume(); // needed for test_burst_resolution to take effect + + CameraController.Size std_size = mPreview.getCurrentPictureSize(); + // check the test_burst_resolution flag took effect: + assertFalse(std_size.supports_burst); + + // now find the maximum mp that supports burst + final List all_picture_sizes = new ArrayList<>(mPreview.getSupportedPictureSizes(false)); + int max_mp = 0; + for(CameraController.Size size : all_picture_sizes) { + if( size.supports_burst ) { + int mp = size.width*size.height; + max_mp = Math.max(max_mp, mp); + } + } + Log.d(TAG, "max_mp: " + max_mp); + assertTrue(max_mp < std_size.width*std_size.height); + + if( mActivity.supportsHDR() ) { + subTestResolutionMaxMP("preference_photo_mode_hdr", MyApplicationInterface.PhotoMode.HDR, max_mp, false, true, true); + subTestResolutionMaxMP("preference_photo_mode_hdr", MyApplicationInterface.PhotoMode.HDR, max_mp, true, true, true); + } + if( mActivity.supportsNoiseReduction() ) { + subTestResolutionMaxMP("preference_photo_mode_noise_reduction", MyApplicationInterface.PhotoMode.NoiseReduction, max_mp, false, true, true); + subTestResolutionMaxMP("preference_photo_mode_noise_reduction", MyApplicationInterface.PhotoMode.NoiseReduction, max_mp, true, true, true); + } + if( mActivity.supportsDRO() ) { + subTestResolutionMaxMP("preference_photo_mode_dro", MyApplicationInterface.PhotoMode.DRO, max_mp, false, false, false); + subTestResolutionMaxMP("preference_photo_mode_dro", MyApplicationInterface.PhotoMode.DRO, max_mp, true, false, false); + } + if( mActivity.supportsFastBurst() ) { + subTestResolutionMaxMP("preference_photo_mode_fast_burst", MyApplicationInterface.PhotoMode.FastBurst, max_mp, false, true, true); + subTestResolutionMaxMP("preference_photo_mode_fast_burst", MyApplicationInterface.PhotoMode.FastBurst, max_mp, true, true, true); + } + } + + /* Tests camera error handling. + */ + public void testOnError() { + Log.d(TAG, "testOnError"); + setToDefault(); + + mActivity.runOnUiThread(new Runnable() { + public void run() { + Log.d(TAG, "onError..."); + mPreview.getCameraController().onError(); + } + }); + this.getInstrumentation().waitForIdleSync(); + assertNull(mPreview.getCameraController()); + } + + /* Various tests for auto-focus. + */ + public void testAutoFocus() throws InterruptedException { + Log.d(TAG, "testAutoFocus"); + setToDefault(); + + if( !mPreview.supportsFocus() ) { + return; + } + //int saved_count = mPreview.count_cameraAutoFocus; + int saved_count = 0; // set to 0 rather than count_cameraAutoFocus, as on Galaxy Nexus, it can happen that startup autofocus has already occurred by the time we reach here + Log.d(TAG, "saved_count: " + saved_count); + switchToFocusValue("focus_mode_auto"); + + assertFalse(mPreview.hasFocusArea()); + assertNull(mPreview.getCameraController().getFocusAreas()); + assertNull(mPreview.getCameraController().getMeteringAreas()); + + Thread.sleep(2000); // wait until autofocus startup (and for toasts to clear, for Android 10+ toast behaviour) + Log.d(TAG, "1 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus + " compare to saved_count: " + saved_count); + assertEquals(saved_count + 1, mPreview.count_cameraAutoFocus); + assertFalse(mPreview.hasFocusArea()); + assertNull(mPreview.getCameraController().getFocusAreas()); + assertNull(mPreview.getCameraController().getMeteringAreas()); + + // touch to auto-focus with focus area + saved_count = mPreview.count_cameraAutoFocus; + Log.d(TAG, "about to touch preview to auto-focus"); + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + Log.d(TAG, "done touch preview to auto-focus"); + Log.d(TAG, "2 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus + " compare to saved_count: " + saved_count); + assertEquals(saved_count + 1, mPreview.count_cameraAutoFocus); + assertTrue(mPreview.hasFocusArea()); + assertNotNull(mPreview.getCameraController().getFocusAreas()); + assertEquals(1, mPreview.getCameraController().getFocusAreas().size()); + assertNotNull(mPreview.getCameraController().getMeteringAreas()); + assertEquals(1, mPreview.getCameraController().getMeteringAreas().size()); + + saved_count = mPreview.count_cameraAutoFocus; + // test selecting same mode doesn't set off an autofocus or reset the focus area + switchToFocusValue("focus_mode_auto"); + Log.d(TAG, "3 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + assertEquals(mPreview.count_cameraAutoFocus, saved_count); + assertTrue(mPreview.hasFocusArea()); + assertNotNull(mPreview.getCameraController().getFocusAreas()); + assertEquals(1, mPreview.getCameraController().getFocusAreas().size()); + assertNotNull(mPreview.getCameraController().getMeteringAreas()); + assertEquals(1, mPreview.getCameraController().getMeteringAreas().size()); + + if( mPreview.getSupportedFocusValues().contains("focus_mode_macro") ) { + saved_count = mPreview.count_cameraAutoFocus; + // test switching mode sets off an autofocus, and resets the focus area + switchToFocusValue("focus_mode_macro"); + Log.d(TAG, "4 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + assertEquals(mPreview.count_cameraAutoFocus, saved_count + 1); + assertFalse(mPreview.hasFocusArea()); + assertNull(mPreview.getCameraController().getFocusAreas()); + assertNull(mPreview.getCameraController().getMeteringAreas()); + } + + saved_count = mPreview.count_cameraAutoFocus; + // switching to focus locked shouldn't set off an autofocus + switchToFocusValue("focus_mode_locked"); + Log.d(TAG, "5 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + assertEquals(mPreview.count_cameraAutoFocus, saved_count); + + saved_count = mPreview.count_cameraAutoFocus; + // touch to focus should autofocus + Thread.sleep(2000); + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + Log.d(TAG, "6 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + assertEquals(mPreview.count_cameraAutoFocus, saved_count + 1); + + saved_count = mPreview.count_cameraAutoFocus; + // switching to focus continuous shouldn't set off an autofocus + switchToFocusValue("focus_mode_continuous_picture"); + Log.d(TAG, "7 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + assertFalse(mPreview.isFocusWaiting()); + assertEquals(mPreview.count_cameraAutoFocus, saved_count); + + // but touch to focus should + Thread.sleep(2000); + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + Log.d(TAG, "8 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + assertEquals(mPreview.count_cameraAutoFocus, saved_count + 1); + assertTrue(mPreview.hasFocusArea()); + assertNotNull(mPreview.getCameraController().getFocusAreas()); + assertEquals(1, mPreview.getCameraController().getFocusAreas().size()); + assertNotNull(mPreview.getCameraController().getMeteringAreas()); + assertEquals(1, mPreview.getCameraController().getMeteringAreas().size()); + + switchToFocusValue("focus_mode_locked"); // change to a mode that isn't auto (so that the first iteration of the next loop will set of an autofocus, due to changing the focus mode) + List supported_focus_values = mPreview.getSupportedFocusValues(); + assertNotNull(supported_focus_values); + assertTrue( supported_focus_values.size() > 1 ); + for(String supported_focus_value : supported_focus_values) { + Log.d(TAG, "supported_focus_value: " + supported_focus_value); + saved_count = mPreview.count_cameraAutoFocus; + Log.d(TAG, "saved autofocus count: " + saved_count); + //View focusModeButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.focus_mode); + //clickView(focusModeButton); + switchToFocusValue(supported_focus_value); + // test that switching focus mode resets the focus area + assertFalse(mPreview.hasFocusArea()); + assertNull(mPreview.getCameraController().getFocusAreas()); + assertNull(mPreview.getCameraController().getMeteringAreas()); + // test that switching focus mode sets off an autofocus in focus auto or macro mode + String focus_value = mPreview.getCameraController().getFocusValue(); + Log.d(TAG, "changed focus_value to: "+ focus_value); + Log.d(TAG, "count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + if( focus_value.equals("focus_mode_auto") || focus_value.equals("focus_mode_macro") ) { + assertEquals(mPreview.count_cameraAutoFocus, saved_count + 1); + } + else { + assertFalse(mPreview.isFocusWaiting()); + assertEquals(mPreview.count_cameraAutoFocus, saved_count); + } + + // test that touch to auto-focus region only works in focus auto, macro or continuous mode, and that we set off an autofocus for focus auto and macro + // test that touch to set metering area works in any focus mode + saved_count = mPreview.count_cameraAutoFocus; + Thread.sleep(2000); + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + Log.d(TAG, "count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + if( focus_value.equals("focus_mode_auto") || focus_value.equals("focus_mode_macro") || focus_value.equals("focus_mode_continuous_picture") || focus_value.equals("focus_mode_continuous_video") ) { + if( focus_value.equals("focus_mode_continuous_picture") || focus_value.equals("focus_mode_continuous_video") ) { + assertFalse(mPreview.isFocusWaiting()); + assertEquals(mPreview.count_cameraAutoFocus, saved_count); + } + else { + assertEquals(mPreview.count_cameraAutoFocus, saved_count + 1); + } + assertTrue(mPreview.hasFocusArea()); + assertNotNull(mPreview.getCameraController().getFocusAreas()); + assertEquals(1, mPreview.getCameraController().getFocusAreas().size()); + assertNotNull(mPreview.getCameraController().getMeteringAreas()); + assertEquals(1, mPreview.getCameraController().getMeteringAreas().size()); + } + else { + assertEquals(mPreview.count_cameraAutoFocus, saved_count); + assertFalse(mPreview.hasFocusArea()); + assertNull(mPreview.getCameraController().getFocusAreas()); + assertNotNull(mPreview.getCameraController().getMeteringAreas()); + assertEquals(1, mPreview.getCameraController().getMeteringAreas().size()); + } + // also check that focus mode is unchanged + assertEquals(mPreview.getCameraController().getFocusValue(), focus_value); + if( focus_value.equals("focus_mode_auto") ) { + break; + } + } + } + + /* Test we do startup autofocus as expected depending on focus mode. + */ + public void testStartupAutoFocus() throws InterruptedException { + Log.d(TAG, "testStartupAutoFocus"); + setToDefault(); + + if( !mPreview.supportsFocus() ) { + return; + } + //int saved_count = mPreview.count_cameraAutoFocus; + int saved_count = 0; // set to 0 rather than count_cameraAutoFocus, as on Galaxy Nexus, it can happen that startup autofocus has already occurred by the time we reach here + Log.d(TAG, "saved_count: " + saved_count); + switchToFocusValue("focus_mode_auto"); + + Thread.sleep(1000); + Log.d(TAG, "1 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus + " compare to saved_count: " + saved_count); + assertEquals(mPreview.count_cameraAutoFocus, saved_count + 1); + + restart(); + //saved_count = mPreview.count_cameraAutoFocus; + saved_count = 0; + Log.d(TAG, "saved_count: " + saved_count); + Thread.sleep(1000); + Log.d(TAG, "2 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus + " compare to saved_count: " + saved_count); + assertEquals(mPreview.count_cameraAutoFocus, saved_count + 1); + + if( mPreview.getSupportedFocusValues().contains("focus_mode_infinity") ) { + switchToFocusValue("focus_mode_infinity"); + restart(); + //saved_count = mPreview.count_cameraAutoFocus; + saved_count = 0; + Log.d(TAG, "saved_count: " + saved_count); + Thread.sleep(1000); + Log.d(TAG, "3 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus + " compare to saved_count: " + saved_count); + assertEquals(mPreview.count_cameraAutoFocus, saved_count); + } + + if( mPreview.getSupportedFocusValues().contains("focus_mode_macro") ) { + switchToFocusValue("focus_mode_macro"); + restart(); + //saved_count = mPreview.count_cameraAutoFocus; + saved_count = 0; + Log.d(TAG, "saved_count: " + saved_count); + Thread.sleep(1000); + Log.d(TAG, "4 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus + " compare to saved_count: " + saved_count); + assertEquals(mPreview.count_cameraAutoFocus, saved_count + 1); + } + + if( mPreview.getSupportedFocusValues().contains("focus_mode_locked") ) { + switchToFocusValue("focus_mode_locked"); + restart(); + //saved_count = mPreview.count_cameraAutoFocus; + saved_count = 0; + Log.d(TAG, "saved_count: " + saved_count); + Thread.sleep(1000); + Log.d(TAG, "5 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus + " compare to saved_count: " + saved_count); + assertEquals(mPreview.count_cameraAutoFocus, saved_count + 1); + } + + if( mPreview.getSupportedFocusValues().contains("focus_mode_continuous_picture") ) { + switchToFocusValue("focus_mode_continuous_picture"); + restart(); + //saved_count = mPreview.count_cameraAutoFocus; + saved_count = 0; + Log.d(TAG, "saved_count: " + saved_count); + Thread.sleep(1000); + Log.d(TAG, "6 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus + " compare to saved_count: " + saved_count); + assertEquals(mPreview.count_cameraAutoFocus, saved_count); + } + } + + /* Test doing touch to auto-focus region by swiping to all four corners works okay. + * Update: now only do one corner, due to complications with different orientations and devices. + */ + public void testAutoFocusCorners() { + Log.d(TAG, "testAutoFocusCorners"); + setToDefault(); + { + // icons along top mode interferes with doing the touch at corners (e.g., on Galaxy Nexus) + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.UIPlacementPreferenceKey, "ui_right"); + editor.apply(); + updateForSettings(); + } + + if( !mPreview.supportsFocus() ) { + return; + } + + int [] gui_location = new int[2]; + mPreview.getView().getLocationOnScreen(gui_location); + Log.d(TAG, "gui_location: " + Arrays.toString(gui_location)); + final int step_dist_c = 2; + final float scale = mActivity.getResources().getDisplayMetrics().density; + final int offset_dist_c = (int) (80 * scale + 0.5f); // convert dps to pixels + final int large_step_dist_c = (int) (80 * scale + 0.5f); // convert dps to pixels + final int step_count_c = 10; + int width = mPreview.getView().getWidth(); + int height = mPreview.getView().getHeight(); + Log.d(TAG, "preview size: " + width + " x " + height); + + assertFalse(mPreview.hasFocusArea()); + assertNull(mPreview.getCameraController().getFocusAreas()); + assertNull(mPreview.getCameraController().getMeteringAreas()); + + Log.d(TAG, "top-left"); + TouchUtils.drag(MainActivityTest.this, gui_location[0] + offset_dist_c + step_dist_c, gui_location[0] + offset_dist_c, gui_location[1] + offset_dist_c + step_dist_c, gui_location[1] + offset_dist_c, step_count_c); + assertTrue(mPreview.hasFocusArea()); + assertNotNull(mPreview.getCameraController().getFocusAreas()); + assertEquals(1, mPreview.getCameraController().getFocusAreas().size()); + assertNotNull(mPreview.getCameraController().getMeteringAreas()); + assertEquals(1, mPreview.getCameraController().getMeteringAreas().size()); + + mPreview.clearFocusAreas(); + assertFalse(mPreview.hasFocusArea()); + assertNull(mPreview.getCameraController().getFocusAreas()); + assertNull(mPreview.getCameraController().getMeteringAreas()); + + // skip top-right, as in portrait orientation we'd conflict with settings button + /*Log.d(TAG, "top-right"); + TouchUtils.drag(MainActivityTest.this, gui_location[0]+width-1-large_step_dist_c, gui_location[0]+width-1, gui_location[1]+large_step_dist_c, gui_location[1], step_count_c); + assertFalse(mPreview.hasFocusArea()); + assertNull(mPreview.getCameraController().getFocusAreas()); + assertNull(mPreview.getCameraController().getMeteringAreas());*/ + + // skip bottom right, conflicts with zoom on various devices + // but note in portrait mode, this is bottom-left that we need to skip + // update: and in portrait mode, that now conflicts with settings, so we just skip this too + + /*if( mActivity.getSystemOrientation() == MainActivity.SystemOrientation.PORTRAIT ) { + Log.d(TAG, "bottom-right"); + TouchUtils.drag(MainActivityTest.this, gui_location[0]+width-1-step_dist_c, gui_location[0]+width-1, gui_location[1]+height-1-step_dist_c, gui_location[1]+height-1, step_count_c); + assertTrue(mPreview.hasFocusArea()); + assertNotNull(mPreview.getCameraController().getFocusAreas()); + assertEquals(1, mPreview.getCameraController().getFocusAreas().size()); + assertNotNull(mPreview.getCameraController().getMeteringAreas()); + assertEquals(1, mPreview.getCameraController().getMeteringAreas().size()); + } + else { + Log.d(TAG, "bottom-left"); + TouchUtils.drag(MainActivityTest.this, gui_location[0]+step_dist_c, gui_location[0], gui_location[1]+height-1-step_dist_c, gui_location[1]+height-1, step_count_c); + assertTrue(mPreview.hasFocusArea()); + assertNotNull(mPreview.getCameraController().getFocusAreas()); + assertEquals(1, mPreview.getCameraController().getFocusAreas().size()); + assertNotNull(mPreview.getCameraController().getMeteringAreas()); + assertEquals(1, mPreview.getCameraController().getMeteringAreas().size()); + }*/ + + mPreview.clearFocusAreas(); + assertFalse(mPreview.hasFocusArea()); + assertNull(mPreview.getCameraController().getFocusAreas()); + assertNull(mPreview.getCameraController().getMeteringAreas()); + + } + + /* Test face detection, and that we don't get the focus/metering areas set. + */ + public void testFaceDetection() throws InterruptedException { + Log.d(TAG, "testFaceDetection"); + setToDefault(); + + View faceDetectionButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.face_detection); + assertEquals(faceDetectionButton.getVisibility(), View.GONE); + + if( !mPreview.supportsFaceDetection() ) { + Log.d(TAG, "face detection not supported"); + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.FaceDetectionPreferenceKey, true); + editor.apply(); + updateForSettings(); + + assertEquals(faceDetectionButton.getVisibility(), View.GONE); + + int saved_count; + Log.d(TAG, "0 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + /* + // autofocus shouldn't be immediately, but after a delay + saved_count = mPreview.count_cameraAutoFocus; + Thread.sleep(1000); + Log.d(TAG, "1 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + assertTrue(mPreview.count_cameraAutoFocus == saved_count+1); + */ + Thread.sleep(2000); + assertFalse(mPreview.hasFocusArea()); + assertNull(mPreview.getCameraController().getFocusAreas()); + assertNull(mPreview.getCameraController().getMeteringAreas()); + // check face detection already started + assertFalse( mPreview.getCameraController().startFaceDetection() ); + + // touch to auto-focus with focus area + saved_count = mPreview.count_cameraAutoFocus; + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + Log.d(TAG, "2 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + assertEquals(mPreview.count_cameraAutoFocus, saved_count + 1); // for autofocus + assertFalse(mPreview.hasFocusArea()); + assertNull(mPreview.getCameraController().getFocusAreas()); + assertNull(mPreview.getCameraController().getMeteringAreas()); + + if( mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ) { + int cameraId = mPreview.getCameraId(); + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + waitUntilCameraOpened(); + // check face detection already started + assertFalse( mPreview.getCameraController().startFaceDetection() ); + + // return to back camera + switchToCamera(cameraId); + } + + // test show face detection icon + + editor.putBoolean(PreferenceKeys.ShowFaceDetectionPreferenceKey, true); + editor.apply(); + updateForSettings(); + + assertEquals(faceDetectionButton.getVisibility(), View.VISIBLE); + assertEquals(faceDetectionButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.face_detection_disable)); + + // check face detection already started + assertFalse( mPreview.getCameraController().startFaceDetection() ); + + // restart and check still enabled + restart(); + faceDetectionButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.face_detection); + + assertEquals(faceDetectionButton.getVisibility(), View.VISIBLE); + assertEquals(faceDetectionButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.face_detection_disable)); + + clickView(faceDetectionButton); + waitUntilCameraOpened(); + assertFalse( settings.getBoolean(PreferenceKeys.FaceDetectionPreferenceKey, false) ); + assertEquals(faceDetectionButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.face_detection_enable)); + + // check face detection not already started + assertTrue( mPreview.getCameraController().startFaceDetection() ); + + assertTrue( mPreview.getCameraController() == null || mPreview.getCameraController().count_camera_parameters_exception == 0 ); + } + + private void subTestPopupButtonAvailability(String test_key, String option, boolean expected) { + Log.d(TAG, "test_key: "+ test_key); + Log.d(TAG, "option: "+ option); + Log.d(TAG, "expected?: "+ expected); + View button = mActivity.getUIButton(test_key + "_" + option); + if( expected ) { + boolean is_video = mPreview.isVideo(); + if( option.equals("focus_mode_continuous_picture") && is_video ) { + // not allowed in video mode + assertNull(button); + } + else if( option.equals("focus_mode_continuous_video") && !is_video ) { + // not allowed in picture mode + assertNull(button); + } + else if( option.equals("flash_auto") && is_video ) { + // not allowed in video mode + assertNull(button); + } + else if( option.equals("flash_on") && is_video ) { + // not allowed in video mode + assertNull(button); + } + else if( option.equals("flash_red_eye") && is_video ) { + // not allowed in video mode + assertNull(button); + } + else if( option.equals("flash_frontscreen_auto") && is_video ) { + // not allowed in video mode + assertNull(button); + } + else if( option.equals("flash_frontscreen_on") && is_video ) { + // not allowed in video mode + assertNull(button); + } + else { + assertNotNull(button); + } + } + else { + Log.d(TAG, "option? "+ option); + Log.d(TAG, "button? "+ button); + assertNull(button); + } + } + + private void subTestPopupButtonAvailability(String test_key, String option, List options) { + subTestPopupButtonAvailability(test_key, option, options != null && options.contains(option)); + } + + private void subTestPopupButtonAvailability(String option, boolean expected) { + View button = mActivity.getUIButton(option); + if( expected ) { + assertNotNull(button); + } + else { + assertNull(button); + } + } + + private void subTestPopupButtonAvailability() { + List supported_flash_values = mPreview.getSupportedFlashValues(); + Log.d(TAG, "supported_flash_values: "+ supported_flash_values); + subTestPopupButtonAvailability("TEST_FLASH", "flash_off", supported_flash_values); + subTestPopupButtonAvailability("TEST_FLASH", "flash_auto", supported_flash_values); + subTestPopupButtonAvailability("TEST_FLASH", "flash_on", supported_flash_values); + subTestPopupButtonAvailability("TEST_FLASH", "flash_torch", supported_flash_values); + subTestPopupButtonAvailability("TEST_FLASH", "flash_red_eye", supported_flash_values); + List supported_focus_values = mPreview.getSupportedFocusValues(); + subTestPopupButtonAvailability("TEST_FOCUS", "focus_mode_auto", supported_focus_values); + subTestPopupButtonAvailability("TEST_FOCUS", "focus_mode_locked", supported_focus_values); + subTestPopupButtonAvailability("TEST_FOCUS", "focus_mode_infinity", supported_focus_values); + subTestPopupButtonAvailability("TEST_FOCUS", "focus_mode_macro", supported_focus_values); + subTestPopupButtonAvailability("TEST_FOCUS", "focus_mode_fixed", supported_focus_values); + subTestPopupButtonAvailability("TEST_FOCUS", "focus_mode_edof", supported_focus_values); + subTestPopupButtonAvailability("TEST_FOCUS", "focus_mode_continuous_picture", supported_focus_values); + subTestPopupButtonAvailability("TEST_FOCUS", "focus_mode_continuous_video", supported_focus_values); + subTestPopupButtonAvailability("TEST_WHITE_BALANCE", mPreview.getSupportedWhiteBalances() != null); + subTestPopupButtonAvailability("TEST_SCENE_MODE", mPreview.getSupportedSceneModes() != null); + subTestPopupButtonAvailability("TEST_COLOR_EFFECT", mPreview.getSupportedColorEffects() != null); + } + + private void subTestFocusFlashAvailability() { + //View focusModeButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.focus_mode); + //View flashButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.flash); + View exposureButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureLockButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure_lock); + View popupButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.popup); + /*boolean focus_visible = focusModeButton.getVisibility() == View.VISIBLE; + Log.d(TAG, "focus_visible? "+ focus_visible); + boolean flash_visible = flashButton.getVisibility() == View.VISIBLE; + Log.d(TAG, "flash_visible? "+ flash_visible);*/ + boolean exposure_visible = exposureButton.getVisibility() == View.VISIBLE; + Log.d(TAG, "exposure_visible? "+ exposure_visible); + boolean exposure_lock_visible = exposureLockButton.getVisibility() == View.VISIBLE; + Log.d(TAG, "exposure_lock_visible? "+ exposure_lock_visible); + boolean popup_visible = popupButton.getVisibility() == View.VISIBLE; + Log.d(TAG, "popup_visible? "+ popup_visible); + boolean has_focus = mPreview.supportsFocus(); + Log.d(TAG, "has_focus? "+ has_focus); + boolean has_flash = mPreview.supportsFlash(); + Log.d(TAG, "has_flash? "+ has_flash); + boolean has_exposure = mPreview.supportsExposures(); + Log.d(TAG, "has_exposure? "+ has_exposure); + boolean has_exposure_lock = mPreview.supportsExposureLock(); + Log.d(TAG, "has_exposure_lock? "+ has_exposure_lock); + //assertTrue(has_focus == focus_visible); + //assertTrue(has_flash == flash_visible); + assertEquals(has_exposure, exposure_visible); + assertEquals(has_exposure_lock, exposure_lock_visible); + assertTrue(popup_visible); + + openPopupMenu(); + subTestPopupButtonAvailability(); + } + + /* + * For each camera, test that visibility of flash and focus etc buttons matches the availability of those camera parameters. + * Added to guard against a bug where on Nexus 7, the flash and focus buttons were made visible by showGUI, even though they aren't supported by Nexus 7 front camera. + */ + public void testFocusFlashAvailability() { + Log.d(TAG, "testFocusFlashAvailability"); + setToDefault(); + + subTestFocusFlashAvailability(); + + if( mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ) { + int cameraId = mPreview.getCameraId(); + Log.d(TAG, "cameraId? "+ cameraId); + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + //mActivity.clickedSwitchCamera(switchCameraButton); + clickView(switchCameraButton); + waitUntilCameraOpened(); + int new_cameraId = mPreview.getCameraId(); + Log.d(TAG, "new_cameraId? "+ new_cameraId); + assertTrue(cameraId != new_cameraId); + + subTestFocusFlashAvailability(); + } + } + + private void subTestPopupButtonContentDescription(int title_id, String test_key, boolean next, boolean expected) { + Log.d(TAG, "subTestPopupButtonContentDescription"); + String title = mActivity.getResources().getString(title_id); + Log.d(TAG, "title: " + title); + Log.d(TAG, "test_key: " + test_key); + Log.d(TAG, "next: " + next); + Log.d(TAG, "expected: " + expected); + View main_button = mActivity.getUIButton(test_key); + assertNotNull(main_button); + View button = mActivity.getUIButton(test_key + (next ? "_NEXT" : "_PREV")); + if( expected ) { + assertNotNull(button); + } + if( button != null ) { + assertNotNull(button.getContentDescription()); + String content_description = button.getContentDescription().toString(); + assertFalse(content_description.isEmpty()); + String next_string = mActivity.getResources().getString(next ? net.sourceforge.opencamera.R.string.next : net.sourceforge.opencamera.R.string.previous); + assertFalse(next_string.isEmpty()); + assertTrue(content_description.startsWith(next_string + " " + title)); + } + else { + Log.d(TAG, "no button found"); + } + } + + /* Tests switching to/from video mode, for front and back cameras, and tests the focus mode changes as expected. + * If this test fails with nullpointerexception on preview.getCameraController() after switching to video mode, check + * that record audio permission is granted! + * Also tests content descriptions of <> buttons on the popup menu. + */ + public void testSwitchVideo() { + Log.d(TAG, "testSwitchVideo"); + + setToDefault(); + assertFalse(mPreview.isVideo()); + String photo_focus_value = mPreview.getCameraController().getFocusValue(); + Log.d(TAG, "picture photo_focus_value: "+ photo_focus_value); + + openPopupMenu(); + + // test popup buttons for photo mode: + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_resolution, "PHOTO_RESOLUTIONS", false, true); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_resolution, "PHOTO_RESOLUTIONS", true, false); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_timer, "TIMER", false, false); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_timer, "TIMER", true, true); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_burst_mode, "REPEAT_MODE", false, false); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_burst_mode, "REPEAT_MODE", true, true); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.grid, "GRID", false, true); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.grid, "GRID", true, true); + + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertTrue(mPreview.isVideo()); + String focus_value = mPreview.getCameraController().getFocusValue(); + Log.d(TAG, "video focus_value: "+ focus_value); + if( mPreview.supportsFocus() ) { + assertEquals("focus_mode_continuous_video", focus_value); + } + + // test popup buttons for video mode: + openPopupMenu(); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.video_quality, "VIDEO_RESOLUTIONS", false, true); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.video_quality, "VIDEO_RESOLUTIONS", true, false); + if( mActivity.getApplicationInterface().getSupportedVideoCaptureRates().size() > 1 ) { + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_video_capture_rate, "VIDEOCAPTURERATE", false, false); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_video_capture_rate, "VIDEOCAPTURERATE", true, true); + } + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_timer, "TIMER", false, false); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_timer, "TIMER", true, true); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_burst_mode, "REPEAT_MODE", false, false); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_burst_mode, "REPEAT_MODE", true, true); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.grid, "GRID", false, true); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.grid, "GRID", true, true); + + int saved_count = mPreview.count_cameraAutoFocus; + Log.d(TAG, "0 count_cameraAutoFocus: " + saved_count); + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertFalse(mPreview.isVideo()); + focus_value = mPreview.getCameraController().getFocusValue(); + Log.d(TAG, "picture focus_value: "+ focus_value); + if( mPreview.supportsFocus() ) { + assertEquals(focus_value, photo_focus_value); + // check that this doesn't cause an autofocus + assertFalse(mPreview.isFocusWaiting()); + Log.d(TAG, "1 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + assertEquals(mPreview.count_cameraAutoFocus, saved_count); + } + + if( mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ) { + int cameraId = mPreview.getCameraId(); + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + waitUntilCameraOpened(); + int new_cameraId = mPreview.getCameraId(); + assertTrue(cameraId != new_cameraId); + // n.b., front camera default photo focus value not necessarily same as back camera, if they have different focus modes + photo_focus_value = mPreview.getCameraController().getFocusValue(); + Log.d(TAG, "front picture photo_focus_value: "+ photo_focus_value); + + // test popup buttons for photo mode: + openPopupMenu(); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_resolution, "PHOTO_RESOLUTIONS", false, true); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_resolution, "PHOTO_RESOLUTIONS", true, false); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_timer, "TIMER", false, false); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_timer, "TIMER", true, true); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_burst_mode, "REPEAT_MODE", false, false); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_burst_mode, "REPEAT_MODE", true, true); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.grid, "GRID", false, true); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.grid, "GRID", true, true); + + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertTrue(mPreview.isVideo()); + focus_value = mPreview.getCameraController().getFocusValue(); + Log.d(TAG, "front video focus_value: "+ focus_value); + if( mPreview.supportsFocus() ) { + assertEquals("focus_mode_continuous_video", focus_value); + } + + // test popup buttons for video mode: + openPopupMenu(); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.video_quality, "VIDEO_RESOLUTIONS", false, true); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.video_quality, "VIDEO_RESOLUTIONS", true, false); + if( mActivity.getApplicationInterface().getSupportedVideoCaptureRates().size() > 1 ) { + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_video_capture_rate, "VIDEOCAPTURERATE", false, false); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_video_capture_rate, "VIDEOCAPTURERATE", true, true); + } + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_timer, "TIMER", false, false); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_timer, "TIMER", true, true); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_burst_mode, "REPEAT_MODE", false, false); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.preference_burst_mode, "REPEAT_MODE", true, true); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.grid, "GRID", false, true); + subTestPopupButtonContentDescription(net.sourceforge.opencamera.R.string.grid, "GRID", true, true); + + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertFalse(mPreview.isVideo()); + focus_value = mPreview.getCameraController().getFocusValue(); + Log.d(TAG, "front picture focus_value: "+ focus_value); + if( mPreview.supportsFocus() ) { + assertEquals(focus_value, photo_focus_value); + } + + // now switch back + switchToCamera(cameraId); + } + + if( mPreview.supportsFocus() ) { + // now test we remember the focus mode for photo and video + + switchToFocusValue("focus_mode_continuous_picture"); + + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertTrue(mPreview.isVideo()); + focus_value = mPreview.getCameraController().getFocusValue(); + Log.d(TAG, "video focus_value: "+ focus_value); + assertEquals("focus_mode_continuous_video", focus_value); + + String non_default_focus = getNonDefaultFocus(); + switchToFocusValue(non_default_focus); + + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertFalse(mPreview.isVideo()); + focus_value = mPreview.getCameraController().getFocusValue(); + Log.d(TAG, "picture focus_value: "+ focus_value); + assertEquals("focus_mode_continuous_picture", focus_value); + + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertTrue(mPreview.isVideo()); + focus_value = mPreview.getCameraController().getFocusValue(); + if( non_default_focus.equals("focus_mode_infinity") && focus_value.equals("focus_mode_manual2") ) { + // for Camera2, focus_mode_infinity is represented as focus_mode_manual2 + focus_value = "focus_mode_infinity"; + } + Log.d(TAG, "video focus_value: "+ focus_value); + assertEquals(non_default_focus, focus_value); + } + } + + /* Tests continuous picture focus, including switching to video and back. + * Tends to fail on Galaxy Nexus, where the continuous picture focusing doesn't happen too often. + */ + public void testContinuousPictureFocus() throws InterruptedException { + Log.d(TAG, "testContinuousPictureFocus"); + + setToDefault(); + + if( !mPreview.supportsFocus() ) { + return; + } + + // first switch to auto-focus (if we're already in continuous picture mode, we might have already done the continuous focus moving - although also see note below) + switchToFocusValue("focus_mode_auto"); + pauseAndResume(); + switchToFocusValue("focus_mode_continuous_picture"); + + // check continuous focus is working + int saved_count_cameraContinuousFocusMoving = mPreview.count_cameraContinuousFocusMoving; + Thread.sleep(2000); // n.b., Galaxy S10e seems to need longer delay than other devices for continuous focus to occur + int new_count_cameraContinuousFocusMoving = mPreview.count_cameraContinuousFocusMoving; + Log.d(TAG, "count_cameraContinuousFocusMoving compare saved: "+ saved_count_cameraContinuousFocusMoving + " to new: " + new_count_cameraContinuousFocusMoving); + assertEquals(0, mPreview.getCameraController().test_af_state_null_focus); + // allow for new_count_cameraContinuousFocusMoving > 0 as some devices like OnePlus Pad won't repeat the continuous focus (even when changing focus modes), unless necessary due to scene changing + assertTrue( new_count_cameraContinuousFocusMoving > saved_count_cameraContinuousFocusMoving || new_count_cameraContinuousFocusMoving > 0 ); + + // switch to video + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + waitUntilCameraOpened(); + String focus_value = mPreview.getCameraController().getFocusValue(); + Log.d(TAG, "video focus_value: "+ focus_value); + assertEquals("focus_mode_continuous_video", focus_value); + + saved_count_cameraContinuousFocusMoving = mPreview.count_cameraContinuousFocusMoving; + + // switch to photo + clickView(switchVideoButton); + Log.d(TAG, "count_cameraContinuousFocusMoving after clicking to switch to video: "+ mPreview.count_cameraContinuousFocusMoving); + waitUntilCameraOpened(); + Log.d(TAG, "count_cameraContinuousFocusMoving after waiting for camera to open: "+ mPreview.count_cameraContinuousFocusMoving); + focus_value = mPreview.getCameraController().getFocusValue(); + Log.d(TAG, "video focus_value: "+ focus_value); + assertEquals("focus_mode_continuous_picture", focus_value); + + // check continuous focus is working + Thread.sleep(3000); + new_count_cameraContinuousFocusMoving = mPreview.count_cameraContinuousFocusMoving; + Log.d(TAG, "count_cameraContinuousFocusMoving compare saved: "+ saved_count_cameraContinuousFocusMoving + " to new: " + new_count_cameraContinuousFocusMoving); + assertEquals(0, mPreview.getCameraController().test_af_state_null_focus); + assertTrue( new_count_cameraContinuousFocusMoving > saved_count_cameraContinuousFocusMoving ); + } + + /* Tests everything works okay if starting in continuous video focus mode when in photo mode, including opening popup, and switching to video and back. + * This shouldn't be possible normal, but could happen if a user is upgrading from version 1.28 or earlier, to version 1.29 or later. + */ + public void testContinuousVideoFocusForPhoto() throws InterruptedException { + Log.d(TAG, "testContinuousVideoFocusForPhoto"); + + setToDefault(); + + if( !mPreview.supportsFocus() ) { + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.getFocusPreferenceKey(mPreview.getCameraId(), false), "focus_mode_continuous_video"); + editor.apply(); + restart(); + + Thread.sleep(1000); + + openPopupMenu(); + + Thread.sleep(1000); + + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + waitUntilCameraOpened(); + } + + /** Return the number of files in the save folder. 0 will be returned if the folder doesn't + * exist. + */ + private int getNFiles() { + //File folder = mActivity.getImageFolder(); + //File [] files = folder.listFiles(); + String [] files = filesInSaveFolder(); + Log.d(TAG, "getNFiles: " + Arrays.toString(files)); + return files == null ? 0 : files.length; + } + + private void subTestContinuousPictureFocusRepeat() throws InterruptedException { + Log.d(TAG, "subTestContinuousPictureFocusRepeat"); + + if( !mPreview.supportsFocus() ) { + return; + } + + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.RepeatModePreferenceKey, "3"); + editor.apply(); + } + switchToFocusValue("focus_mode_continuous_picture"); + + // count initial files in folder + int n_files = getNFiles(); + Log.d(TAG, "n_files at start: " + n_files); + + assertEquals(0, mPreview.count_cameraTakePicture); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + Log.d(TAG, "about to click take photo"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo"); + assertFalse(mPreview.isOnTimer()); + + // wait until photos taken + // wait, and test that we've taken the photos by then + long time_s = System.currentTimeMillis(); + while( mPreview.count_cameraTakePicture < 3 ) { + assertTrue( System.currentTimeMillis() - time_s < 20000 ); + } + Thread.sleep(2000); // allow pictures to save + assertTrue(mPreview.isPreviewStarted()); // check preview restarted + Log.d(TAG, "count_cameraTakePicture: " + mPreview.count_cameraTakePicture); + assertEquals(3, mPreview.count_cameraTakePicture); + int n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(3, n_new_files); + } + + /* Tests continuous picture focus with repeat mode. + */ + public void testContinuousPictureFocusRepeat() throws InterruptedException { + Log.d(TAG, "testContinuousPictureFocusRepeat"); + + setToDefault(); + + subTestContinuousPictureFocusRepeat(); + } + + /* As testContinuousPictureFocusRepeat, but with test_wait_capture_result flag set. + */ + public void testContinuousPictureFocusRepeatWaitCaptureResult() throws InterruptedException { + Log.d(TAG, "testContinuousPictureFocusRepeatWaitCaptureResult"); + + setToDefault(); + + mPreview.getCameraController().test_wait_capture_result = true; + subTestContinuousPictureFocusRepeat(); + } + + /* Test for continuous picture photo mode. + * Touch, wait 8s, check that continuous focus mode has resumed, then take photo. + */ + public void testContinuousPicture1() throws InterruptedException { + Log.d(TAG, "testContinuousPicture1"); + + setToDefault(); + + if( !mPreview.supportsFocus() ) { + return; + } + + switchToFocusValue("focus_mode_continuous_picture"); + + String focus_value = "focus_mode_continuous_picture"; + String focus_value_ui = "focus_mode_continuous_picture"; + + // count initial files in folder + int n_files = getNFiles(); + Log.d(TAG, "n_files at start: " + n_files); + + Thread.sleep(1000); + assertEquals(0, mPreview.count_cameraTakePicture); + assertEquals(mPreview.getCurrentFocusValue(), focus_value_ui); + assertEquals(mPreview.getCameraController().getFocusValue(), focus_value); + + Log.d(TAG, "about to click preview for autofocus"); + int saved_count = mPreview.count_cameraAutoFocus; + Thread.sleep(1000); // needed for Galaxy S10e for the touch to register + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + Log.d(TAG, "1 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + assertEquals(mPreview.count_cameraAutoFocus, saved_count + 1); + assertEquals("focus_mode_continuous_picture", mPreview.getCurrentFocusValue()); + assertEquals("focus_mode_auto", mPreview.getCameraController().getFocusValue()); + assertEquals(mPreview.getCurrentFocusValue(), focus_value_ui); + if( focus_value.equals("focus_mode_continuous_picture") ) + assertEquals("focus_mode_auto", mPreview.getCameraController().getFocusValue()); // continuous focus mode switches to auto focus on touch + else + assertEquals(mPreview.getCameraController().getFocusValue(), focus_value); + + Thread.sleep(8000); + assertEquals(mPreview.getCurrentFocusValue(), focus_value_ui); + assertEquals(mPreview.getCameraController().getFocusValue(), focus_value); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + Log.d(TAG, "about to click take photo"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo"); + + Log.d(TAG, "wait until finished taking photo"); + waitForTakePhoto(); + Log.d(TAG, "done taking photo"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + assertEquals(mPreview.getCurrentFocusValue(), focus_value_ui); + assertEquals(mPreview.getCameraController().getFocusValue(), focus_value); + assertEquals(1, mPreview.count_cameraTakePicture); + mActivity.waitUntilImageQueueEmpty(); + + int n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(1, n_new_files); + } + + /* Test for continuous picture photo mode. + * Touch, wait 1s, check that continuous focus mode hasn't resumed, then take photo, then check continuous focus mode has resumed. + */ + public void testContinuousPicture2() throws InterruptedException { + Log.d(TAG, "testContinuousPicture1"); + + setToDefault(); + + if( !mPreview.supportsFocus() ) { + return; + } + + switchToFocusValue("focus_mode_continuous_picture"); + + String focus_value = "focus_mode_continuous_picture"; + String focus_value_ui = "focus_mode_continuous_picture"; + + // count initial files in folder + int n_files = getNFiles(); + Log.d(TAG, "n_files at start: " + n_files); + + Thread.sleep(1000); + assertEquals(0, mPreview.count_cameraTakePicture); + assertEquals(mPreview.getCurrentFocusValue(), focus_value_ui); + assertEquals(mPreview.getCameraController().getFocusValue(), focus_value); + + Log.d(TAG, "about to click preview for autofocus"); + int saved_count = mPreview.count_cameraAutoFocus; + Thread.sleep(1000); // needed for Galaxy S10e for the touch to register + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "1 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + assertEquals(mPreview.count_cameraAutoFocus, saved_count + 1); + assertEquals("focus_mode_continuous_picture", mPreview.getCurrentFocusValue()); + assertEquals("focus_mode_auto", mPreview.getCameraController().getFocusValue()); + assertEquals(mPreview.getCurrentFocusValue(), focus_value_ui); + if( focus_value.equals("focus_mode_continuous_picture") ) + assertEquals("focus_mode_auto", mPreview.getCameraController().getFocusValue()); // continuous focus mode switches to auto focus on touch + else + assertEquals(mPreview.getCameraController().getFocusValue(), focus_value); + + int saved_count_cameraContinuousFocusMoving = mPreview.count_cameraContinuousFocusMoving; + + Thread.sleep(1000); + assertEquals(mPreview.getCurrentFocusValue(), focus_value_ui); + if( focus_value.equals("focus_mode_continuous_picture") ) + assertEquals("focus_mode_auto", mPreview.getCameraController().getFocusValue()); // continuous focus mode switches to auto focus on touch + else + assertEquals(mPreview.getCameraController().getFocusValue(), focus_value); + int new_count_cameraContinuousFocusMoving = mPreview.count_cameraContinuousFocusMoving; + assertEquals(new_count_cameraContinuousFocusMoving, saved_count_cameraContinuousFocusMoving); + Log.d(TAG, "2 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + assertEquals(mPreview.count_cameraAutoFocus, saved_count + 1); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + Log.d(TAG, "about to click take photo"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo"); + + Log.d(TAG, "wait until finished taking photo"); + waitForTakePhoto(); + Log.d(TAG, "done taking photo"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + assertEquals(mPreview.getCurrentFocusValue(), focus_value_ui); + assertEquals(mPreview.getCameraController().getFocusValue(), focus_value); + assertEquals(1, mPreview.count_cameraTakePicture); + Log.d(TAG, "3 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + assertEquals(mPreview.count_cameraAutoFocus, saved_count + 1); + mActivity.waitUntilImageQueueEmpty(); + + int n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(1, n_new_files); + } + + /* Test for continuous picture photo mode. + * Touch repeatedly with 1s delays for 8 times, make sure continuous focus mode hasn't resumed. + * Then wait 5s, and check continuous focus mode has resumed. + */ + public void testContinuousPictureRepeatTouch() throws InterruptedException { + Log.d(TAG, "testContinuousPictureRepeatTouch"); + + setToDefault(); + + if( !mPreview.supportsFocus() ) { + return; + } + + switchToFocusValue("focus_mode_continuous_picture"); + + String focus_value = "focus_mode_continuous_picture"; + String focus_value_ui = "focus_mode_continuous_picture"; + + Thread.sleep(1000); + assertEquals(mPreview.getCurrentFocusValue(), focus_value_ui); + assertEquals(mPreview.getCameraController().getFocusValue(), focus_value); + + for(int i=0;i<8;i++) { + Log.d(TAG, "about to click preview for autofocus: " + i); + int saved_count = mPreview.count_cameraAutoFocus; + Thread.sleep(1000); // needed for Galaxy S10e for the touch to register + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "1 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + assertEquals(mPreview.count_cameraAutoFocus, saved_count + 1); + int saved_count_cameraContinuousFocusMoving = mPreview.count_cameraContinuousFocusMoving; + Thread.sleep(1000); + + assertEquals("focus_mode_continuous_picture", mPreview.getCurrentFocusValue()); + assertEquals("focus_mode_auto", mPreview.getCameraController().getFocusValue()); + assertEquals(mPreview.getCurrentFocusValue(), focus_value_ui); + if( focus_value.equals("focus_mode_continuous_picture") ) + assertEquals("focus_mode_auto", mPreview.getCameraController().getFocusValue()); // continuous focus mode switches to auto focus on touch + else + assertEquals(mPreview.getCameraController().getFocusValue(), focus_value); + int new_count_cameraContinuousFocusMoving = mPreview.count_cameraContinuousFocusMoving; + assertEquals(new_count_cameraContinuousFocusMoving, saved_count_cameraContinuousFocusMoving); + } + + int saved_count = mPreview.count_cameraAutoFocus; + Thread.sleep(6000); + assertEquals(focus_value_ui, mPreview.getCurrentFocusValue()); + assertEquals(focus_value, mPreview.getCameraController().getFocusValue()); + Log.d(TAG, "2 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + assertEquals(mPreview.count_cameraAutoFocus, saved_count); + } + + /* Test for continuous picture photo mode. + * Touch, then after 1s switch to focus auto in UI, wait 8s, ensure still in autofocus mode. + */ + public void testContinuousPictureSwitchAuto() throws InterruptedException { + Log.d(TAG, "testContinuousPictureSwitchAuto"); + + setToDefault(); + + if( !mPreview.supportsFocus() ) { + return; + } + + switchToFocusValue("focus_mode_continuous_picture"); + + String focus_value = "focus_mode_continuous_picture"; + String focus_value_ui = "focus_mode_continuous_picture"; + + Thread.sleep(1000); + assertEquals(mPreview.getCurrentFocusValue(), focus_value_ui); + assertEquals(mPreview.getCameraController().getFocusValue(), focus_value); + + Log.d(TAG, "about to click preview for autofocus"); + int saved_count = mPreview.count_cameraAutoFocus; + Thread.sleep(2000); // needed for Galaxy S10e for the touch to register + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "1 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + assertEquals(mPreview.count_cameraAutoFocus, saved_count + 1); + int saved_count_cameraContinuousFocusMoving = mPreview.count_cameraContinuousFocusMoving; + Thread.sleep(1000); + + assertEquals(mPreview.getCurrentFocusValue(), focus_value_ui); + if( focus_value.equals("focus_mode_continuous_picture") ) + assertEquals("focus_mode_auto", mPreview.getCameraController().getFocusValue()); // continuous focus mode switches to auto focus on touch + else + assertEquals(mPreview.getCameraController().getFocusValue(), focus_value); + int new_count_cameraContinuousFocusMoving = mPreview.count_cameraContinuousFocusMoving; + assertEquals(new_count_cameraContinuousFocusMoving, saved_count_cameraContinuousFocusMoving); + + Thread.sleep(1000); + assertEquals(mPreview.getCurrentFocusValue(), focus_value_ui); + if( focus_value.equals("focus_mode_continuous_picture") ) + assertEquals("focus_mode_auto", mPreview.getCameraController().getFocusValue()); // continuous focus mode switches to auto focus on touch + else + assertEquals(mPreview.getCameraController().getFocusValue(), focus_value); + new_count_cameraContinuousFocusMoving = mPreview.count_cameraContinuousFocusMoving; + assertEquals(new_count_cameraContinuousFocusMoving, saved_count_cameraContinuousFocusMoving); + + switchToFocusValue("focus_mode_auto"); + assertEquals("focus_mode_auto", mPreview.getCurrentFocusValue()); + assertEquals("focus_mode_auto", mPreview.getCameraController().getFocusValue()); + new_count_cameraContinuousFocusMoving = mPreview.count_cameraContinuousFocusMoving; + assertEquals(new_count_cameraContinuousFocusMoving, saved_count_cameraContinuousFocusMoving); + + Thread.sleep(8000); + assertEquals("focus_mode_auto", mPreview.getCurrentFocusValue()); + assertEquals("focus_mode_auto", mPreview.getCameraController().getFocusValue()); + new_count_cameraContinuousFocusMoving = mPreview.count_cameraContinuousFocusMoving; + assertEquals(new_count_cameraContinuousFocusMoving, saved_count_cameraContinuousFocusMoving); + } + + /* Test for taking HDR photo then going to background[, also tests notifications]. + * [Note test is unstable on Android emulator when testing for the notification, unclear why.] + */ + public void testPhotoBackgroundHDR() throws InterruptedException { + Log.d(TAG, "testPhotoBackgroundHDR"); + + setToDefault(); + + if( !mActivity.supportsHDR() ) { + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_hdr"); + editor.putBoolean(PreferenceKeys.AutoStabilisePreferenceKey, true); // also set auto-stabilise so we have a photo that takes longer to process + editor.apply(); + updateForSettings(); + + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.HDR); + + // count initial files in folder + int n_files = getNFiles(); + Log.d(TAG, "n_files at start: " + n_files); + + Thread.sleep(1000); + assertEquals(0, mPreview.count_cameraTakePicture); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + Log.d(TAG, "about to click take photo"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo"); + + Log.d(TAG, "wait until finished taking photo"); + waitForTakePhoto(); + Log.d(TAG, "done taking photo"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + // go to background after a short pause + Thread.sleep(500); + //assertFalse(mActivity.testHasNotification()); + mActivity.runOnUiThread(new Runnable() { + public void run() { + Log.d(TAG, "pause..."); + getInstrumentation().callActivityOnPause(mActivity); + Log.d(TAG, "done pause"); + } + }); + this.getInstrumentation().waitForIdleSync(); + + assertEquals(1, mPreview.count_cameraTakePicture); + /*if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.O ) { + assertTrue(mActivity.testHasNotification()); + }*/ + mActivity.waitUntilImageQueueEmpty(); + this.getInstrumentation().waitForIdleSync(); + //assertFalse(mActivity.testHasNotification()); + + int n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(1, n_new_files); + } + + /* Start in photo mode with auto focus: + * - go to video mode + * - then switch to front camera + * - then switch back to photo mode + * - then go to back camera + * Check focus mode has returned to auto. + * This test is important when front camera doesn't support focus modes, but back camera does - we won't be able to reset to auto focus for the front camera, but need to do so when returning to back camera + */ + public void testFocusSwitchVideoSwitchCameras() { + Log.d(TAG, "testFocusSwitchVideoSwitchCameras"); + + setToDefault(); + + if( mPreview.getCameraControllerManager().getNumberOfCameras() <= 1 ) { + return; + } + + if( !mPreview.supportsFocus() ) { + return; + } + + int cameraId = mPreview.getCameraId(); + + switchToFocusValue("focus_mode_auto"); + + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + waitUntilCameraOpened(); + String focus_value = mPreview.getCameraController().getFocusValue(); + Log.d(TAG, "video focus_value: "+ focus_value); + assertEquals("focus_mode_continuous_video", focus_value); + + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + waitUntilCameraOpened(); + // camera becomes invalid when switching cameras + focus_value = mPreview.getCameraController().getFocusValue(); + Log.d(TAG, "front video focus_value: "+ focus_value); + // don't care when focus mode is for front camera (focus may not be supported for front camera) + + clickView(switchVideoButton); + waitUntilCameraOpened(); + focus_value = mPreview.getCameraController().getFocusValue(); + Log.d(TAG, "front focus_value: "+ focus_value); + // don't care when focus mode is for front camera (focus may not be supported for front camera) + + switchToCamera(cameraId); + + focus_value = mPreview.getCameraController().getFocusValue(); + Log.d(TAG, "end focus_value: "+ focus_value); + assertEquals("focus_mode_auto", focus_value); + } + + /* Start in photo mode with non-default focus mode: + * - switch to front camera + * - switch to back camera + * Check focus mode is still what we set. + * This test is important when front camera doesn't support focus modes, but back camera does - need to remain in same focus mode for the back camera. + */ + public void testFocusRemainMacroSwitchCamera() { + Log.d(TAG, "testFocusRemainMacroSwitchCamera"); + + setToDefault(); + + if( mPreview.getCameraControllerManager().getNumberOfCameras() <= 1 ) { + return; + } + + if( !mPreview.supportsFocus() ) { + return; + } + + String non_default_focus_mode = getNonDefaultFocus(); + switchToFocusValue(non_default_focus_mode); + + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + // n.b., switch to front then to back + int cameraId = mPreview.getCameraId(); + clickView(switchCameraButton); + waitUntilCameraOpened(); + switchToCamera(cameraId); + + String focus_value = mPreview.getCameraController().getFocusValue(); + Log.d(TAG, "focus_value: "+ focus_value); + assertEquals(focus_value, non_default_focus_mode); + } + + /* Start in photo mode with focus auto: + * - switch to video mode + * - switch to non-default focus mode + * - switch to picture mode + * Check focus mode is now auto. + * As of 1.26, we now remember the focus mode for photos. + */ + public void testFocusRemainMacroSwitchPhoto() { + Log.d(TAG, "testFocusRemainMacroSwitchPhoto"); + + setToDefault(); + + if( !mPreview.supportsFocus() ) { + return; + } + + switchToFocusValue("focus_mode_auto"); + + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + waitUntilCameraOpened(); + String focus_value = mPreview.getCameraController().getFocusValue(); + Log.d(TAG, "focus_value after switching to video mode: "+ focus_value); + assertEquals("focus_mode_continuous_video", focus_value); + + String non_default_focus_mode = getNonDefaultFocus(); + switchToFocusValue(non_default_focus_mode); + + clickView(switchVideoButton); + waitUntilCameraOpened(); + + focus_value = mPreview.getCameraController().getFocusValue(); + Log.d(TAG, "focus_value after switching to picture mode: " + focus_value); + assertEquals("focus_mode_auto", focus_value); + } + + /* Start in photo mode with focus auto: + * - switch to non-default focus mode + * - switch to video mode + * - switch to picture mode + * Check focus mode is still what we set. + * As of 1.26, we now remember the focus mode for photos. + */ + public void testFocusSaveMacroSwitchPhoto() { + Log.d(TAG, "testFocusSaveMacroSwitchPhoto"); + + setToDefault(); + + if( !mPreview.supportsFocus() ) { + return; + } + + String non_default_focus_mode = getNonDefaultFocus(); + switchToFocusValue(non_default_focus_mode); + + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + waitUntilCameraOpened(); + String focus_value = mPreview.getCameraController().getFocusValue(); + Log.d(TAG, "focus_value after switching to video mode: "+ focus_value); + assertEquals("focus_mode_continuous_video", focus_value); + + clickView(switchVideoButton); + waitUntilCameraOpened(); + + focus_value = mPreview.getCameraController().getFocusValue(); + Log.d(TAG, "focus_value after switching to picture mode: " + focus_value); + assertEquals(focus_value, non_default_focus_mode); + } + + /* Start in photo mode with auto focus: + * - go to video mode + * - check in continuous focus mode + * - switch to auto focus mode + * - then pause and resume + * - then check still in video mode, still in auto focus mode + * - then repeat with restarting instead + * (Note the name is a bit misleading - it used to be that we reset to continuous mode, now we don't.) + */ + public void testFocusSwitchVideoResetContinuous() { + Log.d(TAG, "testFocusSwitchVideoResetContinuous"); + + setToDefault(); + + if( !mPreview.supportsFocus() ) { + return; + } + + switchToFocusValue("focus_mode_auto"); + + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + waitUntilCameraOpened(); + String focus_value = mPreview.getCameraController().getFocusValue(); + assertEquals("focus_mode_continuous_video", focus_value); + + switchToFocusValue("focus_mode_auto"); + focus_value = mPreview.getCameraController().getFocusValue(); + assertEquals("focus_mode_auto", focus_value); + + this.pauseAndResume(); + assertTrue(mPreview.isVideo()); + + focus_value = mPreview.getCameraController().getFocusValue(); + assertEquals("focus_mode_auto", focus_value); + + // now with restart + + switchToFocusValue("focus_mode_auto"); + focus_value = mPreview.getCameraController().getFocusValue(); + assertEquals("focus_mode_auto", focus_value); + + restart(); + assertTrue(mPreview.isVideo()); + + focus_value = mPreview.getCameraController().getFocusValue(); + assertEquals("focus_mode_auto", focus_value); + } + + private void subTestISOButtonAvailability() { + if( mPreview.isVideoRecording() ) { + // shouldn't show ISO buttons when video recording + subTestPopupButtonAvailability("TEST_ISO", "auto", false); + subTestPopupButtonAvailability("TEST_ISO", "100", false); + subTestPopupButtonAvailability("TEST_ISO", "200", false); + subTestPopupButtonAvailability("TEST_ISO", "400", false); + subTestPopupButtonAvailability("TEST_ISO", "800", false); + subTestPopupButtonAvailability("TEST_ISO", "1600", false); + } + else if( mPreview.supportsISORange() ) { + subTestPopupButtonAvailability("TEST_ISO", "auto", true); + int [] test_isos = {0, 50, 100, 200, 400, 800, 1600, 3200, 6400}; + int min_iso = mPreview.getMinimumISO(); + int max_iso = mPreview.getMaximumISO(); + for(int test_iso : test_isos) { + subTestPopupButtonAvailability("TEST_ISO", String.valueOf(test_iso), test_iso >= min_iso && test_iso <= max_iso); + } + subTestPopupButtonAvailability("TEST_ISO", String.valueOf(min_iso - 1), false); + subTestPopupButtonAvailability("TEST_ISO", String.valueOf(min_iso), true); + subTestPopupButtonAvailability("TEST_ISO", String.valueOf(max_iso), true); + subTestPopupButtonAvailability("TEST_ISO", String.valueOf(max_iso + 1), false); + } + else { + List supported_iso_values = mPreview.getSupportedISOs(); + subTestPopupButtonAvailability("TEST_ISO", "auto", supported_iso_values); + subTestPopupButtonAvailability("TEST_ISO", "100", supported_iso_values); + subTestPopupButtonAvailability("TEST_ISO", "200", supported_iso_values); + subTestPopupButtonAvailability("TEST_ISO", "400", supported_iso_values); + subTestPopupButtonAvailability("TEST_ISO", "800", supported_iso_values); + subTestPopupButtonAvailability("TEST_ISO", "1600", supported_iso_values); + } + } + + /* Tests enabling and disabling the preview bitmap. + */ + public void testPreviewBitmap() throws InterruptedException { + Log.d(TAG, "testPreviewBitmap"); + + if( !mActivity.supportsPreviewBitmaps() ) { + Log.d(TAG, "preview bitmaps not supported"); + return; + } + + setToDefault(); + Thread.sleep(1000); + + long [] delays = {20, 50, 100, 1000}; + int [] n_iters = {50, 30, 30, 3}; + if( TestUtils.isEmulator() ) { + // this takes much longer to run on emulator, due to taking ~15s for the first waitForIdleSync() in the loop + n_iters = new int[]{1, 1, 1, 1}; + } + assertEquals(delays.length, n_iters.length); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + + assertFalse(mPreview.isPreviewBitmapEnabled()); + assertFalse(mPreview.refreshPreviewBitmapTaskIsRunning()); + + for(int i=0;i>> i = " + i + " delay: " + delays[i]); + for(int j=0;j>> j = " + j + " / " + n_iters[i]); + SharedPreferences.Editor editor = settings.edit(); + + editor.putString(PreferenceKeys.HistogramPreferenceKey, "preference_histogram_rgb"); + editor.apply(); + mActivity.runOnUiThread(new Runnable() { + public void run() { + mActivity.getApplicationInterface().getDrawPreview().updateSettings(); + } + }); + Log.d(TAG, " wait for idle sync"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, " about to sleep for: " + delays[i]); + Thread.sleep(delays[i]); + Log.d(TAG, " done sleep"); + if (delays[i] >= 1000) { + assertTrue(mPreview.isPreviewBitmapEnabled()); + } + + editor.putString(PreferenceKeys.HistogramPreferenceKey, "preference_histogram_off"); + editor.apply(); + mActivity.runOnUiThread(new Runnable() { + public void run() { + mActivity.getApplicationInterface().getDrawPreview().updateSettings(); + } + }); + Log.d(TAG, " wait for idle sync again"); + this.getInstrumentation().waitForIdleSync(); + Thread.sleep(delays[i]); + Log.d(TAG, " done wait for idle sync"); + if (delays[i] >= 1000) { + assertFalse(mPreview.isPreviewBitmapEnabled()); + assertFalse(mPreview.refreshPreviewBitmapTaskIsRunning()); + } + } + } + + Thread.sleep(500); + assertFalse(mPreview.isPreviewBitmapEnabled()); + assertFalse(mPreview.refreshPreviewBitmapTaskIsRunning()); + } + + public void testTakePhotoExposureCompensation() throws InterruptedException { + Log.d(TAG, "testTakePhotoExposureCompensation"); + setToDefault(); + + View exposureButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureContainer = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure_container); + SeekBar seekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure_seekbar); + assertEquals(exposureButton.getVisibility(), (mPreview.supportsExposures() ? View.VISIBLE : View.GONE)); + assertEquals(exposureContainer.getVisibility(), View.GONE); + + if( !mPreview.supportsExposures() ) { + return; + } + + clickView(exposureButton); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.VISIBLE); + + subTestISOButtonAvailability(); + + //assertEquals(mPreview.getMaximumExposure() - mPreview.getMinimumExposure(), seekBar.getMax()); + //assertEquals(mPreview.getCurrentExposure() - mPreview.getMinimumExposure(), seekBar.getProgress()); + // need to allow for repeated zero values in seekbar: + assertTrue(seekBar.getMax() > mPreview.getMaximumExposure() - mPreview.getMinimumExposure()); + assertEquals(mActivity.getExposureSeekbarProgressZero(), seekBar.getProgress()); + assertEquals(mPreview.getCurrentExposure(), mActivity.getExposureSeekbarValue(seekBar.getProgress())); + Log.d(TAG, "change exposure to 1"); + mActivity.changeExposure(1); + this.getInstrumentation().waitForIdleSync(); + assertEquals(1, mPreview.getCurrentExposure()); + //assertEquals(mPreview.getCurrentExposure() - mPreview.getMinimumExposure(), seekBar.getProgress()); + assertEquals(mPreview.getCurrentExposure(), mActivity.getExposureSeekbarValue(seekBar.getProgress())); + Log.d(TAG, "set exposure to min"); + seekBar.setProgress(0); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "actual exposure is now " + mPreview.getCurrentExposure()); + Log.d(TAG, "expected exposure to be " + mPreview.getMinimumExposure()); + assertEquals(mPreview.getCurrentExposure(), mPreview.getMinimumExposure()); + //assertEquals(mPreview.getCurrentExposure() - mPreview.getMinimumExposure(), seekBar.getProgress()); + assertEquals(0, seekBar.getProgress()); + assertEquals(mPreview.getCurrentExposure(), mActivity.getExposureSeekbarValue(seekBar.getProgress())); + + // test volume keys + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.VolumeKeysPreferenceKey, "volume_exposure"); + editor.apply(); + // volume up + while( mPreview.getCurrentExposure() < mPreview.getMaximumExposure() ) { + Log.d(TAG, "use volume key to increase exposure"); + int exposure = mPreview.getCurrentExposure(); + this.getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_VOLUME_UP); + assertEquals(exposure+1, mPreview.getCurrentExposure()); + } + // one more shouldn't change exposure + this.getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_VOLUME_UP); + assertEquals(mPreview.getMaximumExposure(), mPreview.getCurrentExposure()); + // volume down + while( mPreview.getCurrentExposure() > mPreview.getMinimumExposure() ) { + Log.d(TAG, "use volume key to decrease exposure"); + int exposure = mPreview.getCurrentExposure(); + this.getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_VOLUME_DOWN); + assertEquals(exposure-1, mPreview.getCurrentExposure()); + } + // one more shouldn't change exposure + this.getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_VOLUME_DOWN); + assertEquals(mPreview.getMinimumExposure(), mPreview.getCurrentExposure()); + + // test the exposure button clears and reopens without changing exposure level + clickView(exposureButton); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.GONE); + clickView(exposureButton); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.VISIBLE); + assertEquals(mPreview.getCurrentExposure(), mPreview.getMinimumExposure()); + //assertEquals(mPreview.getCurrentExposure() - mPreview.getMinimumExposure(), seekBar.getProgress()); + assertEquals(0, seekBar.getProgress()); + assertEquals(mPreview.getCurrentExposure(), mActivity.getExposureSeekbarValue(seekBar.getProgress())); + + // test touch to focus clears the exposure controls + int [] gui_location = new int[2]; + mPreview.getView().getLocationOnScreen(gui_location); + final float scale = mActivity.getResources().getDisplayMetrics().density; + final int large_step_dist_c = (int) (80 * scale + 0.5f); // convert dps to pixels + final int step_count_c = 10; + TouchUtils.drag(MainActivityTest.this, gui_location[0]+large_step_dist_c, gui_location[0], gui_location[1]+large_step_dist_c, gui_location[1], step_count_c); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.GONE); + clickView(exposureButton); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.VISIBLE); + assertEquals(mPreview.getCurrentExposure(), mPreview.getMinimumExposure()); + //assertEquals(mPreview.getCurrentExposure() - mPreview.getMinimumExposure(), seekBar.getProgress()); + assertEquals(0, seekBar.getProgress()); + assertEquals(mPreview.getCurrentExposure(), mActivity.getExposureSeekbarValue(seekBar.getProgress())); + + Log.d(TAG, "set exposure to -1"); + seekBar.setProgress(-1 - mPreview.getMinimumExposure()); + this.getInstrumentation().waitForIdleSync(); + assertEquals(mPreview.getCurrentExposure(), -1); + assertEquals(mPreview.getCurrentExposure() - mPreview.getMinimumExposure(), seekBar.getProgress()); // fine as -1 is below the repeated zeroes + assertEquals(mPreview.getCurrentExposure(), mActivity.getExposureSeekbarValue(seekBar.getProgress())); + + // clear again so as to not interfere with take photo routine + TouchUtils.drag(MainActivityTest.this, gui_location[0]+large_step_dist_c, gui_location[0], gui_location[1]+large_step_dist_c, gui_location[1], step_count_c); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.GONE); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + + // test that switching to video mode removes the ISO buttons + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + assertFalse(mPreview.isVideo()); + Log.d(TAG, "switch to video mode"); + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertTrue(mPreview.isVideo()); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.GONE); + + clickView(exposureButton); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.VISIBLE); + subTestISOButtonAvailability(); // check that ISO buttons are shown + + assertFalse(mPreview.isVideoRecording()); + Log.d(TAG, "about to click take video"); + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take video"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + assertTrue(mPreview.isVideoRecording()); + + Thread.sleep(100); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.VISIBLE); + subTestISOButtonAvailability(); // check that ISO buttons are not shown + + Thread.sleep(3000); + + assertTrue(mPreview.isVideoRecording()); + Log.d(TAG, "about to click stop video"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking stop video"); + this.getInstrumentation().waitForIdleSync(); + assertFalse(mPreview.isVideoRecording()); + + assertTrue(mPreview.isVideo()); + Log.d(TAG, "switch to photo mode"); + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertFalse(mPreview.isVideo()); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.GONE); + + if( mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ) { + Log.d(TAG, "switch camera"); + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + waitUntilCameraOpened(); + + assertEquals(exposureButton.getVisibility(), (mPreview.supportsExposures() ? View.VISIBLE : View.GONE)); + assertEquals(exposureContainer.getVisibility(), View.GONE); + + if( mPreview.supportsExposures() ) { + assertEquals(mPreview.getCurrentExposure(), -1); + assertEquals(mPreview.getCurrentExposure() - mPreview.getMinimumExposure(), seekBar.getProgress()); // fine as -1 is below the repeated zeroes + assertEquals(mPreview.getCurrentExposure(), mActivity.getExposureSeekbarValue(seekBar.getProgress())); + + clickView(exposureButton); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.VISIBLE); + assertEquals(mPreview.getCurrentExposure(), -1); + assertEquals(mPreview.getCurrentExposure() - mPreview.getMinimumExposure(), seekBar.getProgress()); // fine as -1 is below the repeated zeroes + assertEquals(mPreview.getCurrentExposure(), mActivity.getExposureSeekbarValue(seekBar.getProgress())); + } + } + } + + public void testTakePhotoManualISOExposure() throws InterruptedException { + Log.d(TAG, "testTakePhotoManualISOExposure"); + setToDefault(); + + if( !mPreview.usingCamera2API() ) { + Log.d(TAG, "test requires camera2 api"); + return; + } + else if( !mPreview.supportsISORange() ) { + Log.d(TAG, "test requires manual iso range"); + return; + } + + switchToISO(100); + + View exposureButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureContainer = mActivity.findViewById(net.sourceforge.opencamera.R.id.manual_exposure_container); + SeekBar isoSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.iso_seekbar); + SeekBar exposureTimeSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure_time_seekbar); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.GONE); + + clickView(exposureButton); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.VISIBLE); + assertEquals(isoSeekBar.getVisibility(), View.VISIBLE); + assertEquals(exposureTimeSeekBar.getVisibility(), (mPreview.supportsExposureTime() ? View.VISIBLE : View.GONE)); + subTestISOButtonAvailability(); + + /*final int manual_n = 1000; // should match MainActivity.manual_n + assertTrue( isoSeekBar.getMax() == manual_n ); + if( mPreview.supportsExposureTime() ) + assertTrue( exposureTimeSeekBar.getMax() == manual_n );*/ + + Log.d(TAG, "change ISO to min"); + isoSeekBar.setProgress(0); + this.getInstrumentation().waitForIdleSync(); + assertEquals(mPreview.getCameraController().getISO(), mPreview.getMinimumISO()); + + if( mPreview.supportsExposureTime() ) { + Log.d(TAG, "change exposure time to min"); + exposureTimeSeekBar.setProgress(0); + this.getInstrumentation().waitForIdleSync(); + assertEquals(mPreview.getCameraController().getISO(), mPreview.getMinimumISO()); + assertEquals(mPreview.getCameraController().getExposureTime(), mPreview.getMinimumExposureTime()); + } + + Log.d(TAG, "camera_controller ISO: " + mPreview.getCameraController().getISO()); + Log.d(TAG, "change ISO to max"); + isoSeekBar.setProgress(isoSeekBar.getMax()); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "camera_controller ISO: " + mPreview.getCameraController().getISO()); + Log.d(TAG, "reported max ISO: " + mPreview.getMaximumISO()); + assertEquals(mPreview.getCameraController().getISO(), mPreview.getMaximumISO()); + + // n.b., currently don't test this on devices with long shutter times (e.g., OnePlus 3T) + if( mPreview.supportsExposureTime() && mPreview.getMaximumExposureTime() < 1000000000 ) { + Log.d(TAG, "change exposure time to max"); + exposureTimeSeekBar.setProgress(exposureTimeSeekBar.getMax()); + this.getInstrumentation().waitForIdleSync(); + assertEquals(mPreview.getCameraController().getISO(), mPreview.getMaximumISO()); + assertEquals(mPreview.getCameraController().getExposureTime(), mPreview.getMaximumExposureTime()); + } + else { + Log.d(TAG, "change exposure time to middle"); + //mActivity.setProgressSeekbarExponential(exposureTimeSeekBar, mPreview.getMinimumExposureTime(), mPreview.getMaximumExposureTime(), 1000000000); + exposureTimeSeekBar.setProgress(exposureTimeSeekBar.getMax()/2); + this.getInstrumentation().waitForIdleSync(); + assertEquals(mPreview.getCameraController().getISO(), mPreview.getMaximumISO()); + assertTrue( mPreview.getCameraController().getExposureTime() != mPreview.getMaximumExposureTime() ); + } + long saved_exposure_time = mPreview.getCameraController().getExposureTime(); + + // test the exposure button clears and reopens without changing exposure level + clickView(exposureButton); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.GONE); + clickView(exposureButton); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.VISIBLE); + assertEquals(isoSeekBar.getVisibility(), View.VISIBLE); + assertEquals(exposureTimeSeekBar.getVisibility(), (mPreview.supportsExposureTime() ? View.VISIBLE : View.GONE)); + assertEquals(mPreview.getCameraController().getISO(), mPreview.getMaximumISO()); + if( mPreview.supportsExposureTime() ) + assertEquals(mPreview.getCameraController().getExposureTime(), saved_exposure_time); + + // test touch to focus clears the exposure controls + int [] gui_location = new int[2]; + mPreview.getView().getLocationOnScreen(gui_location); + final int step_dist_c = 2; + final float scale = mActivity.getResources().getDisplayMetrics().density; + final int offset_dist_c = (int) (80 * scale + 0.5f); // convert dps to pixels + final int step_count_c = 10; + TouchUtils.drag(MainActivityTest.this, gui_location[0] + offset_dist_c + step_dist_c, gui_location[0] + offset_dist_c, gui_location[1] + offset_dist_c + step_dist_c, gui_location[1] + offset_dist_c, step_count_c); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.GONE); + clickView(exposureButton); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.VISIBLE); + assertEquals(isoSeekBar.getVisibility(), View.VISIBLE); + assertEquals(exposureTimeSeekBar.getVisibility(), (mPreview.supportsExposureTime() ? View.VISIBLE : View.GONE)); + assertEquals(mPreview.getCameraController().getISO(), mPreview.getMaximumISO()); + if( mPreview.supportsExposureTime() ) + assertEquals(mPreview.getCameraController().getExposureTime(), saved_exposure_time); + + // clear again so as to not interfere with take photo routine + TouchUtils.drag(MainActivityTest.this, gui_location[0] + offset_dist_c + step_dist_c, gui_location[0] + offset_dist_c, gui_location[1] + offset_dist_c + step_dist_c, gui_location[1] + offset_dist_c, step_count_c); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.GONE); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + + if( mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ) { + Log.d(TAG, "switch camera"); + int old_max = mPreview.getMaximumISO(); + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + waitUntilCameraOpened(); + + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.GONE); + // we use same ISO for all cameras, but if new camera has lower max, it should automatically reduce + assertEquals(Math.min(old_max, mPreview.getMaximumISO()), mPreview.getCameraController().getISO()); + if( mPreview.supportsExposureTime() ) { + Log.d(TAG, "exposure time: " + mPreview.getCameraController().getExposureTime()); + Log.d(TAG, "min exposure time: " + mPreview.getMinimumExposureTime()); + Log.d(TAG, "max exposure time: " + mPreview.getMaximumExposureTime()); + if( saved_exposure_time < mPreview.getMinimumExposureTime() ) + saved_exposure_time = mPreview.getMinimumExposureTime(); + if( saved_exposure_time > mPreview.getMaximumExposureTime() ) + saved_exposure_time = mPreview.getMaximumExposureTime(); + assertEquals(mPreview.getCameraController().getExposureTime(), saved_exposure_time); + } + + clickView(exposureButton); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.VISIBLE); + assertEquals(isoSeekBar.getVisibility(), View.VISIBLE); + assertEquals(exposureTimeSeekBar.getVisibility(), (mPreview.supportsExposureTime() ? View.VISIBLE : View.GONE)); + assertEquals(Math.min(old_max, mPreview.getMaximumISO()), mPreview.getCameraController().getISO()); + if( mPreview.supportsExposureTime() ) + assertEquals(mPreview.getCameraController().getExposureTime(), saved_exposure_time); + } + } + + public void testTakePhotoManualWB() throws InterruptedException { + Log.d(TAG, "testTakePhotoManualWB"); + setToDefault(); + + if( !mPreview.usingCamera2API() ) { + return; + } + if( !mPreview.supportsWhiteBalanceTemperature() ) { + return; + } + + View exposureButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureContainer = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure_container); + View manualWBContainer = mActivity.findViewById(net.sourceforge.opencamera.R.id.manual_white_balance_container); + // check manual exposure icon is available + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + // check exposure UI starts off closed + assertEquals(exposureContainer.getVisibility(), View.GONE); + assertEquals(manualWBContainer.getVisibility(), View.GONE); + + assertEquals("auto", mPreview.getCameraController().getWhiteBalance()); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + int initial_temperature = mPreview.getCameraController().getWhiteBalanceTemperature(); + int initial_temperature_setting = settings.getInt(PreferenceKeys.WhiteBalanceTemperaturePreferenceKey, 5000); + assertEquals(initial_temperature, initial_temperature_setting); + SeekBar white_balance_seek_bar = mActivity.findViewById(net.sourceforge.opencamera.R.id.white_balance_seekbar); + int initial_white_balance_seek_bar_pos = white_balance_seek_bar.getProgress(); + + /*SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.getWhiteBalancePreferenceKey(), "manual"); + editor.apply(); + updateForSettings();*/ + + // simulate having changed this through popup view: + openPopupMenu(); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.WhiteBalancePreferenceKey, "manual"); + editor.apply(); + mActivity.runOnUiThread(new Runnable() { + public void run() { + mActivity.getMainUI().getPopupView().switchToWhiteBalance("manual"); + } + }); + this.getInstrumentation().waitForIdleSync(); + + /*openPopupMenu(); + // first need to open the white balance sub-menu + View wbButton = mActivity.getUIButton("TEST_WHITE_BALANCE"); + assertTrue(wbButton != null); + ScrollView popupContainer = (ScrollView)mActivity.findViewById(net.sourceforge.opencamera.R.id.popup_container); + popupContainer.scrollTo(0, wbButton.getBottom()); + this.getInstrumentation().waitForIdleSync(); + Thread.sleep(1000); + + clickView(wbButton); + Log.d(TAG, "clicked wb button"); + // check popup still opened + assertTrue( mActivity.popupIsOpen() ); + + RadioButton manualWBButton = (RadioButton)mActivity.getUIButton("TEST_WHITE_BALANCE_manual"); + assertTrue(manualWBButton != null); + assertTrue(!manualWBButton.isChecked()); + clickView(manualWBButton); + Log.d(TAG, "clicked manual wb button"); + // check popup still opened + assertTrue( mActivity.popupIsOpen() ); + // check now selected + assertTrue(manualWBButton.isChecked()); + */ + + // check we switched to manual mode + assertEquals("manual", mPreview.getCameraController().getWhiteBalance()); + + // check exposure UI automatically opened + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.VISIBLE); + assertEquals(manualWBContainer.getVisibility(), View.VISIBLE); + + // check that the wb temperature has been updated, both in preferences, and the camera controller + int new_temperature = mPreview.getCameraController().getWhiteBalanceTemperature(); + int new_temperature_setting = settings.getInt(PreferenceKeys.WhiteBalanceTemperaturePreferenceKey, 5000); + assertEquals(new_temperature, new_temperature_setting); + Log.d(TAG, "initial_temperature: " + initial_temperature); + Log.d(TAG, "new_temperature: " + new_temperature); + assertTrue(new_temperature != initial_temperature); + // check we moved the wb slider too + int new_white_balance_seek_bar_pos = white_balance_seek_bar.getProgress(); + Log.d(TAG, "initial_white_balance_seek_bar_pos: " + initial_white_balance_seek_bar_pos); + Log.d(TAG, "new_white_balance_seek_bar_pos: " + new_white_balance_seek_bar_pos); + assertTrue(new_white_balance_seek_bar_pos != initial_white_balance_seek_bar_pos); + + // close exposure UI + clickView(exposureButton); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.GONE); + assertEquals(manualWBContainer.getVisibility(), View.GONE); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + + SeekBar seekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure_seekbar); + SeekBar seekBarWB = mActivity.findViewById(net.sourceforge.opencamera.R.id.white_balance_seekbar); + + assertEquals(exposureButton.getVisibility(), (mPreview.supportsExposures() ? View.VISIBLE : View.GONE)); + assertEquals(exposureContainer.getVisibility(), View.GONE); + assertEquals(manualWBContainer.getVisibility(), View.GONE); + + if( !mPreview.supportsExposures() ) { + return; + } + + // reopen exposure UI + clickView(exposureButton); + subTestISOButtonAvailability(); + + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureContainer.getVisibility(), View.VISIBLE); + assertEquals(seekBar.getVisibility(), View.VISIBLE); + assertEquals(manualWBContainer.getVisibility(), View.VISIBLE); + assertEquals(seekBarWB.getVisibility(), View.VISIBLE); + } + + /** Tests that the audio control icon is visible or not as expect (guards against bug fixed in 1.30) + */ + public void testAudioControlIcon() { + Log.d(TAG, "testAudioControlIcon"); + + setToDefault(); + + View audioControlButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.audio_control); + assertEquals(audioControlButton.getVisibility(), View.GONE); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.AudioControlPreferenceKey, "noise"); + editor.apply(); + updateForSettings(); + assertEquals(audioControlButton.getVisibility(), View.VISIBLE); + + restart(); + // reset due to restarting! + settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + editor = settings.edit(); + audioControlButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.audio_control); + + assertEquals(audioControlButton.getVisibility(), View.VISIBLE); + + editor.putString(PreferenceKeys.AudioControlPreferenceKey, "none"); + editor.apply(); + updateForSettings(); + Log.d(TAG, "visibility is now: " + audioControlButton.getVisibility()); + assertEquals(audioControlButton.getVisibility(), View.GONE); + + /*editor.putString(PreferenceKeys.AudioControlPreferenceKey, "voice"); + editor.apply(); + updateForSettings(); + assertEquals(audioControlButton.getVisibility(), View.VISIBLE);*/ + + editor.putString(PreferenceKeys.AudioControlPreferenceKey, "none"); + editor.apply(); + updateForSettings(); + Log.d(TAG, "visibility is now: " + audioControlButton.getVisibility()); + assertEquals(audioControlButton.getVisibility(), View.GONE); + } + + /** Test for on-screen icon. Cycles through cameras and checks that the visibility of + * the icons matches whether available for that camera - currently tests for flash and RAW. + * For multi-camera devices, this tests the behaviour with + * PreferenceKeys.MultiCamButtonPreferenceKey devices, so the switch camera icon still cycles + * through all cameras. + */ + public void testIconsAgainstCameras() { + Log.d(TAG, "testIconsAgainstCameras"); + setToDefault(); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.ShowCycleFlashPreferenceKey, true); + editor.putBoolean(PreferenceKeys.ShowCycleRawPreferenceKey, true); + if( mActivity.isMultiCamEnabled() ) { + editor.putBoolean(PreferenceKeys.MultiCamButtonPreferenceKey, false); + } + editor.apply(); + updateForSettings(); + + for(int i=0;i visited_camera_ids) throws InterruptedException { + if( mActivity.showSwitchMultiCamIcon() ) { + final int cameraId = mPreview.getCameraId(); + CameraController.Facing facing = mPreview.getCameraControllerManager().getFacing(cameraId); + + List logical_camera_ids = mActivity.getSameFacingLogicalCameras(cameraId); + Set physical_cameras = mPreview.getPhysicalCameras(); // physical cameras for logical cameraId + assertEquals(cameraId, (int)logical_camera_ids.get(0)); + + // test all logical cameras with same-facing + for(int id : logical_camera_ids) { + Log.d(TAG, "testing multi id: " + id); + + mActivity.runOnUiThread(new Runnable() { + public void run() { + mActivity.userSwitchToCamera(id, null); + } + }); + // need to wait for UI code to finish + this.getInstrumentation().waitForIdleSync(); + waitUntilCameraOpened(); + + int new_cameraId = mPreview.getCameraId(); + Log.d(TAG, "multi cam button switched to " + new_cameraId); + Log.d(TAG, "cameraId: " + cameraId); + Log.d(TAG, "visited_camera_ids was: " + visited_camera_ids); + assertEquals(id, new_cameraId); + if( id != cameraId ) { + assertFalse(visited_camera_ids.contains(new_cameraId)); + visited_camera_ids.add(new_cameraId); + } + + CameraController.Facing new_facing = mPreview.getCameraControllerManager().getFacing(new_cameraId); + assertEquals(facing, new_facing); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + } + + if( physical_cameras != null ) { + // test all physical cameras for cameraId + for(String physical_id : physical_cameras) { + Log.d(TAG, "testing physical id: " + physical_id); + + mActivity.runOnUiThread(new Runnable() { + public void run() { + mActivity.userSwitchToCamera(cameraId, physical_id); + } + }); + // need to wait for UI code to finish + this.getInstrumentation().waitForIdleSync(); + waitUntilCameraOpened(); + + int new_cameraId = mPreview.getCameraId(); + assertEquals(cameraId, new_cameraId); + + CameraController.Facing new_facing = mPreview.getCameraControllerManager().getFacing(new_cameraId); + assertEquals(facing, new_facing); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + } + } + + // old code for multi-cam button: + /*do { + View switchMultiCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_multi_camera); + clickView(switchMultiCameraButton); + waitUntilCameraOpened(); + + int new_cameraId = mPreview.getCameraId(); + Log.d(TAG, "multi cam button switched to " + new_cameraId); + Log.d(TAG, "cameraId: " + cameraId); + Log.d(TAG, "visited_camera_ids was: " + visited_camera_ids); + assertTrue(new_cameraId != cameraId); + assertFalse(visited_camera_ids.contains(new_cameraId)); + visited_camera_ids.add(new_cameraId); + + CameraController.Facing new_facing = mPreview.getCameraControllerManager().getFacing(new_cameraId); + assertEquals(facing, new_facing); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + } + while( mActivity.testGetNextMultiCameraId() != cameraId );*/ + + /*do { + int next_multi_cameraId = mActivity.testGetNextMultiCameraId(); + assertTrue(next_multi_cameraId != cameraId); + mActivity.runOnUiThread(new Runnable() { + public void run() { + mActivity.userSwitchToCamera(next_multi_cameraId, null); + } + }); + // need to wait for UI code to finish + this.getInstrumentation().waitForIdleSync(); + waitUntilCameraOpened(); + + int new_cameraId = mPreview.getCameraId(); + Log.d(TAG, "multi cam button switched to " + new_cameraId); + Log.d(TAG, "cameraId: " + cameraId); + Log.d(TAG, "visited_camera_ids was: " + visited_camera_ids); + assertTrue(new_cameraId != cameraId); + assertTrue(new_cameraId == next_multi_cameraId); + assertFalse(visited_camera_ids.contains(new_cameraId)); + visited_camera_ids.add(new_cameraId); + + CameraController.Facing new_facing = mPreview.getCameraControllerManager().getFacing(new_cameraId); + assertEquals(facing, new_facing); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + } + while( mActivity.testGetNextMultiCameraId() != cameraId );*/ + } + } + + /** Tests taking a photo with multiple cameras. + * Also tests the content descriptions for switch camera button. + * And tests that we save the current camera when pausing and resuming. + * @param cycle_all_cameras If true, expect that the Switch Camera icon cycles through all + * cameras. + * @param test_multi_cam If true, also test cycling through cameras using the switch multi + * camera icon. If true, then cycle_all_cameras must be false. Should + * only be true on multi-camera devices. + */ + private void subTestTakePhotoMultiCameras(boolean cycle_all_cameras, boolean test_multi_cam) throws InterruptedException { + Log.d(TAG, "subTestTakePhotoMultiCameras"); + + int n_cameras = mPreview.getCameraControllerManager().getNumberOfCameras(); + if( n_cameras <= 1 ) { + return; + } + + if( test_multi_cam ) { + assertFalse(cycle_all_cameras); + } + + int orig_cameraId = mPreview.getCameraId(); + Set visited_camera_ids = new HashSet<>(); + visited_camera_ids.add(orig_cameraId); + + boolean done_front_test = false; + for(int i=0;i<(cycle_all_cameras ? n_cameras-1 : 1);i++) { + Log.d(TAG, "i: " + i); + int cameraId = mPreview.getCameraId(); + + CameraController.Facing facing = mPreview.getCameraControllerManager().getFacing(cameraId); + if( i == 0 ) { + assertEquals(CameraController.Facing.FACING_BACK, facing); + } + + if( test_multi_cam ) { + // first test cycling through the cameras with this facing + subTestCycleMultiCameras(visited_camera_ids); + } + + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + CharSequence contentDescription = switchCameraButton.getContentDescription(); + clickView(switchCameraButton); + waitUntilCameraOpened(); + + int new_cameraId = mPreview.getCameraId(); + assertTrue(new_cameraId != cameraId); + if( cycle_all_cameras ) { + // in this mode, we should just be iterating over the camera IDs + assertEquals((cameraId + 1) % n_cameras, new_cameraId); + } + assertFalse(visited_camera_ids.contains(new_cameraId)); + visited_camera_ids.add(new_cameraId); + + CameraController.Facing new_facing = mPreview.getCameraControllerManager().getFacing(new_cameraId); + CharSequence new_contentDescription = switchCameraButton.getContentDescription(); + if( n_cameras == 2 || !cycle_all_cameras ) { + assertEquals(facing==CameraController.Facing.FACING_BACK ? CameraController.Facing.FACING_FRONT : CameraController.Facing.FACING_BACK, new_facing); + } + + //int next_cameraId = (new_cameraId+1) % n_cameras; + int next_cameraId = mActivity.getNextCameraId(); + assertTrue(next_cameraId != new_cameraId); + if( cycle_all_cameras ) { + // in this mode, we should just be iterating over the camera IDs + assertEquals((new_cameraId + 1) % n_cameras, next_cameraId); + } + if( i==n_cameras-1 || !cycle_all_cameras ) { + // should have returned to the start + assertEquals(cameraId, next_cameraId); + } + CameraController.Facing next_facing = mPreview.getCameraControllerManager().getFacing(next_cameraId); + if( n_cameras == 2 || !cycle_all_cameras ) { + assertEquals(facing, next_facing); + } + + Log.d(TAG, "cameraId: " + cameraId); + Log.d(TAG, "facing: " + facing); + Log.d(TAG, "contentDescription: " + contentDescription); + Log.d(TAG, "new_cameraId: " + new_cameraId); + Log.d(TAG, "new_facing: " + new_facing); + Log.d(TAG, "new_contentDescription: " + new_contentDescription); + Log.d(TAG, "next_cameraId: " + next_cameraId); + Log.d(TAG, "next_facing: " + next_facing); + + switch( new_facing ) { + case FACING_FRONT: + assertEquals(contentDescription, mActivity.getResources().getString(net.sourceforge.opencamera.R.string.switch_to_front_camera)); + break; + case FACING_BACK: + assertEquals(contentDescription, mActivity.getResources().getString(net.sourceforge.opencamera.R.string.switch_to_back_camera)); + break; + case FACING_EXTERNAL: + assertEquals(contentDescription, mActivity.getResources().getString(net.sourceforge.opencamera.R.string.switch_to_external_camera)); + break; + default: + fail(); + } + switch( next_facing ) { + case FACING_FRONT: + assertEquals(new_contentDescription, mActivity.getResources().getString(net.sourceforge.opencamera.R.string.switch_to_front_camera)); + break; + case FACING_BACK: + assertEquals(new_contentDescription, mActivity.getResources().getString(net.sourceforge.opencamera.R.string.switch_to_back_camera)); + break; + case FACING_EXTERNAL: + assertEquals(new_contentDescription, mActivity.getResources().getString(net.sourceforge.opencamera.R.string.switch_to_external_camera)); + break; + default: + fail(); + } + + subTestTakePhoto(false, false, true, true, false, false, false, false); + + if( !done_front_test && new_facing == CameraController.Facing.FACING_FRONT ) { + done_front_test = true; + + // check still front camera after pause/resume + pauseAndResume(); + + int restart_cameraId = mPreview.getCameraId(); + CharSequence restart_contentDescription = switchCameraButton.getContentDescription(); + Log.d(TAG, "restart_contentDescription: " + restart_contentDescription); + assertEquals(restart_cameraId, new_cameraId); + switch( next_facing ) { + case FACING_FRONT: + assertEquals(restart_contentDescription, mActivity.getResources().getString(net.sourceforge.opencamera.R.string.switch_to_front_camera)); + break; + case FACING_BACK: + assertEquals(restart_contentDescription, mActivity.getResources().getString(net.sourceforge.opencamera.R.string.switch_to_back_camera)); + break; + case FACING_EXTERNAL: + assertEquals(restart_contentDescription, mActivity.getResources().getString(net.sourceforge.opencamera.R.string.switch_to_external_camera)); + break; + default: + fail(); + } + + // now test mirror mode + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.FrontCameraMirrorKey, "preference_front_camera_mirror_photo"); + editor.apply(); + updateForSettings(); + subTestTakePhoto(false, false, true, true, false, false, false, false); + // disable mirror mode again + settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + editor = settings.edit(); + editor.putString(PreferenceKeys.FrontCameraMirrorKey, "preference_front_camera_mirror_no"); + editor.apply(); + updateForSettings(); + } + } + + if( test_multi_cam ) { + subTestCycleMultiCameras(visited_camera_ids); + } + + if( cycle_all_cameras || test_multi_cam ) { + // test we visited all cameras + assertEquals(n_cameras, visited_camera_ids.size()); + } + + // now check we really do return to the first camera + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + waitUntilCameraOpened(); + + int final_cameraId = mPreview.getCameraId(); + assertEquals(orig_cameraId, final_cameraId); + } + + /* Tests taking a photo with all non-default cameras. + * For multi-camera devices, this tests the behaviour with + * PreferenceKeys.MultiCamButtonPreferenceKey devices, so the switch camera icon still cycles + * through all cameras. + * Can be unstable on Android emulator if the time taken to focus means we've already switched + * back from auto to continuous focus (after touch to focus). + */ + public void testTakePhotoFrontCameraAll() throws InterruptedException { + Log.d(TAG, "testTakePhotoFrontCameraAll"); + setToDefault(); + + if( mActivity.isMultiCamEnabled() ) { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.MultiCamButtonPreferenceKey, false); + editor.apply(); + updateForSettings(); + } + + subTestTakePhotoMultiCameras(true, false); + } + + /* Tests taking a photo on multi-camera devices with front and back cameras. + */ + public void testTakePhotoFrontCamera() throws InterruptedException { + Log.d(TAG, "testTakePhotoFrontCamera"); + setToDefault(); + + if( !mActivity.isMultiCamEnabled() ) { + return; // no point running, as will be same as testTakePhotoFrontCameraAll + } + + subTestTakePhotoMultiCameras(false, false); + } + + /* Tests taking a photo on multi-camera devices, using both icons to switch between cameras. + */ + public void testTakePhotoFrontCameraMulti() throws InterruptedException { + Log.d(TAG, "testTakePhotoFrontCameraMulti"); + setToDefault(); + + /*if( !mActivity.isMultiCamEnabled() ) { + return; + }*/ + + subTestTakePhotoMultiCameras(false, true); + } + + /** Tests taking a photo with front camera and screen flash. + * Note this test fails on Android emulator with old camera API, because on front camera when + * we switch from continuous to auto focus from touch to focus, we're still in continuous focus + * mode, despite both focus modes being supported for front camera - I confirmed that we do + * switch to auto focus, and haven't reset to continuous! Could be a threading/synchronization + * issue from trying to read the camera parameters from the test thread? + */ + public void testTakePhotoFrontCameraScreenFlash() throws InterruptedException { + Log.d(TAG, "testTakePhotoFrontCameraScreenFlash"); + setToDefault(); + + if( mPreview.getCameraControllerManager().getNumberOfCameras() <= 1 ) { + return; + } + + int cameraId = mPreview.getCameraId(); + + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + this.getInstrumentation().waitForIdleSync(); + waitUntilCameraOpened(); + + int new_cameraId = mPreview.getCameraId(); + + Log.d(TAG, "cameraId: " + cameraId); + Log.d(TAG, "new_cameraId: " + new_cameraId); + + assertTrue(cameraId != new_cameraId); + + switchToFlashValue("flash_frontscreen_on"); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + } + + /** Take a photo in auto focus mode. + */ + public void testTakePhotoAutoFocus() throws InterruptedException { + Log.d(TAG, "testTakePhotoAutoFocus"); + setToDefault(); + switchToFocusValue("focus_mode_auto"); + subTestTakePhoto(false, false, true, true, false, false, false, false); + + assertEquals(0, mPreview.getCameraController().test_af_state_null_focus); + } + + /** Take a photo for Camera2 API when camera is released on UI thread whilst photo is taken on background thread (via + * autofocus callback). + */ + public void testTakePhotoAutoFocusReleaseDuringPhoto() throws InterruptedException { + Log.d(TAG, "testTakePhotoAutoFocusReleaseDuringPhoto"); + + if( !mPreview.usingCamera2API() ) { + Log.d(TAG, "test requires camera2 api"); + return; + } + + setToDefault(); + + if( !mPreview.supportsFocus() ) { + // if no focus, then the photo will be taken on the UI thread + Log.d(TAG, "test requires focus"); + return; + } + + switchToFocusValue("focus_mode_auto"); + + mPreview.getCameraController().test_release_during_photo = true; + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + assertFalse( mActivity.hasThumbnailAnimation() ); + Log.d(TAG, "about to click take photo"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo"); + + Thread.sleep(5000); + } + + public void testTakePhotoLockedFocus() throws InterruptedException { + Log.d(TAG, "testTakePhotoLockedFocus"); + setToDefault(); + switchToFocusValue("focus_mode_locked"); + subTestTakePhoto(true, false, true, true, false, false, false, false); + } + + public void testTakePhotoManualFocus() throws InterruptedException { + Log.d(TAG, "testTakePhotoManualFocus"); + setToDefault(); + + if( !mPreview.supportsFocus() || !mPreview.getSupportedFocusValues().contains("focus_mode_manual2") ) { + return; + } + SeekBar seekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.focus_seekbar); + assertEquals(seekBar.getVisibility(), View.GONE); + switchToFocusValue("focus_mode_manual2"); + assertEquals(seekBar.getVisibility(), View.VISIBLE); + seekBar.setProgress( (int)(0.25*(seekBar.getMax()-1)) ); + subTestTakePhoto(false, false, true, true, false, false, false, false); + } + + public void testTakePhotoLockedLandscape() throws InterruptedException { + Log.d(TAG, "testTakePhotoLockedLandscape"); + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.LockOrientationPreferenceKey, "landscape"); + editor.apply(); + updateForSettings(); + subTestTakePhoto(false, false, true, true, false, false, false, false); + } + + public void testTakePhotoLockedPortrait() throws InterruptedException { + Log.d(TAG, "testTakePhotoLockedPortrait"); + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.LockOrientationPreferenceKey, "portrait"); + editor.apply(); + updateForSettings(); + subTestTakePhoto(false, false, true, true, false, false, false, false); + } + + // If this test fails, make sure we've manually selected that folder (as permission can't be given through the test framework). + public void testTakePhotoSAF() throws InterruptedException { + Log.d(TAG, "testTakePhotoSAF"); + + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ) { + Log.d(TAG, "SAF requires Android Lollipop or better"); + return; + } + + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.UsingSAFPreferenceKey, true); + editor.putString(PreferenceKeys.SaveLocationSAFPreferenceKey, "content://com.android.externalstorage.documents/tree/primary%3ADCIM%2FOpenCamera"); + editor.apply(); + updateForSettings(); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + } + + public void testTakePhotoAudioButton() throws InterruptedException { + Log.d(TAG, "testTakePhotoAudioButton"); + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.AudioControlPreferenceKey, "noise"); + editor.apply(); + updateForSettings(); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + } + + // If this fails with a SecurityException about needing INJECT_EVENTS permission, this seems to be due to the "help popup" that Android shows - can be fixed by clearing that manually, then rerunning the test. + public void testImmersiveMode() throws InterruptedException { + Log.d(TAG, "testImmersiveMode"); + + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT ) { + Log.d(TAG, "immersive mode requires Android Kitkat or better"); + return; + } + + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.ImmersiveModePreferenceKey, "immersive_mode_gui"); + editor.putString(PreferenceKeys.AudioControlPreferenceKey, "noise"); + editor.apply(); + updateForSettings(); + + boolean has_audio_control_button = true; + boolean has_zoom = mPreview.supportsZoom(); + + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + View switchMultiCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_multi_camera); + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + View exposureButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureLockButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure_lock); + View audioControlButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.audio_control); + View popupButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.popup); + View trashButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.trash); + View shareButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.share); + View zoomSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.zoom_seekbar); + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + View pauseVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.pause_video); + View takePhotoVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo_when_video_recording); + SeekBar seekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.focus_seekbar); + SeekBar targetSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.focus_bracketing_target_seekbar); + + assertEquals(switchCameraButton.getVisibility(), (mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE)); + assertEquals(switchMultiCameraButton.getVisibility(), (mActivity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE)); + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + int exposureVisibility = exposureButton.getVisibility(); + int exposureLockVisibility = exposureLockButton.getVisibility(); + assertEquals(audioControlButton.getVisibility(), (has_audio_control_button ? View.VISIBLE : View.GONE)); + assertEquals(popupButton.getVisibility(), View.VISIBLE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + assertEquals(zoomSeekBar.getVisibility(), has_zoom ? View.VISIBLE : View.INVISIBLE); + assertEquals(takePhotoButton.getVisibility(), View.VISIBLE); + assertEquals(pauseVideoButton.getVisibility(), View.GONE); + assertEquals(takePhotoVideoButton.getVisibility(), View.GONE); + assertEquals(seekBar.getVisibility(), View.GONE); + assertEquals(targetSeekBar.getVisibility(), View.GONE); + + // now wait for immersive mode to kick in + Thread.sleep(6000); + assertEquals(switchCameraButton.getVisibility(), View.GONE); + assertEquals(switchMultiCameraButton.getVisibility(), View.GONE); + assertEquals(switchVideoButton.getVisibility(), View.GONE); + assertEquals(exposureButton.getVisibility(), View.GONE); + assertEquals(exposureLockButton.getVisibility(), View.GONE); + assertEquals(audioControlButton.getVisibility(), View.GONE); + assertEquals(popupButton.getVisibility(), View.GONE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + assertEquals(zoomSeekBar.getVisibility(), has_zoom ? View.GONE : View.INVISIBLE); + assertEquals(takePhotoButton.getVisibility(), View.VISIBLE); + assertEquals(pauseVideoButton.getVisibility(), View.GONE); + assertEquals(takePhotoVideoButton.getVisibility(), View.GONE); + assertEquals(seekBar.getVisibility(), View.GONE); + assertEquals(targetSeekBar.getVisibility(), View.GONE); + + subTestTakePhoto(false, true, true, true, false, false, false, false); + + // test now exited immersive mode + assertEquals(switchCameraButton.getVisibility(), (mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE)); + assertEquals(switchMultiCameraButton.getVisibility(), (mActivity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE)); + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + assertEquals(exposureButton.getVisibility(), exposureVisibility); + assertEquals(exposureLockButton.getVisibility(), exposureLockVisibility); + assertEquals(audioControlButton.getVisibility(), (has_audio_control_button ? View.VISIBLE : View.GONE)); + assertEquals(popupButton.getVisibility(), View.VISIBLE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + assertEquals(zoomSeekBar.getVisibility(), has_zoom ? View.VISIBLE : View.INVISIBLE); + assertEquals(takePhotoButton.getVisibility(), View.VISIBLE); + assertEquals(pauseVideoButton.getVisibility(), View.GONE); + assertEquals(takePhotoVideoButton.getVisibility(), View.GONE); + assertEquals(seekBar.getVisibility(), View.GONE); + assertEquals(targetSeekBar.getVisibility(), View.GONE); + + // wait for immersive mode to kick in again + Thread.sleep(6000); + assertEquals(switchCameraButton.getVisibility(), View.GONE); + assertEquals(switchMultiCameraButton.getVisibility(), View.GONE); + assertEquals(switchVideoButton.getVisibility(), View.GONE); + assertEquals(exposureButton.getVisibility(), View.GONE); + assertEquals(exposureLockButton.getVisibility(), View.GONE); + assertEquals(audioControlButton.getVisibility(), View.GONE); + assertEquals(popupButton.getVisibility(), View.GONE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + assertEquals(zoomSeekBar.getVisibility(), has_zoom ? View.GONE : View.INVISIBLE); + assertEquals(takePhotoButton.getVisibility(), View.VISIBLE); + assertEquals(pauseVideoButton.getVisibility(), View.GONE); + assertEquals(takePhotoVideoButton.getVisibility(), View.GONE); + assertEquals(seekBar.getVisibility(), View.GONE); + assertEquals(targetSeekBar.getVisibility(), View.GONE); + + subTestTakePhotoPreviewPaused(true, false); + + // test now exited immersive mode + assertEquals(switchCameraButton.getVisibility(), (mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE)); + assertEquals(switchMultiCameraButton.getVisibility(), (mActivity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE)); + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + assertEquals(exposureButton.getVisibility(), exposureVisibility); + assertEquals(exposureLockButton.getVisibility(), exposureLockVisibility); + assertEquals(audioControlButton.getVisibility(), (has_audio_control_button ? View.VISIBLE : View.GONE)); + assertEquals(popupButton.getVisibility(), View.VISIBLE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + assertEquals(zoomSeekBar.getVisibility(), has_zoom ? View.VISIBLE : View.INVISIBLE); + assertEquals(takePhotoButton.getVisibility(), View.VISIBLE); + assertEquals(pauseVideoButton.getVisibility(), View.GONE); + assertEquals(takePhotoVideoButton.getVisibility(), View.GONE); + assertEquals(seekBar.getVisibility(), View.GONE); + assertEquals(targetSeekBar.getVisibility(), View.GONE); + + // need to switch video before going back to immersive mode + if( !mPreview.isVideo() ) { + clickView(switchVideoButton); + waitUntilCameraOpened(); + } + // test now exited immersive mode + assertEquals(switchCameraButton.getVisibility(), (mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE)); + assertEquals(switchMultiCameraButton.getVisibility(), (mActivity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE)); + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + assertEquals(exposureButton.getVisibility(), exposureVisibility); + assertEquals(exposureLockButton.getVisibility(), exposureLockVisibility); + assertEquals(audioControlButton.getVisibility(), (has_audio_control_button ? View.VISIBLE : View.GONE)); + assertEquals(popupButton.getVisibility(), View.VISIBLE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + assertEquals(zoomSeekBar.getVisibility(), has_zoom ? View.VISIBLE : View.INVISIBLE); + assertEquals(takePhotoButton.getVisibility(), View.VISIBLE); + assertEquals(pauseVideoButton.getVisibility(), View.GONE); + assertEquals(takePhotoVideoButton.getVisibility(), View.GONE); + assertEquals(seekBar.getVisibility(), View.GONE); + assertEquals(targetSeekBar.getVisibility(), View.GONE); + + // wait for immersive mode to kick in again + Thread.sleep(6000); + assertEquals(switchCameraButton.getVisibility(), View.GONE); + assertEquals(switchMultiCameraButton.getVisibility(), View.GONE); + assertEquals(switchVideoButton.getVisibility(), View.GONE); + assertEquals(exposureButton.getVisibility(), View.GONE); + assertEquals(exposureLockButton.getVisibility(), View.GONE); + assertEquals(audioControlButton.getVisibility(), View.GONE); + assertEquals(popupButton.getVisibility(), View.GONE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + assertEquals(zoomSeekBar.getVisibility(), has_zoom ? View.GONE : View.INVISIBLE); + assertEquals(takePhotoButton.getVisibility(), View.VISIBLE); + assertEquals(pauseVideoButton.getVisibility(), View.GONE); + assertEquals(takePhotoVideoButton.getVisibility(), View.GONE); + assertEquals(seekBar.getVisibility(), View.GONE); + assertEquals(targetSeekBar.getVisibility(), View.GONE); + + subTestTakeVideo(false, false, false, true, null, 5000, false, 0); + + // test touch exits immersive mode + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + assertEquals(switchCameraButton.getVisibility(), (mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE)); + assertEquals(switchMultiCameraButton.getVisibility(), (mActivity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE)); + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + assertEquals(exposureButton.getVisibility(), exposureVisibility); + assertEquals(exposureLockButton.getVisibility(), exposureLockVisibility); + assertEquals(audioControlButton.getVisibility(), (has_audio_control_button ? View.VISIBLE : View.GONE)); + assertEquals(popupButton.getVisibility(), View.VISIBLE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + assertEquals(zoomSeekBar.getVisibility(), has_zoom ? View.VISIBLE : View.INVISIBLE); + assertEquals(takePhotoButton.getVisibility(), View.VISIBLE); + assertEquals(pauseVideoButton.getVisibility(), View.GONE); + assertEquals(takePhotoVideoButton.getVisibility(), View.GONE); + assertEquals(seekBar.getVisibility(), View.GONE); + assertEquals(targetSeekBar.getVisibility(), View.GONE); + + // switch back to photo mode + if( mPreview.isVideo() ) { + clickView(switchVideoButton); + waitUntilCameraOpened(); + } + + if( mPreview.usingCamera2API() && mPreview.supportsFocus() && mPreview.getSupportedFocusValues().contains("focus_mode_manual2") ) { + // now test manual focus seekbar disappears + assertEquals(seekBar.getVisibility(), View.GONE); + switchToFocusValue("focus_mode_manual2"); + assertEquals(seekBar.getVisibility(), View.VISIBLE); + + // wait for immersive mode to kick in again + Thread.sleep(6000); + assertEquals(switchCameraButton.getVisibility(), View.GONE); + assertEquals(switchMultiCameraButton.getVisibility(), View.GONE); + assertEquals(switchVideoButton.getVisibility(), View.GONE); + assertEquals(exposureButton.getVisibility(), View.GONE); + assertEquals(exposureLockButton.getVisibility(), View.GONE); + assertEquals(audioControlButton.getVisibility(), View.GONE); + assertEquals(popupButton.getVisibility(), View.GONE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + assertEquals(zoomSeekBar.getVisibility(), has_zoom ? View.GONE : View.INVISIBLE); + assertEquals(takePhotoButton.getVisibility(), View.VISIBLE); + assertEquals(pauseVideoButton.getVisibility(), View.GONE); + assertEquals(takePhotoVideoButton.getVisibility(), View.GONE); + assertEquals(seekBar.getVisibility(), View.GONE); + assertEquals(targetSeekBar.getVisibility(), View.GONE); + + // test touch exits immersive mode + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + assertEquals(switchCameraButton.getVisibility(), (mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE)); + assertEquals(switchMultiCameraButton.getVisibility(), (mActivity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE)); + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + assertEquals(exposureButton.getVisibility(), exposureVisibility); + assertEquals(exposureLockButton.getVisibility(), exposureLockVisibility); + assertEquals(audioControlButton.getVisibility(), (has_audio_control_button ? View.VISIBLE : View.GONE)); + assertEquals(popupButton.getVisibility(), View.VISIBLE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + assertEquals(zoomSeekBar.getVisibility(), has_zoom ? View.VISIBLE : View.INVISIBLE); + assertEquals(takePhotoButton.getVisibility(), View.VISIBLE); + assertEquals(pauseVideoButton.getVisibility(), View.GONE); + assertEquals(takePhotoVideoButton.getVisibility(), View.GONE); + assertEquals(seekBar.getVisibility(), View.VISIBLE); + assertEquals(targetSeekBar.getVisibility(), View.GONE); + + switchToFocusValue("focus_mode_continuous_picture"); + } + + if( mPreview.usingCamera2API() && mActivity.supportsFocusBracketing() ) { + // now test focus bracketing seekbars disappear + assertEquals(seekBar.getVisibility(), View.GONE); + assertEquals(targetSeekBar.getVisibility(), View.GONE); + + settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_focus_bracketing"); + editor.apply(); + updateForSettings(); + + assertEquals(seekBar.getVisibility(), View.VISIBLE); + assertEquals(targetSeekBar.getVisibility(), View.VISIBLE); + + // wait for immersive mode to kick in again + Thread.sleep(6000); + assertEquals(switchCameraButton.getVisibility(), View.GONE); + assertEquals(switchMultiCameraButton.getVisibility(), View.GONE); + assertEquals(switchVideoButton.getVisibility(), View.GONE); + assertEquals(exposureButton.getVisibility(), View.GONE); + assertEquals(exposureLockButton.getVisibility(), View.GONE); + assertEquals(audioControlButton.getVisibility(), View.GONE); + assertEquals(popupButton.getVisibility(), View.GONE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + assertEquals(zoomSeekBar.getVisibility(), has_zoom ? View.GONE : View.INVISIBLE); + assertEquals(takePhotoButton.getVisibility(), View.VISIBLE); + assertEquals(pauseVideoButton.getVisibility(), View.GONE); + assertEquals(takePhotoVideoButton.getVisibility(), View.GONE); + assertEquals(seekBar.getVisibility(), View.GONE); + assertEquals(targetSeekBar.getVisibility(), View.GONE); + + // test touch exits immersive mode + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + assertEquals(switchCameraButton.getVisibility(), (mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE)); + assertEquals(switchMultiCameraButton.getVisibility(), (mActivity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE)); + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + assertEquals(exposureButton.getVisibility(), exposureVisibility); + assertEquals(exposureLockButton.getVisibility(), exposureLockVisibility); + assertEquals(audioControlButton.getVisibility(), (has_audio_control_button ? View.VISIBLE : View.GONE)); + assertEquals(popupButton.getVisibility(), View.VISIBLE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + assertEquals(zoomSeekBar.getVisibility(), has_zoom ? View.VISIBLE : View.INVISIBLE); + assertEquals(takePhotoButton.getVisibility(), View.VISIBLE); + assertEquals(pauseVideoButton.getVisibility(), View.GONE); + assertEquals(takePhotoVideoButton.getVisibility(), View.GONE); + assertEquals(seekBar.getVisibility(), View.VISIBLE); + assertEquals(targetSeekBar.getVisibility(), View.VISIBLE); + + settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "photo_mode_std"); + editor.apply(); + updateForSettings(); + } + + if( mPreview.usingCamera2API() && mPreview.supportsISORange() ) { + // now test exposure button disappears when in manual ISO mode + switchToISO(100); + + // wait for immersive mode to kick in again + Thread.sleep(6000); + assertEquals(switchCameraButton.getVisibility(), View.GONE); + assertEquals(switchMultiCameraButton.getVisibility(), View.GONE); + assertEquals(switchVideoButton.getVisibility(), View.GONE); + assertEquals(exposureButton.getVisibility(), View.GONE); + assertEquals(exposureLockButton.getVisibility(), View.GONE); + assertEquals(audioControlButton.getVisibility(), View.GONE); + assertEquals(popupButton.getVisibility(), View.GONE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + assertEquals(zoomSeekBar.getVisibility(), has_zoom ? View.GONE : View.INVISIBLE); + assertEquals(takePhotoButton.getVisibility(), View.VISIBLE); + assertEquals(pauseVideoButton.getVisibility(), View.GONE); + assertEquals(takePhotoVideoButton.getVisibility(), View.GONE); + assertEquals(seekBar.getVisibility(), View.GONE); + assertEquals(targetSeekBar.getVisibility(), View.GONE); + } + } + + // See note under testImmersiveMode() if this fails with a SecurityException about needing INJECT_EVENTS permission. + public void testImmersiveModeEverything() throws InterruptedException { + Log.d(TAG, "testImmersiveModeEverything"); + + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT ) { + Log.d(TAG, "immersive mode requires Android Kitkat or better"); + return; + } + + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.ImmersiveModePreferenceKey, "immersive_mode_everything"); + editor.apply(); + updateForSettings(); + + boolean has_zoom = mPreview.supportsZoom(); + + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + View switchMultiCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_multi_camera); + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + View exposureButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureLockButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure_lock); + View popupButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.popup); + View trashButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.trash); + View shareButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.share); + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + View pauseVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.pause_video); + View takePhotoVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo_when_video_recording); + View zoomSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.zoom_seekbar); + assertEquals(switchCameraButton.getVisibility(), (mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE)); + assertEquals(switchMultiCameraButton.getVisibility(), (mActivity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE)); + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + int exposureVisibility = exposureButton.getVisibility(); + int exposureLockVisibility = exposureLockButton.getVisibility(); + assertEquals(popupButton.getVisibility(), View.VISIBLE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + assertEquals(zoomSeekBar.getVisibility(), has_zoom ? View.VISIBLE : View.INVISIBLE); + assertEquals(takePhotoButton.getVisibility(), View.VISIBLE); + assertEquals(pauseVideoButton.getVisibility(), View.GONE); + assertEquals(takePhotoVideoButton.getVisibility(), View.GONE); + + // now wait for immersive mode to kick in + Thread.sleep(6000); + assertEquals(switchCameraButton.getVisibility(), View.GONE); + assertEquals(switchMultiCameraButton.getVisibility(), View.GONE); + assertEquals(switchVideoButton.getVisibility(), View.GONE); + assertEquals(exposureButton.getVisibility(), View.GONE); + assertEquals(exposureLockButton.getVisibility(), View.GONE); + assertEquals(popupButton.getVisibility(), View.GONE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + assertEquals(zoomSeekBar.getVisibility(), has_zoom ? View.GONE : View.INVISIBLE); + assertEquals(takePhotoButton.getVisibility(), View.GONE); + assertEquals(pauseVideoButton.getVisibility(), View.GONE); + assertEquals(takePhotoVideoButton.getVisibility(), View.GONE); + + // now touch to exit immersive mode + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + Thread.sleep(500); + + // test now exited immersive mode + assertEquals(switchCameraButton.getVisibility(), (mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE)); + assertEquals(switchMultiCameraButton.getVisibility(), (mActivity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE)); + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + assertEquals(exposureButton.getVisibility(), exposureVisibility); + assertEquals(exposureLockButton.getVisibility(), exposureLockVisibility); + assertEquals(popupButton.getVisibility(), View.VISIBLE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + assertEquals(zoomSeekBar.getVisibility(), has_zoom ? View.VISIBLE : View.INVISIBLE); + assertEquals(takePhotoButton.getVisibility(), View.VISIBLE); + assertEquals(pauseVideoButton.getVisibility(), View.GONE); + assertEquals(takePhotoVideoButton.getVisibility(), View.GONE); + } + + /** Tests the use of the FLAG_LAYOUT_NO_LIMITS flag introduced in 1.48. + * In 1.49 this was replaced with View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION. + * In 1.54 this was replaced with WindowCompat.setDecorFitsSystemWindows(). + * In 1.54, on Android 15+ we now support always running in edge-to-edge mode (MainActivity.edge_to_edge_mode). + */ + public void testLayoutNoLimits() throws InterruptedException { + Log.d(TAG, "testLayoutNoLimits"); + + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ) { + // we don't support FLAG_LAYOUT_NO_LIMITS + return; + } + + MainActivity.test_preview_want_no_limits = true; + MainActivity.test_preview_want_no_limits_value = false; + // need to restart for test_preview_want_no_limits static to take effect + restart(); + + setToDefault(); + + boolean edge_to_edge_mode = mActivity.getEdgeToEdgeMode(); + + Thread.sleep(1000); + assertEquals(0, mActivity.getWindow().getAttributes().flags & WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS); + assertFalse(mActivity.test_set_show_under_navigation); + assertEquals(0, mActivity.getWindow().getDecorView().getSystemUiVisibility() & View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION); + assertEquals(mActivity.getNavigationGap(), mActivity.getMainUI().test_navigation_gap); + assertEquals(mActivity.getNavigationGapLandscape(), mActivity.getMainUI().test_navigation_gap_landscape); + assertEquals(mActivity.getNavigationGapReverseLandscape(), mActivity.getMainUI().test_navigation_gap_reversed_landscape); + int initial_navigation_gap = mActivity.getMainUI().test_navigation_gap; + if( !edge_to_edge_mode ) { + assertEquals(0, mActivity.getMainUI().test_navigation_gap); + } + int initial_navigation_gap_landscape = mActivity.getMainUI().test_navigation_gap_landscape; + int initial_navigation_gap_reversed_landscape = mActivity.getMainUI().test_navigation_gap_reversed_landscape; + if( edge_to_edge_mode ) { + // exactly one navigation gap should be non-zero + int count = 0; + if( initial_navigation_gap > 0 ) + count++; + if( initial_navigation_gap_landscape > 0 ) + count++; + if( initial_navigation_gap_reversed_landscape > 0 ) + count++; + assertEquals(1, count); + } + else { + assertEquals(0, initial_navigation_gap_landscape); + assertEquals(0, initial_navigation_gap_reversed_landscape); + } + + // test changing resolution + MainActivity.test_preview_want_no_limits_value = true; + updateForSettings(); + Thread.sleep(1000); + //final boolean supports_no_limits = mActivity.getNavigationGap() != 0; + final boolean supports_no_limits = false; + // on Android 11+, WindowCompat.setDecorFitsSystemWindows() uses Window.setDecorFitsSystemWindows() instead of SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION, and + // doesn't seem possible to read back that value, so instead on Android 11+ we rely on mActivity.test_set_show_under_navigation) + final boolean supports_hide_navigation = Build.VERSION.SDK_INT < Build.VERSION_CODES.R && mActivity.getNavigationGap() != 0; + Log.d(TAG, "supports_no_limits: " + supports_no_limits); + Log.d(TAG, "supports_hide_navigation: " + supports_hide_navigation); + + assertEquals(supports_no_limits ? WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS : 0, mActivity.getWindow().getAttributes().flags & WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS); + if( edge_to_edge_mode ) { + assertFalse(mActivity.test_set_show_under_navigation); + } + else { + assertTrue(mActivity.test_set_show_under_navigation); + } + assertEquals(supports_hide_navigation ? View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION : 0, mActivity.getWindow().getDecorView().getSystemUiVisibility() & View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION); + assertEquals(mActivity.getNavigationGap(), mActivity.getMainUI().test_navigation_gap); + assertEquals(mActivity.getNavigationGapLandscape(), mActivity.getMainUI().test_navigation_gap_landscape); + assertEquals(mActivity.getNavigationGapReverseLandscape(), mActivity.getMainUI().test_navigation_gap_reversed_landscape); + if( edge_to_edge_mode ) { + assertEquals(initial_navigation_gap, mActivity.getMainUI().test_navigation_gap); + } + assertEquals(initial_navigation_gap_landscape, mActivity.getMainUI().test_navigation_gap_landscape); + assertEquals(initial_navigation_gap_reversed_landscape, mActivity.getMainUI().test_navigation_gap_reversed_landscape); + MainActivity.test_preview_want_no_limits_value = false; + updateForSettings(); + Thread.sleep(1000); + assertEquals(0, mActivity.getWindow().getAttributes().flags & WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS); + assertFalse(mActivity.test_set_show_under_navigation); + assertEquals(0, mActivity.getWindow().getDecorView().getSystemUiVisibility() & View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION); + if( edge_to_edge_mode ) { + assertEquals(initial_navigation_gap, mActivity.getMainUI().test_navigation_gap); + } + else { + assertEquals(0, mActivity.getMainUI().test_navigation_gap); + } + assertEquals(initial_navigation_gap_landscape, mActivity.getMainUI().test_navigation_gap_landscape); + assertEquals(initial_navigation_gap_reversed_landscape, mActivity.getMainUI().test_navigation_gap_reversed_landscape); + + if( mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ) { + // test switching camera + MainActivity.test_preview_want_no_limits_value = true; + switchToCamera(1); + Thread.sleep(1000); + assertEquals(supports_no_limits ? WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS : 0, mActivity.getWindow().getAttributes().flags & WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS); + if( edge_to_edge_mode ) { + assertFalse(mActivity.test_set_show_under_navigation); + } + else { + assertTrue(mActivity.test_set_show_under_navigation); + } + assertEquals(supports_hide_navigation ? View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION : 0, mActivity.getWindow().getDecorView().getSystemUiVisibility() & View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION); + assertEquals(mActivity.getNavigationGap(), mActivity.getMainUI().test_navigation_gap); + assertEquals(mActivity.getNavigationGapLandscape(), mActivity.getMainUI().test_navigation_gap_landscape); + assertEquals(mActivity.getNavigationGapReverseLandscape(), mActivity.getMainUI().test_navigation_gap_reversed_landscape); + if( edge_to_edge_mode ) { + assertEquals(initial_navigation_gap, mActivity.getMainUI().test_navigation_gap); + } + assertEquals(initial_navigation_gap_landscape, mActivity.getMainUI().test_navigation_gap_landscape); + assertEquals(initial_navigation_gap_reversed_landscape, mActivity.getMainUI().test_navigation_gap_reversed_landscape); + MainActivity.test_preview_want_no_limits_value = false; + switchToCamera(0); + Thread.sleep(1000); + assertEquals(0, mActivity.getWindow().getAttributes().flags & WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS); + assertFalse(mActivity.test_set_show_under_navigation); + assertEquals(0, mActivity.getWindow().getDecorView().getSystemUiVisibility() & View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION); + if( edge_to_edge_mode ) { + assertEquals(initial_navigation_gap, mActivity.getMainUI().test_navigation_gap); + } + else { + assertEquals(0, mActivity.getMainUI().test_navigation_gap); + } + assertEquals(initial_navigation_gap_landscape, mActivity.getMainUI().test_navigation_gap_landscape); + assertEquals(initial_navigation_gap_reversed_landscape, mActivity.getMainUI().test_navigation_gap_reversed_landscape); + } + + // test switching to video and back + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + MainActivity.test_preview_want_no_limits_value = true; + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertTrue(mPreview.isVideo()); + Thread.sleep(1000); + assertEquals(supports_no_limits ? WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS : 0, mActivity.getWindow().getAttributes().flags & WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS); + if( edge_to_edge_mode ) { + assertFalse(mActivity.test_set_show_under_navigation); + } + else { + assertTrue(mActivity.test_set_show_under_navigation); + } + assertEquals(supports_hide_navigation ? View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION : 0, mActivity.getWindow().getDecorView().getSystemUiVisibility() & View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION); + assertEquals(mActivity.getNavigationGap(), mActivity.getMainUI().test_navigation_gap); + assertEquals(mActivity.getNavigationGapLandscape(), mActivity.getMainUI().test_navigation_gap_landscape); + assertEquals(mActivity.getNavigationGapReverseLandscape(), mActivity.getMainUI().test_navigation_gap_reversed_landscape); + if( edge_to_edge_mode ) { + assertEquals(initial_navigation_gap, mActivity.getMainUI().test_navigation_gap); + } + assertEquals(initial_navigation_gap_landscape, mActivity.getMainUI().test_navigation_gap_landscape); + assertEquals(initial_navigation_gap_reversed_landscape, mActivity.getMainUI().test_navigation_gap_reversed_landscape); + MainActivity.test_preview_want_no_limits_value = false; + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertFalse(mPreview.isVideo()); + Thread.sleep(1000); + assertEquals(0, mActivity.getWindow().getAttributes().flags & WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS); + assertFalse(mActivity.test_set_show_under_navigation); + assertEquals(0, mActivity.getWindow().getDecorView().getSystemUiVisibility() & View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION); + if( edge_to_edge_mode ) { + assertEquals(initial_navigation_gap, mActivity.getMainUI().test_navigation_gap); + } + else { + assertEquals(0, mActivity.getMainUI().test_navigation_gap); + } + assertEquals(initial_navigation_gap_landscape, mActivity.getMainUI().test_navigation_gap_landscape); + assertEquals(initial_navigation_gap_reversed_landscape, mActivity.getMainUI().test_navigation_gap_reversed_landscape); + + // test after restart + MainActivity.test_preview_want_no_limits_value = true; + restart(); + Thread.sleep(1000); + assertEquals(supports_no_limits ? WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS : 0, mActivity.getWindow().getAttributes().flags & WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS); + if( edge_to_edge_mode ) { + assertFalse(mActivity.test_set_show_under_navigation); + } + else { + assertTrue(mActivity.test_set_show_under_navigation); + } + assertEquals(supports_hide_navigation ? View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION : 0, mActivity.getWindow().getDecorView().getSystemUiVisibility() & View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION); + assertEquals(mActivity.getNavigationGap(), mActivity.getMainUI().test_navigation_gap); + assertEquals(mActivity.getNavigationGapLandscape(), mActivity.getMainUI().test_navigation_gap_landscape); + assertEquals(mActivity.getNavigationGapReverseLandscape(), mActivity.getMainUI().test_navigation_gap_reversed_landscape); + if( edge_to_edge_mode ) { + assertEquals(initial_navigation_gap, mActivity.getMainUI().test_navigation_gap); + } + assertEquals(initial_navigation_gap_landscape, mActivity.getMainUI().test_navigation_gap_landscape); + assertEquals(initial_navigation_gap_reversed_landscape, mActivity.getMainUI().test_navigation_gap_reversed_landscape); + } + + /** Tests the use of the FLAG_LAYOUT_NO_LIMITS flag introduced in 1.48, with the mode set from startup. + * In 1.49 this was replaced with View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION. + * In 1.54 this was replaced with WindowCompat.setDecorFitsSystemWindows(). + */ + public void testLayoutNoLimitsStartup() throws InterruptedException { + Log.d(TAG, "testLayoutNoLimitsStartup"); + + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ) { + // we don't support FLAG_LAYOUT_NO_LIMITS + return; + } + + MainActivity.test_preview_want_no_limits = true; + MainActivity.test_preview_want_no_limits_value = true; + // need to restart for test_preview_want_no_limits static to take effect + restart(); + + setToDefault(); + + boolean edge_to_edge_mode = mActivity.getEdgeToEdgeMode(); + + Thread.sleep(1000); + //boolean supports_no_limits = mActivity.getNavigationGap() != 0; + final boolean supports_no_limits = false; + // on Android 11+, WindowCompat.setDecorFitsSystemWindows() uses Window.setDecorFitsSystemWindows() instead of SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION, and + // doesn't seem possible to read back that value, so instead on Android 11+ we rely on mActivity.test_set_show_under_navigation) + final boolean supports_hide_navigation = Build.VERSION.SDK_INT < Build.VERSION_CODES.R && mActivity.getNavigationGap() != 0; + Log.d(TAG, "supports_no_limits: " + supports_no_limits); + Log.d(TAG, "supports_hide_navigation: " + supports_hide_navigation); + + Log.d(TAG, "check FLAG_LAYOUT_NO_LIMITS"); + Log.d(TAG, "test_navigation_gap: " + mActivity.getMainUI().test_navigation_gap); + assertEquals(supports_no_limits ? WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS : 0, mActivity.getWindow().getAttributes().flags & WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS); + if( edge_to_edge_mode ) { + assertFalse(mActivity.test_set_show_under_navigation); + } + else { + assertTrue(mActivity.test_set_show_under_navigation); + } + assertEquals(supports_hide_navigation ? View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION : 0, mActivity.getWindow().getDecorView().getSystemUiVisibility() & View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION); + assertEquals(mActivity.getNavigationGap(), mActivity.getMainUI().test_navigation_gap); + } + + private void subTestTakePhotoPreviewPaused(boolean immersive_mode, boolean is_raw) throws InterruptedException { + mPreview.count_cameraTakePicture = 0; + + // count initial files in folder + int n_files = getNFiles(); + Log.d(TAG, "n_files at start: " + n_files); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.PausePreviewPreferenceKey, true); + editor.apply(); + + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(mActivity); + boolean has_audio_control_button = !sharedPreferences.getString(PreferenceKeys.AudioControlPreferenceKey, "none").equals("none"); + + Log.d(TAG, "check if preview is started"); + Thread.sleep(500); // needed for Pixel 6 Pro with Camera 2 API + assertTrue(mPreview.isPreviewStarted()); + + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + View switchMultiCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_multi_camera); + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + //View flashButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.flash); + //View focusButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.focus_mode); + View exposureButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureLockButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure_lock); + View audioControlButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.audio_control); + View popupButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.popup); + View trashButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.trash); + View shareButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.share); + assertEquals(switchCameraButton.getVisibility(), (immersive_mode ? View.GONE : (mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE))); + assertEquals(switchMultiCameraButton.getVisibility(), (immersive_mode ? View.GONE : (mActivity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE))); + assertEquals(switchVideoButton.getVisibility(), (immersive_mode ? View.GONE : View.VISIBLE)); + // store status to compare with later + int exposureVisibility = exposureButton.getVisibility(); + int exposureLockVisibility = exposureLockButton.getVisibility(); + assertEquals(audioControlButton.getVisibility(), ((has_audio_control_button && !immersive_mode) ? View.VISIBLE : View.GONE)); + assertEquals(popupButton.getVisibility(), (immersive_mode ? View.GONE : View.VISIBLE)); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + Log.d(TAG, "about to click take photo"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo"); + + waitForTakePhoto(); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + assertEquals(1, mPreview.count_cameraTakePicture); + + // don't need to wait until image queue empty, as Open Camera shouldn't use background thread for preview pause option + + Bitmap thumbnail = mActivity.gallery_bitmap; + assertNotNull(thumbnail); + + int n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + int exp_n_new_files = is_raw ? 2 : 1; + Log.d(TAG, "exp_n_new_files: " + exp_n_new_files); + assertEquals(n_new_files, exp_n_new_files); + + // now preview should be paused + assertFalse(mPreview.isPreviewStarted()); // check preview paused + assertEquals(switchCameraButton.getVisibility(), View.GONE); + assertEquals(switchMultiCameraButton.getVisibility(), View.GONE); + assertEquals(switchVideoButton.getVisibility(), View.GONE); + assertEquals(exposureButton.getVisibility(), View.GONE); + assertEquals(exposureLockButton.getVisibility(), View.GONE); + assertEquals(audioControlButton.getVisibility(), View.GONE); + assertEquals(popupButton.getVisibility(), View.GONE); + assertEquals(trashButton.getVisibility(), View.VISIBLE); + assertEquals(shareButton.getVisibility(), View.VISIBLE); + + Thread.sleep(1000); // needed for Galaxy S10e + Log.d(TAG, "about to click preview"); + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + Log.d(TAG, "done click preview"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync 3"); + + // check photo not deleted + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + Log.d(TAG, "exp_n_new_files: " + exp_n_new_files); + assertEquals(n_new_files, exp_n_new_files); + + assertTrue(mPreview.isPreviewStarted()); // check preview restarted + assertEquals(switchCameraButton.getVisibility(), (mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE)); + assertEquals(switchMultiCameraButton.getVisibility(), (mActivity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE)); + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + //assertTrue(flashButton.getVisibility() == flashVisibility); + //assertTrue(focusButton.getVisibility() == focusVisibility); + if( !immersive_mode ) { + assertEquals(exposureButton.getVisibility(), exposureVisibility); + assertEquals(exposureLockButton.getVisibility(), exposureLockVisibility); + } + assertEquals(audioControlButton.getVisibility(), (has_audio_control_button ? View.VISIBLE : View.GONE)); + assertEquals(popupButton.getVisibility(), View.VISIBLE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + + // check still same icon even after a delay + Log.d(TAG, "thumbnail:" + thumbnail); + Log.d(TAG, "mActivity.gallery_bitmap: " + mActivity.gallery_bitmap); + assertSame(mActivity.gallery_bitmap, thumbnail); + Thread.sleep(1000); + Log.d(TAG, "thumbnail:" + thumbnail); + Log.d(TAG, "mActivity.gallery_bitmap: " + mActivity.gallery_bitmap); + assertSame(mActivity.gallery_bitmap, thumbnail); + + mActivity.waitUntilImageQueueEmpty(); + } + + public void testTakePhotoPreviewPaused() throws InterruptedException { + Log.d(TAG, "testTakePhotoPreviewPaused"); + setToDefault(); + subTestTakePhotoPreviewPaused(false, false); + } + + public void testTakePhotoPreviewPausedAudioButton() throws InterruptedException { + Log.d(TAG, "testTakePhotoPreviewPausedAudioButton"); + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.AudioControlPreferenceKey, "noise"); + editor.apply(); + updateForSettings(); + + subTestTakePhotoPreviewPaused(false, false); + } + + // If this test fails, make sure we've manually selected that folder (as permission can't be given through the test framework). + public void testTakePhotoPreviewPausedSAF() throws InterruptedException { + Log.d(TAG, "testTakePhotoPreviewPausedSAF"); + + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ) { + Log.d(TAG, "SAF requires Android Lollipop or better"); + return; + } + + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.UsingSAFPreferenceKey, true); + editor.putString(PreferenceKeys.SaveLocationSAFPreferenceKey, "content://com.android.externalstorage.documents/tree/primary%3ADCIM%2FOpenCamera"); + editor.apply(); + updateForSettings(); + + subTestTakePhotoPreviewPaused(false, false); + } + + /** Tests pause preview option. + * @param share If true, share the image; else, trash it. A test with share==true should be the + * last test if run in a suite, as sharing the image may sometimes cause later + * tests to hang. + */ + private void subTestTakePhotoPreviewPausedShareTrash(boolean is_raw, boolean share) throws InterruptedException { + // count initial files in folder + int n_files = getNFiles(); + Log.d(TAG, "n_files at start: " + n_files); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.PausePreviewPreferenceKey, true); + editor.apply(); + + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(mActivity); + boolean has_audio_control_button = !sharedPreferences.getString(PreferenceKeys.AudioControlPreferenceKey, "none").equals("none"); + + Thread.sleep(500); // needed for Pixel 6 Pro with Camera 2 API + assertTrue(mPreview.isPreviewStarted()); + + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + View switchMultiCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_multi_camera); + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + //View flashButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.flash); + //View focusButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.focus_mode); + View exposureButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureLockButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure_lock); + View audioControlButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.audio_control); + View popupButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.popup); + View trashButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.trash); + View shareButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.share); + assertEquals(switchCameraButton.getVisibility(), (mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE)); + assertEquals(switchMultiCameraButton.getVisibility(), (mActivity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE)); + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + // flash and focus etc default visibility tested in another test + // but store status to compare with later + //int flashVisibility = flashButton.getVisibility(); + //int focusVisibility = focusButton.getVisibility(); + int exposureVisibility = exposureButton.getVisibility(); + int exposureLockVisibility = exposureLockButton.getVisibility(); + assertEquals(audioControlButton.getVisibility(), (has_audio_control_button ? View.VISIBLE : View.GONE)); + assertEquals(popupButton.getVisibility(), View.VISIBLE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + Log.d(TAG, "about to click take photo"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo"); + + waitForTakePhoto(); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + Log.d(TAG, "count_cameraTakePicture: " + mPreview.count_cameraTakePicture); + assertEquals(1, mPreview.count_cameraTakePicture); + + // don't need to wait until image queue empty, as Open Camera shouldn't use background thread for preview pause option + + Bitmap thumbnail = mActivity.gallery_bitmap; + assertNotNull(thumbnail); + + int n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + int exp_n_new_files = is_raw ? 2 : 1; + Log.d(TAG, "exp_n_new_files: " + exp_n_new_files); + assertEquals(n_new_files, exp_n_new_files); + + // now preview should be paused + assertFalse(mPreview.isPreviewStarted()); // check preview restarted + assertEquals(switchCameraButton.getVisibility(), View.GONE); + assertEquals(switchMultiCameraButton.getVisibility(), View.GONE); + assertEquals(switchVideoButton.getVisibility(), View.GONE); + //assertTrue(flashButton.getVisibility() == View.GONE); + //assertTrue(focusButton.getVisibility() == View.GONE); + assertEquals(exposureButton.getVisibility(), View.GONE); + assertEquals(exposureLockButton.getVisibility(), View.GONE); + assertEquals(audioControlButton.getVisibility(), View.GONE); + assertEquals(popupButton.getVisibility(), View.GONE); + assertEquals(trashButton.getVisibility(), View.VISIBLE); + assertEquals(shareButton.getVisibility(), View.VISIBLE); + + if( share ) { + Log.d(TAG, "about to click share"); + clickView(shareButton); + Log.d(TAG, "done click share"); + + // check photo(s) not deleted + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(n_new_files, exp_n_new_files); + } + else { + Log.d(TAG, "about to click trash"); + clickView(trashButton); + Log.d(TAG, "done click trash"); + + // check photo(s) deleted + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(0, n_new_files); + + assertTrue(mPreview.isPreviewStarted()); // check preview restarted + assertEquals(switchCameraButton.getVisibility(), (mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE)); + assertEquals(switchMultiCameraButton.getVisibility(), (mActivity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE)); + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + //assertTrue(flashButton.getVisibility() == flashVisibility); + //assertTrue(focusButton.getVisibility() == focusVisibility); + assertEquals(exposureButton.getVisibility(), exposureVisibility); + assertEquals(exposureLockButton.getVisibility(), exposureLockVisibility); + assertEquals(audioControlButton.getVisibility(), (has_audio_control_button ? View.VISIBLE : View.GONE)); + assertEquals(popupButton.getVisibility(), View.VISIBLE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + + // icon may be null, or have been set to another image - only changed after a delay + Thread.sleep(2000); + Log.d(TAG, "gallery_bitmap: " + mActivity.gallery_bitmap); + Log.d(TAG, "thumbnail: " + thumbnail); + assertNotSame(mActivity.gallery_bitmap, thumbnail); + } + mActivity.waitUntilImageQueueEmpty(); + } + + public void testTakePhotoPreviewPausedTrash() throws InterruptedException { + Log.d(TAG, "testTakePhotoPreviewPausedTrash"); + setToDefault(); + subTestTakePhotoPreviewPausedShareTrash(false, false); + } + + /** Equivalent of testTakePhotoPreviewPausedTrash(), but for Storage Access Framework. + * If this test fails, make sure we've manually selected that folder (as permission can't be given through the test framework). + */ + public void testTakePhotoPreviewPausedTrashSAF() throws InterruptedException { + Log.d(TAG, "testTakePhotoPreviewPausedTrashSAF"); + + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ) { + Log.d(TAG, "SAF requires Android Lollipop or better"); + return; + } + + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.UsingSAFPreferenceKey, true); + editor.putString(PreferenceKeys.SaveLocationSAFPreferenceKey, "content://com.android.externalstorage.documents/tree/primary%3ADCIM%2FOpenCamera"); + editor.apply(); + updateForSettings(); + + subTestTakePhotoPreviewPausedShareTrash(false, false); + } + + /** Like testTakePhotoPreviewPausedTrash() but taking 2 photos, only deleting the most recent - make + * sure we don't delete both images! + */ + public void testTakePhotoPreviewPausedTrash2() throws InterruptedException { + Log.d(TAG, "testTakePhotoPreviewPausedTrash2"); + setToDefault(); + + subTestTakePhotoPreviewPaused(false, false); + + mPreview.count_cameraTakePicture = 0; // need to reset + + subTestTakePhotoPreviewPausedShareTrash(false, false); + } + + /** Equivalent of testTakePhotoPreviewPausedTrash(), but with Raw enabled. + */ + public void testTakePhotoPreviewPausedTrashRaw() throws InterruptedException { + Log.d(TAG, "testTakePhotoPreviewPausedTrashRaw"); + setToDefault(); + + if( !mPreview.supportsRaw() ) { + return; + } + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.RawPreferenceKey, "preference_raw_yes"); + editor.apply(); + updateForSettings(); + + subTestTakePhotoPreviewPausedShareTrash(true, false); + } + + /** Take a photo with RAW that we keep, then take a photo without RAW that we delete, and ensure we + * don't delete the previous RAW image! + */ + public void testTakePhotoPreviewPausedTrashRaw2() throws InterruptedException { + Log.d(TAG, "testTakePhotoPreviewPausedTrashRaw2"); + setToDefault(); + + if( !mPreview.supportsRaw() ) { + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.RawPreferenceKey, "preference_raw_yes"); + editor.apply(); + updateForSettings(); + + subTestTakePhotoPreviewPaused(false, true); + + settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + editor = settings.edit(); + editor.putString(PreferenceKeys.RawPreferenceKey, "preference_raw_no"); + editor.apply(); + updateForSettings(); + mPreview.count_cameraTakePicture = 0; // need to reset + + subTestTakePhotoPreviewPausedShareTrash(false, false); + } + + /** Tests sharing an image. If run in a suite, this test should be last, as sharing the image + * may sometimes cause later tests to hang. + */ + public void testTakePhotoPreviewPausedShare() throws InterruptedException { + Log.d(TAG, "testTakePhotoPreviewPausedShare"); + setToDefault(); + subTestTakePhotoPreviewPausedShareTrash(false, true); + } + + /* Tests that we don't do an extra autofocus when taking a photo, if recently touch-focused. + */ + public void testTakePhotoQuickFocus() throws InterruptedException { + Log.d(TAG, "testTakePhotoQuickFocus"); + setToDefault(); + + assertTrue(mPreview.isPreviewStarted()); + + // touch to auto-focus with focus area + // autofocus shouldn't be immediately, but after a delay + // and Galaxy S10e needs a longer delay for some reason, for the subsequent touch of the preview view to register + Thread.sleep(2000); + int saved_count = mPreview.count_cameraAutoFocus; + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + Log.d(TAG, "1 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + + if( mPreview.supportsFocus() ) { + assertEquals(mPreview.count_cameraAutoFocus, saved_count + 1); + assertTrue(mPreview.hasFocusArea()); + assertNotNull(mPreview.getCameraController().getFocusAreas()); + assertEquals(1, mPreview.getCameraController().getFocusAreas().size()); + assertNotNull(mPreview.getCameraController().getMeteringAreas()); + assertEquals(1, mPreview.getCameraController().getMeteringAreas().size()); + } + else { + assertEquals(mPreview.count_cameraAutoFocus, saved_count); + } + + // wait 3s for auto-focus to complete + Thread.sleep(3000); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + Log.d(TAG, "about to click take photo"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo"); + + Log.d(TAG, "wait until finished taking photo"); + waitForTakePhoto(); + Log.d(TAG, "done taking photo"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + assertEquals(1, mPreview.count_cameraTakePicture); + + // taking photo shouldn't have done an auto-focus, and still have focus areas + Log.d(TAG, "2 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + if( mPreview.supportsFocus() ) { + assertEquals(mPreview.count_cameraAutoFocus, saved_count + 1); + assertTrue(mPreview.hasFocusArea()); + assertNotNull(mPreview.getCameraController().getFocusAreas()); + assertEquals(1, mPreview.getCameraController().getFocusAreas().size()); + assertNotNull(mPreview.getCameraController().getMeteringAreas()); + assertEquals(1, mPreview.getCameraController().getMeteringAreas().size()); + } + else { + assertEquals(mPreview.count_cameraAutoFocus, saved_count); + } + + mActivity.waitUntilImageQueueEmpty(); + } + + private void takePhotoRepeatFocus(boolean locked) throws InterruptedException { + Log.d(TAG, "takePhotoRepeatFocus"); + setToDefault(); + + if( !mPreview.supportsFocus() ) { + Log.d(TAG, "test requires focus"); + return; + } + + if( locked ) { + switchToFocusValue("focus_mode_locked"); + } + else { + switchToFocusValue("focus_mode_auto"); + } + + assertTrue(mPreview.isPreviewStarted()); + + // touch to auto-focus with focus area + // autofocus shouldn't be immediately, but after a delay + // and Galaxy S10e needs a longer delay for some reason, for the subsequent touch of the preview view to register + Thread.sleep(2000); + int saved_count = mPreview.count_cameraAutoFocus; + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + Log.d(TAG, "1 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + assertEquals(mPreview.count_cameraAutoFocus, saved_count + 1); + assertTrue(mPreview.hasFocusArea()); + assertNotNull(mPreview.getCameraController().getFocusAreas()); + assertEquals(1, mPreview.getCameraController().getFocusAreas().size()); + assertNotNull(mPreview.getCameraController().getMeteringAreas()); + assertEquals(1, mPreview.getCameraController().getMeteringAreas().size()); + + // wait 3s for auto-focus to complete, and 5s to require additional auto-focus when taking a photo + // need a bit longer on Galaxy S10e + Thread.sleep(9000); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + Log.d(TAG, "about to click take photo"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo"); + + Log.d(TAG, "wait until finished taking photo"); + waitForTakePhoto(); + Log.d(TAG, "done taking photo"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + assertEquals(1, mPreview.count_cameraTakePicture); + + // taking photo should have done an auto-focus iff in automatic mode, and still have focus areas + Log.d(TAG, "2 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + assertEquals((locked ? saved_count + 1 : saved_count + 2), mPreview.count_cameraAutoFocus); + assertTrue(mPreview.hasFocusArea()); + assertNotNull(mPreview.getCameraController().getFocusAreas()); + assertEquals(1, mPreview.getCameraController().getFocusAreas().size()); + assertNotNull(mPreview.getCameraController().getMeteringAreas()); + assertEquals(1, mPreview.getCameraController().getMeteringAreas().size()); + + mActivity.waitUntilImageQueueEmpty(); + } + + /* Tests that we do an extra autofocus when taking a photo, if too long since last touch-focused. + */ + public void testTakePhotoRepeatFocus() throws InterruptedException { + Log.d(TAG, "testTakePhotoRepeatFocus"); + takePhotoRepeatFocus(false); + } + + /* Tests that we don't do an extra autofocus when taking a photo, if too long since last touch-focused, when in locked focus mode. + */ + public void testTakePhotoRepeatFocusLocked() throws InterruptedException { + Log.d(TAG, "testTakePhotoRepeatFocusLocked"); + takePhotoRepeatFocus(true); + } + + /* Tests taking a photo with animation and shutter disabled, and not setting focus areas + */ + public void testTakePhotoAlt() throws InterruptedException { + Log.d(TAG, "testTakePhotoAlt"); + setToDefault(); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.ThumbnailAnimationPreferenceKey, false); + editor.putBoolean(PreferenceKeys.ShutterSoundPreferenceKey, false); + editor.apply(); + + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(mActivity); + boolean has_audio_control_button = !sharedPreferences.getString(PreferenceKeys.AudioControlPreferenceKey, "none").equals("none"); + + assertTrue(mPreview.isPreviewStarted()); + + // count initial files in folder + int n_files = getNFiles(); + Log.d(TAG, "n_files at start: " + n_files); + + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + View switchMultiCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_multi_camera); + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + //View flashButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.flash); + //View focusButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.focus_mode); + View exposureButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureLockButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure_lock); + View audioControlButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.audio_control); + View popupButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.popup); + View trashButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.trash); + View shareButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.share); + assertEquals(switchCameraButton.getVisibility(), (mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE)); + assertEquals(switchMultiCameraButton.getVisibility(), (mActivity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE)); + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + // flash and focus etc default visibility tested in another test + // but store status to compare with later + //int flashVisibility = flashButton.getVisibility(); + //int focusVisibility = focusButton.getVisibility(); + int exposureVisibility = exposureButton.getVisibility(); + int exposureLockVisibility = exposureLockButton.getVisibility(); + assertEquals(audioControlButton.getVisibility(), (has_audio_control_button ? View.VISIBLE : View.GONE)); + assertEquals(popupButton.getVisibility(), View.VISIBLE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + + // autofocus shouldn't be immediately, but after a delay + Thread.sleep(2000); + int saved_count = mPreview.count_cameraAutoFocus; + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + Log.d(TAG, "about to click take photo"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo"); + + waitForTakePhoto(); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + assertEquals(1, mPreview.count_cameraTakePicture); + + mActivity.waitUntilImageQueueEmpty(); + + int n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(1, n_new_files); + + Log.d(TAG, "2 count_cameraAutoFocus: " + mPreview.count_cameraAutoFocus); + Log.d(TAG, "saved_count: " + saved_count); + /* + // taking photo should have done an auto-focus, and no focus areas [focus auto] + assertTrue(mPreview.count_cameraAutoFocus == saved_count+1); + */ + // taking photo shouldn't have done an auto-focus, and no focus areas [focus continuous] + assertEquals(mPreview.count_cameraAutoFocus, saved_count); + assertFalse(mPreview.hasFocusArea()); + assertNull(mPreview.getCameraController().getFocusAreas()); + assertNull(mPreview.getCameraController().getMeteringAreas()); + + // trash/share only shown when preview is paused after taking a photo + + assertTrue(mPreview.isPreviewStarted()); // check preview restarted + assertEquals(switchCameraButton.getVisibility(), (mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE)); + assertEquals(switchMultiCameraButton.getVisibility(), (mActivity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE)); + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + //assertTrue(flashButton.getVisibility() == flashVisibility); + //assertTrue(focusButton.getVisibility() == focusVisibility); + assertEquals(exposureButton.getVisibility(), exposureVisibility); + assertEquals(exposureLockButton.getVisibility(), exposureLockVisibility); + assertEquals(audioControlButton.getVisibility(), (has_audio_control_button ? View.VISIBLE : View.GONE)); + assertEquals(popupButton.getVisibility(), View.VISIBLE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + } + + private void takePhotoLoop(int count) { + // count initial files in folder + int n_files = getNFiles(); + Log.d(TAG, "n_files at start: " + n_files); + + int start_count = mPreview.count_cameraTakePicture; + for(int i=0;i 1 ) { + int cameraId = mPreview.getCameraId(); + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + while( switchCameraButton.getVisibility() != View.VISIBLE ) { + // wait until photo is taken and button is visible again + } + clickView(switchCameraButton); + waitUntilCameraOpened(); + int new_cameraId = mPreview.getCameraId(); + assertTrue(cameraId != new_cameraId); + takePhotoLoop(n_photos_c); + while( switchCameraButton.getVisibility() != View.VISIBLE ) { + // wait until photo is taken and button is visible again + } + + // return to back camera + switchToCamera(cameraId); + } + } + + /* Tests taking photos repeatedly with auto-stabilise enabled. + * Tests with front and back. + */ + public void testTakePhotoAutoLevel() { + Log.d(TAG, "testTakePhotoAutoLevel"); + + subTestTakePhotoAutoLevel(); + } + + /* As testTakePhotoAutoLevel(), but with test_low_memory set. + */ + public void testTakePhotoAutoLevelLowMemory() { + Log.d(TAG, "testTakePhotoAutoLevelLowMemory"); + + mActivity.test_low_memory = true; + + subTestTakePhotoAutoLevel(); + } + + private void takePhotoLoopAngles(int [] angles) { + // count initial files in folder + mActivity.test_have_angle = true; + int n_files = getNFiles(); + Log.d(TAG, "n_files at start: " + n_files); + + int start_count = mPreview.count_cameraTakePicture; + for(int i=0;i 1 ) { + int cameraId = mPreview.getCameraId(); + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + while( switchCameraButton.getVisibility() != View.VISIBLE ) { + // wait until photo is taken and button is visible again + } + clickView(switchCameraButton); + waitUntilCameraOpened(); + int new_cameraId = mPreview.getCameraId(); + assertTrue(cameraId != new_cameraId); + takePhotoLoopAngles(angles); + while( switchCameraButton.getVisibility() != View.VISIBLE ) { + // wait until photo is taken and button is visible again + } + + // return to back camera + switchToCamera(cameraId); + } + } + + /* Tests taking photos repeatedly with auto-stabilise enabled, at various angles. + * Tests with front and back. + */ + public void testTakePhotoAutoLevelAngles() throws InterruptedException { + Log.d(TAG, "testTakePhotoAutoLevel"); + + subTestTakePhotoAutoLevelAngles(); + } + + /* As testTakePhotoAutoLevelAngles(), but with test_low_memory set. + */ + public void testTakePhotoAutoLevelAnglesLowMemory() throws InterruptedException { + Log.d(TAG, "testTakePhotoAutoLevelAnglesLowMemory"); + + mActivity.test_low_memory = true; + + subTestTakePhotoAutoLevelAngles(); + } + + /** + * @return The number of resultant video files + */ + private int subTestTakeVideo(boolean test_exposure_lock, boolean test_focus_area, boolean allow_failure, boolean immersive_mode, TestUtils.VideoTestCallback test_cb, long time_ms, boolean max_filesize, int n_non_video_files) throws InterruptedException { + if( test_exposure_lock && !mPreview.supportsExposureLock() ) { + return 0; + } + + Thread.sleep(500); // needed for Pixel 6 Pro with Camera 2 API + + TestUtils.preTakeVideoChecks(mActivity, immersive_mode); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + + if( !mPreview.isVideo() ) { + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + waitUntilCameraOpened(); + } + assertTrue(mPreview.isVideo()); + TestUtils.preTakeVideoChecks(mActivity, immersive_mode); + + // reset: + mActivity.getApplicationInterface().test_n_videos_scanned = 0; + + // count initial files in folder + int n_files = getNFiles(); + Log.d(TAG, "n_files at start: " + n_files); + + // store status to compare with later + View exposureButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureLockButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure_lock); + int exposureVisibility = exposureButton.getVisibility(); + int exposureLockVisibility = exposureLockButton.getVisibility(); + + Log.d(TAG, "about to click take video"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take video"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + if( mPreview.usingCamera2API() ) { + assertEquals(mPreview.getCurrentPreviewSize().width, mPreview.getCameraController().test_texture_view_buffer_w); + assertEquals(mPreview.getCurrentPreviewSize().height, mPreview.getCameraController().test_texture_view_buffer_h); + } + + if( mPreview.isOnTimer() ) { + Log.d(TAG, "wait for timer"); + while( mPreview.isOnTimer() ) { + } + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + } + + int exp_n_new_files = 0; + boolean failed_to_start = false; + if( mPreview.isVideoRecording() ) { + TestUtils.takeVideoRecordingChecks(mActivity, immersive_mode, exposureVisibility, exposureLockVisibility); + + if( test_cb == null ) { + if( !immersive_mode && time_ms > 500 ) { + // test turning torch on/off (if in immersive mode, popup button will be hidden) + switchToFlashValue("flash_torch"); + Thread.sleep(500); + switchToFlashValue("flash_off"); + } + + Thread.sleep(time_ms); + TestUtils.takeVideoRecordingChecks(mActivity, immersive_mode, exposureVisibility, exposureLockVisibility); + + assertFalse(mPreview.hasFocusArea()); + if( !allow_failure ) { + assertNull(mPreview.getCameraController().getFocusAreas()); + assertNull(mPreview.getCameraController().getMeteringAreas()); + } + + if( test_focus_area ) { + // touch to auto-focus with focus area + Log.d(TAG, "touch to focus"); + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + Thread.sleep(1000); // wait for autofocus + if( mPreview.supportsFocus() ) { + assertTrue(mPreview.hasFocusArea()); + assertNotNull(mPreview.getCameraController().getFocusAreas()); + assertEquals(1, mPreview.getCameraController().getFocusAreas().size()); + assertNotNull(mPreview.getCameraController().getMeteringAreas()); + assertEquals(1, mPreview.getCameraController().getMeteringAreas().size()); + } + Log.d(TAG, "done touch to focus"); + + // this time, don't wait + Log.d(TAG, "touch again to focus"); + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + } + + if( test_exposure_lock ) { + Log.d(TAG, "test exposure lock"); + assertFalse(mPreview.getCameraController().getAutoExposureLock()); + clickView(exposureLockButton); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + assertTrue( mPreview.getCameraController().getAutoExposureLock() ); + Thread.sleep(2000); + } + + TestUtils.takeVideoRecordingChecks(mActivity, immersive_mode, exposureVisibility, exposureLockVisibility); + + Log.d(TAG, "about to click stop video"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking stop video"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + } + else { + exp_n_new_files = test_cb.doTest(); + + if( mPreview.isVideoRecording() ) { + Log.d(TAG, "about to click stop video"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking stop video"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + } + } + } + else { + Log.d(TAG, "didn't start video"); + assertTrue(allow_failure); + failed_to_start = true; + } + + int n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + TestUtils.checkFilesAfterTakeVideo(mActivity, allow_failure, test_cb != null, time_ms, n_non_video_files, failed_to_start, exp_n_new_files, n_new_files); + + TestUtils.postTakeVideoChecks(mActivity, immersive_mode, max_filesize, exposureVisibility, exposureLockVisibility); + + return n_new_files; + } + + public void testTakeVideo() throws InterruptedException { + Log.d(TAG, "testTakeVideo"); + + setToDefault(); + + int n_new_files = subTestTakeVideo(false, false, false, false, null, 5000, false, 0); + + assertEquals(1, n_new_files); + } + + public void testTakeVideoAudioControl() throws InterruptedException { + Log.d(TAG, "testTakeVideoAudioControl"); + + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.AudioControlPreferenceKey, "noise"); + editor.apply(); + updateForSettings(); + + subTestTakeVideo(false, false, false, false, null, 5000, false, 0); + } + + // If this test fails, make sure we've manually selected that folder (as permission can't be given through the test framework). + public void testTakeVideoSAF() throws InterruptedException { + Log.d(TAG, "testTakeVideoSAF"); + + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ) { + Log.d(TAG, "SAF requires Android Lollipop or better"); + return; + } + + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.UsingSAFPreferenceKey, true); + editor.putString(PreferenceKeys.SaveLocationSAFPreferenceKey, "content://com.android.externalstorage.documents/tree/primary%3ADCIM%2FOpenCamera"); + editor.apply(); + updateForSettings(); + + int n_new_files = subTestTakeVideo(false, false, false, false, null, 5000, false, 0); + + assertEquals(1, n_new_files); + } + + /** Tests video subtitles option. + */ + public void testTakeVideoSubtitles() throws InterruptedException { + Log.d(TAG, "testTakeVideoSubtitles"); + + setToDefault(); + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.VideoSubtitlePref, "preference_video_subtitle_yes"); + editor.apply(); + updateForSettings(); + } + + subTestTakeVideo(false, false, false, false, null, 5000, false, + Build.VERSION.SDK_INT >= Build.VERSION_CODES.R ? 0 : 1 // boo, Android 11 doesn't allow video subtitles to be saved with mediastore API! + ); + } + + /** Tests video subtitles option, when using Storage Access Framework. + */ + public void testTakeVideoSubtitlesSAF() throws InterruptedException { + Log.d(TAG, "testTakeVideoSubtitlesSAF"); + + setToDefault(); + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.VideoSubtitlePref, "preference_video_subtitle_yes"); + editor.putBoolean(PreferenceKeys.UsingSAFPreferenceKey, true); + editor.putString(PreferenceKeys.SaveLocationSAFPreferenceKey, "content://com.android.externalstorage.documents/tree/primary%3ADCIM%2FOpenCamera"); + editor.apply(); + updateForSettings(); + } + + subTestTakeVideo(false, false, false, false, null, 5000, false, 1); + } + + /** Tests video subtitles option, including GPS - also tests losing the connection. + * Also test with Storage Access Framework, so this can run on Android 11+. + */ + public void testTakeVideoSubtitlesGPSSAF() throws InterruptedException { + Log.d(TAG, "testTakeVideoSubtitlesGPSSAF"); + + setToDefault(); + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.VideoSubtitlePref, "preference_video_subtitle_yes"); + editor.putBoolean(PreferenceKeys.LocationPreferenceKey, true); + editor.putBoolean(PreferenceKeys.UsingSAFPreferenceKey, true); + editor.putString(PreferenceKeys.SaveLocationSAFPreferenceKey, "content://com.android.externalstorage.documents/tree/primary%3ADCIM%2FOpenCamera"); + editor.apply(); + updateForSettings(); + } + + subTestTakeVideo(false, false, false, false, new TestUtils.VideoTestCallback() { + @Override + public int doTest() { + // wait for location + long start_t = System.currentTimeMillis(); + while( !mActivity.getLocationSupplier().testHasReceivedLocation() ) { + getInstrumentation().waitForIdleSync(); + if( System.currentTimeMillis() - start_t > 20000 ) { + // need to allow long time for testing devices without mobile network; will likely fail altogether if don't even have wifi + fail(); + } + } + getInstrumentation().waitForIdleSync(); + assertNotNull(mActivity.getLocationSupplier().getLocation()); + + Log.d(TAG, "have location"); + try { + Thread.sleep(2000); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + + // now test losing gps + Log.d(TAG, "test losing location"); + mActivity.getLocationSupplier().setForceNoLocation(true); + + try { + Thread.sleep(2000); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + + return 2; + } + }, 5000, false, 1); + } + + /** Test pausing and resuming video. + */ + public void testTakeVideoPause() throws InterruptedException { + Log.d(TAG, "testTakeVideoPause"); + + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.N ) { + Log.d(TAG, "pause video requires Android N or better"); + return; + } + + setToDefault(); + + final View pauseVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.pause_video); + assertEquals(pauseVideoButton.getVisibility(), View.GONE); + + subTestTakeVideo(false, false, false, false, new TestUtils.VideoTestCallback() { + @Override + public int doTest() { + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + final long time_tol_ms = 1000; + + Log.d(TAG, "wait before pausing"); + try { + Thread.sleep(3000); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + assertEquals(takePhotoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.stop_video)); + assertEquals(pauseVideoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.pause_video)); + assertEquals(pauseVideoButton.getVisibility(), View.VISIBLE); + assertTrue( mPreview.isVideoRecording() ); + assertFalse(mPreview.isVideoRecordingPaused()); + long video_time = mPreview.getVideoTime(false); + Log.d(TAG, "video time: " + video_time); + assertTrue( video_time >= 3000 - time_tol_ms ); + assertTrue( video_time <= 3000 + time_tol_ms ); + + Log.d(TAG, "about to click pause video"); + clickView(pauseVideoButton); + Log.d(TAG, "done clicking pause video"); + getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + assertEquals(pauseVideoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.resume_video)); + assertEquals(pauseVideoButton.getVisibility(), View.VISIBLE); + assertTrue( mPreview.isVideoRecording() ); + assertTrue( mPreview.isVideoRecordingPaused() ); + + Log.d(TAG, "wait before resuming"); + try { + Thread.sleep(3000); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + assertEquals(pauseVideoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.resume_video)); + assertEquals(pauseVideoButton.getVisibility(), View.VISIBLE); + assertTrue( mPreview.isVideoRecording() ); + assertTrue( mPreview.isVideoRecordingPaused() ); + video_time = mPreview.getVideoTime(false); + Log.d(TAG, "video time: " + video_time); + assertTrue( video_time >= 3000 - time_tol_ms ); + assertTrue( video_time <= 3000 + time_tol_ms ); + + Log.d(TAG, "about to click resume video"); + clickView(pauseVideoButton); + Log.d(TAG, "done clicking resume video"); + getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + assertEquals(pauseVideoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.pause_video)); + assertEquals(pauseVideoButton.getVisibility(), View.VISIBLE); + assertTrue( mPreview.isVideoRecording() ); + assertFalse(mPreview.isVideoRecordingPaused()); + + Log.d(TAG, "wait before stopping"); + try { + Thread.sleep(3000); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + Log.d(TAG, "takePhotoButton description: " + takePhotoButton.getContentDescription()); + assertEquals(takePhotoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.stop_video)); + assertEquals(pauseVideoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.pause_video)); + assertEquals(pauseVideoButton.getVisibility(), View.VISIBLE); + assertTrue( mPreview.isVideoRecording() ); + assertFalse(mPreview.isVideoRecordingPaused()); + video_time = mPreview.getVideoTime(false); + Log.d(TAG, "video time: " + video_time); + assertTrue( video_time >= 6000 - time_tol_ms ); + assertTrue( video_time <= 6000 + time_tol_ms ); + + Log.d(TAG, "about to click stop video"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking stop video"); + getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + return 1; + } + }, 5000, false, 0); + } + + /** Test pausing and stopping video. + */ + public void testTakeVideoPauseStop() throws InterruptedException { + Log.d(TAG, "testTakeVideoPauseStop"); + + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.N ) { + Log.d(TAG, "pause video requires Android N or better"); + return; + } + + setToDefault(); + + final View pauseVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.pause_video); + assertEquals(pauseVideoButton.getVisibility(), View.GONE); + + subTestTakeVideo(false, false, false, false, new TestUtils.VideoTestCallback() { + @Override + public int doTest() { + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + final long time_tol_ms = 1000; + + Log.d(TAG, "wait before pausing"); + try { + Thread.sleep(3000); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + assertEquals(takePhotoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.stop_video)); + assertEquals(pauseVideoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.pause_video)); + assertEquals(pauseVideoButton.getVisibility(), View.VISIBLE); + assertTrue( mPreview.isVideoRecording() ); + assertFalse(mPreview.isVideoRecordingPaused()); + long video_time = mPreview.getVideoTime(false); + Log.d(TAG, "video time: " + video_time); + assertTrue( video_time >= 3000 - time_tol_ms ); + assertTrue( video_time <= 3000 + time_tol_ms ); + + Log.d(TAG, "about to click pause video"); + clickView(pauseVideoButton); + Log.d(TAG, "done clicking pause video"); + getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + assertEquals(pauseVideoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.resume_video)); + assertEquals(pauseVideoButton.getVisibility(), View.VISIBLE); + assertTrue( mPreview.isVideoRecording() ); + assertTrue( mPreview.isVideoRecordingPaused() ); + + Log.d(TAG, "wait before stopping"); + try { + Thread.sleep(3000); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + Log.d(TAG, "takePhotoButton description: " + takePhotoButton.getContentDescription()); + assertEquals(takePhotoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.stop_video)); + assertEquals(pauseVideoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.resume_video)); + assertEquals(pauseVideoButton.getVisibility(), View.VISIBLE); + assertTrue( mPreview.isVideoRecording() ); + assertTrue( mPreview.isVideoRecordingPaused() ); + video_time = mPreview.getVideoTime(false); + Log.d(TAG, "video time: " + video_time); + assertTrue( video_time >= 3000 - time_tol_ms ); + assertTrue( video_time <= 3000 + time_tol_ms ); + + Log.d(TAG, "about to click stop video"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking stop video"); + getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + return 1; + } + }, 5000, false, 0); + } + + private void subTestTakeVideoSnapshot() throws InterruptedException { + Log.d(TAG, "subTestTakeVideoSnapshot"); + + final View takePhotoVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo_when_video_recording); + assertEquals(takePhotoVideoButton.getVisibility(), View.GONE); + + subTestTakeVideo(false, false, false, false, new TestUtils.VideoTestCallback() { + @Override + public int doTest() { + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + + Log.d(TAG, "wait before taking photo"); + try { + Thread.sleep(3000); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + assertEquals(takePhotoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.stop_video)); + assertEquals(takePhotoVideoButton.getVisibility(), View.VISIBLE); + assertTrue( mPreview.isVideoRecording() ); + assertFalse(mPreview.isVideoRecordingPaused()); + + Log.d(TAG, "about to click take photo snapshot"); + clickView(takePhotoVideoButton); + Log.d(TAG, "done clicking take photo snapshot"); + getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + waitForTakePhoto(); + + assertEquals(takePhotoVideoButton.getVisibility(), View.VISIBLE); + assertTrue( mPreview.isVideoRecording() ); + assertFalse(mPreview.isVideoRecordingPaused()); + + Log.d(TAG, "wait before stopping"); + try { + Thread.sleep(3000); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + assertEquals(takePhotoButton.getContentDescription(), mActivity.getResources().getString(net.sourceforge.opencamera.R.string.stop_video)); + assertEquals(takePhotoVideoButton.getVisibility(), View.VISIBLE); + assertTrue( mPreview.isVideoRecording() ); + + Log.d(TAG, "about to click stop video"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking stop video"); + getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + return 2; + } + }, 5000, false, 1); + + mActivity.waitUntilImageQueueEmpty(); + } + + /** Test taking photo while recording video. + */ + public void testTakeVideoSnapshot() throws InterruptedException { + Log.d(TAG, "testTakeVideoSnapshot"); + + setToDefault(); + + if( !mPreview.supportsPhotoVideoRecording() ) { + Log.d(TAG, "video snapshot not supported"); + return; + } + + subTestTakeVideoSnapshot(); + } + + /** Test taking photo while recording video, with timer. + */ + public void testTakeVideoSnapshotTimer() throws InterruptedException { + Log.d(TAG, "testTakeVideoSnapshotTimer"); + + setToDefault(); + + if( !mPreview.supportsPhotoVideoRecording() ) { + Log.d(TAG, "video snapshot not supported"); + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.TimerPreferenceKey, "5"); + editor.putBoolean(PreferenceKeys.TimerBeepPreferenceKey, false); + editor.apply(); + + subTestTakeVideoSnapshot(); + } + + /** Test taking photo while recording video, with pause preview. + */ + public void testTakeVideoSnapshotPausePreview() throws InterruptedException { + Log.d(TAG, "testTakeVideoSnapshotPausePreview"); + + setToDefault(); + + if( !mPreview.supportsPhotoVideoRecording() ) { + Log.d(TAG, "video snapshot not supported"); + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.PausePreviewPreferenceKey, true); + editor.apply(); + + subTestTakeVideoSnapshot(); + } + + /** Test taking photo while recording video at max video quality. + */ + public void testTakeVideoSnapshotMax() throws InterruptedException { + Log.d(TAG, "testTakeVideoSnapshotMax"); + + setToDefault(); + + if( !mPreview.supportsPhotoVideoRecording() ) { + Log.d(TAG, "video snapshot not supported"); + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.getVideoQualityPreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref(), false), "" + CamcorderProfile.QUALITY_HIGH); // set to highest quality (4K on Nexus 6 or OnePlus 3T) + editor.apply(); + updateForSettings(); + + subTestTakeVideoSnapshot(); + } + + /** Set available memory to make sure that we stop before running out of memory. + * This test is fine-tuned to Nexus 6, OnePlus 3T, Nokia 8, Galaxy S10e as we measure hitting max filesize based on time. + */ + public void testTakeVideoAvailableMemory() throws InterruptedException { + Log.d(TAG, "testTakeVideoAvailableMemory"); + + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ) { + // as not fine-tuned to pre-Android 5 devices + return; + } + setToDefault(); + + mActivity.getApplicationInterface().test_set_available_memory = true; + mActivity.getApplicationInterface().test_available_memory = 50000000; + boolean is_nokia = Build.MANUFACTURER.toLowerCase(Locale.US).contains("hmd global"); + boolean is_samsung = Build.MANUFACTURER.toLowerCase(Locale.US).contains("samsung"); + if( is_nokia || is_samsung ) + { + // Nokia 8 has much smaller video sizes, at least when recording with phone face down, so we both set + // 4K, and lower test_available_memory. + mActivity.getApplicationInterface().test_available_memory = 21000000; // must be at least MyApplicationInterface.getVideoMaxFileSizePref().min_free_filesize + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.getVideoQualityPreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref(), false), "" + CamcorderProfile.QUALITY_HIGH); // set to highest quality (4K on Nexus 6 or OnePlus 3T) + editor.apply(); + updateForSettings(); + } + + subTestTakeVideo(false, false, false, false, new TestUtils.VideoTestCallback() { + @Override + public int doTest() { + // wait until automatically stops + Log.d(TAG, "wait until video recording stops"); + long time_s = System.currentTimeMillis(); + long video_time_s = mPreview.getVideoTime(false); + // simulate remaining memory now being reduced, so we don't keep trying to restart + mActivity.getApplicationInterface().test_available_memory = 10000000; + while( mPreview.isVideoRecording() ) { + assertTrue( System.currentTimeMillis() - time_s <= 35000 ); + long video_time = mPreview.getVideoTime(false); + assertTrue( video_time >= video_time_s ); + } + Log.d(TAG, "video recording now stopped"); + // now allow time for video recording to properly shut down + try { + Thread.sleep(1000); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + Log.d(TAG, "done waiting"); + + return 1; + } + }, 5000, true, 0); + } + + /** Set available memory small enough to make sure we don't even attempt to record video. + */ + public void testTakeVideoAvailableMemory2() throws InterruptedException { + Log.d(TAG, "testTakeVideoAvailableMemory2"); + + setToDefault(); + + mActivity.getApplicationInterface().test_set_available_memory = true; + mActivity.getApplicationInterface().test_available_memory = 5000000; + + subTestTakeVideo(false, false, true, false, new TestUtils.VideoTestCallback() { + @Override + public int doTest() { + // wait until automatically stops + Log.d(TAG, "wait until video recording stops"); + assertFalse( mPreview.isVideoRecording() ); + Log.d(TAG, "video recording now stopped"); + return 0; + } + }, 5000, true, 0); + } + + /** Set maximum filesize so that we get approx 3s of video time. Check that recording stops and restarts within 10s. + * Then check recording stops again within 10s. + * On Android 8+, we use MediaRecorder.setNextOutputFile() (see Preview.onVideoInfo()), so instead we just wait 10s and + * check video is still recording, then expect at least 2 resultant video files. If this fails on Android 8+, ensure + * that the video lengths aren't too short (if less than 3s, we sometimes seem to fall back to the pre-Android 8 + * behaviour, presumably because setNextOutputFile() can't take effect in time). + * This test is fine-tuned to Nexus 6, OnePlus 3T, Nokia 8, Galaxy S10e, as we measure hitting max filesize based on time. + */ + public void testTakeVideoMaxFileSize1() throws InterruptedException { + Log.d(TAG, "testTakeVideoMaxFileSize1"); + + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ) { + // as not fine-tuned to pre-Android 5 devices + return; + } + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + boolean is_nokia = Build.MANUFACTURER.toLowerCase(Locale.US).contains("hmd global"); + boolean is_samsung = Build.MANUFACTURER.toLowerCase(Locale.US).contains("samsung"); + if( is_nokia || is_samsung ) { + // Nokia 8 has much smaller video sizes, at least when recording with phone face down, so we also set 4K + editor.putString(PreferenceKeys.getVideoQualityPreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref(), false), "" + CamcorderProfile.QUALITY_HIGH); // set to highest quality (4K on Nexus 6) + //editor.putString(PreferenceKeys.VideoMaxFileSizePreferenceKey, "2000000"); // approx 3s on Nokia 8 at 4K + editor.putString(PreferenceKeys.VideoMaxFileSizePreferenceKey, "10000000"); // approx 3s on Nokia 8 at 4K + } + else { + //editor.putString(PreferenceKeys.getVideoQualityPreferenceKey(mPreview.getCameraId()), "" + CamcorderProfile.QUALITY_HIGH); // set to highest quality (4K on Nexus 6) + //editor.putString(PreferenceKeys.VideoMaxFileSizePreferenceKey, "15728640"); // approx 3-4s on Nexus 6 at 4K + editor.putString(PreferenceKeys.VideoMaxFileSizePreferenceKey, "9437184"); // approx 3-4s on Nexus 6 at FullHD + } + editor.apply(); + updateForSettings(); + + int n_new_files = subTestTakeVideo(false, false, false, false, new TestUtils.VideoTestCallback() { + @Override + public int doTest() { + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.O ) { + assertTrue(mPreview.isVideoRecording()); + + long video_time = mPreview.getVideoTime(false); + long video_time_this_file = mPreview.getVideoTime(true); + assertEquals(video_time, video_time_this_file); + + Log.d(TAG, "wait"); + try { + Thread.sleep(10000); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + Log.d(TAG, "check still recording"); + assertTrue(mPreview.isVideoRecording()); + return -1; // the number of videos recorded can vary, as the max duration corresponding to max filesize can vary wildly, so we check the number of files afterwards (below) + } + + // pre-Android 8 code: + + // wait until automatically stops + Log.d(TAG, "wait until video recording stops"); + long time_s = System.currentTimeMillis(); + long video_time_s = mPreview.getVideoTime(false); + while( mPreview.isVideoRecording() ) { + assertTrue( System.currentTimeMillis() - time_s <= 8000 ); + long video_time = mPreview.getVideoTime(false); + assertTrue( video_time >= video_time_s ); + } + Log.d(TAG, "video recording now stopped - wait for restart"); + video_time_s = mPreview.getVideoAccumulatedTime(); + Log.d(TAG, "video_time_s: " + video_time_s); + // now ensure we'll restart within a reasonable time + time_s = System.currentTimeMillis(); + while( !mPreview.isVideoRecording() ) { + long c_time = System.currentTimeMillis(); + if( c_time - time_s > 10000 ) { + Log.e(TAG, "time: " + (c_time - time_s)); + } + assertTrue( c_time - time_s <= 10000 ); + } + // wait for stop again + time_s = System.currentTimeMillis(); + while( mPreview.isVideoRecording() ) { + long c_time = System.currentTimeMillis(); + if( c_time - time_s > 10000 ) { + Log.e(TAG, "time: " + (c_time - time_s)); + } + assertTrue( c_time - time_s <= 10000 ); + long video_time = mPreview.getVideoTime(false); + if( video_time < video_time_s ) + Log.d(TAG, "compare: " + video_time_s + " to " + video_time); + assertTrue( video_time + 1 >= video_time_s ); + } + Log.d(TAG, "video recording now stopped again"); + + // now start again + time_s = System.currentTimeMillis(); + while( !mPreview.isVideoRecording() ) { + long c_time = System.currentTimeMillis(); + if( c_time - time_s > 10000 ) { + Log.e(TAG, "time: " + (c_time - time_s)); + } + assertTrue( c_time - time_s <= 10000 ); + } + try { + Thread.sleep(1000); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + + // now properly stop - need to wait first so that stopping video isn't ignored (due to too quick after video start) + try { + Thread.sleep(1000); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + Log.d(TAG, "about to click stop video"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking stop video"); + getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + Log.d(TAG, "wait for stop"); + try { + Thread.sleep(1000); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + Log.d(TAG, "done wait for stop"); + return -1; // the number of videos recorded can vary, as the max duration corresponding to max filesize can vary widly + } + }, 5000, true, 0); + + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.O ) { + assertTrue( n_new_files >= 2 ); + } + + // if we've restarted, the total video time should be longer than the video time for the most recent file + long video_time = mPreview.getVideoTime(false); + long video_time_this_file = mPreview.getVideoTime(true); + Log.d(TAG, "video_time: " + video_time); + Log.d(TAG, "video_time_this_file: " + video_time_this_file); + assertTrue(video_time > video_time_this_file + 1000); + } + + /** Max filesize is for ~4.5s, and max duration is 5s, check we only get 1 video. + * This test is fine-tuned to OnePlus 3T, as we measure hitting max filesize based on time. + */ + public void testTakeVideoMaxFileSize2() throws InterruptedException { + Log.d(TAG, "testTakeVideoMaxFileSize2"); + + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ) { + // as not fine-tuned to pre-Android 5 devices + return; + } + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.getVideoQualityPreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref(), false), "" + CamcorderProfile.QUALITY_HIGH); // set to highest quality (4K on Nexus 6 or OnePlus 3T) + //editor.putString(PreferenceKeys.VideoMaxFileSizePreferenceKey, "23592960"); // approx 4.5s on Nexus 6 at 4K + editor.putString(PreferenceKeys.VideoMaxFileSizePreferenceKey, "35389440"); // approx 4.5s on OnePlus 3T at 4K + editor.putString(PreferenceKeys.VideoMaxDurationPreferenceKey, "5"); + editor.apply(); + updateForSettings(); + + subTestTakeVideo(false, false, false, false, new TestUtils.VideoTestCallback() { + @Override + public int doTest() { + // wait until automatically stops + Log.d(TAG, "wait until video recording stops"); + long time_s = System.currentTimeMillis(); + long video_time_s = mPreview.getVideoTime(false); + while( mPreview.isVideoRecording() ) { + assertTrue( System.currentTimeMillis() - time_s <= 8000 ); + long video_time = mPreview.getVideoTime(false); + assertTrue( video_time >= video_time_s ); + } + Log.d(TAG, "video recording now stopped - check we don't restart"); + video_time_s = mPreview.getVideoAccumulatedTime(); + Log.d(TAG, "video_time_s: " + video_time_s); + // now ensure we don't restart + time_s = System.currentTimeMillis(); + while( System.currentTimeMillis() - time_s <= 5000 ) { + assertFalse( mPreview.isVideoRecording() ); + } + return 1; + } + }, 5000, true, 0); + } + + /* Max filesize for ~5s, max duration 7s, max n_repeats 1 - to ensure we're not repeating indefinitely. + * This test is fine-tuned to OnePlus 3T, as we measure hitting max filesize based on time. + */ + public void testTakeVideoMaxFileSize3() throws InterruptedException { + Log.d(TAG, "testTakeVideoMaxFileSize3"); + + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ) { + // as not fine-tuned to pre-Android 5 devices + return; + } + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.getVideoQualityPreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref(), false), "" + CamcorderProfile.QUALITY_HIGH); // set to highest quality (4K on Nexus 6) + //editor.putString(PreferenceKeys.VideoMaxFileSizePreferenceKey, "26214400"); // approx 5s on Nexus 6 at 4K + //editor.putString(PreferenceKeys.VideoMaxFileSizePreferenceKey, "15728640"); // approx 5s on Nexus 6 at 4K + editor.putString(PreferenceKeys.VideoMaxFileSizePreferenceKey, "26214400"); // approx 5s on OnePlus 3T at 4K + editor.putString(PreferenceKeys.VideoMaxDurationPreferenceKey, "7"); + editor.putString(PreferenceKeys.VideoRestartPreferenceKey, "1"); + editor.apply(); + updateForSettings(); + + subTestTakeVideo(false, false, false, false, new TestUtils.VideoTestCallback() { + @Override + public int doTest() { + // wait until we should have stopped - 2x7s, but add 6s for each of 4 restarts + Log.d(TAG, "wait until video recording completely stopped"); + try { + Thread.sleep(38000); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + Log.d(TAG, "ensure we've really stopped"); + long time_s = System.currentTimeMillis(); + while( System.currentTimeMillis() - time_s <= 5000 ) { + assertFalse( mPreview.isVideoRecording() ); + } + return -1; // the number of videos recorded can very, as the max duration corresponding to max filesize can vary widly + } + }, 5000, true, 0); + } + + + private void subTestTakeVideoMaxFileSize4() throws InterruptedException { + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.O ) { + // as this tests Android 8+'s seamless restart + return; + } + if( TestUtils.isEmulator() ) { + // as test takes about 6.5 minutes on emulator (possibly due to unusual video file sizes), even though it does eventually pass + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.getVideoQualityPreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref(), false), "" + CamcorderProfile.QUALITY_HIGH); // set to highest quality (4K on Galaxy S10e) + editor.putString(PreferenceKeys.VideoMaxFileSizePreferenceKey, "30000000"); // about 19s on Galaxy S10e at 4K + editor.apply(); + updateForSettings(); + + subTestTakeVideo(false, false, false, false, new TestUtils.VideoTestCallback() { + @Override + public int doTest() { + assertTrue(mPreview.isVideoRecording()); + assertFalse(mPreview.test_started_next_output_file); + + while( !mPreview.test_called_next_output_file ) { + Log.d(TAG, "waiting for test_called_next_output_file"); + try { + Thread.sleep(100); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + } + + Log.d(TAG, "test_called_next_output_file is now set"); + assertTrue(mPreview.isVideoRecording()); + + // If this fails, it means we already started recording on the next output test, so + // can't test what we wanted to test (i.e., that we don't create a zero-length video + // file): + assertFalse(mPreview.test_started_next_output_file); + + // wait a little bit longer... but needs to be before seamless restart actually occurs! + try { + Thread.sleep(100); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + + // If this fails, it means we already started recording on the next output test, so + // can't test what we wanted to test (i.e., that we don't create a zero-length video + // file): + assertFalse(mPreview.test_started_next_output_file); + + return 1; + } + }, 5000, true, 0); + } + + /** Tests stopping video when MEDIA_RECORDER_INFO_MAX_FILESIZE_APPROACHING has been received, but before + * we receive MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED. Tests that we delete the leftover zero-length file + * that would have been created. + */ + public void testTakeVideoMaxFileSize4() throws InterruptedException { + Log.d(TAG, "testTakeVideoMaxFileSize4"); + + setToDefault(); + + subTestTakeVideoMaxFileSize4(); + } + + /** As testTakeVideoMaxFileSize4(), but using Storage Access Framework. + * N.B., failing on Android 12+ (e.g., Galaxy S10e, Pixel 6 Pro) due to receiving MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED + * shortly after MEDIA_RECORDER_INFO_MAX_FILESIZE_APPROACHING. Seems to be issue specific to using SAF with too small + * maximum file sizes. + */ + public void testTakeVideoMaxFileSize4SAF() throws InterruptedException { + Log.d(TAG, "testTakeVideoMaxFileSize4SAF"); + + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ) { + Log.d(TAG, "SAF requires Android Lollipop or better"); + return; + } + + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.UsingSAFPreferenceKey, true); + editor.putString(PreferenceKeys.SaveLocationSAFPreferenceKey, "content://com.android.externalstorage.documents/tree/primary%3ADCIM%2FOpenCamera"); + editor.apply(); + updateForSettings(); + + subTestTakeVideoMaxFileSize4(); + } + + public void testTakeVideoStabilization() throws InterruptedException { + Log.d(TAG, "testTakeVideoStabilization"); + + setToDefault(); + + if( !mPreview.supportsVideoStabilization() ) { + Log.d(TAG, "video stabilization not supported"); + return; + } + boolean supports_ois = mPreview.supportsOpticalStabilization(); + + assertFalse(mPreview.getCameraController().getVideoStabilization()); + assertEquals(supports_ois, mPreview.getOpticalStabilization()); // OIS should be on if supported + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.VideoStabilizationPreferenceKey, true); + editor.apply(); + updateForSettings(); + + // video stabilization should only actually be enabled when in video mode + assertFalse(mPreview.isVideo()); + assertFalse(mPreview.getCameraController().getVideoStabilization()); + assertEquals(supports_ois, mPreview.getOpticalStabilization()); // OIS should be on if supported + + // now switch to video mode + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertTrue(mPreview.isVideo()); + assertTrue(mPreview.getCameraController().getVideoStabilization()); + assertFalse(mPreview.getCameraController().getOpticalStabilization()); // OIS should always be disabled when using digital video stabilization + + subTestTakeVideo(false, false, false, false, null, 5000, false, 0); + + assertTrue(mPreview.isVideo()); + assertTrue(mPreview.getCameraController().getVideoStabilization()); + assertFalse(mPreview.getCameraController().getOpticalStabilization()); // OIS should always be disabled when using digital video stabilization + + // restart when in video mode, and ensure still as expected + restart(); + switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + Thread.sleep(1000); + assertTrue(mPreview.isVideo()); + assertTrue(mPreview.getCameraController().getVideoStabilization()); + assertFalse(mPreview.getCameraController().getOpticalStabilization()); // OIS should always be disabled when using digital video stabilization + + // now switch back to photo mode + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertFalse(mPreview.isVideo()); + assertFalse(mPreview.getCameraController().getVideoStabilization()); + assertEquals(supports_ois, mPreview.getOpticalStabilization()); // OIS should be on if supported + } + + public void testTakeVideoExposureLock() throws InterruptedException { + Log.d(TAG, "testTakeVideoExposureLock"); + + setToDefault(); + + subTestTakeVideo(true, false, false, false, null, 5000, false, 0); + } + + public void testTakeVideoFocusArea() throws InterruptedException { + Log.d(TAG, "testTakeVideoFocusArea"); + + setToDefault(); + + subTestTakeVideo(false, true, false, false, null, 5000, false, 0); + } + + /** Tests starting and stopping video quickly, to simulate failing to create a video (but needs Open Camera to delete + * the corrupt resultant video). + */ + public void testTakeVideoQuick() throws InterruptedException { + Log.d(TAG, "testTakeVideoQuick"); + + setToDefault(); + + mPreview.test_runtime_on_video_stop = true; // as RuntimeException on short delay doesn't seem to occur on Galaxy S10e at least, for 500ms delay + + // still need a short delay (at least 500ms, otherwise Open Camera will ignore the repeated stop) + subTestTakeVideo(false, false, false, false, null, 500, false, 0); + } + + // If this test fails, make sure we've manually selected that folder (as permission can't be given through the test framework). + public void testTakeVideoQuickSAF() throws InterruptedException { + Log.d(TAG, "testTakeVideoQuickSAF"); + + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ) { + Log.d(TAG, "SAF requires Android Lollipop or better"); + return; + } + + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.UsingSAFPreferenceKey, true); + editor.putString(PreferenceKeys.SaveLocationSAFPreferenceKey, "content://com.android.externalstorage.documents/tree/primary%3ADCIM%2FOpenCamera"); + editor.apply(); + updateForSettings(); + + mPreview.test_runtime_on_video_stop = true; // as RuntimeException on short delay doesn't seem to occur on Galaxy S10e at least, for 500ms delay + + // still need a short delay (at least 500ms, otherwise Open Camera will ignore the repeated stop) + subTestTakeVideo(false, false, false, false, null, 500, false, 0); + } + + public void testTakeVideoForceFailure() throws InterruptedException { + Log.d(TAG, "testTakeVideoForceFailure"); + + setToDefault(); + + mActivity.getPreview().test_video_failure = true; + subTestTakeVideo(false, false, true, false, null, 5000, false, 0); + } + + public void testTakeVideoForceFailureSAF() throws InterruptedException { + Log.d(TAG, "testTakeVideoForceFailureSAF"); + + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ) { + Log.d(TAG, "SAF requires Android Lollipop or better"); + return; + } + + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.UsingSAFPreferenceKey, true); + editor.putString(PreferenceKeys.SaveLocationSAFPreferenceKey, "content://com.android.externalstorage.documents/tree/primary%3ADCIM%2FOpenCamera"); + editor.apply(); + updateForSettings(); + + mActivity.getPreview().test_video_failure = true; + subTestTakeVideo(false, false, true, false, null, 5000, false, 0); + } + + public void testTakeVideoForceIOException() throws InterruptedException { + Log.d(TAG, "testTakeVideoForceIOException"); + + setToDefault(); + + mActivity.getPreview().test_video_ioexception = true; + subTestTakeVideo(false, false, true, false, null, 5000, false, 0); + } + + public void testTakeVideoForceCameraControllerException() throws InterruptedException { + Log.d(TAG, "testTakeVideoForceCameraControllerException"); + + setToDefault(); + + mActivity.getPreview().test_video_cameracontrollerexception = true; + subTestTakeVideo(false, false, true, false, null, 5000, false, 0); + } + + /* Test can be reliable on some devices, test no longer run as part of test suites. + */ + public void testTakeVideo4K() throws InterruptedException { + Log.d(TAG, "testTakeVideo4K"); + + setToDefault(); + + if( !mActivity.supportsForceVideo4K() ) { + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.ForceVideo4KPreferenceKey, true); + editor.apply(); + updateForSettings(); + + subTestTakeVideo(false, false, true, false, null, 5000, false, 0); + } + + /** Will likely be unreliable on OnePlus 3T and Galaxy S10e with Camera2. + * Also hangs with old camera API on Pixel 6 Pro. + */ + public void testTakeVideoFPS() throws InterruptedException { + Log.d(TAG, "testTakeVideoFPS"); + + setToDefault(); + // different frame rates only reliable for Camera2, but at least make sure we don't crash on old api + final int [] fps_values = mPreview.usingCamera2API() ? new int[]{15, 25, 30, 60, 120, 240} : new int[]{30}; + for(int fps_value : fps_values) { + if( mPreview.usingCamera2API() ) { + if( mPreview.getVideoQualityHander().videoSupportsFrameRate(fps_value) ) { + Log.d(TAG, "fps supported at normal speed: " + fps_value); + } + else if( mPreview.getVideoQualityHander().videoSupportsFrameRateHighSpeed(fps_value) ) { + Log.d(TAG, "fps supported at HIGH SPEED: " + fps_value); + } + else { + Log.d(TAG, "fps is NOT supported: " + fps_value); + continue; + } + boolean expect_high_speed; + boolean is_samsung = Build.MANUFACTURER.toLowerCase(Locale.US).contains("samsung"); + boolean is_google = Build.MANUFACTURER.toLowerCase(Locale.US).contains("google"); + if( is_samsung || is_google ) { + // tested on Galaxy S10e, Galaxy S24+, Pixel 6 Pro at least + expect_high_speed = (fps_value > 60); + } + else { + expect_high_speed = (fps_value >= 60); + } + assertEquals(expect_high_speed, mPreview.fpsIsHighSpeed(String.valueOf(fps_value))); + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.getVideoFPSPreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref()), String.valueOf(fps_value)); + editor.apply(); + updateForSettings(); + + Log.d(TAG, "test video with fps: " + fps_value); + //boolean allow_failure = fps_value.equals("24") || fps_value.equals("25") || fps_value.equals("60"); + boolean allow_failure = false; + subTestTakeVideo(false, false, allow_failure, false, null, 5000, false, 0); + } + } + + /** Will likely be unreliable on OnePlus 3T, Galaxy S10e. + * Manual mode should be ignored by high speed video, but check this doesn't crash at least! + */ + public void testTakeVideoFPSHighSpeedManual() throws InterruptedException { + Log.d(TAG, "testTakeVideoFPSHighSpeedManual"); + + setToDefault(); + + if( !mPreview.usingCamera2API() ) { + return; + } + else if( !mPreview.supportsISORange() ) { + return; + } + + int fps_value = 120; + if( mPreview.getVideoQualityHander().videoSupportsFrameRate(fps_value) ) { + Log.d(TAG, "fps supported at normal speed: " + fps_value); + return; + } + else if( !mPreview.getVideoQualityHander().videoSupportsFrameRateHighSpeed(fps_value) ) { + Log.d(TAG, "fps is NOT supported: " + fps_value); + return; + } + + assertTrue( mPreview.fpsIsHighSpeed(String.valueOf(fps_value)) ); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.getVideoFPSPreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref()), String.valueOf(fps_value)); + editor.apply(); + updateForSettings(); + + Log.d(TAG, "test video with fps: " + fps_value); + + switchToISO(100); + + View exposureButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + + // switch to video mode, ensure that exposure button disappears due to high speed video + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertTrue(mPreview.isVideo()); + assertEquals(exposureButton.getVisibility(), View.GONE); + + // test recording video + subTestTakeVideo(false, false, false, false, null, 5000, false, 0); + + // switch to photo mode, ensure that exposure button re-appears + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertFalse(mPreview.isVideo()); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + } + + /** Tests that video resolutions are stored separately for high speed fps, for Camera2. + */ + public void testVideoFPSHighSpeed() { + Log.d(TAG, "testVideoFPSHighSpeed"); + + setToDefault(); + + if( !mPreview.usingCamera2API() ) { + return; + } + + int fps_value = 120; + if( mPreview.getVideoQualityHander().videoSupportsFrameRate(fps_value) ) { + Log.d(TAG, "fps supported at normal speed: " + fps_value); + return; + } + else if( !mPreview.getVideoQualityHander().videoSupportsFrameRateHighSpeed(fps_value) ) { + Log.d(TAG, "fps is NOT supported: " + fps_value); + return; + } + + assertTrue( mPreview.fpsIsHighSpeed(String.valueOf(fps_value)) ); + + // switch to video mode + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertTrue(mPreview.isVideo()); + + // get initial video resolution, for non-high-speed + String saved_quality = mActivity.getApplicationInterface().getVideoQualityPref(); + VideoProfile profile = mPreview.getVideoProfile(); + int saved_video_width = profile.videoFrameWidth; + int saved_video_height = profile.videoFrameHeight; + Log.d(TAG, "saved_quality: " + saved_quality); + Log.d(TAG, "saved_video_width: " + saved_video_width); + Log.d(TAG, "saved_video_height: " + saved_video_height); + + // switch to high speed fps + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.getVideoFPSPreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref()), String.valueOf(fps_value)); + editor.apply(); + updateForSettings(); + + Log.d(TAG, "test video with fps: " + fps_value); + + // change video resolution + List video_sizes = mPreview.getSupportedVideoQuality(mActivity.getApplicationInterface().getVideoFPSPref()); + // find current index + int video_size_index = -1; + for(int i=0;i video_size_index+1); + video_size_index++; + String quality = video_sizes.get(video_size_index); + settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + editor = settings.edit(); + editor.putString(PreferenceKeys.getVideoQualityPreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref(), mActivity.getApplicationInterface().fpsIsHighSpeed()), quality); + editor.apply(); + updateForSettings(); + + quality = mActivity.getApplicationInterface().getVideoQualityPref(); + profile = mPreview.getVideoProfile(); + int video_width = profile.videoFrameWidth; + int video_height = profile.videoFrameHeight; + Log.d(TAG, "quality: " + quality); + Log.d(TAG, "video_width: " + video_width); + Log.d(TAG, "video_height: " + video_height); + assertNotEquals(saved_quality, quality); + assertFalse(video_width == saved_video_width && video_height == saved_video_height); + String high_speed_quality = quality; + int high_speed_video_width = video_width; + int high_speed_video_height = video_height; + + // switch to normal fps + Log.d(TAG, "switch to normal fps"); + settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + editor = settings.edit(); + editor.putString(PreferenceKeys.getVideoFPSPreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref()), "30"); + editor.apply(); + updateForSettings(); + + // check resolution reverts to original + quality = mActivity.getApplicationInterface().getVideoQualityPref(); + profile = mPreview.getVideoProfile(); + video_width = profile.videoFrameWidth; + video_height = profile.videoFrameHeight; + Log.d(TAG, "quality: " + quality); + Log.d(TAG, "video_width: " + video_width); + Log.d(TAG, "video_height: " + video_height); + assertEquals(saved_quality, quality); + assertTrue(video_width == saved_video_width && video_height == saved_video_height); + + // switch to high speed fps again + Log.d(TAG, "switch to high speed fps again"); + settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + editor = settings.edit(); + editor.putString(PreferenceKeys.getVideoFPSPreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref()), String.valueOf(fps_value)); + editor.apply(); + updateForSettings(); + + // check resolution reverts to high speed + quality = mActivity.getApplicationInterface().getVideoQualityPref(); + profile = mPreview.getVideoProfile(); + video_width = profile.videoFrameWidth; + video_height = profile.videoFrameHeight; + Log.d(TAG, "quality: " + quality); + Log.d(TAG, "video_width: " + video_width); + Log.d(TAG, "video_height: " + video_height); + assertEquals(high_speed_quality, quality); + assertTrue(video_width == high_speed_video_width && video_height == high_speed_video_height); + } + + /** Will likely be unreliable on OnePlus 3T, Galaxy S10e. + */ + public void testTakeVideoSlowMotion() throws InterruptedException { + Log.d(TAG, "testTakeVideoSlowMotion"); + + setToDefault(); + + if( !mPreview.usingCamera2API() ) { + return; + } + + List supported_capture_rates = mActivity.getApplicationInterface().getSupportedVideoCaptureRates(); + if( supported_capture_rates.size() <= 1 ) { + Log.d(TAG, "slow motion not supported"); + return; + } + + float capture_rate = supported_capture_rates.get(0); + if( capture_rate > 1.0f-1.0e-5f ) { + Log.d(TAG, "slow motion not supported"); + return; + } + Log.d(TAG, "capture_rate: " + capture_rate); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putFloat(PreferenceKeys.getVideoCaptureRatePreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref()), capture_rate); + editor.apply(); + updateForSettings(); + + // switch to video, and check we've set a high speed fps + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertTrue(mPreview.isVideo()); + + String fps_value = mActivity.getApplicationInterface().getVideoFPSPref(); + int fps = Integer.parseInt(fps_value); + Log.d(TAG, "fps: " + fps); + assertTrue(fps >= 60); + assertTrue(mPreview.isVideoHighSpeed()); + + // check video profile + VideoProfile profile = mPreview.getVideoProfile(); + assertEquals(profile.videoCaptureRate, fps, 1.0e-5); + assertEquals((float)profile.videoFrameRate, (float)(profile.videoCaptureRate*capture_rate), 1.0e-5); + + boolean allow_failure = false; + subTestTakeVideo(false, false, allow_failure, false, null, 5000, false, 0); + } + + /** Take video with timelapse mode. + * Fails on Pixel 6 Pro with old camera API. + */ + public void testTakeVideoTimeLapse() throws InterruptedException { + Log.d(TAG, "testTakeVideoTimeLapse"); + + setToDefault(); + + if( TestUtils.isEmulator() ) { + // fails on Android emulator (at least for Android 7.1 on VirtualScene camera) - and can also leave camera in a state that can't be opened for subsequent tests + return; + } + + List supported_capture_rates = mActivity.getApplicationInterface().getSupportedVideoCaptureRates(); + if( supported_capture_rates.size() <= 1 ) { + Log.d(TAG, "timelapse not supported"); + return; + } + + float capture_rate = -1.0f; + // find the first timelapse rate + for(float this_capture_rate : supported_capture_rates) { + if( this_capture_rate > 1.0f+1.0e-5f ) { + capture_rate = this_capture_rate; + break; + } + } + if( capture_rate < 0.0f ) { + Log.d(TAG, "timelapse not supported"); + return; + } + Log.d(TAG, "capture_rate: " + capture_rate); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putFloat(PreferenceKeys.getVideoCaptureRatePreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref()), capture_rate); + editor.apply(); + updateForSettings(); + + // switch to video, and check we've set a non-high speed fps + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertTrue(mPreview.isVideo()); + + String fps_value = mActivity.getApplicationInterface().getVideoFPSPref(); + Log.d(TAG, "fps_value: " + fps_value); + assertEquals("default", fps_value); + assertFalse(mPreview.isVideoHighSpeed()); + + // check video profile + VideoProfile profile = mPreview.getVideoProfile(); + // note, need to allow a larger delta, due to the fudge factor applied for 2x timelapse + assertEquals((float)profile.videoFrameRate, (float)(profile.videoCaptureRate*capture_rate), 5.0e-3); + + boolean allow_failure = false; + subTestTakeVideo(false, false, allow_failure, false, null, 5000, false, 0); + } + + /* Test can be reliable on some devices, test no longer run as part of test suites. + */ + public void testTakeVideoBitrate() throws InterruptedException { + Log.d(TAG, "testTakeVideoBitrate"); + + setToDefault(); + final String [] bitrate_values = new String[]{"1000000", "10000000", "20000000", "50000000"}; + //final String [] bitrate_values = new String[]{"1000000", "10000000", "20000000", "30000000"}; + for(String bitrate_value : bitrate_values) { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.VideoBitratePreferenceKey, bitrate_value); + editor.apply(); + updateForSettings(); + + Log.d(TAG, "test video with bitrate: " + bitrate_value); + boolean allow_failure = bitrate_value.equals("30000000") || bitrate_value.equals("50000000"); + subTestTakeVideo(false, false, allow_failure, false, null, 5000, false, 0); + } + } + + /* Test recording video with a flat (log) profile. + */ + public void testVideoLogProfile() throws InterruptedException { + Log.d(TAG, "testVideoLogProfile"); + + setToDefault(); + + if( !mPreview.supportsTonemapCurve() ) { + Log.d(TAG, "test requires tonemap curve"); + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.VideoLogPreferenceKey, "strong"); + editor.apply(); + updateForSettings(); + + subTestTakeVideo(false, false, true, false, null, 5000, false, 0); + + assertTrue( mPreview.getCameraController().test_used_tonemap_curve ); + } + + /* Test recording video with a flat (jtlog) profile. + */ + public void testVideoJTLogProfile() throws InterruptedException { + Log.d(TAG, "testVideoJTLogProfile"); + + setToDefault(); + + if( !mPreview.supportsTonemapCurve() ) { + Log.d(TAG, "test requires tonemap curve"); + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.VideoLogPreferenceKey, "jtlog"); + editor.apply(); + updateForSettings(); + + subTestTakeVideo(false, false, true, false, null, 5000, false, 0); + + assertTrue( mPreview.getCameraController().test_used_tonemap_curve ); + } + + /* Test recording video with custom gamma profile. + */ + public void testVideoGammaProfile() throws InterruptedException { + Log.d(TAG, "testVideoGammaProfile"); + + setToDefault(); + + if( !mPreview.supportsTonemapCurve() ) { + Log.d(TAG, "test requires tonemap curve"); + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.VideoLogPreferenceKey, "gamma"); + editor.putString(PreferenceKeys.VideoProfileGammaPreferenceKey, "3.0"); + editor.apply(); + updateForSettings(); + + subTestTakeVideo(false, false, true, false, null, 5000, false, 0); + + assertTrue( mPreview.getCameraController().test_used_tonemap_curve ); + } + + /* Test recording video with non-default edge and noise reduction modes. + */ + public void testVideoEdgeModeNoiseReductionMode() throws InterruptedException { + Log.d(TAG, "testVideoEdgeModeNoiseReductionMode"); + + setToDefault(); + + if( !mPreview.usingCamera2API() ) { + Log.d(TAG, "test requires camera2 api"); + return; + } + + CameraController2 camera_controller2 = (CameraController2)mPreview.getCameraController(); + CaptureRequest.Builder previewBuilder = camera_controller2.testGetPreviewBuilder(); + Integer default_edge_mode = previewBuilder.get(CaptureRequest.EDGE_MODE); + Integer default_noise_reduction_mode = previewBuilder.get(CaptureRequest.NOISE_REDUCTION_MODE); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.EdgeModePreferenceKey, "off"); + editor.putString(PreferenceKeys.CameraNoiseReductionModePreferenceKey, "fast"); + editor.apply(); + updateForSettings(); + + if( mPreview.getSupportedEdgeModes() != null ) { + Integer new_edge_mode = previewBuilder.get(CaptureRequest.EDGE_MODE); + assertEquals(CameraMetadata.EDGE_MODE_OFF, new_edge_mode.intValue()); + } + if( mPreview.getSupportedNoiseReductionModes() != null ) { + Integer new_noise_reduction_mode = previewBuilder.get(CaptureRequest.NOISE_REDUCTION_MODE); + assertEquals(CameraMetadata.NOISE_REDUCTION_MODE_FAST, new_noise_reduction_mode.intValue()); + } + + subTestTakeVideo(false, false, true, false, null, 5000, false, 0); + + editor = settings.edit(); + editor.putString(PreferenceKeys.EdgeModePreferenceKey, "default"); + editor.putString(PreferenceKeys.CameraNoiseReductionModePreferenceKey, "default"); + editor.apply(); + updateForSettings(); + + camera_controller2 = (CameraController2)mPreview.getCameraController(); + previewBuilder = camera_controller2.testGetPreviewBuilder(); + if( mPreview.getSupportedEdgeModes() != null ) { + Integer new_edge_mode = previewBuilder.get(CaptureRequest.EDGE_MODE); + assertEquals(default_edge_mode, new_edge_mode); + } + if( mPreview.getSupportedNoiseReductionModes() != null ) { + Integer new_noise_reduction_mode = previewBuilder.get(CaptureRequest.NOISE_REDUCTION_MODE); + assertEquals(default_noise_reduction_mode, new_noise_reduction_mode); + } + } + + private void subTestTakeVideoMaxDuration(boolean restart, boolean interrupt) throws InterruptedException { + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.VideoMaxDurationPreferenceKey, "15"); + if( restart ) { + editor.putString(PreferenceKeys.VideoRestartPreferenceKey, "1"); + } + editor.apply(); + } + + assertTrue(mPreview.isPreviewStarted()); + + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + if( !mPreview.isVideo() ) { + clickView(switchVideoButton); + waitUntilCameraOpened(); + } + assertTrue(mPreview.isVideo()); + assertTrue(mPreview.isPreviewStarted()); + + // count initial files in folder + int n_files = getNFiles(); + Log.d(TAG, "n_files at start: " + n_files); + + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(mActivity); + boolean has_audio_control_button = !sharedPreferences.getString(PreferenceKeys.AudioControlPreferenceKey, "none").equals("none"); + + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + View switchMultiCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_multi_camera); + //View flashButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.flash); + //View focusButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.focus_mode); + View exposureButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureLockButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure_lock); + View audioControlButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.audio_control); + View popupButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.popup); + View trashButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.trash); + View shareButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.share); + assertEquals(switchCameraButton.getVisibility(), (mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE)); + assertEquals(switchMultiCameraButton.getVisibility(), (mActivity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE)); + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + // flash and focus etc default visibility tested in another test + // but store status to compare with later + //int flashVisibility = flashButton.getVisibility(); + //int focusVisibility = focusButton.getVisibility(); + int exposureVisibility = exposureButton.getVisibility(); + int exposureLockVisibility = exposureLockButton.getVisibility(); + assertEquals(audioControlButton.getVisibility(), (has_audio_control_button ? View.VISIBLE : View.GONE)); + assertEquals(popupButton.getVisibility(), View.VISIBLE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + + // workaround for Android 7.1 bug at https://stackoverflow.com/questions/47548317/what-belong-is-badtokenexception-at-classes-of-project + // without this, we get a crash due to that problem on Nexus (old API at least) in testTakeVideoMaxDuration + Thread.sleep(1000); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + Log.d(TAG, "about to click take video"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take video"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + assertTrue( mPreview.isVideoRecording() ); + + assertEquals(switchCameraButton.getVisibility(), View.GONE); + assertEquals(switchMultiCameraButton.getVisibility(), View.GONE); + assertEquals(switchVideoButton.getVisibility(), View.GONE); + //assertTrue(flashButton.getVisibility() == flashVisibility); + //assertTrue(focusButton.getVisibility() == View.GONE); + assertEquals(exposureButton.getVisibility(), exposureVisibility); + assertEquals(exposureLockButton.getVisibility(), exposureLockVisibility); + assertEquals(audioControlButton.getVisibility(), View.GONE); + assertEquals(popupButton.getVisibility(), (mPreview.supportsFlash() ? View.VISIBLE : View.GONE)); // popup button only visible when recording video if flash supported + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + + Thread.sleep(10000); + Log.d(TAG, "check still taking video"); + assertTrue( mPreview.isVideoRecording() ); + + int n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + // note, if using scoped storage without SAF (i.e., mediastore API), then the video file won't show up until after we've finished recording (IS_PENDING is set to 0) + assertEquals(MainActivity.useScopedStorage() && !mActivity.getStorageUtils().isUsingSAF() ? 0 : 1, n_new_files); + + if( restart ) { + if( interrupt ) { + Thread.sleep(5100); + restart(); + Log.d(TAG, "done restart"); + // now wait, and check we don't crash + Thread.sleep(5000); + return; + } + else { + Thread.sleep(10000); + Log.d(TAG, "check restarted video"); + assertTrue( mPreview.isVideoRecording() ); + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + // as noted above, with mediastore API then the latest file won't be visible yet + assertEquals(MainActivity.useScopedStorage() && !mActivity.getStorageUtils().isUsingSAF() ? 1 : 2, n_new_files); + + Thread.sleep(15000); + } + } + else { + Thread.sleep(8000); + } + Log.d(TAG, "check stopped taking video"); + assertFalse(mPreview.isVideoRecording()); + + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(n_new_files, (restart ? 2 : 1)); + + // trash/share only shown when preview is paused after taking a photo + + assertTrue(mPreview.isPreviewStarted()); // check preview restarted + assertEquals(switchCameraButton.getVisibility(), (mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE)); + assertEquals(switchMultiCameraButton.getVisibility(), (mActivity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE)); + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + //assertTrue(flashButton.getVisibility() == flashVisibility); + //assertTrue(focusButton.getVisibility() == focusVisibility); + assertEquals(exposureButton.getVisibility(), exposureVisibility); + assertEquals(exposureLockButton.getVisibility(), exposureLockVisibility); + assertEquals(audioControlButton.getVisibility(), (has_audio_control_button ? View.VISIBLE : View.GONE)); + assertEquals(popupButton.getVisibility(), View.VISIBLE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + } + + /** + * Fails on Android emulator, for some reason EXTRA_DURATION_LIMIT makes the video stop due to + * hitting max duration immediately. + */ + public void testTakeVideoMaxDuration() throws InterruptedException { + Log.d(TAG, "testTakeVideoMaxDuration"); + + setToDefault(); + + subTestTakeVideoMaxDuration(false, false); + } + + /** + * Fails on Android emulator, for some reason EXTRA_DURATION_LIMIT makes the video stop due to + * hitting max duration immediately. + */ + public void testTakeVideoMaxDurationRestart() throws InterruptedException { + Log.d(TAG, "testTakeVideoMaxDurationRestart"); + + setToDefault(); + + subTestTakeVideoMaxDuration(true, false); + } + + /** + * Fails on Android emulator, for some reason EXTRA_DURATION_LIMIT makes the video stop due to + * hitting max duration immediately. + */ + public void testTakeVideoMaxDurationRestartInterrupt() throws InterruptedException { + Log.d(TAG, "testTakeVideoMaxDurationRestartInterrupt"); + + setToDefault(); + + subTestTakeVideoMaxDuration(true, true); + } + + public void testTakeVideoSettings() throws InterruptedException { + Log.d(TAG, "testTakeVideoSettings"); + + setToDefault(); + + assertTrue(mPreview.isPreviewStarted()); + + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + if( !mPreview.isVideo() ) { + clickView(switchVideoButton); + waitUntilCameraOpened(); + } + assertTrue(mPreview.isVideo()); + assertTrue(mPreview.isPreviewStarted()); + + // count initial files in folder + int n_files = getNFiles(); + Log.d(TAG, "n_files at start: " + n_files); + + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + Log.d(TAG, "about to click take video"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take video"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + assertTrue( mPreview.isVideoRecording() ); + + Thread.sleep(2000); + Log.d(TAG, "check still taking video"); + assertTrue( mPreview.isVideoRecording() ); + + int n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + // note, if using scoped storage without SAF (i.e., mediastore API), then the video file won't show up until after we've finished recording (IS_PENDING is set to 0) + assertEquals(MainActivity.useScopedStorage() && !mActivity.getStorageUtils().isUsingSAF() ? 0 : 1, n_new_files); + + // now go to settings + View settingsButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.settings); + Log.d(TAG, "about to click settings"); + clickView(settingsButton); + Log.d(TAG, "done clicking settings"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + assertFalse(mPreview.isVideoRecording()); + + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(1, n_new_files); + + Thread.sleep(500); + mActivity.runOnUiThread(new Runnable() { + public void run() { + Log.d(TAG, "on back pressed..."); + mActivity.onBackPressed(); + } + }); + // need to wait for UI code to finish before leaving + this.getInstrumentation().waitForIdleSync(); + Thread.sleep(500); + assertFalse(mPreview.isVideoRecording()); + + Log.d(TAG, "about to click take video"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take video"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + assertTrue( mPreview.isVideoRecording() ); + + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + // see note above about mediastore API + assertEquals(MainActivity.useScopedStorage() && !mActivity.getStorageUtils().isUsingSAF() ? 1 : 2, n_new_files); + + } + + /** Switch to non-default focus, go to settings, check still in focus mode that we set, then test recording. + */ + public void testTakeVideoMacro() throws InterruptedException { + Log.d(TAG, "testTakeVideoMacro"); + + setToDefault(); + + if( !mPreview.supportsFocus() ) { + return; + } + + assertTrue(mPreview.isPreviewStarted()); + + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + if( !mPreview.isVideo() ) { + clickView(switchVideoButton); + waitUntilCameraOpened(); + } + assertTrue(mPreview.isVideo()); + assertTrue(mPreview.isPreviewStarted()); + + String non_default_focus_mode = getNonDefaultFocus(); + switchToFocusValue(non_default_focus_mode); + + // now go to settings + View settingsButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.settings); + Log.d(TAG, "about to click settings"); + clickView(settingsButton); + Log.d(TAG, "done clicking settings"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + assertFalse(mPreview.isVideoRecording()); + + Thread.sleep(500); + + // camera should be closed in settings + assertNull(mPreview.getCameraController()); + + mActivity.runOnUiThread(new Runnable() { + public void run() { + Log.d(TAG, "on back pressed..."); + mActivity.onBackPressed(); + } + }); + // need to wait for UI code to finish before leaving + this.getInstrumentation().waitForIdleSync(); + Thread.sleep(500); + + assertEquals(mPreview.getCurrentFocusValue(), non_default_focus_mode); + + // count initial files in folder + int n_files = getNFiles(); + Log.d(TAG, "n_files at start: " + n_files); + + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + Log.d(TAG, "about to click take video"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take video"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + assertTrue( mPreview.isVideoRecording() ); + + Thread.sleep(2000); + Log.d(TAG, "check still taking video"); + assertTrue( mPreview.isVideoRecording() ); + + int n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + // note, if using scoped storage without SAF (i.e., mediastore API), then the video file won't show up until after we've finished recording (IS_PENDING is set to 0) + assertEquals(MainActivity.useScopedStorage() && !mActivity.getStorageUtils().isUsingSAF() ? 0 : 1, n_new_files); + } + + public void testTakeVideoFlashVideo() throws InterruptedException { + Log.d(TAG, "testTakeVideoFlashVideo"); + + setToDefault(); + + if( !mPreview.supportsFlash() ) { + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.VideoFlashPreferenceKey, true); + editor.apply(); + updateForSettings(); + + assertTrue(mPreview.isPreviewStarted()); + + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + if( !mPreview.isVideo() ) { + clickView(switchVideoButton); + waitUntilCameraOpened(); + } + assertTrue(mPreview.isVideo()); + assertTrue(mPreview.isPreviewStarted()); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + Log.d(TAG, "about to click take video"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take video"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + assertTrue( mPreview.isVideoRecording() ); + + Thread.sleep(1500); + Log.d(TAG, "check still taking video"); + assertTrue( mPreview.isVideoRecording() ); + + // wait until flash off + long time_s = System.currentTimeMillis(); + while (mPreview.getCameraController().getFlashValue().equals("flash_torch")) { + assertTrue(System.currentTimeMillis() - time_s <= 200); + } + + // wait until flash on + time_s = System.currentTimeMillis(); + while (!mPreview.getCameraController().getFlashValue().equals("flash_torch")) { + assertTrue(System.currentTimeMillis() - time_s <= 1100); + } + + // wait until flash off + time_s = System.currentTimeMillis(); + while (mPreview.getCameraController().getFlashValue().equals("flash_torch")) { + assertTrue(System.currentTimeMillis() - time_s <= 200); + } + + // wait until flash on + time_s = System.currentTimeMillis(); + while (!mPreview.getCameraController().getFlashValue().equals("flash_torch")) { + assertTrue(System.currentTimeMillis() - time_s <= 1100); + } + + Log.d(TAG, "about to click stop video"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking stop video"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + // test flash now off + assertNotEquals("flash_torch", mPreview.getCameraController().getFlashValue()); + } + + // type: 0 - go to background; 1 - go to settings; 2 - go to popup + private void subTestTimer(int type) { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.TimerPreferenceKey, "10"); + editor.putBoolean(PreferenceKeys.TimerBeepPreferenceKey, false); + editor.apply(); + + assertFalse(mPreview.isOnTimer()); + + // count initial files in folder + int n_files = getNFiles(); + Log.d(TAG, "n_files at start: " + n_files); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + Log.d(TAG, "about to click take photo"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo"); + assertTrue(mPreview.isOnTimer()); + assertEquals(0, mPreview.count_cameraTakePicture); + + try { + // wait 2s, and check we are still on timer, and not yet taken a photo + Thread.sleep(2000); + assertTrue(mPreview.isOnTimer()); + assertEquals(0, mPreview.count_cameraTakePicture); + // quit and resume + if( type == 0 ) + restart(); + else if( type == 1 ) { + View settingsButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.settings); + Log.d(TAG, "about to click settings"); + clickView(settingsButton); + Log.d(TAG, "done clicking settings"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + mActivity.runOnUiThread(new Runnable() { + public void run() { + Log.d(TAG, "on back pressed..."); + mActivity.onBackPressed(); + } + }); + // need to wait for UI code to finish before leaving + this.getInstrumentation().waitForIdleSync(); + Thread.sleep(1000); // need at least 1000ms for Nexus 7 + } + else { + openPopupMenu(); + } + takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + // check timer cancelled, and not yet taken a photo + assertFalse(mPreview.isOnTimer()); + assertEquals(0, mPreview.count_cameraTakePicture); + int n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(0, n_new_files); + + // start timer again + Log.d(TAG, "about to click take photo"); + assertNotNull(mPreview.getCameraController()); + clickView(takePhotoButton); + assertNotNull(mPreview.getCameraController()); + Log.d(TAG, "done clicking take photo"); + assertTrue(mPreview.isOnTimer()); + assertEquals(0, mPreview.count_cameraTakePicture); + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(0, n_new_files); + + // wait 15s, and ensure we took a photo + Thread.sleep(15000); + Log.d(TAG, "waited, count now " + mPreview.count_cameraTakePicture); + assertFalse(mPreview.isOnTimer()); + assertEquals(1, mPreview.count_cameraTakePicture); + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(1, n_new_files); + + // now set timer to 5s, and turn on pause_preview + editor.putString(PreferenceKeys.TimerPreferenceKey, "5"); + editor.putBoolean(PreferenceKeys.PausePreviewPreferenceKey, true); + editor.apply(); + + Log.d(TAG, "about to click take photo"); + assertNotNull(mPreview.getCameraController()); + clickView(takePhotoButton); + assertNotNull(mPreview.getCameraController()); + Log.d(TAG, "done clicking take photo"); + assertTrue(mPreview.isOnTimer()); + assertEquals(1, mPreview.count_cameraTakePicture); + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(1, n_new_files); + + // wait 10s, and ensure we took a photo + Thread.sleep(10000); + Log.d(TAG, "waited, count now " + mPreview.count_cameraTakePicture); + assertFalse(mPreview.isOnTimer()); + assertEquals(2, mPreview.count_cameraTakePicture); + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(2, n_new_files); + + // now test cancelling + Log.d(TAG, "about to click take photo"); + assertNotNull(mPreview.getCameraController()); + clickView(takePhotoButton); + assertNotNull(mPreview.getCameraController()); + Log.d(TAG, "done clicking take photo"); + assertTrue(mPreview.isOnTimer()); + assertEquals(2, mPreview.count_cameraTakePicture); + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(2, n_new_files); + + // wait 2s, and cancel + Thread.sleep(2000); + Log.d(TAG, "about to click take photo to cance"); + assertNotNull(mPreview.getCameraController()); + clickView(takePhotoButton); + assertNotNull(mPreview.getCameraController()); + Log.d(TAG, "done clicking take photo to cancel"); + assertFalse(mPreview.isOnTimer()); + assertEquals(2, mPreview.count_cameraTakePicture); + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(2, n_new_files); + + // wait 8s, and ensure we didn't take a photo + Thread.sleep(8000); + Log.d(TAG, "waited, count now " + mPreview.count_cameraTakePicture); + assertFalse(mPreview.isOnTimer()); + assertEquals(2, mPreview.count_cameraTakePicture); + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(2, n_new_files); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + } + + /* Test with 10s timer, start a photo, go to background, then back, then take another photo. We should only take 1 photo - the original countdown should not be active (nor should we crash)! + */ + public void testTimerBackground() { + Log.d(TAG, "testTimerBackground"); + setToDefault(); + + subTestTimer(0); + } + + /* Test and going to settings. + */ + public void testTimerSettings() { + Log.d(TAG, "testTimerSettings"); + setToDefault(); + + subTestTimer(1); + } + + /* Test and going to popup. + */ + public void testTimerPopup() { + Log.d(TAG, "testTimerPopup"); + setToDefault(); + + subTestTimer(2); + } + + /* Takes video on a timer, but interrupts with restart. + */ + public void testVideoTimerInterrupt() { + Log.d(TAG, "testVideoTimerInterrupt"); + setToDefault(); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.TimerPreferenceKey, "5"); + editor.putBoolean(PreferenceKeys.TimerBeepPreferenceKey, false); + editor.apply(); + + assertFalse(mPreview.isOnTimer()); + + // count initial files in folder + int n_files = getNFiles(); + Log.d(TAG, "n_files at start: " + n_files); + + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertTrue(mPreview.isVideo()); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + Log.d(TAG, "about to click take photo"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo"); + assertTrue(mPreview.isOnTimer()); + assertEquals(0, mPreview.count_cameraTakePicture); + + try { + // wait a moment after 5s, then restart + Thread.sleep(5100); + assertEquals(0, mPreview.count_cameraTakePicture); + // quit and resume + restart(); + Log.d(TAG, "done restart"); + + // check timer cancelled; may or may not have managed to take a photo + assertFalse(mPreview.isOnTimer()); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + } + + /* Tests that selecting a new flash and focus option, then reopening the popup menu, still has the correct option highlighted. + */ + public void testPopup() { + Log.d(TAG, "testPopup"); + setToDefault(); + + switchToFlashValue("flash_off"); + switchToFlashValue("flash_on"); + + if( mPreview.supportsFocus() ) { + if( mPreview.getSupportedFocusValues().contains("focus_mode_macro") ) { + switchToFocusValue("focus_mode_macro"); + } + else if( mPreview.getSupportedFocusValues().contains("focus_mode_infinity") ) { + switchToFocusValue("focus_mode_infinity"); + } + + if( mPreview.getSupportedFocusValues().contains("focus_mode_auto") ) { + switchToFocusValue("focus_mode_auto"); + } + } + + // now open popup, pause and resume, then reopen popup + // this tests against a crash, if we don't remove the popup from the popup container in MainUI.destroyPopup() + openPopupMenu(); + + pauseAndResume(); + openPopupMenu(); + } + + /* Tests against a bug where popup wouldn't show with left UI placement, due to 0 popup view height. + */ + public void testPopupLeftLayout() { + Log.d(TAG, "testPopupLeftLayout"); + + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.UIPlacementPreferenceKey, "ui_left"); + editor.apply(); + updateForSettings(); + + View popup_view = mActivity.findViewById(net.sourceforge.opencamera.R.id.popup_container); + + openPopupMenu(); + + int popup_width = popup_view.getWidth(); + int popup_height = popup_view.getHeight(); + int test_popup_width = mActivity.getMainUI().test_saved_popup_width; + int test_popup_height = mActivity.getMainUI().test_saved_popup_height; + Log.d(TAG, "popup_width: " + popup_width); + Log.d(TAG, "popup_height: " + popup_height); + Log.d(TAG, "test_popup_width: " + test_popup_width); + Log.d(TAG, "test_popup_height: " + test_popup_height); + assertTrue(popup_width > 0); + assertTrue(popup_height > 0); + assertEquals(popup_width, test_popup_width); + assertEquals(popup_height, test_popup_height); + + // now reopen popup view, and check the same dimensions + closePopupMenu(); + + openPopupMenu(); + + int new_popup_width = popup_view.getWidth(); + int new_popup_height = popup_view.getHeight(); + test_popup_width = mActivity.getMainUI().test_saved_popup_width; + test_popup_height = mActivity.getMainUI().test_saved_popup_height; + Log.d(TAG, "new_popup_width: " + new_popup_width); + Log.d(TAG, "new_popup_height: " + new_popup_height); + Log.d(TAG, "test_popup_width: " + test_popup_width); + Log.d(TAG, "test_popup_height: " + test_popup_height); + assertEquals(popup_width, new_popup_width); + assertEquals(popup_height, new_popup_height); + assertEquals(popup_width, test_popup_width); + assertEquals(popup_height, test_popup_height); + } + + /* Tests with ui_right vs ui_top layout. + */ + public void testRightLayout() { + Log.d(TAG, "testRightLayout"); + + setToDefault(); + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.UIPlacementPreferenceKey, "ui_right"); + editor.apply(); + updateForSettings(); + } + + openPopupMenu(); + + Point display_size = new Point(); + { + // call with exclude_insets==true, as in this test we're measuring things about the UI + mActivity.getApplicationInterface().getDisplaySize(display_size, true); + Log.d(TAG, "display_size: " + display_size.x + " x " + display_size.y); + } + View settingsButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.settings); + View galleryButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.gallery); + + Log.d(TAG, "settings right: " + settingsButton.getRight()); + Log.d(TAG, "settings top: " + settingsButton.getTop()); + Log.d(TAG, "gallery right: " + galleryButton.getRight()); + Log.d(TAG, "gallery top: " + galleryButton.getTop()); + + final float scale = mActivity.getResources().getDisplayMetrics().density; + int expected_gap = (int) (MainUI.privacy_indicator_gap_dp * scale + 0.5f); // convert dps to pixels; + if( mActivity.getSystemOrientation() == MainActivity.SystemOrientation.PORTRAIT ) { + assertTrue(settingsButton.getBottom() > (int)(0.8*display_size.y)); + assertEquals(display_size.x, settingsButton.getRight()); + // position may be 1 coordinate different on some devices, e.g., Galaxy Nexus + // have 14 pixel gap on Pixel 6 Pro + assertEquals(display_size.y-1-expected_gap, galleryButton.getBottom(), 14.0+1.0e-5); + assertEquals(display_size.x-expected_gap, galleryButton.getRight()); + } + else { + assertTrue(settingsButton.getRight() > (int)(0.8*display_size.x)); + assertEquals(0, settingsButton.getTop()); + assertEquals(display_size.x-expected_gap, galleryButton.getRight()); + assertEquals(expected_gap, galleryButton.getTop()); + } + + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.UIPlacementPreferenceKey, "ui_top"); + editor.apply(); + updateForSettings(); + } + + Log.d(TAG, "settings right: " + settingsButton.getRight()); + Log.d(TAG, "settings top: " + settingsButton.getTop()); + Log.d(TAG, "gallery right: " + galleryButton.getRight()); + Log.d(TAG, "gallery top: " + galleryButton.getTop()); + + if( mActivity.getSystemOrientation() == MainActivity.SystemOrientation.PORTRAIT ) { + assertTrue(settingsButton.getBottom() < (int)(0.2*display_size.y)); + assertEquals(display_size.x, settingsButton.getRight()); + // position may be 1 coordinate different on some devices, e.g., Galaxy Nexus + // have 14 pixel gap on Pixel 6 Pro + assertEquals(display_size.y-1-expected_gap, galleryButton.getBottom(), 14.0+1.0e-5); + assertEquals(display_size.x-expected_gap, galleryButton.getRight()); + } + else { + assertTrue(settingsButton.getRight() < (int)(0.2*display_size.x)); + assertEquals(0, settingsButton.getTop()); + assertEquals(display_size.x-expected_gap, galleryButton.getRight()); + assertEquals(expected_gap, galleryButton.getTop()); + } + + openPopupMenu(); + } + + /* Tests layout bug with popup menu. + * Note, in practice this doesn't seem to reproduce the problem, but keep the test anyway. + * Currently not autotested as the problem isn't fixed, and this would just be a test that + * occasionally fails (instead we work round the problem but not caching the popup when the + * bug occurs). + */ + public void testPopupLayout() throws InterruptedException { + Log.d(TAG, "testPopupLayout"); + setToDefault(); + + for(int i=0;i<50;i++) { + View popup_container = mActivity.findViewById(net.sourceforge.opencamera.R.id.popup_container); + View popupButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.popup); + final float scale = mActivity.getResources().getDisplayMetrics().density; + int max_width = (int) (280 * scale + 0.5f); // convert dps to pixels; + + Thread.sleep(400); + + // open popup + openPopupMenu(); + + // check popup width is not larger than expected + int popup_container_width = popup_container.getWidth(); + Log.d(TAG, "i = : " + i); + Log.d(TAG, " popup_container_width: " + popup_container_width); + Log.d(TAG, " max_width: " + max_width); + assertTrue(popup_container_width <= max_width); + + /*View settingsButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.settings); + Log.d(TAG, "about to click settings"); + clickView(settingsButton); + Log.d(TAG, "done clicking settings"); + this.getInstrumentation().waitForIdleSync();*/ + + if( i % 10 == 0 ) { + restart(); + } + else { + pauseAndResume(); + } + } + } + + /* Tests to do with video and popup menu. + */ + private void subTestVideoPopup(boolean on_timer) { + Log.d(TAG, "subTestVideoPopup"); + + assertFalse(mPreview.isOnTimer()); + assertFalse(mActivity.popupIsOpen()); + View popupButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.popup); + + if( !mPreview.isVideo() ) { + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertTrue(mPreview.isVideo()); + } + + if( !on_timer ) { + // open popup now + openPopupMenu(); + } + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + Log.d(TAG, "about to click take photo"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo"); + if( on_timer ) { + assertTrue(mPreview.isOnTimer()); + } + + try { + if( on_timer ) { + Thread.sleep(2000); + + // now open popup + openPopupMenu(); + + // check timer is cancelled + assertFalse(mPreview.isOnTimer()); + + // wait for timer (if it was still going) + Thread.sleep(4000); + + // now check we still aren't recording, and that popup is still open + assertTrue( mPreview.isVideo() ); + assertFalse(mPreview.isVideoRecording()); + assertFalse(mPreview.isOnTimer()); + assertTrue( mActivity.popupIsOpen() ); + } + else { + Thread.sleep(1000); + + // now check we are recording video, and that popup is closed + assertTrue( mPreview.isVideo() ); + assertTrue( mPreview.isVideoRecording() ); + assertFalse(mActivity.popupIsOpen()); + } + + if( !on_timer ) { + // (if on timer, the video will have stopped) + List supported_flash_values = mPreview.getSupportedFlashValues(); + if( supported_flash_values == null ) { + // button shouldn't show at all + assertEquals(popupButton.getVisibility(), View.GONE); + } + else { + // now open popup again + openPopupMenu(); + subTestPopupButtonAvailability("TEST_FLASH", "flash_off", supported_flash_values); + subTestPopupButtonAvailability("TEST_FLASH", "flash_auto", supported_flash_values); + subTestPopupButtonAvailability("TEST_FLASH", "flash_on", supported_flash_values); + subTestPopupButtonAvailability("TEST_FLASH", "flash_torch", supported_flash_values); + subTestPopupButtonAvailability("TEST_FLASH", "flash_red_eye", supported_flash_values); + subTestPopupButtonAvailability("TEST_FLASH", "flash_frontscreen_auto", supported_flash_values); + subTestPopupButtonAvailability("TEST_FLASH", "flash_frontscreen_on", supported_flash_values); + // only flash should be available + subTestPopupButtonAvailability("TEST_FOCUS", "focus_mode_auto", null); + subTestPopupButtonAvailability("TEST_FOCUS", "focus_mode_locked", null); + subTestPopupButtonAvailability("TEST_FOCUS", "focus_mode_infinity", null); + subTestPopupButtonAvailability("TEST_FOCUS", "focus_mode_macro", null); + subTestPopupButtonAvailability("TEST_FOCUS", "focus_mode_fixed", null); + subTestPopupButtonAvailability("TEST_FOCUS", "focus_mode_edof", null); + subTestPopupButtonAvailability("TEST_FOCUS", "focus_mode_continuous_picture", null); + subTestPopupButtonAvailability("TEST_FOCUS", "focus_mode_continuous_video", null); + subTestPopupButtonAvailability("TEST_ISO", "auto", null); + subTestPopupButtonAvailability("TEST_ISO", "100", null); + subTestPopupButtonAvailability("TEST_ISO", "200", null); + subTestPopupButtonAvailability("TEST_ISO", "400", null); + subTestPopupButtonAvailability("TEST_ISO", "800", null); + subTestPopupButtonAvailability("TEST_ISO", "1600", null); + subTestPopupButtonAvailability("TEST_WHITE_BALANCE", false); + subTestPopupButtonAvailability("TEST_SCENE_MODE", false); + subTestPopupButtonAvailability("TEST_COLOR_EFFECT", false); + } + } + + Log.d(TAG, "now stop video"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking stop video"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + assertFalse(mPreview.isVideoRecording()); + assertFalse(mActivity.popupIsOpen()); + + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + + // now open popup again + openPopupMenu(); + subTestPopupButtonAvailability(); + } + + /* Tests that popup menu closes when we record video; then tests behaviour of popup. + */ + public void testVideoPopup() { + Log.d(TAG, "testVideoPopup"); + setToDefault(); + + subTestVideoPopup(false); + + if( mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ) { + Log.d(TAG, "switch camera"); + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + waitUntilCameraOpened(); + subTestVideoPopup(false); + } + } + + /* Takes video on a timer, but checks that the popup menu stops video timer; then tests behaviour of popup. + */ + public void testVideoTimerPopup() { + Log.d(TAG, "testVideoTimerPopup"); + setToDefault(); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.TimerPreferenceKey, "5"); + editor.putBoolean(PreferenceKeys.TimerBeepPreferenceKey, false); + editor.apply(); + + subTestVideoPopup(true); + + if( mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ) { + Log.d(TAG, "switch camera"); + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + waitUntilCameraOpened(); + subTestVideoPopup(true); + } + } + + /* Tests for USB/bluetooth keyboard controls. + */ + public void testKeyboardControls() throws InterruptedException { + Log.d(TAG, "testKeyboardControls"); + + setToDefault(); + + if( !mPreview.supportsFlash() ) { + Log.d(TAG, "doesn't support flash"); + return; + } + else if( !mPreview.supportsFocus() ) { + Log.d(TAG, "doesn't support focus"); + return; + } + + switchToFlashValue("flash_auto"); + + // open popup + assertFalse( mActivity.popupIsOpen() ); + getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_FUNCTION); + getInstrumentation().waitForIdleSync(); + assertTrue( mActivity.popupIsOpen() ); + + // arrow down + assertFalse(mActivity.getMainUI().testGetRemoteControlMode()); + assertFalse(mActivity.getMainUI().selectingLines()); + assertFalse(mActivity.getMainUI().selectingIcons()); + getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_DOWN); + getInstrumentation().waitForIdleSync(); + assertTrue(mActivity.getMainUI().testGetRemoteControlMode()); + assertTrue(mActivity.getMainUI().selectingLines()); + assertFalse(mActivity.getMainUI().selectingIcons()); + assertEquals(0, mActivity.getMainUI().testGetPopupLine()); + assertEquals(0, mActivity.getMainUI().testGetPopupIcon()); + //Thread.sleep(3000); // test + + // arrow down again + getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_NUMPAD_2); + getInstrumentation().waitForIdleSync(); + assertTrue(mActivity.getMainUI().testGetRemoteControlMode()); + assertTrue(mActivity.getMainUI().selectingLines()); + assertFalse(mActivity.getMainUI().selectingIcons()); + //Thread.sleep(3000); // test + assertEquals(1, mActivity.getMainUI().testGetPopupLine()); + assertEquals(0, mActivity.getMainUI().testGetPopupIcon()); + + // arrow down again + getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_NUMPAD_2); + getInstrumentation().waitForIdleSync(); + assertTrue(mActivity.getMainUI().testGetRemoteControlMode()); + assertTrue(mActivity.getMainUI().selectingLines()); + assertFalse(mActivity.getMainUI().selectingIcons()); + assertEquals(3, mActivity.getMainUI().testGetPopupLine()); + assertEquals(0, mActivity.getMainUI().testGetPopupIcon()); + + // arrow up + getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_UP); + getInstrumentation().waitForIdleSync(); + assertTrue(mActivity.getMainUI().testGetRemoteControlMode()); + assertTrue(mActivity.getMainUI().selectingLines()); + assertFalse(mActivity.getMainUI().selectingIcons()); + assertEquals(1, mActivity.getMainUI().testGetPopupLine()); + assertEquals(0, mActivity.getMainUI().testGetPopupIcon()); + + // arrow up again + getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_NUMPAD_8); + getInstrumentation().waitForIdleSync(); + assertTrue(mActivity.getMainUI().testGetRemoteControlMode()); + assertTrue(mActivity.getMainUI().selectingLines()); + assertFalse(mActivity.getMainUI().selectingIcons()); + assertEquals(0, mActivity.getMainUI().testGetPopupLine()); + assertEquals(0, mActivity.getMainUI().testGetPopupIcon()); + + // select + getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_NUMPAD_5); + getInstrumentation().waitForIdleSync(); + assertTrue(mActivity.getMainUI().testGetRemoteControlMode()); + assertTrue(mActivity.getMainUI().selectingLines()); + assertTrue(mActivity.getMainUI().selectingIcons()); + assertEquals(0, mActivity.getMainUI().testGetPopupLine()); + assertEquals(0, mActivity.getMainUI().testGetPopupIcon()); + + // arrow down + getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_DOWN); + getInstrumentation().waitForIdleSync(); + assertTrue(mActivity.getMainUI().testGetRemoteControlMode()); + assertTrue(mActivity.getMainUI().selectingLines()); + assertTrue(mActivity.getMainUI().selectingIcons()); + assertEquals(0, mActivity.getMainUI().testGetPopupLine()); + assertEquals(1, mActivity.getMainUI().testGetPopupIcon()); + + // arrow down again + getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_NUMPAD_2); + getInstrumentation().waitForIdleSync(); + assertTrue(mActivity.getMainUI().testGetRemoteControlMode()); + assertTrue(mActivity.getMainUI().selectingLines()); + assertTrue(mActivity.getMainUI().selectingIcons()); + assertEquals(0, mActivity.getMainUI().testGetPopupLine()); + assertEquals(2, mActivity.getMainUI().testGetPopupIcon()); + + // arrow up + getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_UP); + getInstrumentation().waitForIdleSync(); + assertTrue(mActivity.getMainUI().testGetRemoteControlMode()); + assertTrue(mActivity.getMainUI().selectingLines()); + assertTrue(mActivity.getMainUI().selectingIcons()); + assertEquals(0, mActivity.getMainUI().testGetPopupLine()); + assertEquals(1, mActivity.getMainUI().testGetPopupIcon()); + + // arrow up again + getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_NUMPAD_8); + getInstrumentation().waitForIdleSync(); + assertTrue(mActivity.getMainUI().testGetRemoteControlMode()); + assertTrue(mActivity.getMainUI().selectingLines()); + assertTrue(mActivity.getMainUI().selectingIcons()); + assertEquals(0, mActivity.getMainUI().testGetPopupLine()); + assertEquals(0, mActivity.getMainUI().testGetPopupIcon()); + + // select + assertEquals("flash_auto", mPreview.getCurrentFlashValue()); + getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_NUMPAD_5); + getInstrumentation().waitForIdleSync(); + Thread.sleep(500); + assertFalse( mActivity.popupIsOpen() ); + assertEquals("flash_off", mPreview.getCurrentFlashValue()); + assertFalse(mActivity.getMainUI().testGetRemoteControlMode()); + assertFalse(mActivity.getMainUI().selectingLines()); + assertFalse(mActivity.getMainUI().selectingIcons()); + + Thread.sleep(500); + + // open exposure panel + assertFalse( mActivity.getMainUI().isExposureUIOpen() ); + getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_SLASH); + getInstrumentation().waitForIdleSync(); + assertTrue( mActivity.getMainUI().isExposureUIOpen() ); + + assertFalse(mActivity.getMainUI().testGetRemoteControlMode()); + if( mPreview.supportsISORange() || mPreview.getSupportedISOs() != null ) { + // need to skip past the ISO line + assertFalse(mActivity.getMainUI().selectingLines()); + assertFalse(mActivity.getMainUI().selectingIcons()); + getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_NUMPAD_2); + getInstrumentation().waitForIdleSync(); + assertTrue(mActivity.getMainUI().testGetRemoteControlMode()); + assertFalse(mActivity.getMainUI().selectingLines()); + assertFalse(mActivity.getMainUI().selectingIcons()); + assertFalse(mActivity.getMainUI().isSelectingExposureUIElement()); + assertEquals(0, mActivity.getMainUI().testGetPopupLine()); + assertEquals(0, mActivity.getMainUI().testGetPopupIcon()); + assertEquals(0, mActivity.getMainUI().testGetExposureLine()); + } + + // arrow down + assertFalse(mActivity.getMainUI().selectingLines()); + assertFalse(mActivity.getMainUI().selectingIcons()); + getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_DOWN); + getInstrumentation().waitForIdleSync(); + assertTrue(mActivity.getMainUI().testGetRemoteControlMode()); + assertFalse(mActivity.getMainUI().selectingLines()); + assertFalse(mActivity.getMainUI().selectingIcons()); + assertFalse(mActivity.getMainUI().isSelectingExposureUIElement()); + assertEquals(0, mActivity.getMainUI().testGetPopupLine()); + assertEquals(0, mActivity.getMainUI().testGetPopupIcon()); + assertEquals(3, mActivity.getMainUI().testGetExposureLine()); + + // select + getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_NUMPAD_5); + getInstrumentation().waitForIdleSync(); + assertTrue(mActivity.getMainUI().testGetRemoteControlMode()); + assertFalse(mActivity.getMainUI().selectingLines()); + assertFalse(mActivity.getMainUI().selectingIcons()); + assertTrue(mActivity.getMainUI().isSelectingExposureUIElement()); + assertEquals(0, mActivity.getMainUI().testGetPopupLine()); + assertEquals(0, mActivity.getMainUI().testGetPopupIcon()); + assertEquals(3, mActivity.getMainUI().testGetExposureLine()); + + // arrow down + for(int i=0;i<6;i++) { + assertEquals(-i, mPreview.getCurrentExposure()); + getInstrumentation().sendKeyDownUpSync((i%2==0) ? KeyEvent.KEYCODE_NUMPAD_2 : KeyEvent.KEYCODE_DPAD_DOWN); + getInstrumentation().waitForIdleSync(); + assertEquals(-(i+1), mPreview.getCurrentExposure()); + } + + // arrow up + for(int i=0;i<6;i++) { + assertEquals(-6+i, mPreview.getCurrentExposure()); + getInstrumentation().sendKeyDownUpSync((i%2==0) ? KeyEvent.KEYCODE_NUMPAD_8 : KeyEvent.KEYCODE_DPAD_UP); + getInstrumentation().waitForIdleSync(); + assertEquals(-6+(i+1), mPreview.getCurrentExposure()); + } + + // close exposure panel + assertTrue( mActivity.getMainUI().isExposureUIOpen() ); + getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_SLASH); + getInstrumentation().waitForIdleSync(); + assertFalse( mActivity.getMainUI().isExposureUIOpen() ); + + // take photo + assertEquals(0, mPreview.count_cameraTakePicture); + getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_NUMPAD_5); + getInstrumentation().waitForIdleSync(); + waitForTakePhoto(); + assertEquals(1, mPreview.count_cameraTakePicture); + mActivity.waitUntilImageQueueEmpty(); + + // open settings + assertFalse(mActivity.isCameraInBackground()); + getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_MENU); + this.getInstrumentation().waitForIdleSync(); + assertTrue(mActivity.isCameraInBackground()); + + //Thread.sleep(3000); + Thread.sleep(500); + } + + /* Tests taking photos repeatedly with auto-repeat method. + */ + public void testTakePhotoRepeat() { + Log.d(TAG, "testTakePhotoRepeat"); + setToDefault(); + + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.RepeatModePreferenceKey, "3"); + editor.apply(); + } + + // count initial files in folder + int n_files = getNFiles(); + Log.d(TAG, "n_files at start: " + n_files); + + assertEquals(0, mPreview.count_cameraTakePicture); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + Log.d(TAG, "about to click take photo"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo"); + assertFalse(mPreview.isOnTimer()); + + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + View switchMultiCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_multi_camera); + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + View exposureButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureLockButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure_lock); + View popupButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.popup); + View trashButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.trash); + View shareButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.share); + + try { + // wait 7s, and test that we've taken the photos by then + Thread.sleep(7000); + assertTrue(mPreview.isPreviewStarted()); // check preview restarted + Log.d(TAG, "count_cameraTakePicture: " + mPreview.count_cameraTakePicture); + assertEquals(3, mPreview.count_cameraTakePicture); + int n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(3, n_new_files); + + // now test pausing and resuming + pauseAndResume(); + // wait 5s, and test that we haven't taken any photos + Thread.sleep(5000); + assertTrue(mPreview.isPreviewStarted()); // check preview restarted + Log.d(TAG, "mPreview.count_cameraTakePicture: " + mPreview.count_cameraTakePicture); + assertEquals(3, mPreview.count_cameraTakePicture); + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(3, n_new_files); + + // test with preview paused + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.PausePreviewPreferenceKey, true); + editor.apply(); + } + clickView(takePhotoButton); + Thread.sleep(7000); + assertEquals(6, mPreview.count_cameraTakePicture); + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(6, n_new_files); + assertFalse(mPreview.isPreviewStarted()); // check preview paused + + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + this.getInstrumentation().waitForIdleSync(); + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(6, n_new_files); + assertTrue(mPreview.isPreviewStarted()); // check preview restarted + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.PausePreviewPreferenceKey, false); + editor.apply(); + } + + // now test repeat interval + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.RepeatModePreferenceKey, "2"); + editor.putString(PreferenceKeys.RepeatIntervalPreferenceKey, "3"); + editor.putBoolean(PreferenceKeys.TimerBeepPreferenceKey, false); + editor.apply(); + } + assertEquals(switchCameraButton.getVisibility(), (mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE)); + assertEquals(switchMultiCameraButton.getVisibility(), (mActivity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE)); + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + assertEquals(exposureButton.getVisibility(), (mPreview.supportsExposures() ? View.VISIBLE : View.GONE)); + assertEquals(exposureLockButton.getVisibility(), (mPreview.supportsExposureLock() ? View.VISIBLE : View.GONE)); + assertEquals(popupButton.getVisibility(), View.VISIBLE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + + clickView(takePhotoButton); + waitForTakePhoto(); + Log.d(TAG, "done taking 1st photo"); + this.getInstrumentation().waitForIdleSync(); + assertEquals(7, mPreview.count_cameraTakePicture); + mActivity.waitUntilImageQueueEmpty(); + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(7, n_new_files); + + // wait 2s, should still not have taken another photo + Thread.sleep(2000); + assertEquals(7, mPreview.count_cameraTakePicture); + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(7, n_new_files); + // check GUI has returned to correct state + assertEquals(switchCameraButton.getVisibility(), (mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE)); + assertEquals(switchMultiCameraButton.getVisibility(), (mActivity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE)); + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + assertEquals(exposureButton.getVisibility(), (mPreview.supportsExposures() ? View.VISIBLE : View.GONE)); + assertEquals(exposureLockButton.getVisibility(), (mPreview.supportsExposureLock() ? View.VISIBLE : View.GONE)); + assertEquals(popupButton.getVisibility(), View.VISIBLE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + + // wait another 5s, should have taken another photo (need to allow time for the extra auto-focus) + Thread.sleep(5000); + assertEquals(8, mPreview.count_cameraTakePicture); + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(8, n_new_files); + // wait 4s, should not have taken any more photos + Thread.sleep(4000); + assertEquals(8, mPreview.count_cameraTakePicture); + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(8, n_new_files); + } + catch(InterruptedException e) { + Log.e(TAG, "InterruptedException from sleep", e); + fail(); + } + } + + /* Tests that saving quality (i.e., resolution) settings can be done per-camera. Also checks that the supported picture sizes is as expected. + */ + public void testSaveQuality() { + Log.d(TAG, "testSaveQuality"); + + setToDefault(); + + if( mPreview.getCameraControllerManager().getNumberOfCameras() <= 1 ) { + return; + } + + List picture_sizes = mPreview.getSupportedPictureSizes(true); + + // change back camera to the last size + CameraController.Size size = picture_sizes.get(picture_sizes.size()-1); + { + Log.d(TAG, "set size to " + size.width + " x " + size.height); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.getResolutionPreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref()), size.width + " " + size.height); + editor.apply(); + } + + // need to resume activity for it to take effect (for camera to be reopened) + pauseAndResume(); + CameraController.Size new_size = mPreview.getCameraController().getPictureSize(); + Log.d(TAG, "size is now " + new_size.width + " x " + new_size.height); + assertEquals(size, new_size); + + // switch camera to front + int cameraId = mPreview.getCameraId(); + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + waitUntilCameraOpened(); + int new_cameraId = mPreview.getCameraId(); + assertTrue(cameraId != new_cameraId); + + List front_picture_sizes = mPreview.getSupportedPictureSizes(true); + + // change front camera to the last size + CameraController.Size front_size = front_picture_sizes.get(front_picture_sizes.size()-1); + { + Log.d(TAG, "set front_size to " + front_size.width + " x " + front_size.height); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.getResolutionPreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref()), front_size.width + " " + front_size.height); + editor.apply(); + } + + // need to resume activity for it to take effect (for camera to be reopened) + pauseAndResume(); + // check still on front camera + Log.d(TAG, "camera id " + mPreview.getCameraId()); + assertEquals(mPreview.getCameraId(), new_cameraId); + CameraController.Size front_new_size = mPreview.getCameraController().getPictureSize(); + Log.d(TAG, "front size is now " + front_new_size.width + " x " + front_new_size.height); + assertEquals(front_size, front_new_size); + + // change front camera to the first size + front_size = front_picture_sizes.get(0); + { + Log.d(TAG, "set front_size to " + front_size.width + " x " + front_size.height); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.getResolutionPreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref()), front_size.width + " " + front_size.height); + editor.apply(); + } + + // need to resume activity for it to take effect (for camera to be reopened) + pauseAndResume(); + front_new_size = mPreview.getCameraController().getPictureSize(); + Log.d(TAG, "front size is now " + front_new_size.width + " x " + front_new_size.height); + assertEquals(front_size, front_new_size); + + // return to back camera + switchToCamera(cameraId); + + // now back camera size should still be what it was + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + String settings_size = settings.getString(PreferenceKeys.getResolutionPreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref()), ""); + Log.d(TAG, "settings key is " + PreferenceKeys.getResolutionPreferenceKey(mPreview.getCameraId(), mActivity.getApplicationInterface().getCameraIdSPhysicalPref())); + Log.d(TAG, "settings size is " + settings_size); + } + new_size = mPreview.getCameraController().getPictureSize(); + Log.d(TAG, "size is now " + new_size.width + " x " + new_size.height); + assertEquals(size, new_size); + } + + private void testExif(String file, Uri uri, boolean expect_device_tags, boolean expect_datetime, boolean expect_gps) throws IOException { + TestUtils.testExif(getActivity(), file, uri, expect_device_tags, expect_datetime, expect_gps); + } + + private void subTestLocationOn(boolean gps_direction) throws IOException { + Log.d(TAG, "subTestLocationOn"); + + assertTrue(mActivity.getLocationSupplier().noLocationListeners()); + assertFalse(mActivity.getLocationSupplier().testHasReceivedLocation()); + assertNull(mActivity.getLocationSupplier().getLocation()); + Log.d(TAG, "turn on location"); + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.LocationPreferenceKey, true); + if( gps_direction ) { + editor.putBoolean(PreferenceKeys.GPSDirectionPreferenceKey, true); + } + editor.apply(); + Log.d(TAG, "update settings after turning on location"); + updateForSettings(); + Log.d(TAG, "location should now be on"); + } + + assertTrue(mActivity.getLocationSupplier().hasLocationListeners()); + Log.d(TAG, "wait until received location"); + + long start_t = System.currentTimeMillis(); + while( !mActivity.getLocationSupplier().testHasReceivedLocation() ) { + this.getInstrumentation().waitForIdleSync(); + if( System.currentTimeMillis() - start_t > 20000 ) { + // need to allow long time for testing devices without mobile network; will likely fail altogether if don't even have wifi + fail(); + } + } + Log.d(TAG, "have received location"); + this.getInstrumentation().waitForIdleSync(); + assertNotNull(mActivity.getLocationSupplier().getLocation()); + assertEquals(0, mPreview.count_cameraTakePicture); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + mActivity.test_last_saved_image = null; + mActivity.test_last_saved_imageuri = null; + clickView(takePhotoButton); + + Log.d(TAG, "wait until finished taking photo"); + waitForTakePhoto(); + this.getInstrumentation().waitForIdleSync(); + assertEquals(1, mPreview.count_cameraTakePicture); + mActivity.waitUntilImageQueueEmpty(); + testExif(mActivity.test_last_saved_image, mActivity.test_last_saved_imageuri, true, true, true); + + // now test with auto-stabilise + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.AutoStabilisePreferenceKey, true); + editor.apply(); + updateForSettings(); + } + mActivity.test_last_saved_image = null; + mActivity.test_last_saved_imageuri = null; + clickView(takePhotoButton); + + Log.d(TAG, "wait until finished taking photo"); + waitForTakePhoto(); + this.getInstrumentation().waitForIdleSync(); + assertEquals(2, mPreview.count_cameraTakePicture); + mActivity.waitUntilImageQueueEmpty(); + testExif(mActivity.test_last_saved_image, mActivity.test_last_saved_imageuri, true, true, true); + + // switch to front camera + if( mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ) { + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + waitUntilCameraOpened(); + assertTrue(mActivity.getLocationSupplier().hasLocationListeners()); + // shouldn't need to wait for test_has_received_location to be true, as should remember from before switching camera + assertNotNull(mActivity.getLocationSupplier().getLocation()); + } + } + + /* Tests we save location data; also tests that we save other exif data. + * May fail on devices without mobile network, especially if we don't even have wifi. + */ + public void testLocationOn() throws IOException { + Log.d(TAG, "testLocationOn"); + setToDefault(); + + subTestLocationOn(false); + } + + /* Tests we save location and gps direction. + * May fail on devices without mobile network, especially if we don't even have wifi. + */ + public void testLocationDirectionOn() throws IOException { + Log.d(TAG, "testLocationDirectionOn"); + setToDefault(); + + subTestLocationOn(true); + } + + /* As testLocationOn, but with SAF enabled. + * Important for Camera2 API at least to test the codepath for when + * ImageSaver.needGPSTimestampHack() returns true, when using SAF. + * May fail on devices without mobile network, especially if we don't even have wifi. + */ + public void testLocationOnSAF() throws IOException { + Log.d(TAG, "testLocationOnSAF"); + + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ) { + Log.d(TAG, "SAF requires Android Lollipop or better"); + return; + } + + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.UsingSAFPreferenceKey, true); + editor.putString(PreferenceKeys.SaveLocationSAFPreferenceKey, "content://com.android.externalstorage.documents/tree/primary%3ADCIM%2FOpenCamera"); + editor.apply(); + updateForSettings(); + + subTestLocationOn(false); + } + + /* Tests we don't save location data; also tests that we save other exif data. + */ + private void subTestLocationOff(boolean gps_direction) throws IOException { + setToDefault(); + + if( gps_direction ) { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.GPSDirectionPreferenceKey, true); + editor.apply(); + updateForSettings(); + } + this.getInstrumentation().waitForIdleSync(); + assertTrue(mActivity.getLocationSupplier().noLocationListeners()); + assertFalse(mActivity.getLocationSupplier().testHasReceivedLocation()); + assertNull(mActivity.getLocationSupplier().getLocation()); + assertEquals(0, mPreview.count_cameraTakePicture); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + mActivity.test_last_saved_image = null; + mActivity.test_last_saved_imageuri = null; + clickView(takePhotoButton); + + assertTrue(mActivity.getLocationSupplier().noLocationListeners()); + assertFalse(mActivity.getLocationSupplier().testHasReceivedLocation()); + assertNull(mActivity.getLocationSupplier().getLocation()); + + Log.d(TAG, "wait until finished taking photo"); + waitForTakePhoto(); + this.getInstrumentation().waitForIdleSync(); + assertEquals(1, mPreview.count_cameraTakePicture); + + assertTrue(mActivity.getLocationSupplier().noLocationListeners()); + assertFalse(mActivity.getLocationSupplier().testHasReceivedLocation()); + assertNull(mActivity.getLocationSupplier().getLocation()); + + mActivity.waitUntilImageQueueEmpty(); + testExif(mActivity.test_last_saved_image, mActivity.test_last_saved_imageuri, true, true, false); + + assertTrue(mActivity.getLocationSupplier().noLocationListeners()); + assertFalse(mActivity.getLocationSupplier().testHasReceivedLocation()); + assertNull(mActivity.getLocationSupplier().getLocation()); + + // now test with auto-stabilise + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.AutoStabilisePreferenceKey, true); + editor.apply(); + updateForSettings(); + } + mActivity.test_last_saved_image = null; + mActivity.test_last_saved_imageuri = null; + clickView(takePhotoButton); + + assertTrue(mActivity.getLocationSupplier().noLocationListeners()); + assertFalse(mActivity.getLocationSupplier().testHasReceivedLocation()); + assertNull(mActivity.getLocationSupplier().getLocation()); + + Log.d(TAG, "wait until finished taking photo"); + waitForTakePhoto(); + this.getInstrumentation().waitForIdleSync(); + assertEquals(2, mPreview.count_cameraTakePicture); + + assertTrue(mActivity.getLocationSupplier().noLocationListeners()); + assertFalse(mActivity.getLocationSupplier().testHasReceivedLocation()); + assertNull(mActivity.getLocationSupplier().getLocation()); + + mActivity.waitUntilImageQueueEmpty(); + testExif(mActivity.test_last_saved_image, mActivity.test_last_saved_imageuri, true, true, false); + + assertTrue(mActivity.getLocationSupplier().noLocationListeners()); + assertFalse(mActivity.getLocationSupplier().testHasReceivedLocation()); + assertNull(mActivity.getLocationSupplier().getLocation()); + + // switch to front camera + if( mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ) { + int cameraId = mPreview.getCameraId(); + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + waitUntilCameraOpened(); + this.getInstrumentation().waitForIdleSync(); + + assertTrue(mActivity.getLocationSupplier().noLocationListeners()); + assertFalse(mActivity.getLocationSupplier().testHasReceivedLocation()); + assertNull(mActivity.getLocationSupplier().getLocation()); + + // return to back camera + switchToCamera(cameraId); + assertTrue(mActivity.getLocationSupplier().noLocationListeners()); + assertFalse(mActivity.getLocationSupplier().testHasReceivedLocation()); + assertNull(mActivity.getLocationSupplier().getLocation()); + } + + // now switch location back on + Log.d(TAG, "now switch location back on"); + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.LocationPreferenceKey, true); + editor.apply(); + updateForSettings(); + } + + long start_t = System.currentTimeMillis(); + while( !mActivity.getLocationSupplier().testHasReceivedLocation() ) { + this.getInstrumentation().waitForIdleSync(); + if( System.currentTimeMillis() - start_t > 20000 ) { + // need to allow long time for testing devices without mobile network; will likely fail altogether if don't even have wifi + fail(); + } + } + this.getInstrumentation().waitForIdleSync(); + assertNotNull(mActivity.getLocationSupplier().getLocation()); + + // switch to front camera + if( mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ) { + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + waitUntilCameraOpened(); + // shouldn't need to wait for test_has_received_location to be true, as should remember from before switching camera + assertNotNull(mActivity.getLocationSupplier().getLocation()); + } + } + + /* Tests we don't save location data; also tests that we save other exif data. + * May fail on devices without mobile network, especially if we don't even have wifi. + */ + public void testLocationOff() throws IOException { + Log.d(TAG, "testLocationOff"); + subTestLocationOff(false); + } + + /* Tests we save gps direction. + * May fail on devices without mobile network, especially if we don't even have wifi. + */ + public void testDirectionOn() throws IOException { + Log.d(TAG, "testDirectionOn"); + subTestLocationOff(true); + } + + /* As testDirectionOn() but for SAF. + * May fail on devices without mobile network, especially if we don't even have wifi. + * If this test fails, make sure we've manually selected that folder (as permission can't be given through the test + * framework). + */ + public void testDirectionOnSAF() throws IOException { + Log.d(TAG, "testDirectionOnSAF"); + + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ) { + Log.d(TAG, "SAF requires Android Lollipop or better"); + return; + } + + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.UsingSAFPreferenceKey, true); + editor.putString(PreferenceKeys.SaveLocationSAFPreferenceKey, "content://com.android.externalstorage.documents/tree/primary%3ADCIM%2FOpenCamera"); + editor.apply(); + updateForSettings(); + + subTestLocationOff(true); + } + + /* Tests we disable location when going to settings, but re-enable it when returning to camera. + * Also tests camera is turned off when going to settings. + * Fails on Android emulator because we immediately get location again after returning from settings. + */ + public void testLocationSettings() throws InterruptedException { + Log.d(TAG, "testLocationSettings"); + setToDefault(); + + assertTrue(mPreview.openCameraAttempted()); + assertNotNull(mPreview.getCameraController()); + assertTrue(mActivity.getLocationSupplier().noLocationListeners()); + assertFalse(mActivity.getLocationSupplier().testHasReceivedLocation()); + assertNull(mActivity.getLocationSupplier().getLocation()); + Log.d(TAG, "turn on location"); + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.LocationPreferenceKey, true); + editor.apply(); + Log.d(TAG, "update settings after turning on location"); + updateForSettings(); + Log.d(TAG, "location should now be on"); + } + + assertTrue(mActivity.getLocationSupplier().hasLocationListeners()); + Log.d(TAG, "wait until received location"); + + long start_t = System.currentTimeMillis(); + while( !mActivity.getLocationSupplier().testHasReceivedLocation() ) { + this.getInstrumentation().waitForIdleSync(); + if( System.currentTimeMillis() - start_t > 20000 ) { + // need to allow long time for testing devices without mobile network; will likely fail altogether if don't even have wifi + fail(); + } + } + Log.d(TAG, "have received location"); + this.getInstrumentation().waitForIdleSync(); + assertNotNull(mActivity.getLocationSupplier().getLocation()); + // check wasn't cached + LocationSupplier.LocationInfo locationInfo = new LocationSupplier.LocationInfo(); + mActivity.getLocationSupplier().getLocation(locationInfo); + assertFalse(locationInfo.LocationWasCached()); + + assertTrue(mPreview.openCameraAttempted()); + assertNotNull(mPreview.getCameraController()); + + // now go to settings + assertFalse(mActivity.isCameraInBackground()); + View settingsButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.settings); + Log.d(TAG, "about to click settings"); + clickView(settingsButton); + Log.d(TAG, "done clicking settings"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + assertTrue(mActivity.isCameraInBackground()); + + // now check we're not listening for location, or opening camera + start_t = System.currentTimeMillis(); + int count = 0; + while( System.currentTimeMillis() - start_t <= 15000 ) { + assertFalse(mPreview.isOpeningCamera()); + assertFalse(mPreview.openCameraAttempted()); + assertNull(mPreview.getCameraController()); + + assertTrue(mActivity.getLocationSupplier().noLocationListeners()); + assertFalse(mActivity.getLocationSupplier().testHasReceivedLocation()); + assertNull(mActivity.getLocationSupplier().getLocation()); + Thread.sleep(10); + if( count++ == 5 ) { + pauseAndResume(); // check we still don't listen for location after pause and resume + } + } + + // now go back + assertTrue(mActivity.isCameraInBackground()); + Log.d(TAG, "go back"); + mActivity.runOnUiThread(new Runnable() { + public void run() { + mActivity.onBackPressed(); + } + }); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + assertFalse(mActivity.isCameraInBackground()); + + // check camera is reopening + assertTrue(mPreview.isOpeningCamera() || mPreview.openCameraAttempted()); + + // check we start listening again + // first should have a cached location + assertTrue(mActivity.getLocationSupplier().hasLocationListeners()); + assertFalse(mActivity.getLocationSupplier().testHasReceivedLocation()); + assertNotNull(mActivity.getLocationSupplier().getLocation()); + locationInfo = new LocationSupplier.LocationInfo(); + mActivity.getLocationSupplier().getLocation(locationInfo); + assertTrue(locationInfo.LocationWasCached()); + + // check camera is opened after a pause + Thread.sleep(1000); + assertTrue(mPreview.openCameraAttempted()); + assertNotNull(mPreview.getCameraController()); + + // check we get a non-cached location + while( !mActivity.getLocationSupplier().testHasReceivedLocation() ) { + this.getInstrumentation().waitForIdleSync(); + if( System.currentTimeMillis() - start_t > 25000 ) { + // need to allow long time for testing devices without mobile network; will likely fail altogether if don't even have wifi + fail(); + } + } + Log.d(TAG, "have received location"); + this.getInstrumentation().waitForIdleSync(); + assertNotNull(mActivity.getLocationSupplier().getLocation()); + // check wasn't cached + locationInfo = new LocationSupplier.LocationInfo(); + mActivity.getLocationSupplier().getLocation(locationInfo); + assertFalse(locationInfo.LocationWasCached()); + + // now test repeatedly going to settings and back - guard against crash we had where onLocationChanged got called one more time after + // location listeners had been freed + for(int i=0;i<20;i++) { + assertTrue(mActivity.getLocationSupplier().hasLocationListeners()); + Thread.sleep((i % 5) * 100); + + // go to settings + assertFalse(mActivity.isCameraInBackground()); + Log.d(TAG, "about to click settings"); + clickView(settingsButton); + Log.d(TAG, "done clicking settings"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + assertTrue(mActivity.isCameraInBackground()); + + Thread.sleep(100); + assertTrue(mActivity.getLocationSupplier().noLocationListeners()); + assertFalse(mActivity.getLocationSupplier().testHasReceivedLocation()); + assertNull(mActivity.getLocationSupplier().getLocation()); + + Thread.sleep(200); + assertTrue(mActivity.getLocationSupplier().noLocationListeners()); + assertFalse(mActivity.getLocationSupplier().testHasReceivedLocation()); + assertNull(mActivity.getLocationSupplier().getLocation()); + + // go back + assertTrue(mActivity.isCameraInBackground()); + Log.d(TAG, "go back"); + mActivity.runOnUiThread(new Runnable() { + public void run() { + mActivity.onBackPressed(); + } + }); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + assertFalse(mActivity.isCameraInBackground()); + } + + // check camera is opened after a pause + Thread.sleep(1000); + assertTrue(mPreview.openCameraAttempted()); + assertNotNull(mPreview.getCameraController()); + } + + private void subTestPhotoStamp() throws IOException { + { + assertFalse(mActivity.getApplicationInterface().getDrawPreview().getStoredHasStampPref()); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.StampPreferenceKey, "preference_stamp_yes"); + editor.apply(); + updateForSettings(); + assertTrue(mActivity.getApplicationInterface().getDrawPreview().getStoredHasStampPref()); + } + + assertEquals(0, mPreview.count_cameraTakePicture); + + int n_old_files = getNFiles(); + Log.d(TAG, "n_old_files: " + n_old_files); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + clickView(takePhotoButton); + + Log.d(TAG, "wait until finished taking photo"); + waitForTakePhoto(); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "photo count: " + mPreview.count_cameraTakePicture); + assertEquals(1, mPreview.count_cameraTakePicture); + mActivity.waitUntilImageQueueEmpty(); + int n_files = getNFiles() - n_old_files; + Log.d(TAG, "n_files: " + n_files); + assertEquals(1, n_files); + testExif(mActivity.test_last_saved_image, mActivity.test_last_saved_imageuri, true, true, false); + + // now again with location + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.LocationPreferenceKey, true); + editor.apply(); + updateForSettings(); + } + + assertTrue( mActivity.getLocationSupplier().hasLocationListeners() ); + long start_t = System.currentTimeMillis(); + while( !mActivity.getLocationSupplier().testHasReceivedLocation() ) { + this.getInstrumentation().waitForIdleSync(); + if( System.currentTimeMillis() - start_t > 20000 ) { + // need to allow long time for testing devices without mobile network; will likely fail altogether if don't even have wifi + fail(); + } + } + this.getInstrumentation().waitForIdleSync(); + assertNotNull(mActivity.getLocationSupplier().getLocation()); + + clickView(takePhotoButton); + + Log.d(TAG, "wait until finished taking photo"); + waitForTakePhoto(); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "photo count: " + mPreview.count_cameraTakePicture); + assertEquals(2, mPreview.count_cameraTakePicture); + mActivity.waitUntilImageQueueEmpty(); + n_files = getNFiles() - n_old_files; + Log.d(TAG, "n_files: " + n_files); + assertEquals(2, n_files); + testExif(mActivity.test_last_saved_image, mActivity.test_last_saved_imageuri, true, true, true); + + // now again with location and custom text + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.LocationPreferenceKey, true); + editor.putString(PreferenceKeys.TextStampPreferenceKey, "Test stamp!£$"); + editor.apply(); + updateForSettings(); + } + + assertTrue( mActivity.getLocationSupplier().hasLocationListeners() ); + while( !mActivity.getLocationSupplier().testHasReceivedLocation() ) { + } + this.getInstrumentation().waitForIdleSync(); + assertNotNull(mActivity.getLocationSupplier().getLocation()); + + clickView(takePhotoButton); + + Log.d(TAG, "wait until finished taking photo"); + waitForTakePhoto(); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "photo count: " + mPreview.count_cameraTakePicture); + assertEquals(3, mPreview.count_cameraTakePicture); + mActivity.waitUntilImageQueueEmpty(); + n_files = getNFiles() - n_old_files; + Log.d(TAG, "n_files: " + n_files); + assertEquals(3, n_files); + testExif(mActivity.test_last_saved_image, mActivity.test_last_saved_imageuri, true, true, true); + + // now test with auto-stabilise + { + assertFalse(mActivity.getApplicationInterface().getDrawPreview().getStoredAutoStabilisePref()); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.TextStampPreferenceKey, ""); + editor.putBoolean(PreferenceKeys.AutoStabilisePreferenceKey, true); + editor.apply(); + updateForSettings(); + assertTrue(mActivity.getApplicationInterface().getDrawPreview().getStoredAutoStabilisePref()); + } + + clickView(takePhotoButton); + + Log.d(TAG, "wait until finished taking photo"); + waitForTakePhoto(); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "photo count: " + mPreview.count_cameraTakePicture); + assertEquals(4, mPreview.count_cameraTakePicture); + mActivity.waitUntilImageQueueEmpty(); + n_files = getNFiles() - n_old_files; + Log.d(TAG, "n_files: " + n_files); + assertEquals(4, n_files); + testExif(mActivity.test_last_saved_image, mActivity.test_last_saved_imageuri, true, true, true); + + // now again with auto-stabilise angle 0 + + mActivity.test_have_angle = true; + mActivity.test_angle = 0.0f; + + clickView(takePhotoButton); + + Log.d(TAG, "wait until finished taking photo"); + waitForTakePhoto(); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "photo count: " + mPreview.count_cameraTakePicture); + assertEquals(5, mPreview.count_cameraTakePicture); + mActivity.waitUntilImageQueueEmpty(); + n_files = getNFiles() - n_old_files; + Log.d(TAG, "n_files: " + n_files); + assertEquals(5, n_files); + testExif(mActivity.test_last_saved_image, mActivity.test_last_saved_imageuri, true, true, true); + + mActivity.test_have_angle = false; + } + + /* Tests we can stamp date/time and location to photo. + * May fail on devices without mobile network, especially if we don't even have wifi. + */ + public void testPhotoStamp() throws IOException { + Log.d(TAG, "testPhotoStamp"); + + setToDefault(); + + subTestPhotoStamp(); + } + + /** As testPhotoStamp() but with SAF. + * If this test fails, make sure we've manually selected that folder (as permission can't be given through the test + * framework). + */ + public void testPhotoStampSAF() throws IOException { + Log.d(TAG, "testPhotoStampSAF"); + + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ) { + Log.d(TAG, "SAF requires Android Lollipop or better"); + return; + } + + setToDefault(); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.UsingSAFPreferenceKey, true); + editor.putString(PreferenceKeys.SaveLocationSAFPreferenceKey, "content://com.android.externalstorage.documents/tree/primary%3ADCIM%2FOpenCamera"); + editor.apply(); + updateForSettings(); + + subTestPhotoStamp(); + } + + /* Tests we can stamp custom text to photo. + */ + public void testCustomTextStamp() { + Log.d(TAG, "testCustomTextStamp"); + + setToDefault(); + + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.TextStampPreferenceKey, "Test stamp!£$"); + editor.apply(); + updateForSettings(); + } + + assertEquals(0, mPreview.count_cameraTakePicture); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + clickView(takePhotoButton); + + Log.d(TAG, "wait until finished taking photo"); + waitForTakePhoto(); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "photo count: " + mPreview.count_cameraTakePicture); + assertEquals(1, mPreview.count_cameraTakePicture); + + // now test with auto-stabilise + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.AutoStabilisePreferenceKey, true); + editor.apply(); + updateForSettings(); + } + + clickView(takePhotoButton); + + Log.d(TAG, "wait until finished taking photo"); + waitForTakePhoto(); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "photo count: " + mPreview.count_cameraTakePicture); + assertEquals(2, mPreview.count_cameraTakePicture); + + mActivity.waitUntilImageQueueEmpty(); + } + + /* Tests zoom. + */ + public void testZoom() throws InterruptedException { + Log.d(TAG, "testZoom"); + setToDefault(); + + if( !mPreview.supportsZoom() ) { + Log.d(TAG, "zoom not supported"); + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + + final SeekBar zoomSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.zoom_seekbar); + assertEquals(zoomSeekBar.getVisibility(), View.VISIBLE); + int max_zoom = mPreview.getMaxZoom(); + assertEquals(zoomSeekBar.getMax(), max_zoom); + Log.d(TAG, "zoomSeekBar progress = " + zoomSeekBar.getProgress()); + Log.d(TAG, "actual zoom = " + mPreview.getCameraController().getZoom()); + assertEquals(max_zoom - zoomSeekBar.getProgress(), mPreview.getCameraController().getZoom()); + + // test we can find 1x zoom (which won't be the first entry for devices that exposure ultra-wide + // cameras via the zoom) + boolean found_1xzoom = false; + for(int i=0;i 1.0f); + + // check zoom values are non-decreasing + for(int i=0;i " + zoom1); + assertTrue(zoom1 >= zoom0); + } + + if( mPreview.supportsFocus() ) { + assertFalse(mPreview.hasFocusArea()); + assertNull(mPreview.getCameraController().getFocusAreas()); + assertNull(mPreview.getCameraController().getMeteringAreas()); + + // touch to auto-focus with focus area + Thread.sleep(2000); // needed for Galaxy S10e for the touch to register + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + assertTrue(mPreview.hasFocusArea()); + assertNotNull(mPreview.getCameraController().getFocusAreas()); + assertEquals(1, mPreview.getCameraController().getFocusAreas().size()); + assertNotNull(mPreview.getCameraController().getMeteringAreas()); + assertEquals(1, mPreview.getCameraController().getMeteringAreas().size()); + } + + int zoom = mPreview.getCameraController().getZoom(); + + // now test multitouch zoom + mPreview.scaleZoom(2.0f); + this.getInstrumentation().waitForIdleSync(); + Thread.sleep(500); // need to wait for zoom transition (for Camera2 API) + Log.d(TAG, "compare actual zoom " + mPreview.getCameraController().getZoom() + " to zoom " + zoom); + assertTrue(mPreview.getCameraController().getZoom() > zoom); + assertEquals(max_zoom - zoomSeekBar.getProgress(), mPreview.getCameraController().getZoom()); + + mPreview.scaleZoom(0.5f); + this.getInstrumentation().waitForIdleSync(); + Thread.sleep(500); // need to wait for zoom transition (for Camera2 API) + Log.d(TAG, "compare actual zoom " + mPreview.getCameraController().getZoom() + " to zoom " + zoom); + // here test the zoom ratio values themselves rather than the indices, as we may have repeated zoom_ratios entries for 1x zoom + assertEquals( mPreview.getZoomRatio(mPreview.getCameraController().getZoom()), mPreview.getZoomRatio(zoom) ); + assertEquals(max_zoom - zoomSeekBar.getProgress(), mPreview.getCameraController().getZoom()); + + // test to max/min + mPreview.scaleZoom(10000.0f); + this.getInstrumentation().waitForIdleSync(); + Thread.sleep(500); // need to wait for zoom transition (for Camera2 API) + Log.d(TAG, "compare actual zoom " + mPreview.getCameraController().getZoom() + " to max_zoom " + max_zoom); + assertEquals(mPreview.getCameraController().getZoom(), max_zoom); + assertEquals(max_zoom - zoomSeekBar.getProgress(), mPreview.getCameraController().getZoom()); + + mPreview.scaleZoom(1.0f/10000.0f); + this.getInstrumentation().waitForIdleSync(); + Thread.sleep(500); // need to wait for zoom transition (for Camera2 API) + Log.d(TAG, "compare actual zoom " + mPreview.getCameraController().getZoom() + " to zero"); + assertEquals(0, mPreview.getCameraController().getZoom()); + assertEquals(max_zoom - zoomSeekBar.getProgress(), mPreview.getCameraController().getZoom()); + + // use seekbar to zoom + Log.d(TAG, "zoom to max"); + Log.d(TAG, "progress was: " + zoomSeekBar.getProgress()); + zoomSeekBar.setProgress(0); + this.getInstrumentation().waitForIdleSync(); + Thread.sleep(500); // need to wait for zoom transition (for Camera2 API) + Log.d(TAG, "compare actual zoom " + mPreview.getCameraController().getZoom() + " to max_zoom " + max_zoom); + assertEquals(mPreview.getCameraController().getZoom(), max_zoom); + assertEquals(max_zoom - zoomSeekBar.getProgress(), mPreview.getCameraController().getZoom()); + if( mPreview.supportsFocus() ) { + // check that focus areas cleared + assertFalse(mPreview.hasFocusArea()); + assertNull(mPreview.getCameraController().getFocusAreas()); + assertNull(mPreview.getCameraController().getMeteringAreas()); + } + + Log.d(TAG, "zoom to min"); + Log.d(TAG, "progress was: " + zoomSeekBar.getProgress()); + zoomSeekBar.setProgress(zoomSeekBar.getMax()); + this.getInstrumentation().waitForIdleSync(); + Thread.sleep(500); // need to wait for zoom transition (for Camera2 API) + Log.d(TAG, "compare actual zoom " + mPreview.getCameraController().getZoom() + " to zoom " + zoom); + assertEquals(mPreview.getCameraController().getZoom(), 0); + assertEquals(max_zoom - zoomSeekBar.getProgress(), mPreview.getCameraController().getZoom()); + + // use volume keys to zoom in/out + editor.putString(PreferenceKeys.VolumeKeysPreferenceKey, "volume_zoom"); + editor.apply(); + + Log.d(TAG, "zoom in with volume keys"); + this.getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_VOLUME_UP); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "compare actual zoom " + mPreview.getCameraController().getZoom() + " to zoom " + zoom); + assertEquals(mPreview.getCameraController().getZoom(), 1); + assertEquals(max_zoom - zoomSeekBar.getProgress(), mPreview.getCameraController().getZoom()); + + Log.d(TAG, "zoom out with volume keys"); + this.getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_VOLUME_DOWN); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "compare actual zoom " + mPreview.getCameraController().getZoom() + " to zoom " + zoom); + assertEquals(mPreview.getCameraController().getZoom(), 0); + assertEquals(max_zoom - zoomSeekBar.getProgress(), mPreview.getCameraController().getZoom()); + + // now test with -/+ control + + Log.d(TAG, "zoom in"); + mActivity.zoomIn(); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "compare actual zoom " + mPreview.getCameraController().getZoom() + " to zoom " + zoom); + assertEquals(mPreview.getCameraController().getZoom(), 1); + assertEquals(max_zoom - zoomSeekBar.getProgress(), mPreview.getCameraController().getZoom()); + if( mPreview.supportsFocus() ) { + // check that focus areas cleared + assertFalse(mPreview.hasFocusArea()); + assertNull(mPreview.getCameraController().getFocusAreas()); + assertNull(mPreview.getCameraController().getMeteringAreas()); + + // touch to auto-focus with focus area + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + assertTrue(mPreview.hasFocusArea()); + assertNotNull(mPreview.getCameraController().getFocusAreas()); + assertEquals(1, mPreview.getCameraController().getFocusAreas().size()); + assertNotNull(mPreview.getCameraController().getMeteringAreas()); + assertEquals(1, mPreview.getCameraController().getMeteringAreas().size()); + } + + Log.d(TAG, "zoom out"); + mActivity.zoomOut(); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "compare actual zoom " + mPreview.getCameraController().getZoom() + " to zoom " + zoom); + assertEquals(mPreview.getCameraController().getZoom(), 0); + assertEquals(max_zoom - zoomSeekBar.getProgress(), mPreview.getCameraController().getZoom()); + if( mPreview.supportsFocus() ) { + // check that focus areas cleared + assertFalse(mPreview.hasFocusArea()); + assertNull(mPreview.getCameraController().getFocusAreas()); + assertNull(mPreview.getCameraController().getMeteringAreas()); + + // touch to auto-focus with focus area + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + assertTrue(mPreview.hasFocusArea()); + assertNotNull(mPreview.getCameraController().getFocusAreas()); + assertEquals(1, mPreview.getCameraController().getFocusAreas().size()); + assertNotNull(mPreview.getCameraController().getMeteringAreas()); + assertEquals(1, mPreview.getCameraController().getMeteringAreas().size()); + } + + // now test with slider invisible + + editor.putBoolean(PreferenceKeys.ShowZoomSliderControlsPreferenceKey, false); + editor.apply(); + updateForSettings(); + + assertEquals(zoomSeekBar.getVisibility(), View.INVISIBLE); + + Log.d(TAG, "zoom in"); + mActivity.zoomIn(); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "compare actual zoom " + mPreview.getCameraController().getZoom() + " to zoom " + zoom); + assertEquals(mPreview.getCameraController().getZoom(), 1); + assertEquals(max_zoom - zoomSeekBar.getProgress(), mPreview.getCameraController().getZoom()); + + Log.d(TAG, "zoom out"); + mActivity.zoomOut(); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "compare actual zoom " + mPreview.getCameraController().getZoom() + " to zoom " + zoom); + assertEquals(mPreview.getCameraController().getZoom(), 0); + assertEquals(max_zoom - zoomSeekBar.getProgress(), mPreview.getCameraController().getZoom()); + } + + public void testZoomIdle() throws InterruptedException { + Log.d(TAG, "testZoomIdle"); + setToDefault(); + + if( !mPreview.supportsZoom() ) { + Log.d(TAG, "zoom not supported"); + return; + } + + final SeekBar zoomSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.zoom_seekbar); + assertEquals(zoomSeekBar.getVisibility(), View.VISIBLE); + int init_zoom = mPreview.getCameraController().getZoom(); + int max_zoom = mPreview.getMaxZoom(); + zoomSeekBar.setProgress(0); + this.getInstrumentation().waitForIdleSync(); + Thread.sleep(500); // need to wait for zoom transition (for Camera2 API) + Log.d(TAG, "compare actual zoom " + mPreview.getCameraController().getZoom() + " to zoom " + max_zoom); + assertEquals(mPreview.getCameraController().getZoom(), max_zoom); + assertEquals(max_zoom - zoomSeekBar.getProgress(), mPreview.getCameraController().getZoom()); + + pauseAndResume(); + Log.d(TAG, "after pause and resume: compare actual zoom " + mPreview.getCameraController().getZoom() + " to zoom " + max_zoom); + // as of Open Camera v1.43, zoom is reset when pause/resuming + //assertTrue(mPreview.getCameraController().getZoom() == max_zoom); + assertEquals(mPreview.getCameraController().getZoom(), init_zoom); + assertEquals(max_zoom - zoomSeekBar.getProgress(), mPreview.getCameraController().getZoom()); + } + + public void testZoomSwitchCamera() throws InterruptedException { + Log.d(TAG, "testZoomSwitchCamera"); + setToDefault(); + + if( !mPreview.supportsZoom() ) { + Log.d(TAG, "zoom not supported"); + return; + } + else if( mPreview.getCameraControllerManager().getNumberOfCameras() <= 1 ) { + return; + } + + final SeekBar zoomSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.zoom_seekbar); + assertEquals(zoomSeekBar.getVisibility(), View.VISIBLE); + int init_zoom = mPreview.getCameraController().getZoom(); + float init_zoom_ratio = mPreview.getZoomRatio(init_zoom); + int max_zoom = mPreview.getMaxZoom(); + zoomSeekBar.setProgress(0); + this.getInstrumentation().waitForIdleSync(); + Thread.sleep(500); // need to wait for zoom transition (for Camera2 API) + Log.d(TAG, "compare actual zoom " + mPreview.getCameraController().getZoom() + " to zoom " + max_zoom); + assertEquals(mPreview.getCameraController().getZoom(), max_zoom); + assertEquals(max_zoom - zoomSeekBar.getProgress(), mPreview.getCameraController().getZoom()); + + int cameraId = mPreview.getCameraId(); + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + waitUntilCameraOpened(); + int new_cameraId = mPreview.getCameraId(); + Log.d(TAG, "cameraId: " + cameraId); + Log.d(TAG, "new_cameraId: " + new_cameraId); + assertTrue(cameraId != new_cameraId); + + max_zoom = mPreview.getMaxZoom(); + Log.d(TAG, "after pause and resume: compare actual zoom " + mPreview.getCameraController().getZoom() + " to zoom " + max_zoom); + // as of Open Camera v1.43, zoom is reset when pause/resuming + //assertTrue(mPreview.getCameraController().getZoom() == max_zoom); + assertEquals(mPreview.getZoomRatio(mPreview.getCameraController().getZoom()), init_zoom_ratio); + assertEquals(max_zoom - zoomSeekBar.getProgress(), mPreview.getCameraController().getZoom()); + } + + /** Switch to front camera, pause and resume, check still on the front camera. + */ + public void testSwitchCameraIdle() { + Log.d(TAG, "testSwitchCameraIdle"); + setToDefault(); + + if( mPreview.getCameraControllerManager().getNumberOfCameras() <= 1 ) { + return; + } + + int cameraId = mPreview.getCameraId(); + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + waitUntilCameraOpened(); + + int new_cameraId = mPreview.getCameraId(); + assertTrue(cameraId != new_cameraId); + + pauseAndResume(); + + int new2_cameraId = mPreview.getCameraId(); + assertEquals(new2_cameraId, new_cameraId); + + } + + /** Tests touching the screen before camera has opened. + */ + public void testTouchFocusQuick() { + Log.d(TAG, "testTouchFocusQuick"); + setToDefault(); + + pauseAndResume(false); // don't wait for camera to be reopened, as we want to test touch focus whilst it's opening + + for(int i=0;i<10;i++) { + TouchUtils.clickView(MainActivityTest.this, mPreview.getView()); + } + } + + /** Tests trying to switch camera repeatedly, without waiting for camera to open. + */ + public void testSwitchCameraRepeat() { + Log.d(TAG, "testSwitchCameraRepeat"); + setToDefault(); + + if( mPreview.getCameraControllerManager().getNumberOfCameras() <= 1 ) { + return; + } + + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + for(int i=0;i<100;i++) { + clickView(switchCameraButton); + } + waitUntilCameraOpened(); + // n.b., don't check the new camera Id, as it's ill-defined which camera will be open + // the main point of this test is to check we don't crash due to opening camera on background thread + } + + /* Tests repeatedly switching camera, waiting for camera to reopen each time. + * Guards against a bug fixed in 1.44 where we would crash due to memory leak in + * OrientationEventListener.enable() (from Preview.cameraOpened()) when called too many times. + * Note, takes a while (over 1m) to run, test may look like it's hung whilst running! + */ + public void testSwitchCameraRepeat2() { + Log.d(TAG, "testSwitchCameraRepeat2"); + setToDefault(); + + if( mPreview.getCameraControllerManager().getNumberOfCameras() <= 1 ) { + return; + } + + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + int cameraId = mPreview.getCameraId(); + + for(int i=0;i<130;i++) { + Log.d(TAG, "i = " + i); + + clickView(switchCameraButton); + waitUntilCameraOpened(); + + int new_cameraId = mPreview.getCameraId(); + assertTrue(new_cameraId != cameraId); + cameraId = new_cameraId; + } + } + + /* Tests going to gallery. + */ + public void testGallery() { + Log.d(TAG, "testGallery"); + setToDefault(); + + View galleryButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.gallery); + clickView(galleryButton); + + } + + /* Tests going to settings, and back. + */ + public void testSettings() throws InterruptedException { + Log.d(TAG, "testSettings"); + setToDefault(); + + restart(false); // so we test going to settings even without waiting for preview to start (for Camera2 API) + + assertFalse(mActivity.isCameraInBackground()); + View settingsButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.settings); + clickView(settingsButton); + this.getInstrumentation().waitForIdleSync(); + assertTrue(mActivity.isCameraInBackground()); + + Thread.sleep(500); + mActivity.runOnUiThread(new Runnable() { + public void run() { + Log.d(TAG, "on back pressed..."); + mActivity.onBackPressed(); + } + }); + // need to wait for UI code to finish before leaving + this.getInstrumentation().waitForIdleSync(); + Thread.sleep(500); + + // check preview starts up + waitUntilPreviewStarted(); + } + + /* Tests going to settings and opening the privacy policy window. + */ + public void testSettingsPrivacyPolicy() throws InterruptedException { + Log.d(TAG, "testSettingsPrivacyPolicy"); + setToDefault(); + + assertFalse(mActivity.isCameraInBackground()); + View settingsButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.settings); + clickView(settingsButton); + this.getInstrumentation().waitForIdleSync(); + assertTrue(mActivity.isCameraInBackground()); + Thread.sleep(500); + + mActivity.runOnUiThread(new Runnable() { + public void run() { + MyPreferenceFragment fragment = mActivity.getPreferenceFragment(); + assertNotNull(fragment); + fragment.clickedPrivacyPolicy(); + } + }); + getInstrumentation().waitForIdleSync(); + Thread.sleep(1000); + } + + /* Tests save and load settings. + */ + public void testSettingsSaveLoad() throws InterruptedException { + Log.d(TAG, "testSettingsSaveLoad"); + setToDefault(); + + final String test_string = "Test stamp!£$ <&"; // intentionally include characters that need escaping in xml + + // set a non-default setting + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.TextStampPreferenceKey, test_string); + editor.apply(); + updateForSettings(); + } + + mActivity.getSettingsManager().saveSettings("test_testSettingsSaveLoad.xml"); + assertNotNull(mActivity.test_save_settings_file); + + // now modify the aforementioned setting + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.TextStampPreferenceKey, ""); + editor.apply(); + updateForSettings(); + } + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + String new_string = settings.getString(PreferenceKeys.TextStampPreferenceKey, ""); + assertEquals("", new_string); + + // now load settings + assertTrue( mActivity.getSettingsManager().loadSettings(mActivity.test_save_settings_file) ); + + // wait - n.b., loadSettings() won't restart due to being test code + Thread.sleep(3000); + /*mActivity = getActivity(); + Log.d(TAG, "mActivity is now: " + mActivity); + mPreview = mActivity.getPreview(); + Log.d(TAG, "mPreview is now: " + mPreview);*/ + + // now check setting is as expected + settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + new_string = settings.getString(PreferenceKeys.TextStampPreferenceKey, ""); + Log.d(TAG, "new_string: " + new_string); + assertEquals(test_string, new_string); + + // check again after a restart + restart(); + settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + new_string = settings.getString(PreferenceKeys.TextStampPreferenceKey, ""); + Log.d(TAG, "new_string: " + new_string); + assertEquals(test_string, new_string); + } + + private void subTestCreateSaveFolder(boolean use_saf, String save_folder, boolean delete_folder) { + setToDefault(); + + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + if( use_saf ) { + editor.putBoolean(PreferenceKeys.UsingSAFPreferenceKey, true); + editor.putString(PreferenceKeys.SaveLocationSAFPreferenceKey, save_folder); + } + else { + editor.putString(PreferenceKeys.SaveLocationPreferenceKey, save_folder); + } + editor.apply(); + updateForSettings(); + if( use_saf ) { + // need to call this directly, as we don't call mActivity.onActivityResult + mActivity.updateFolderHistorySAF(save_folder); + } + } + + SaveLocationHistory save_location_history = use_saf ? mActivity.getSaveLocationHistorySAF() : mActivity.getSaveLocationHistory(); + assertTrue(save_location_history.size() > 0); + assertTrue(save_location_history.contains(save_folder)); + assertEquals(save_location_history.get(save_location_history.size() - 1), save_folder); + + File folder = mActivity.getImageFolder(); + if( folder.exists() && delete_folder ) { + // Note when using scoped storage, this won't actually work (although the test code won't fail) + // It's not possible to delete folders with scoped storage unless via SAF which would need permission to have been + // given to such folders. + assertTrue(folder.isDirectory()); + // delete folder - need to delete contents first + if( folder.isDirectory() ) { + String [] children = folder.list(); + if( children != null ) { + for(String child : children) { + File file = new File(folder, child); + //noinspection ResultOfMethodCallIgnored + file.delete(); + MediaScannerConnection.scanFile(mActivity, new String[] { file.getAbsolutePath() }, null, null); + } + } + } + //noinspection ResultOfMethodCallIgnored + folder.delete(); + } + int n_old_files = getNFiles(); + Log.d(TAG, "n_old_files: " + n_old_files); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + Log.d(TAG, "about to click take photo"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo"); + + Log.d(TAG, "wait until finished taking photo"); + waitForTakePhoto(); + Log.d(TAG, "done taking photo"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + assertEquals(1, mPreview.count_cameraTakePicture); + + mActivity.waitUntilImageQueueEmpty(); + + int n_new_files = getNFiles(); + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(n_new_files, n_old_files + 1); + + // change back to default, so as to not be annoying + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + if( use_saf ) { + editor.putString(PreferenceKeys.SaveLocationSAFPreferenceKey, "content://com.android.externalstorage.documents/tree/primary%3ADCIM%2FOpenCamera"); + } + else { + editor.putString(PreferenceKeys.SaveLocationPreferenceKey, "OpenCamera"); + } + editor.apply(); + } + } + + /** Tests taking a photo with a new save folder. + */ + public void testCreateSaveFolder1() { + Log.d(TAG, "testCreateSaveFolder1"); + subTestCreateSaveFolder(false, "OpenCameraTest", true); + } + + /** Tests taking a photo with a new save folder. + */ + public void testCreateSaveFolder2() { + Log.d(TAG, "testCreateSaveFolder2"); + subTestCreateSaveFolder(false, "OpenCameraTest/", true); + } + + /** Tests taking a photo with a new save folder. + */ + public void testCreateSaveFolder3() { + Log.d(TAG, "testCreateSaveFolder3"); + subTestCreateSaveFolder(false, "OpenCameraTest_a/OpenCameraTest_b", true); + } + + /** Tests taking a photo with a new save folder. + */ + @SuppressLint("SdCardPath") + public void testCreateSaveFolder4() { + Log.d(TAG, "testCreateSaveFolder4"); + + if( MainActivity.useScopedStorage() ) { + // can't save outside DCIM when using scoped storage + return; + } + + subTestCreateSaveFolder(false, "/sdcard/Pictures/OpenCameraTest", true); + } + + /** Tests taking a photo with a new save folder. + */ + public void testCreateSaveFolderUnicode() { + Log.d(TAG, "testCreateSaveFolderUnicode"); + subTestCreateSaveFolder(false, "éúíóá!£$%^&()", true); + } + + /** Tests taking a photo with a new save folder. + */ + public void testCreateSaveFolderEmpty() { + Log.d(TAG, "testCreateSaveFolderEmpty"); + subTestCreateSaveFolder(false, "", false); + } + + /** Tests taking a photo with a new save folder. + * If this test fails, make sure we've manually selected that folder (as permission can't be given through the test framework). + */ + public void testCreateSaveFolderSAF() { + Log.d(TAG, "testCreateSaveFolderSAF"); + + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ) { + Log.d(TAG, "SAF requires Android Lollipop or better"); + return; + } + + subTestCreateSaveFolder(true, "content://com.android.externalstorage.documents/tree/primary%3ADCIM", true); + } + + /** Tests code for checking existing non-SAF save locations, when updating to scoped storage. + * Case where save location is invalid for scoped storage. + */ + public void testScopedStorageChecks1() { + Log.d(TAG, "testScopedStorageChecks1"); + setToDefault(); + + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.SaveLocationPreferenceKey, "/storage/emulated/0/Pictures/Camera"); + editor.putString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_0", "OpenCamera"); + editor.putString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_1", "/storage/emulated/0/DCIM"); + editor.putString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_2", "/storage/emulated/0/DCIM/OpenCameraéúíóá!£$%^&()/test"); + editor.putString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_3", "Camera"); + editor.putString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_4", "/storage/sdcard/DCIM/OpenCamera"); + editor.putString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_5", "/storage/emulated/0/Pictures/Camera"); + editor.putInt(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_size", 6); + editor.apply(); + } + + restart(); + + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(mActivity); + if( MainActivity.useScopedStorage() ) { + assertEquals("OpenCamera", sharedPreferences.getString(PreferenceKeys.SaveLocationPreferenceKey, "OpenCamera")); + // because the save folder is reset to "OpenCamera", we've also removed it from the original position in the history + assertEquals("", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_0", null)); + assertEquals("OpenCameraéúíóá!£$%^&()/test", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_1", null)); + assertEquals("Camera", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_2", null)); + // invalid entry removed here + assertEquals("OpenCamera", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_3", null)); + assertEquals(4, sharedPreferences.getInt(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_size", 0)); + } + else { + // should be unchanged + assertEquals("/storage/emulated/0/Pictures/Camera", sharedPreferences.getString(PreferenceKeys.SaveLocationPreferenceKey, "OpenCamera")); + assertEquals("OpenCamera", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_0", null)); + assertEquals("/storage/emulated/0/DCIM", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_1", null)); + assertEquals("/storage/emulated/0/DCIM/OpenCameraéúíóá!£$%^&()/test", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_2", null)); + assertEquals("Camera", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_3", null)); + assertEquals("/storage/sdcard/DCIM/OpenCamera", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_4", null)); + assertEquals("/storage/emulated/0/Pictures/Camera", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_5", null)); + assertEquals(6, sharedPreferences.getInt(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_size", 0)); + } + } + + /** Tests code for checking existing non-SAF save locations, when updating to scoped storage. + * This test a version where everything is valid and shouldn't change. + */ + public void testScopedStorageChecks2() { + Log.d(TAG, "testScopedStorageChecks2"); + setToDefault(); + + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.SaveLocationPreferenceKey, "Camera"); + editor.putString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_0", "OpenCamera"); + editor.putString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_1", "test"); + editor.putString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_2", "Camera"); + editor.putInt(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_size", 3); + editor.apply(); + } + + restart(); + + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(mActivity); + assertEquals("Camera", sharedPreferences.getString(PreferenceKeys.SaveLocationPreferenceKey, "OpenCamera")); + assertEquals("OpenCamera", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_0", null)); + assertEquals("test", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_1", null)); + assertEquals("Camera", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_2", null)); + assertEquals(3, sharedPreferences.getInt(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_size", 0)); + } + + /** Tests code for checking existing non-SAF save locations, when updating to scoped storage. + * This tests a version with default settings, where nothing should change. + */ + public void testScopedStorageChecks3() { + Log.d(TAG, "testScopedStorageChecks3"); + setToDefault(); + + restart(); + + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(mActivity); + assertEquals("OpenCamera", sharedPreferences.getString(PreferenceKeys.SaveLocationPreferenceKey, "OpenCamera")); + assertEquals("OpenCamera", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_0", null)); + assertEquals(1, sharedPreferences.getInt(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_size", 0)); + } + + /** Tests code for checking existing non-SAF save locations, when updating to scoped storage. + * Case where save location contains sub-folder, so is valid but needs updating for scoped storage. + */ + public void testScopedStorageChecks4() { + Log.d(TAG, "testScopedStorageChecks4"); + setToDefault(); + + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.SaveLocationPreferenceKey, "/storage/emulated/0/DCIM/OpenCamera/subfolder"); + editor.putString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_0", "/storage/emulated/0/DCIM/OpenCamera/subfolder/subfolder"); + editor.putString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_1", "/storage/emulated/0/Pictures"); + editor.putString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_2", "/storage/emulated/0"); + editor.putString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_3", "OpenCamera"); + editor.putString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_4", "/storage/emulated/0/DCIM/OpenCamera/subfolder"); + editor.putInt(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_size", 5); + editor.apply(); + } + + restart(); + + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(mActivity); + if( MainActivity.useScopedStorage() ) { + assertEquals("OpenCamera/subfolder", sharedPreferences.getString(PreferenceKeys.SaveLocationPreferenceKey, "OpenCamera")); + assertEquals("OpenCamera/subfolder/subfolder", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_0", null)); + // invalid entry removed here + // invalid entry removed here + assertEquals("OpenCamera", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_1", null)); + assertEquals("OpenCamera/subfolder", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_2", null)); + assertEquals(3, sharedPreferences.getInt(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_size", 0)); + } + else { + // should be unchanged + assertEquals("/storage/emulated/0/DCIM/OpenCamera/subfolder", sharedPreferences.getString(PreferenceKeys.SaveLocationPreferenceKey, "OpenCamera")); + assertEquals("/storage/emulated/0/DCIM/OpenCamera/subfolder/subfolder", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_0", null)); + assertEquals("/storage/emulated/0/Pictures", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_1", null)); + assertEquals("/storage/emulated/0", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_2", null)); + assertEquals("OpenCamera", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_3", null)); + assertEquals("/storage/emulated/0/DCIM/OpenCamera/subfolder", sharedPreferences.getString(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_4", null)); + assertEquals(5, sharedPreferences.getInt(PreferenceKeys.SaveLocationHistoryBasePreferenceKey + "_size", 0)); + } + } + + /** Tests launching the folder chooser on a new folder. + */ + public void testFolderChooserNew() throws InterruptedException { + Log.d(TAG, "testFolderChooserNew"); + + if( MainActivity.useScopedStorage() ) { + Log.d(TAG, "folder chooser not relevant for scoped storage"); + return; + } + + setToDefault(); + + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.SaveLocationPreferenceKey, "OpenCameraTest"); + editor.apply(); + updateForSettings(); + } + + File folder = mActivity.getImageFolder(); + if( folder.exists() ) { + assertTrue(folder.isDirectory()); + // delete folder - need to delete contents first + if( folder.isDirectory() ) { + String [] children = folder.list(); + if( children != null ) { + for(String child : children) { + File file = new File(folder, child); + //noinspection ResultOfMethodCallIgnored + file.delete(); + MediaScannerConnection.scanFile(mActivity, new String[] { file.getAbsolutePath() }, null, null); + } + } + } + //noinspection ResultOfMethodCallIgnored + folder.delete(); + } + + FolderChooserDialog fragment = new FolderChooserDialog(); + fragment.setStartFolder(mActivity.getImageFolder()); + fragment.show(mActivity.getFragmentManager(), "FOLDER_FRAGMENT"); + Thread.sleep(1000); // wait until folderchooser started up + Log.d(TAG, "started folderchooser"); + assertNotNull(fragment.getCurrentFolder()); + assertEquals(fragment.getCurrentFolder(), folder); + assertTrue(folder.exists()); + } + + /** Tests launching the folder chooser on a folder we don't have access to. + * (Shouldn't be possible to get into this state, but just in case.) + */ + public void testFolderChooserInvalid() throws InterruptedException { + Log.d(TAG, "testFolderChooserInvalid"); + + if( MainActivity.useScopedStorage() ) { + Log.d(TAG, "folder chooser not relevant for scoped storage"); + return; + } + + setToDefault(); + + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.SaveLocationPreferenceKey, "/OpenCameraTest"); + editor.apply(); + updateForSettings(); + } + + FolderChooserDialog fragment = new FolderChooserDialog(); + fragment.setStartFolder(mActivity.getImageFolder()); + fragment.show(mActivity.getFragmentManager(), "FOLDER_FRAGMENT"); + Thread.sleep(1000); // wait until folderchooser started up + Log.d(TAG, "started folderchooser"); + assertNotNull(fragment.getCurrentFolder()); + Log.d(TAG, "current folder: " + fragment.getCurrentFolder()); + assertTrue(fragment.getCurrentFolder().exists()); + } + + private void subTestSaveFolderHistory(final boolean use_saf) { + // clearFolderHistory has code that must be run on UI thread + mActivity.runOnUiThread(new Runnable() { + public void run() { + Log.d(TAG, "clearFolderHistory"); + if( use_saf ) + mActivity.clearFolderHistorySAF(); + else + mActivity.clearFolderHistory(); + } + }); + // need to wait for UI code to finish before leaving + this.getInstrumentation().waitForIdleSync(); + SaveLocationHistory save_location_history = use_saf ? mActivity.getSaveLocationHistorySAF() : mActivity.getSaveLocationHistory(); + Log.d(TAG, "save_location_history size: " + save_location_history.size()); + assertEquals(1, save_location_history.size()); + String current_folder; + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + current_folder = use_saf ? settings.getString(PreferenceKeys.SaveLocationSAFPreferenceKey, "") : settings.getString(PreferenceKeys.SaveLocationPreferenceKey, "OpenCamera"); + Log.d(TAG, "current_folder: " + current_folder); + Log.d(TAG, "save_location_history entry: " + save_location_history.get(0)); + assertEquals(save_location_history.get(0), current_folder); + } + + { + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(use_saf ? PreferenceKeys.SaveLocationSAFPreferenceKey : PreferenceKeys.SaveLocationPreferenceKey, "new_folder_history_entry"); + editor.apply(); + updateForSettings(); + if( use_saf ) { + // need to call this directly, as we don't call mActivity.onActivityResult + mActivity.updateFolderHistorySAF("new_folder_history_entry"); + } + } + save_location_history = use_saf ? mActivity.getSaveLocationHistorySAF() : mActivity.getSaveLocationHistory(); + Log.d(TAG, "save_location_history size: " + save_location_history.size()); + for(int i=0;i scene_modes = mPreview.getSupportedSceneModes(); + if( scene_modes == null ) { + return; + } + Log.d(TAG, "scene mode: " + mPreview.getCameraController().getSceneMode()); + assertTrue( mPreview.getCameraController().getSceneMode() == null || mPreview.getCameraController().getSceneMode().equals(CameraController.SCENE_MODE_DEFAULT) ); + + String scene_mode = null; + // find a scene mode that isn't default + for(String this_scene_mode : scene_modes) { + if( !this_scene_mode.equals(CameraController.SCENE_MODE_DEFAULT) ) { + scene_mode = this_scene_mode; + break; + } + } + if( scene_mode == null ) { + return; + } + Log.d(TAG, "change to scene_mode: " + scene_mode); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.SceneModePreferenceKey, scene_mode); + editor.apply(); + updateForSettings(); + + String new_scene_mode = mPreview.getCameraController().getSceneMode(); + Log.d(TAG, "scene_mode is now: " + new_scene_mode); + assertEquals(new_scene_mode, scene_mode); + + // Now set back to default - important as on some devices, non-default scene modes may override e.g. what + // white balance mode can be set. + // This was needed to fix the test testCameraModes() on Galaxy Nexus, which started failing in + // April 2018 for v1.43. Earlier versions (e.g., 1.42) still had the problem despite previously + // testing fine, so something must have changed on the device? + + settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + editor = settings.edit(); + editor.putString(PreferenceKeys.SceneModePreferenceKey, CameraController.SCENE_MODE_DEFAULT); + editor.apply(); + updateForSettings(); + + new_scene_mode = mPreview.getCameraController().getSceneMode(); + Log.d(TAG, "scene_mode is now: " + new_scene_mode); + assertEquals(new_scene_mode, CameraController.SCENE_MODE_DEFAULT); + } + + private void subTestColorEffect() { + Log.d(TAG, "subTestColorEffect"); + + setToDefault(); + + List color_effects = mPreview.getSupportedColorEffects(); + if( color_effects == null ) { + return; + } + Log.d(TAG, "color effect: " + mPreview.getCameraController().getColorEffect()); + assertTrue( mPreview.getCameraController().getColorEffect() == null || mPreview.getCameraController().getColorEffect().equals(CameraController.COLOR_EFFECT_DEFAULT) ); + + String color_effect = null; + // find a color effect that isn't default + for(String this_color_effect : color_effects) { + if( !this_color_effect.equals(CameraController.COLOR_EFFECT_DEFAULT) ) { + color_effect = this_color_effect; + break; + } + } + if( color_effect == null ) { + return; + } + Log.d(TAG, "change to color_effect: " + color_effect); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.ColorEffectPreferenceKey, color_effect); + editor.apply(); + updateForSettings(); + + String new_color_effect = mPreview.getCameraController().getColorEffect(); + Log.d(TAG, "color_effect is now: " + new_color_effect); + assertEquals(new_color_effect, color_effect); + } + + private void subTestWhiteBalance() { + Log.d(TAG, "subTestWhiteBalance"); + + setToDefault(); + + List white_balances = mPreview.getSupportedWhiteBalances(); + if( white_balances == null ) { + return; + } + Log.d(TAG, "white balance: " + mPreview.getCameraController().getWhiteBalance()); + assertTrue( mPreview.getCameraController().getWhiteBalance() == null || mPreview.getCameraController().getWhiteBalance().equals(CameraController.WHITE_BALANCE_DEFAULT) ); + + String white_balance = null; + // find a white balance that isn't default + for(String this_white_balances : white_balances) { + if( !this_white_balances.equals(CameraController.WHITE_BALANCE_DEFAULT) ) { + white_balance = this_white_balances; + break; + } + } + if( white_balance == null ) { + return; + } + Log.d(TAG, "change to white_balance: " + white_balance); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.WhiteBalancePreferenceKey, white_balance); + editor.apply(); + updateForSettings(); + + String new_white_balance = mPreview.getCameraController().getWhiteBalance(); + Log.d(TAG, "white_balance is now: " + new_white_balance); + assertEquals(new_white_balance, white_balance); + } + + private void subTestImageQuality() { + Log.d(TAG, "subTestImageQuality"); + + setToDefault(); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.QualityPreferenceKey, "100"); + editor.apply(); + updateForSettings(); + + int quality = mPreview.getCameraController().getJpegQuality(); + Log.d(TAG, "quality is: " + quality); + assertEquals(100, quality); + } + + /** Note this test fails on Android emulator with old camera API, because we get a + * RuntimeException from setParameters when trying to set white balance (we catch the + * exception, but the test fails because the white balance hasn't been changed to the expected + * value). + */ + public void testCameraModes() { + Log.d(TAG, "testCameraModes"); + subTestSceneMode(); + subTestColorEffect(); + subTestWhiteBalance(); + subTestImageQuality(); + } + + /** Tests that changing resolutions doesn't close the popup. + */ + public void testSwitchResolution() throws InterruptedException { + Log.d(TAG, "testSwitchResolution"); + + setToDefault(); + + View popupButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.popup); + CameraController.Size old_picture_size = mPreview.getCameraController().getPictureSize(); + + // open popup + openPopupMenu(); + + TextView photoResolutionButton = (TextView)mActivity.getUIButton("PHOTO_RESOLUTIONS"); + assertNotNull(photoResolutionButton); + //String exp_size_string = old_picture_size.width + " x " + old_picture_size.height + " " + Preview.getMPString(old_picture_size.width, old_picture_size.height); + //String exp_size_string = old_picture_size.width + " x " + old_picture_size.height; + String exp_size_string = old_picture_size.width + " x " + old_picture_size.height + " (" + Preview.getMPString(old_picture_size.width, old_picture_size.height) + ")"; + Log.d(TAG, "size string: " + photoResolutionButton.getText()); + assertEquals(exp_size_string, photoResolutionButton.getText()); + + // change photo resolution + View photoResolutionChangeButton = mActivity.getUIButton("PHOTO_RESOLUTIONS_PREV"); + assertNotNull(photoResolutionChangeButton); + this.getInstrumentation().waitForIdleSync(); + clickView(photoResolutionChangeButton); + + // check + Thread.sleep(2000); + CameraController.Size new_picture_size = mPreview.getCameraController().getPictureSize(); + Log.d(TAG, "old picture size: " + old_picture_size.width + " x " + old_picture_size.height); + Log.d(TAG, "old new_picture_size size: " + new_picture_size.width + " x " + new_picture_size.height); + assertNotEquals(new_picture_size, old_picture_size); + assertTrue( mActivity.popupIsOpen() ); + + //exp_size_string = new_picture_size.width + " x " + new_picture_size.height + " " + Preview.getMPString(new_picture_size.width, new_picture_size.height); + //exp_size_string = new_picture_size.width + " x " + new_picture_size.height; + exp_size_string = new_picture_size.width + " x " + new_picture_size.height + " (" + Preview.getMPString(new_picture_size.width, new_picture_size.height) + ")"; + Log.d(TAG, "size string: " + photoResolutionButton.getText()); + assertEquals(photoResolutionButton.getText(), exp_size_string); + + // switch to video mode + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertTrue(mPreview.isVideo()); + + // open popup + openPopupMenu(); + + TextView videoResolutionButton = (TextView)mActivity.getUIButton("VIDEO_RESOLUTIONS"); + assertNotNull(videoResolutionButton); + CharSequence oldVideoResolutionString = videoResolutionButton.getText(); + + // change video resolution + View videoResolutionChangeButton = mActivity.getUIButton("VIDEO_RESOLUTIONS_PREV"); + assertNotNull(videoResolutionChangeButton); + clickView(videoResolutionChangeButton); + + // check + Thread.sleep(500); + assertTrue( mActivity.popupIsOpen() ); + assertNotEquals(videoResolutionButton.getText(), oldVideoResolutionString); + + } + + /* Test for failing to open camera. + */ + public void testFailOpenCamera() throws InterruptedException { + Log.d(TAG, "testFailOpenCamera"); + + setToDefault(); + + assertNotNull(mPreview.getCameraControllerManager()); + assertNotNull(mPreview.getCameraController()); + mPreview.test_fail_open_camera = true; + + // can't test on startup, as camera is created when we create activity, so instead test by switching camera + if( mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ) { + Log.d(TAG, "switch camera"); + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + waitUntilCameraOpened(false); + assertNotNull(mPreview.getCameraControllerManager()); + assertNull(mPreview.getCameraController()); + this.getInstrumentation().waitForIdleSync(); + + assertFalse( mActivity.popupIsOpen() ); + View popupButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.popup); + Log.d(TAG, "about to click popup"); + clickView(popupButton); + Log.d(TAG, "done clicking popup"); + Thread.sleep(500); + // if camera isn't opened, popup shouldn't open + assertFalse( mActivity.popupIsOpen() ); + + View settingsButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.settings); + Log.d(TAG, "about to click settings"); + clickView(settingsButton); + Log.d(TAG, "done clicking settings"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.VolumeKeysPreferenceKey, "volume_exposure"); + editor.apply(); + this.getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_VOLUME_UP); + } + + public void testTakePhotoDRO() throws InterruptedException { + Log.d(TAG, "testTakePhotoDRO"); + + setToDefault(); + + if( !mActivity.supportsDRO() ) { + return; + } + + assertEquals(90, mActivity.getApplicationInterface().getImageQualityPref()); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_dro"); + editor.apply(); + updateForSettings(); + + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.DRO); + assertEquals(100, mActivity.getApplicationInterface().getImageQualityPref()); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + + assertEquals(100, mActivity.getApplicationInterface().getImageQualityPref()); + + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + waitUntilCameraOpened(); + + assertEquals(90, mActivity.getApplicationInterface().getImageQualityPref()); + + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertEquals(100, mActivity.getApplicationInterface().getImageQualityPref()); + + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_std"); + editor.apply(); + updateForSettings(); + + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.Standard); + assertEquals(90, mActivity.getApplicationInterface().getImageQualityPref()); + } + + public void testTakePhotoDROPhotoStamp() throws InterruptedException { + Log.d(TAG, "testTakePhotoDROPhotoStamp"); + + setToDefault(); + + if( !mActivity.supportsDRO() ) { + return; + } + + assertEquals(90, mActivity.getApplicationInterface().getImageQualityPref()); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_dro"); + editor.putString(PreferenceKeys.StampPreferenceKey, "preference_stamp_yes"); + editor.apply(); + updateForSettings(); + + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.DRO); + assertEquals(100, mActivity.getApplicationInterface().getImageQualityPref()); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + + assertEquals(100, mActivity.getApplicationInterface().getImageQualityPref()); + + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_std"); + editor.apply(); + updateForSettings(); + + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.Standard); + assertEquals(90, mActivity.getApplicationInterface().getImageQualityPref()); + } + + /** Tests restarting in HDR mode. + */ + public void testHDRRestart() { + Log.d(TAG, "testHDRRestart"); + setToDefault(); + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.Standard); + + if( !mActivity.supportsHDR() ) { + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_hdr"); + editor.apply(); + + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.HDR); + restart(); + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.HDR); + } + + public void testTakePhotoHDR() throws InterruptedException { + Log.d(TAG, "testTakePhotoHDR"); + + setToDefault(); + + if( !mActivity.supportsHDR() ) { + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_hdr"); + editor.apply(); + updateForSettings(); + + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.HDR); + subTestTakePhoto(false, false, true, true, false, false, false, false); + if( mPreview.usingCamera2API() ) { + Log.d(TAG, "test_capture_results: " + mPreview.getCameraController().test_capture_results); + assertEquals(1, mPreview.getCameraController().test_capture_results); + } + } + + /** Tests taking photo in HDR photo mode with fast expo/HDR burst disabled. + */ + public void testTakePhotoHDRSlowBurst() throws InterruptedException { + Log.d(TAG, "testTakePhotoHDRSlowBurst"); + + setToDefault(); + + if( !mActivity.supportsHDR() ) { + return; + } + if( !mPreview.usingCamera2API() ) { + Log.d(TAG, "test requires camera2 api"); + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_hdr"); + editor.putBoolean(PreferenceKeys.Camera2FastBurstPreferenceKey, false); + editor.apply(); + updateForSettings(); + + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.HDR); + subTestTakePhoto(false, false, true, true, false, false, false, false); + if( mPreview.usingCamera2API() ) { + Log.d(TAG, "test_capture_results: " + mPreview.getCameraController().test_capture_results); + assertEquals(1, mPreview.getCameraController().test_capture_results); + } + } + + /** Tests taking photo in HDR photo mode with saving base expo images. + */ + public void testTakePhotoHDRSaveExpo() throws InterruptedException { + Log.d(TAG, "testTakePhotoHDRSaveExpo"); + + setToDefault(); + + if( !mActivity.supportsHDR() ) { + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_hdr"); + editor.putBoolean(PreferenceKeys.HDRSaveExpoPreferenceKey, true); + editor.apply(); + updateForSettings(); + + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.HDR); + subTestTakePhoto(false, false, true, true, false, false, false, false); + if( mPreview.usingCamera2API() ) { + Log.d(TAG, "test_capture_results: " + mPreview.getCameraController().test_capture_results); + assertEquals(1, mPreview.getCameraController().test_capture_results); + } + } + + /** Tests taking photo in HDR photo mode with saving base expo images, with RAW. + */ + public void testTakePhotoHDRSaveExpoRaw() throws InterruptedException { + Log.d(TAG, "testTakePhotoHDRSaveExpoRaw"); + + setToDefault(); + + if( !mActivity.supportsHDR() ) { + return; + } + if( !mPreview.supportsRaw() ) { + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.RawPreferenceKey, "preference_raw_yes"); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_hdr"); + editor.putBoolean(PreferenceKeys.HDRSaveExpoPreferenceKey, true); + editor.apply(); + updateForSettings(); + + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.HDR); + subTestTakePhoto(false, false, true, true, false, false, true, false); + if( mPreview.usingCamera2API() ) { + Log.d(TAG, "test_capture_results: " + mPreview.getCameraController().test_capture_results); + assertEquals(1, mPreview.getCameraController().test_capture_results); + } + } + + /** Tests taking photo in HDR photo mode with saving base expo images, with RAW only. + */ + public void testTakePhotoHDRSaveExpoRawOnly() throws InterruptedException { + Log.d(TAG, "testTakePhotoHDRSaveExpoRawOnly"); + + setToDefault(); + + if( !mActivity.supportsHDR() ) { + return; + } + if( !mPreview.supportsRaw() ) { + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.RawPreferenceKey, "preference_raw_only"); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_hdr"); + editor.putBoolean(PreferenceKeys.HDRSaveExpoPreferenceKey, true); + editor.apply(); + updateForSettings(); + + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.HDR); + subTestTakePhoto(false, false, true, true, false, false, true, false); + if( mPreview.usingCamera2API() ) { + Log.d(TAG, "test_capture_results: " + mPreview.getCameraController().test_capture_results); + assertEquals(1, mPreview.getCameraController().test_capture_results); + } + } + + /** Take photo in HDR mode with front camera. + * Note that this fails on OnePlus 3T with old camera API, due to bug where photo resolution changes when + * exposure compensation set for front camera. + */ + public void testTakePhotoHDRFrontCamera() throws InterruptedException { + Log.d(TAG, "testTakePhotoHDRFrontCamera"); + + setToDefault(); + + if( !mActivity.supportsHDR() ) { + return; + } + if( mPreview.getCameraControllerManager().getNumberOfCameras() <= 1 ) { + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_hdr"); + editor.apply(); + updateForSettings(); + + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.HDR); + + int cameraId = mPreview.getCameraId(); + + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + clickView(switchCameraButton); + waitUntilCameraOpened(); + + int new_cameraId = mPreview.getCameraId(); + + Log.d(TAG, "cameraId: " + cameraId); + Log.d(TAG, "new_cameraId: " + new_cameraId); + + assertTrue(cameraId != new_cameraId); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + if( mPreview.usingCamera2API() ) { + Log.d(TAG, "test_capture_results: " + mPreview.getCameraController().test_capture_results); + assertEquals(1, mPreview.getCameraController().test_capture_results); + } + } + + public void testTakePhotoHDRAutoStabilise() throws InterruptedException { + Log.d(TAG, "testTakePhotoHDRAutoStabilise"); + + setToDefault(); + + if( !mActivity.supportsHDR() ) { + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_hdr"); + editor.putBoolean(PreferenceKeys.AutoStabilisePreferenceKey, true); + editor.apply(); + updateForSettings(); + + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.HDR); + subTestTakePhoto(false, false, true, true, false, false, false, false); + if( mPreview.usingCamera2API() ) { + Log.d(TAG, "test_capture_results: " + mPreview.getCameraController().test_capture_results); + assertEquals(1, mPreview.getCameraController().test_capture_results); + } + } + + public void testTakePhotoHDRPhotoStamp() throws InterruptedException { + Log.d(TAG, "testTakePhotoHDRPhotoStamp"); + + setToDefault(); + + if( !mActivity.supportsHDR() ) { + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_hdr"); + editor.putString(PreferenceKeys.StampPreferenceKey, "preference_stamp_yes"); + editor.apply(); + updateForSettings(); + + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.HDR); + subTestTakePhoto(false, false, true, true, false, false, false, false); + if( mPreview.usingCamera2API() ) { + Log.d(TAG, "test_capture_results: " + mPreview.getCameraController().test_capture_results); + assertEquals(1, mPreview.getCameraController().test_capture_results); + } + } + + /** Tests expo bracketing with default values. + */ + public void testTakePhotoExpo() throws InterruptedException { + Log.d(TAG, "testTakePhotoExpo"); + + setToDefault(); + + if( !mActivity.supportsExpoBracketing() ) { + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_expo_bracketing"); + editor.apply(); + updateForSettings(); + + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.ExpoBracketing); + subTestTakePhoto(false, false, true, true, false, false, false, false); + if( mPreview.usingCamera2API() ) { + Log.d(TAG, "test_capture_results: " + mPreview.getCameraController().test_capture_results); + assertEquals(1, mPreview.getCameraController().test_capture_results); + } + } + + /** Tests expo bracketing with 5 images, 1 stop. + */ + public void testTakePhotoExpo5() throws InterruptedException { + Log.d(TAG, "testTakePhotoExpo5"); + + setToDefault(); + + if( !mActivity.supportsExpoBracketing() ) { + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_expo_bracketing"); + editor.putString(PreferenceKeys.ExpoBracketingNImagesPreferenceKey, "5"); + editor.putString(PreferenceKeys.ExpoBracketingStopsPreferenceKey, "1"); + editor.apply(); + updateForSettings(); + + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.ExpoBracketing); + subTestTakePhoto(false, false, true, true, false, false, false, false); + if( mPreview.usingCamera2API() ) { + Log.d(TAG, "test_capture_results: " + mPreview.getCameraController().test_capture_results); + assertEquals(1, mPreview.getCameraController().test_capture_results); + } + } + + /* Sets focus bracketing seek bars to some test positions. + */ + private void setUpFocusBracketing() throws InterruptedException { + SeekBar focusSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.focus_seekbar); + SeekBar focusTargetSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.focus_bracketing_target_seekbar); + + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.FocusBracketing); + assertEquals(focusSeekBar.getVisibility(), View.VISIBLE); + focusSeekBar.setProgress( (int)(0.9*(focusSeekBar.getMax()-1)) ); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "source focus_distance: " + mPreview.getCameraController().getFocusDistance()); + mPreview.stoppedSettingFocusDistance(false); // hack, since onStopTrackingTouch() isn't called programmatically! + this.getInstrumentation().waitForIdleSync(); + Thread.sleep(500); + + float initial_focus_distance = mPreview.getCameraController().getFocusDistance(); + Log.d(TAG, "initial_focus_distance: " + initial_focus_distance); + CameraController2 camera_controller2 = (CameraController2)mPreview.getCameraController(); + CaptureRequest.Builder previewBuilder = camera_controller2.testGetPreviewBuilder(); + // need to use LENS_FOCUS_DISTANCE rather than mPreview.getCameraController().getFocusDistance(), as the latter + // will always return the source focus distance, even if the preview was set to something else + float actual_initial_focus_distance = previewBuilder.get(CaptureRequest.LENS_FOCUS_DISTANCE); + assertEquals(initial_focus_distance, actual_initial_focus_distance, 1.0e-5f); + + assertEquals(focusTargetSeekBar.getVisibility(), View.VISIBLE); + focusTargetSeekBar.setProgress( (int)(0.25*(focusTargetSeekBar.getMax()-1)) ); + this.getInstrumentation().waitForIdleSync(); + // test that we temporarily set the focus to the target distance + float target_actual_focus_distance = previewBuilder.get(CaptureRequest.LENS_FOCUS_DISTANCE); + Log.d(TAG, "target_actual_focus_distance: " + target_actual_focus_distance); + assertTrue(Math.abs(initial_focus_distance - target_actual_focus_distance) > 1.0e-5f); // no assertNotEquals! + mPreview.stoppedSettingFocusDistance(true); // hack, since onStopTrackingTouch() isn't called programmatically! + this.getInstrumentation().waitForIdleSync(); + Thread.sleep(500); // wait for initial focus to be set + + // test that we've reset back to the source distance + float new_focus_distance = mPreview.getCameraController().getFocusDistance(); + Log.d(TAG, "new_focus_distance: " + new_focus_distance); + assertEquals(initial_focus_distance, new_focus_distance, 1.0e-5f); + + float new_actual_focus_distance = previewBuilder.get(CaptureRequest.LENS_FOCUS_DISTANCE); + Log.d(TAG, "new_actual_focus_distance: " + new_actual_focus_distance); + assertEquals(initial_focus_distance, new_actual_focus_distance, 1.0e-5f); + } + + /** Tests taking a photo in focus bracketing mode. + */ + public void testTakePhotoFocusBracketing() throws InterruptedException { + Log.d(TAG, "testTakePhotoFocusBracketing"); + + setToDefault(); + + if( !mActivity.supportsFocusBracketing() ) { + Log.d(TAG, "test requires focus bracketing"); + return; + } + + SeekBar focusSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.focus_seekbar); + assertEquals(focusSeekBar.getVisibility(), View.GONE); + SeekBar focusTargetSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.focus_bracketing_target_seekbar); + assertEquals(focusTargetSeekBar.getVisibility(), View.GONE); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_focus_bracketing"); + editor.apply(); + updateForSettings(); + + setUpFocusBracketing(); + + float initial_focus_distance = mPreview.getCameraController().getFocusDistance(); + Log.d(TAG, "initial_focus_distance: " + initial_focus_distance); + CameraController2 camera_controller2 = (CameraController2)mPreview.getCameraController(); + CaptureRequest.Builder previewBuilder = camera_controller2.testGetPreviewBuilder(); + // need to use LENS_FOCUS_DISTANCE rather than mPreview.getCameraController().getFocusDistance(), as the latter + // will always return the source focus distance, even if the preview was set to something else + float actual_initial_focus_distance = previewBuilder.get(CaptureRequest.LENS_FOCUS_DISTANCE); + assertEquals(initial_focus_distance, actual_initial_focus_distance, 1.0e-5f); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + Log.d(TAG, "test_capture_results: " + mPreview.getCameraController().test_capture_results); + assertEquals(1, mPreview.getCameraController().test_capture_results); + + float new_focus_distance = mPreview.getCameraController().getFocusDistance(); + Log.d(TAG, "new_focus_distance: " + new_focus_distance); + assertEquals(initial_focus_distance, new_focus_distance, 1.0e-5f); + + float new_actual_focus_distance = previewBuilder.get(CaptureRequest.LENS_FOCUS_DISTANCE); + Log.d(TAG, "new_actual_focus_distance: " + new_actual_focus_distance); + assertEquals(initial_focus_distance, new_actual_focus_distance, 1.0e-5f); + } + + /** Tests taking a photo in focus bracketing mode, with auto-level and 20 images. + */ + public void testTakePhotoFocusBracketingHeavy() throws InterruptedException { + Log.d(TAG, "testTakePhotoFocusBracketingHeavy"); + + setToDefault(); + + if( !mActivity.supportsFocusBracketing() ) { + Log.d(TAG, "test requires focus bracketing"); + return; + } + + ImageSaver.test_small_queue_size = true; + mActivity.getApplicationInterface().getImageSaver().test_slow_saving = true; + // need to restart for test_small_queue_size to take effect + restart(); + + SeekBar focusSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.focus_seekbar); + assertEquals(focusSeekBar.getVisibility(), View.GONE); + SeekBar focusTargetSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.focus_bracketing_target_seekbar); + assertEquals(focusTargetSeekBar.getVisibility(), View.GONE); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_focus_bracketing"); + editor.putString(PreferenceKeys.FocusBracketingNImagesPreferenceKey, "20"); + editor.apply(); + updateForSettings(); + + setUpFocusBracketing(); + + float initial_focus_distance = mPreview.getCameraController().getFocusDistance(); + Log.d(TAG, "initial_focus_distance: " + initial_focus_distance); + CameraController2 camera_controller2 = (CameraController2)mPreview.getCameraController(); + CaptureRequest.Builder previewBuilder = camera_controller2.testGetPreviewBuilder(); + // need to use LENS_FOCUS_DISTANCE rather than mPreview.getCameraController().getFocusDistance(), as the latter + // will always return the source focus distance, even if the preview was set to something else + float actual_initial_focus_distance = previewBuilder.get(CaptureRequest.LENS_FOCUS_DISTANCE); + assertEquals(initial_focus_distance, actual_initial_focus_distance, 1.0e-5f); + + subTestTakePhoto(false, false, true, true, false, false, false, false); + Log.d(TAG, "test_capture_results: " + mPreview.getCameraController().test_capture_results); + assertEquals(1, mPreview.getCameraController().test_capture_results); + + float new_focus_distance = mPreview.getCameraController().getFocusDistance(); + Log.d(TAG, "new_focus_distance: " + new_focus_distance); + assertEquals(initial_focus_distance, new_focus_distance, 1.0e-5f); + + float new_actual_focus_distance = previewBuilder.get(CaptureRequest.LENS_FOCUS_DISTANCE); + Log.d(TAG, "new_actual_focus_distance: " + new_actual_focus_distance); + assertEquals(initial_focus_distance, new_actual_focus_distance, 1.0e-5f); + } + + /** Tests taking a photo in focus bracketing mode, but with cancelling. + */ + public void testTakePhotoFocusBracketingCancel() throws InterruptedException { + Log.d(TAG, "testTakePhotoFocusBracketingCancel"); + + setToDefault(); + + if( !mActivity.supportsFocusBracketing() ) { + Log.d(TAG, "test requires focus bracketing"); + return; + } + + SeekBar focusSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.focus_seekbar); + assertEquals(focusSeekBar.getVisibility(), View.GONE); + SeekBar focusTargetSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.focus_bracketing_target_seekbar); + assertEquals(focusTargetSeekBar.getVisibility(), View.GONE); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_focus_bracketing"); + editor.putString(PreferenceKeys.FocusBracketingNImagesPreferenceKey, "200"); + editor.apply(); + updateForSettings(); + + setUpFocusBracketing(); + + float initial_focus_distance = mPreview.getCameraController().getFocusDistance(); + Log.d(TAG, "initial_focus_distance: " + initial_focus_distance); + CameraController2 camera_controller2 = (CameraController2)mPreview.getCameraController(); + CaptureRequest.Builder previewBuilder = camera_controller2.testGetPreviewBuilder(); + // need to use LENS_FOCUS_DISTANCE rather than mPreview.getCameraController().getFocusDistance(), as the latter + // will always return the source focus distance, even if the preview was set to something else + float actual_initial_focus_distance = previewBuilder.get(CaptureRequest.LENS_FOCUS_DISTANCE); + assertEquals(initial_focus_distance, actual_initial_focus_distance, 1.0e-5f); + + assertFalse( mPreview.isTakingPhoto() ); + assertTrue( mActivity.getApplicationInterface().canTakeNewPhoto() ); + + for(int i=0;i<2;i++) { + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + Log.d(TAG, "about to click take photo"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo"); + assertTrue( mPreview.isTakingPhoto() ); + + Thread.sleep(i==0 ? 500 : 3000); // wait before cancelling + assertTrue( mPreview.isTakingPhoto() ); + + Log.d(TAG, "about to click take photo to cancel"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo cancel"); + + // need to wait until cancelled + Thread.sleep(3000); + assertFalse( mPreview.isTakingPhoto() ); + assertTrue( mActivity.getApplicationInterface().canTakeNewPhoto() ); + + assertTrue(mPreview.isPreviewStarted()); // check preview restarted + Log.d(TAG, "count_cameraTakePicture: " + mPreview.count_cameraTakePicture); + assertEquals(mPreview.count_cameraTakePicture, i + 1); + Log.d(TAG, "test_capture_results: " + mPreview.getCameraController().test_capture_results); + assertEquals(mPreview.getCameraController().test_capture_results, i + 1); + + float new_focus_distance = mPreview.getCameraController().getFocusDistance(); + Log.d(TAG, "new_focus_distance: " + new_focus_distance); + assertEquals(initial_focus_distance, new_focus_distance, 1.0e-5f); + + float new_actual_focus_distance = previewBuilder.get(CaptureRequest.LENS_FOCUS_DISTANCE); + Log.d(TAG, "new_actual_focus_distance: " + new_actual_focus_distance); + assertEquals(initial_focus_distance, new_actual_focus_distance, 1.0e-5f); + } + } + + /** Tests taking a photo with RAW and focus bracketing mode. + */ + public void testTakePhotoRawFocusBracketing() throws InterruptedException { + Log.d(TAG, "testTakePhotoRawFocusBracketing"); + + setToDefault(); + + if( !mActivity.supportsFocusBracketing() ) { + Log.d(TAG, "test requires focus bracketing"); + return; + } + if( !mPreview.supportsRaw() ) { + Log.d(TAG, "test requires RAW"); + return; + } + + SeekBar focusSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.focus_seekbar); + assertEquals(focusSeekBar.getVisibility(), View.GONE); + SeekBar focusTargetSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.focus_bracketing_target_seekbar); + assertEquals(focusTargetSeekBar.getVisibility(), View.GONE); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.RawPreferenceKey, "preference_raw_yes"); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_focus_bracketing"); + editor.apply(); + updateForSettings(); + + setUpFocusBracketing(); + + float initial_focus_distance = mPreview.getCameraController().getFocusDistance(); + Log.d(TAG, "initial_focus_distance: " + initial_focus_distance); + CameraController2 camera_controller2 = (CameraController2)mPreview.getCameraController(); + CaptureRequest.Builder previewBuilder = camera_controller2.testGetPreviewBuilder(); + // need to use LENS_FOCUS_DISTANCE rather than mPreview.getCameraController().getFocusDistance(), as the latter + // will always return the source focus distance, even if the preview was set to something else + float actual_initial_focus_distance = previewBuilder.get(CaptureRequest.LENS_FOCUS_DISTANCE); + assertEquals(initial_focus_distance, actual_initial_focus_distance, 1.0e-5f); + + subTestTakePhoto(false, false, true, true, false, false, true, false); + Log.d(TAG, "test_capture_results: " + mPreview.getCameraController().test_capture_results); + assertEquals(1, mPreview.getCameraController().test_capture_results); + + float new_focus_distance = mPreview.getCameraController().getFocusDistance(); + Log.d(TAG, "new_focus_distance: " + new_focus_distance); + assertEquals(initial_focus_distance, new_focus_distance, 1.0e-5f); + + float new_actual_focus_distance = previewBuilder.get(CaptureRequest.LENS_FOCUS_DISTANCE); + Log.d(TAG, "new_actual_focus_distance: " + new_actual_focus_distance); + assertEquals(initial_focus_distance, new_actual_focus_distance, 1.0e-5f); + } + + /** Tests taking a photo with RAW only and focus bracketing mode. + */ + public void testTakePhotoRawOnlyFocusBracketing() throws InterruptedException { + Log.d(TAG, "testTakePhotoRawOnlyFocusBracketing"); + + setToDefault(); + + if( !mActivity.supportsFocusBracketing() ) { + Log.d(TAG, "test requires focus bracketing"); + return; + } + if( !mPreview.supportsRaw() ) { + Log.d(TAG, "test requires RAW"); + return; + } + + SeekBar focusSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.focus_seekbar); + assertEquals(focusSeekBar.getVisibility(), View.GONE); + SeekBar focusTargetSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.focus_bracketing_target_seekbar); + assertEquals(focusTargetSeekBar.getVisibility(), View.GONE); + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.RawPreferenceKey, "preference_raw_only"); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_focus_bracketing"); + editor.apply(); + updateForSettings(); + + setUpFocusBracketing(); + + float initial_focus_distance = mPreview.getCameraController().getFocusDistance(); + Log.d(TAG, "initial_focus_distance: " + initial_focus_distance); + CameraController2 camera_controller2 = (CameraController2)mPreview.getCameraController(); + CaptureRequest.Builder previewBuilder = camera_controller2.testGetPreviewBuilder(); + // need to use LENS_FOCUS_DISTANCE rather than mPreview.getCameraController().getFocusDistance(), as the latter + // will always return the source focus distance, even if the preview was set to something else + float actual_initial_focus_distance = previewBuilder.get(CaptureRequest.LENS_FOCUS_DISTANCE); + assertEquals(initial_focus_distance, actual_initial_focus_distance, 1.0e-5f); + + subTestTakePhoto(false, false, true, true, false, false, true, false); + Log.d(TAG, "test_capture_results: " + mPreview.getCameraController().test_capture_results); + assertEquals(1, mPreview.getCameraController().test_capture_results); + + float new_focus_distance = mPreview.getCameraController().getFocusDistance(); + Log.d(TAG, "new_focus_distance: " + new_focus_distance); + assertEquals(initial_focus_distance, new_focus_distance, 1.0e-5f); + + float new_actual_focus_distance = previewBuilder.get(CaptureRequest.LENS_FOCUS_DISTANCE); + Log.d(TAG, "new_actual_focus_distance: " + new_actual_focus_distance); + assertEquals(initial_focus_distance, new_actual_focus_distance, 1.0e-5f); + } + + /** Tests NR photo mode. + */ + public void testTakePhotoNR() throws InterruptedException { + Log.d(TAG, "testTakePhotoNR"); + + setToDefault(); + + if( !mActivity.supportsNoiseReduction() ) { + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_noise_reduction"); + editor.apply(); + updateForSettings(); + + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.NoiseReduction); + + final int n_back_photos = 3; + subTestTakePhoto(false, false, true, true, false, false, false, false); + Log.d(TAG, "test_capture_results: " + mPreview.getCameraController().test_capture_results); + assertEquals(1, mPreview.getCameraController().test_capture_results); + assertTrue(mActivity.getPreview().getCameraController().getBurstTotal() < CameraController.N_IMAGES_NR_DARK_LOW_LIGHT); + + // then try again without waiting + for(int i=1;i= 3 && n_new_files <= 31); + } + else { + // at one photo per 100ms, should have approximately 30 - note that long press can take longer to kick in on some devices, e.g., OnePlus 3T + assertTrue(n_new_files >= 12 && n_new_files <= 31); + } + + mActivity.waitUntilImageQueueEmpty(); + } + + /** Tests continuous burst. + * Fails on Android emulator with Camera2 API, due to a serious camera error occurring for + * fast burst with more than 5 images! + */ + public void testTakePhotoContinuousBurst() throws InterruptedException { + Log.d(TAG, "testTakePhotoContinuousBurst"); + + setToDefault(); + + if( !mActivity.supportsFastBurst() ) { + return; + } + + subTestTakePhotoContinuousBurst(false); + + Thread.sleep(1000); + + // now take a regular photo + subTestTakePhoto(false, false, false, false, false, false, false, false); + } + + /** Tests continuous burst, but with flags set for slow saving and shorter queue. + * Fails on Android emulator with Camera2 API, due to a serious camera error occurring for + * fast burst with more than 5 images! + */ + public void testTakePhotoContinuousBurstSlow() throws InterruptedException { + Log.d(TAG, "testTakePhotoContinuousBurstSlow"); + + setToDefault(); + + if( !mActivity.supportsFastBurst() ) { + return; + } + + ImageSaver.test_small_queue_size = true; + mActivity.getApplicationInterface().getImageSaver().test_slow_saving = true; + // need to restart for test_small_queue_size to take effect + restart(); + + subTestTakePhotoContinuousBurst(true); + } + + private void subTestTakePhotoPanorama(boolean to_max, boolean cancel, boolean cancel_by_settings) throws InterruptedException { + Log.d(TAG, "subTestTakePhotoPanorama"); + setToDefault(); + + if( !mActivity.supportsPanorama() ) { + return; + } + + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.putString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_panorama"); + //editor.putString(PreferenceKeys.PanoramaSaveExpoPreferenceKey, "preference_panorama_save_all"); // test/debug + editor.apply(); + updateForSettings(); + + assertSame(mActivity.getApplicationInterface().getPhotoMode(), MyApplicationInterface.PhotoMode.Panorama); + + // count initial files in folder + int n_files = getNFiles(); + Log.d(TAG, "n_files at start: " + n_files); + + Thread.sleep(1000); + assertEquals(0, mPreview.count_cameraTakePicture); + + assertFalse( mActivity.getApplicationInterface().getGyroSensor().isRecording() ); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + View switchCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_camera); + View switchMultiCameraButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_multi_camera); + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + View exposureButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure); + View exposureLockButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.exposure_lock); + View audioControlButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.audio_control); + View popupButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.popup); + View trashButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.trash); + View shareButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.share); + View settingsButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.settings); + View cancelPanoramaButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.cancel_panorama); + + assertEquals(takePhotoButton.getVisibility(), View.VISIBLE); + assertEquals(switchCameraButton.getVisibility(), (mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE)); + assertEquals(switchMultiCameraButton.getVisibility(), (mActivity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE)); + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureLockButton.getVisibility(), (mPreview.supportsExposureLock() ? View.VISIBLE : View.GONE)); + assertEquals(audioControlButton.getVisibility(), View.GONE); + assertEquals(popupButton.getVisibility(), View.VISIBLE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + assertEquals(settingsButton.getVisibility(), View.VISIBLE); + assertEquals(cancelPanoramaButton.getVisibility(), View.GONE); + + Log.d(TAG, "about to click take photo"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo"); + + Log.d(TAG, "wait until finished taking photo"); + waitForTakePhoto(); + Log.d(TAG, "done taking photo"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + assertEquals(1, mPreview.count_cameraTakePicture); + + for(int i=0;i<(to_max ? MyApplicationInterface.max_panorama_pics_c-1 : 4);i++) { + Log.d(TAG, "i = " + i); + assertTrue( mActivity.getApplicationInterface().getGyroSensor().isRecording() ); + + assertEquals(takePhotoButton.getVisibility(), View.VISIBLE); + assertEquals(switchCameraButton.getVisibility(), View.GONE); + assertEquals(switchMultiCameraButton.getVisibility(), View.GONE); + assertEquals(switchVideoButton.getVisibility(), View.GONE); + assertEquals(exposureButton.getVisibility(), View.GONE); + assertEquals(exposureLockButton.getVisibility(), View.GONE); + assertEquals(audioControlButton.getVisibility(), View.GONE); + assertEquals(popupButton.getVisibility(), View.GONE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + assertEquals(settingsButton.getVisibility(), View.GONE); + assertEquals(cancelPanoramaButton.getVisibility(), View.VISIBLE); + + Thread.sleep(2000); + + assertTrue( mActivity.getApplicationInterface().getGyroSensor().isRecording() ); + mActivity.runOnUiThread(new Runnable() { + @Override + public void run() { + mActivity.getApplicationInterface().getGyroSensor().testForceTargetAchieved(0); + } + }); + // need to wait for UI code to finish before leaving + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "wait for taking photo"); + waitForTakePhoto(); + Log.d(TAG, "done taking photo"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + Log.d(TAG, "take picture count: " + mPreview.count_cameraTakePicture); + assertEquals(mPreview.count_cameraTakePicture, i + 2); + } + + Thread.sleep(2000); + + if( !to_max ) { + assertTrue( mActivity.getApplicationInterface().getGyroSensor().isRecording() ); + + if( cancel ) { + if( cancel_by_settings ) { + Log.d(TAG, "about to click settings"); + clickView(settingsButton); + Log.d(TAG, "done clicking settings"); + this.getInstrumentation().waitForIdleSync(); + } + else { + Log.d(TAG, "about to click cancel"); + clickView(cancelPanoramaButton); + Log.d(TAG, "done clicking cancel"); + this.getInstrumentation().waitForIdleSync(); + } + } + else { + // finish panorama (if to_max, this should have happened automatically) + Log.d(TAG, "about to click take photo"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + } + } + + assertFalse( mActivity.getApplicationInterface().getGyroSensor().isRecording() ); + + assertEquals(takePhotoButton.getVisibility(), View.VISIBLE); + assertEquals(switchCameraButton.getVisibility(), (mPreview.getCameraControllerManager().getNumberOfCameras() > 1 ? View.VISIBLE : View.GONE)); + assertEquals(switchMultiCameraButton.getVisibility(), (mActivity.showSwitchMultiCamIcon() ? View.VISIBLE : View.GONE)); + assertEquals(switchVideoButton.getVisibility(), View.VISIBLE); + assertEquals(exposureButton.getVisibility(), View.VISIBLE); + assertEquals(exposureLockButton.getVisibility(), (mPreview.supportsExposureLock() ? View.VISIBLE : View.GONE)); + assertEquals(audioControlButton.getVisibility(), View.GONE); + assertEquals(popupButton.getVisibility(), View.VISIBLE); + assertEquals(trashButton.getVisibility(), View.GONE); + assertEquals(shareButton.getVisibility(), View.GONE); + assertEquals(settingsButton.getVisibility(), View.VISIBLE); + assertEquals(cancelPanoramaButton.getVisibility(), View.GONE); + + if( !cancel && !to_max ) { + // test trying to take another photo whilst saving + Thread.sleep(500); + assertTrue( mActivity.getApplicationInterface().getImageSaver().getNImagesToSave() > 0 ); + Log.d(TAG, "about to click take photo whilst saving images"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + assertFalse( mActivity.getApplicationInterface().getGyroSensor().isRecording() ); + + // and again (test for crash that occured in 1.47!) + Thread.sleep(500); + assertTrue( mActivity.getApplicationInterface().getImageSaver().getNImagesToSave() > 0 ); + Log.d(TAG, "about to click take photo whilst saving images"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take photo"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + assertFalse( mActivity.getApplicationInterface().getGyroSensor().isRecording() ); + } + + mActivity.waitUntilImageQueueEmpty(); + + int n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(n_new_files, (cancel ? 0 : 1)); + } + + /* Test for panorama photo mode. + * Can fail on Android emulator due to failing to create panorama image from identical set of + * images (since we don't actually move the camera). + */ + public void testTakePhotoPanorama() throws InterruptedException { + Log.d(TAG, "testTakePhotoPanorama"); + + boolean has_zoom = mPreview.supportsZoom(); // record before switching to panorama mode, so we check if zoom is available (this will be false once in panorama mode) + + subTestTakePhotoPanorama(false, false, false); + + // check zoom seekbar doesn't show + View zoomSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.zoom_seekbar); + assertEquals(zoomSeekBar.getVisibility(), View.INVISIBLE); + + // switch to video mode, check zoom now shows + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + waitUntilCameraOpened(); + assertTrue(mPreview.isVideo()); + assertEquals(zoomSeekBar.getVisibility(), has_zoom ? View.VISIBLE : View.INVISIBLE); + + // pause/resume, check still in video mode, and zoom still available + pauseAndResume(); + assertTrue(mPreview.isVideo()); + zoomSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.zoom_seekbar); + assertEquals(zoomSeekBar.getVisibility(), has_zoom ? View.VISIBLE : View.INVISIBLE); + + // restart, check still in video mode, and zoom still available + restart(); + assertTrue(mPreview.isVideo()); + zoomSeekBar = mActivity.findViewById(net.sourceforge.opencamera.R.id.zoom_seekbar); + assertEquals(zoomSeekBar.getVisibility(), has_zoom ? View.VISIBLE : View.INVISIBLE); + } + + /* Test for panorama photo mode, taking max number of panorama shots. + * Can fail on Android emulator due to failing to create panorama image from identical set of + * images (since we don't actually move the camera). + */ + public void testTakePhotoPanoramaMax() throws InterruptedException { + Log.d(TAG, "testTakePhotoPanoramaMax"); + + subTestTakePhotoPanorama(true, false, false); + } + + /* Test for panorama photo mode, but cancelling. + */ + public void testTakePhotoPanoramaCancel() throws InterruptedException { + Log.d(TAG, "testTakePhotoPanoramaCancel"); + + subTestTakePhotoPanorama(false, true, false); + } + + /* Test for panorama photo mode, but cancelling by going to settings. + * No longer relevant now that we hide the Settings while taking photo. + */ + /*public void testTakePhotoPanoramaCancelBySettings() throws InterruptedException { + Log.d(TAG, "testTakePhotoPanoramaCancelBySettings"); + + subTestTakePhotoPanorama(false, true, true); + }*/ + + /*private Bitmap getBitmapFromAssets(String filename) throws IOException { + Log.d(TAG, "getBitmapFromAssets: " + filename); + AssetManager assetManager = getInstrumentation().getContext().getResources().getAssets(); + InputStream is = assetManager.open(filename); + BitmapFactory.Options options = new BitmapFactory.Options(); + options.inMutable = true; + Bitmap bitmap = BitmapFactory.decodeStream(is, null, options); + is.close(); + Log.d(TAG, " done: " + bitmap); + return bitmap; + }*/ + + private Bitmap getBitmapFromFile(String filename) { + return TestUtils.getBitmapFromFile(mActivity, filename); + } + + private Bitmap getBitmapFromFile(String filename, int inSampleSize) { + return TestUtils.getBitmapFromFile(mActivity, filename, inSampleSize); + } + + /* Tests restarting a large number of times - can be useful for testing for memory/resource leaks. + */ + public void testRestart() { + Log.d(TAG, "testRestart"); + setToDefault(); + + final int n_restarts = 150; + for(int i=0;i video_quality = mActivity.getPreview().getVideoQualityHander().getSupportedVideoQuality(); + + assertFalse( mActivity.getApplicationInterface().isVideoPref() ); + assertEquals( 0, mActivity.getApplicationInterface().getVideoMaxDurationPref() ); + // n.b., will fail if not enough storage space on device!: + MyApplicationInterface.VideoMaxFileSize videomaxfilesize = mActivity.getApplicationInterface().getVideoMaxFileSizePref(); + long max_filesize = videomaxfilesize.max_filesize; + assertTrue( max_filesize > 100000000); + assertTrue(videomaxfilesize.auto_restart); + + Intent intent = createDefaultIntent(); + intent.setAction(MediaStore.ACTION_VIDEO_CAPTURE); + intent.putExtra(MediaStore.EXTRA_SIZE_LIMIT, 50123456L); + intent.putExtra(MediaStore.EXTRA_VIDEO_QUALITY, 0); + setActivityIntent(intent); + + restart(); + + assertTrue( mActivity.getApplicationInterface().isVideoPref() ); + assertEquals( 0, mActivity.getApplicationInterface().getVideoMaxDurationPref() ); + videomaxfilesize = mActivity.getApplicationInterface().getVideoMaxFileSizePref(); + assertEquals( 50123456, videomaxfilesize.max_filesize ); + assertFalse(videomaxfilesize.auto_restart); + assertEquals(video_quality.get(video_quality.size()-1), mActivity.getApplicationInterface().getVideoQualityPref()); + + intent = createDefaultIntent(); + intent.setAction(MediaStore.ACTION_VIDEO_CAPTURE); + intent.putExtra(MediaStore.EXTRA_VIDEO_QUALITY, 1); + setActivityIntent(intent); + + restart(); + + assertTrue( mActivity.getApplicationInterface().isVideoPref() ); + assertEquals( 0, mActivity.getApplicationInterface().getVideoMaxDurationPref() ); + videomaxfilesize = mActivity.getApplicationInterface().getVideoMaxFileSizePref(); + assertTrue( videomaxfilesize.max_filesize > 100000000); + assertTrue( Math.abs(max_filesize - videomaxfilesize.max_filesize) < 5000000 ); // remaining storage may vary whilst test is running! + assertTrue(videomaxfilesize.auto_restart); + + assertEquals(video_quality.get(0), mActivity.getApplicationInterface().getVideoQualityPref()); + } + + /* Tests launching with ACTION_VIDEO_CAPTURE intent, along with EXTRA_DURATION_LIMIT. The test + * then tests we actually record video with the duration limit set. + * Fails on Android emulator, for some reason EXTRA_DURATION_LIMIT makes the video stop due to + * hitting max duration immediately. + */ + public void testIntentVideoDurationLimit() throws InterruptedException, ApplicationInterface.NoFreeStorageException { + Log.d(TAG, "testIntentVideoDurationLimit"); + + setToDefault(); + + assertFalse( mActivity.getApplicationInterface().isVideoPref() ); + assertEquals( 0, mActivity.getApplicationInterface().getVideoMaxDurationPref() ); + // n.b., will fail if not enough storage space on device!: + MyApplicationInterface.VideoMaxFileSize videomaxfilesize = mActivity.getApplicationInterface().getVideoMaxFileSizePref(); + long max_filesize = videomaxfilesize.max_filesize; + assertTrue( max_filesize > 100000000); + assertTrue(videomaxfilesize.auto_restart); + + Intent intent = createDefaultIntent(); + intent.setAction(MediaStore.ACTION_VIDEO_CAPTURE); + intent.putExtra(MediaStore.EXTRA_DURATION_LIMIT, 5); + setActivityIntent(intent); + + restart(); + + assertTrue( mActivity.getApplicationInterface().isVideoPref() ); + assertEquals( 5000, mActivity.getApplicationInterface().getVideoMaxDurationPref() ); + // note that max_filesize may vary if device filesize has changed whilst test is running + assertEquals( max_filesize, mActivity.getApplicationInterface().getVideoMaxFileSizePref().max_filesize, 5*1048576 ); + + // count initial files in folder + int n_files = getNFiles(); + Log.d(TAG, "n_files at start: " + n_files); + + View takePhotoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.take_photo); + Log.d(TAG, "about to click take video"); + clickView(takePhotoButton); + Log.d(TAG, "done clicking take video"); + this.getInstrumentation().waitForIdleSync(); + Log.d(TAG, "after idle sync"); + + assertTrue( mPreview.isVideoRecording() ); + + Thread.sleep(4000); + Log.d(TAG, "check still taking video"); + assertTrue( mPreview.isVideoRecording() ); + + int n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + // note, if using scoped storage without SAF (i.e., mediastore API), then the video file won't show up until after we've finished recording (IS_PENDING is set to 0) + assertEquals(MainActivity.useScopedStorage() && !mActivity.getStorageUtils().isUsingSAF() ? 0 : 1, n_new_files); + + Thread.sleep(3000); + + Log.d(TAG, "check stopped taking video"); + assertFalse(mPreview.isVideoRecording()); + + n_new_files = getNFiles() - n_files; + Log.d(TAG, "n_new_files: " + n_new_files); + assertEquals(1, n_new_files); + + } + + /** Tests that we handle the upgrade from the preference boolean key "preference_use_camera2" + * to the string key PreferenceKeys.CameraAPIPreferenceKey that occured in v1.48. + */ + public void testCamera2PrefUpgrade() { + Log.d(TAG, "testCamera2PrefUpgrade"); + + // n.b., don't bother calling setToDefault() + waitUntilCameraOpened(); + + if( !mActivity.supportsCamera2() ) { + Log.d(TAG, "test requires camera2 support"); + return; + } + + assertFalse(mPreview.usingCamera2API()); + + // test legacy key present, but set to old api + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + SharedPreferences.Editor editor = settings.edit(); + editor.clear(); + editor.putBoolean("preference_use_camera2", false); + editor.apply(); + restart(); + assertFalse(mPreview.usingCamera2API()); + + // now test legacy key present for camera2 api + settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + editor = settings.edit(); + editor.clear(); + editor.putBoolean("preference_use_camera2", true); + editor.apply(); + + for(int i=0;i<2;i++) { + restart(); + assertTrue(mPreview.usingCamera2API()); + + // also check we switched over to the new key + settings = PreferenceManager.getDefaultSharedPreferences(mActivity); + assertFalse(settings.contains("preference_use_camera2")); + assertTrue(settings.contains(PreferenceKeys.CameraAPIPreferenceKey)); + assertEquals("preference_camera_api_camera2", settings.getString(PreferenceKeys.CameraAPIPreferenceKey, PreferenceKeys.CameraAPIPreferenceDefault)); + } + } + + private TestUtils.HistogramDetails checkHistogram(Bitmap bitmap) { + return TestUtils.checkHistogram(mActivity, bitmap); + } + + private TestUtils.HistogramDetails subTestHDR(List inputs, String output_name, boolean test_dro, int iso, long exposure_time) { + return TestUtils.subTestHDR(mActivity, inputs, output_name, test_dro, iso, exposure_time); + } + + private TestUtils.HistogramDetails subTestHDR(List inputs, String output_name, boolean test_dro, int iso, long exposure_time, HDRProcessor.TonemappingAlgorithm tonemapping_algorithm/*, HDRTestCallback test_callback*/) { + return TestUtils.subTestHDR(mActivity, inputs, output_name, test_dro, iso, exposure_time, tonemapping_algorithm); + } + + private void checkHDROffsets(int [] exp_offsets_x, int [] exp_offsets_y) { + TestUtils.checkHDROffsets(mActivity, exp_offsets_x, exp_offsets_y); + } + + private void checkHDROffsets(int [] exp_offsets_x, int [] exp_offsets_y, int scale) { + TestUtils.checkHDROffsets(mActivity, exp_offsets_x, exp_offsets_y, scale); + } + + private static void checkHistogramDetails(TestUtils.HistogramDetails hdrHistogramDetails, int exp_min_value, int exp_median_value, int exp_max_value) { + TestUtils.checkHistogramDetails(hdrHistogramDetails, exp_min_value, exp_median_value, exp_max_value); + } + + /** Tests HDR algorithm on test samples "saintpaul". + */ + public void testHDR1() throws IOException, InterruptedException { + Log.d(TAG, "testHDR1"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "saintpaul/input2.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "saintpaul/input3.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "saintpaul/input4.jpg") ); + + // actual ISO unknown, so guessing + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR1_output.jpg", false, 1600, 1000000000L); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + //checkHistogramDetails(hdrHistogramDetails, 1, 44, 253); + //checkHistogramDetails(hdrHistogramDetails, 1, 42, 253); + //checkHistogramDetails(hdrHistogramDetails, 1, 24, 254); + checkHistogramDetails(hdrHistogramDetails, 2, 30, 254); + } + + /** Tests HDR algorithm on test samples "saintpaul", but with 5 images. + */ + public void testHDR1_exp5() throws IOException, InterruptedException { + Log.d(TAG, "testHDR1_exp5"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "saintpaul/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "saintpaul/input2.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "saintpaul/input3.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "saintpaul/input4.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "saintpaul/input5.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR1_exp5_output.jpg", false, -1, -1); + + int [] exp_offsets_x = {0, 0, 0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 3, 43, 251); + checkHistogramDetails(hdrHistogramDetails, 6, 42, 251); + } + + /** Tests HDR algorithm on test samples "stlouis". + */ + public void testHDR2() throws IOException, InterruptedException { + Log.d(TAG, "testHDR2"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "stlouis/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "stlouis/input2.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "stlouis/input3.jpg") ); + + // actual ISO unknown, so guessing + subTestHDR(inputs, "testHDR2_output.jpg", false, 1600, (long)(1000000000L*2.5)); + + int [] exp_offsets_x = {0, 0, 2}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR3". + */ + public void testHDR3() throws IOException, InterruptedException { + Log.d(TAG, "testHDR3"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR3/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR3/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR3/input2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR3_output.jpg", false, 40, 1000000000L/680); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {1, 0, -1}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 3, 104, 255); + //checkHistogramDetails(hdrHistogramDetails, 4, 113, 255); + checkHistogramDetails(hdrHistogramDetails, 8, 113, 255); + } + + /** Tests HDR algorithm on test samples "testHDR4". + */ + public void testHDR4() throws IOException, InterruptedException { + Log.d(TAG, "testHDR4"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR4/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR4/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR4/input2.jpg") ); + + subTestHDR(inputs, "testHDR4_output.jpg", true, 102, 1000000000L/60); + + int [] exp_offsets_x = {-2, 0, 2}; + int [] exp_offsets_y = {-1, 0, 1}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR5". + */ + public void testHDR5() throws IOException, InterruptedException { + Log.d(TAG, "testHDR5"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR5/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR5/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR5/input2.jpg") ); + + subTestHDR(inputs, "testHDR5_output.jpg", false, 40, 1000000000L/398); + + // Nexus 6: + //int [] exp_offsets_x = {0, 0, 0}; + //int [] exp_offsets_y = {-1, 0, 0}; + // OnePlus 3T: + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR6". + */ + public void testHDR6() throws IOException, InterruptedException { + Log.d(TAG, "testHDR6"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR6/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR6/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR6/input2.jpg") ); + + subTestHDR(inputs, "testHDR6_output.jpg", false, 40, 1000000000L/2458); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {1, 0, -1}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR7". + */ + public void testHDR7() throws IOException, InterruptedException { + Log.d(TAG, "testHDR7"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR7/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR7/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR7/input2.jpg") ); + + subTestHDR(inputs, "testHDR7_output.jpg", false, 40, 1000000000L/538); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 1}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR8". + */ + public void testHDR8() throws IOException, InterruptedException { + Log.d(TAG, "testHDR8"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR8/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR8/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR8/input2.jpg") ); + + subTestHDR(inputs, "testHDR8_output.jpg", false, 40, 1000000000L/148); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR9". + */ + public void testHDR9() throws IOException, InterruptedException { + Log.d(TAG, "testHDR9"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR9/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR9/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR9/input2.jpg") ); + + subTestHDR(inputs, "testHDR9_output.jpg", false, 40, 1000000000L/1313); + + int [] exp_offsets_x = {-1, 0, 1}; + int [] exp_offsets_y = {0, 0, -1}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR10". + */ + public void testHDR10() throws IOException, InterruptedException { + Log.d(TAG, "testHDR10"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR10/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR10/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR10/input2.jpg") ); + + subTestHDR(inputs, "testHDR10_output.jpg", false, 107, 1000000000L/120); + + int [] exp_offsets_x = {2, 0, 0}; + int [] exp_offsets_y = {5, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR11". + */ + public void testHDR11() throws IOException, InterruptedException { + Log.d(TAG, "testHDR11"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR11/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR11/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR11/input2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR11_output.jpg", true, 40, 1000000000L/2662); + + int [] exp_offsets_x = {-2, 0, 1}; + int [] exp_offsets_y = {1, 0, -1}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 0, 48, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 65, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 62, 254); + } + + /** Tests HDR algorithm on test samples "testHDR12". + */ + public void testHDR12() throws IOException, InterruptedException { + Log.d(TAG, "testHDR12"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR12/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR12/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR12/input2.jpg") ); + + subTestHDR(inputs, "testHDR12_output.jpg", true, 1196, 1000000000L/12); + + int [] exp_offsets_x = {0, 0, 7}; + int [] exp_offsets_y = {0, 0, 8}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR13". + */ + public void testHDR13() throws IOException, InterruptedException { + Log.d(TAG, "testHDR13"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR13/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR13/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR13/input2.jpg") ); + + subTestHDR(inputs, "testHDR13_output.jpg", false, 323, 1000000000L/24); + + int [] exp_offsets_x = {0, 0, 2}; + int [] exp_offsets_y = {0, 0, -1}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR14". + */ + public void testHDR14() throws IOException, InterruptedException { + Log.d(TAG, "testHDR14"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR14/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR14/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR14/input2.jpg") ); + + subTestHDR(inputs, "testHDR14_output.jpg", false, 40, 1000000000L/1229); + + int [] exp_offsets_x = {0, 0, 1}; + int [] exp_offsets_y = {0, 0, -1}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR15". + */ + public void testHDR15() throws IOException, InterruptedException { + Log.d(TAG, "testHDR15"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR15/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR15/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR15/input2.jpg") ); + + subTestHDR(inputs, "testHDR15_output.jpg", false, 40, 1000000000L/767); + + int [] exp_offsets_x = {1, 0, -1}; + int [] exp_offsets_y = {2, 0, -3}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR16". + */ + public void testHDR16() throws IOException, InterruptedException { + Log.d(TAG, "testHDR16"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR16/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR16/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR16/input2.jpg") ); + + subTestHDR(inputs, "testHDR16_output.jpg", false, 52, 1000000000L/120); + + int [] exp_offsets_x = {-1, 0, 2}; + int [] exp_offsets_y = {1, 0, -6}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR17". + */ + public void testHDR17() throws IOException, InterruptedException { + Log.d(TAG, "testHDR17"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR17/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR17/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR17/input2.jpg") ); + + subTestHDR(inputs, "testHDR17_output.jpg", true, 557, 1000000000L/12); + + // Nexus 6: + //int [] exp_offsets_x = {0, 0, -3}; + //int [] exp_offsets_y = {0, 0, -4}; + // OnePlus 3T: + int [] exp_offsets_x = {0, 0, -2}; + int [] exp_offsets_y = {0, 0, -3}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR18". + */ + public void testHDR18() throws IOException, InterruptedException { + Log.d(TAG, "testHDR18"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR18/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR18/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR18/input2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR18_output.jpg", true, 100, 1000000000L/800); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 1, 113, 254); + //checkHistogramDetails(hdrHistogramDetails, 1, 119, 255); + //checkHistogramDetails(hdrHistogramDetails, 5, 120, 255); + checkHistogramDetails(hdrHistogramDetails, 2, 120, 255); + } + + /** Tests HDR algorithm on test samples "testHDR19". + */ + public void testHDR19() throws IOException, InterruptedException { + Log.d(TAG, "testHDR19"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR19/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR19/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR19/input2.jpg") ); + + subTestHDR(inputs, "testHDR19_output.jpg", true, 100, 1000000000L/160); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR20". + */ + public void testHDR20() throws IOException, InterruptedException { + Log.d(TAG, "testHDR20"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR20/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR20/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR20/input2.jpg") ); + + subTestHDR(inputs, "testHDR20_output.jpg", true, 100, 1000000000L*2); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {-1, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR21". + */ + public void testHDR21() throws IOException, InterruptedException { + Log.d(TAG, "testHDR21"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR21/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR21/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR21/input2.jpg") ); + + // ISO and exposure unknown, so guessing + subTestHDR(inputs, "testHDR21_output.jpg", true, 800, 1000000000L/12); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR22". + */ + public void testHDR22() throws IOException, InterruptedException { + Log.d(TAG, "testHDR22"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR22/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR22/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR22/input2.jpg") ); + + subTestHDR(inputs, "testHDR22_output.jpg", true, 391, 1000000000L/12); + + // Nexus 6: + //int [] exp_offsets_x = {1, 0, -5}; + //int [] exp_offsets_y = {1, 0, -6}; + // OnePlus 3T: + int [] exp_offsets_x = {0, 0, -5}; + int [] exp_offsets_y = {1, 0, -6}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR23", but with 2 images. + */ + public void testHDR23_exp2() throws IOException, InterruptedException { + Log.d(TAG, "testHDR23_exp2"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0068.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0064.png") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR23_exp2_output.jpg", false, -1, -1); + + int [] exp_offsets_x = {0, 0}; + int [] exp_offsets_y = {0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 13, 72, 250); + checkHistogramDetails(hdrHistogramDetails, 24, 72, 250); + } + + /** Tests HDR algorithm on test samples "testHDR23", but with 2 images, and greater exposure gap. + */ + public void testHDR23_exp2b() throws IOException, InterruptedException { + Log.d(TAG, "testHDR23_exp2b"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0070.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0062.png") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR23_exp2b_output.jpg", false, -1, -1); + + int [] exp_offsets_x = {0, 0}; + int [] exp_offsets_y = {0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR23". + */ + public void testHDR23() throws IOException, InterruptedException { + Log.d(TAG, "testHDR23"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0068.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0066.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0064.png") ); + + // ISO unknown, so guessing + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR23_output.jpg", false, 1600, 1000000000L); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 17, 81, 255); + //checkHistogramDetails(hdrHistogramDetails, 32, 74, 255); + checkHistogramDetails(hdrHistogramDetails, 29, 68, 255); + } + + /** Tests HDR algorithm on test samples "testHDR23", but with 4 images. + */ + public void testHDR23_exp4() throws IOException, InterruptedException { + Log.d(TAG, "testHDR23_exp4"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0070.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0068.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0064.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0062.png") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR23_exp4_output.jpg", false, -1, -1); + + int [] exp_offsets_x = {0, 0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 15, 69, 254); + checkHistogramDetails(hdrHistogramDetails, 24, 70, 254); + } + + /** Tests HDR algorithm on test samples "testHDR23", but with 5 images. + */ + public void testHDR23_exp5() throws IOException, InterruptedException { + Log.d(TAG, "testHDR23_exp5"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0070.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0068.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0066.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0064.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0062.png") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR23_exp5_output.jpg", false, -1, -1); + + int [] exp_offsets_x = {0, 0, 0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 17, 81, 255); + //checkHistogramDetails(hdrHistogramDetails, 28, 82, 255); + checkHistogramDetails(hdrHistogramDetails, 21, 74, 255); + } + + /** Tests HDR algorithm on test samples "testHDR23", but with 6 images. + */ + public void testHDR23_exp6() throws IOException, InterruptedException { + Log.d(TAG, "testHDR23_exp6"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0072.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0070.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0068.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0064.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0062.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0061.png") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR23_exp6_output.jpg", false, -1, -1); + + int [] exp_offsets_x = {0, 0, 0, 0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0, 0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 15, 70, 254); + checkHistogramDetails(hdrHistogramDetails, 25, 71, 254); + } + + /** Tests HDR algorithm on test samples "testHDR23", but with 7 images. + */ + public void testHDR23_exp7() throws IOException, InterruptedException { + Log.d(TAG, "testHDR23_exp7"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0072.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0070.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0068.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0066.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0064.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0062.png") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR23/memorial0061.png") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR23_exp7_output.jpg", false, -1, -1); + + int [] exp_offsets_x = {0, 0, 0, 0, 0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0, 0, 0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 17, 81, 255); + //checkHistogramDetails(hdrHistogramDetails, 28, 82, 255); + checkHistogramDetails(hdrHistogramDetails, 20, 72, 255); + } + + /** Tests HDR algorithm on test samples "testHDR24". + */ + public void testHDR24() throws IOException, InterruptedException { + Log.d(TAG, "testHDR24"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR24/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR24/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR24/input2.jpg") ); + + subTestHDR(inputs, "testHDR24_output.jpg", true, 40, 1000000000L/422); + + int [] exp_offsets_x = {0, 0, 1}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR25". + */ + public void testHDR25() throws IOException, InterruptedException { + Log.d(TAG, "testHDR25"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR25/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR25/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR25/input2.jpg") ); + + subTestHDR(inputs, "testHDR25_output.jpg", true, 40, 1000000000L/1917); + + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {1, 0, -1}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR26". + */ + public void testHDR26() throws IOException, InterruptedException { + Log.d(TAG, "testHDR26"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR26/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR26/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR26/input2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR26_output.jpg", true, 40, 1000000000L/5325); + + int [] exp_offsets_x = {-1, 0, 1}; + int [] exp_offsets_y = {1, 0, -1}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 0, 104, 254); + checkHistogramDetails(hdrHistogramDetails, 0, 119, 254); + } + + /** Tests HDR algorithm on test samples "testHDR27". + */ + public void testHDR27() throws IOException, InterruptedException { + Log.d(TAG, "testHDR27"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR27/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR27/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR27/input2.jpg") ); + + subTestHDR(inputs, "testHDR27_output.jpg", true, 40, 1000000000L/949); + + int [] exp_offsets_x = {0, 0, 2}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR28". + */ + public void testHDR28() throws IOException, InterruptedException { + Log.d(TAG, "testHDR28"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR28/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR28/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR28/input2.jpg") ); + + subTestHDR(inputs, "testHDR28_output.jpg", true, 294, 1000000000L/20); + + int [] exp_offsets_x = {0, 0, 2}; + int [] exp_offsets_y = {0, 0, -1}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR29". + */ + public void testHDR29() throws IOException, InterruptedException { + Log.d(TAG, "testHDR29"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR29/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR29/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR29/input2.jpg") ); + + subTestHDR(inputs, "testHDR29_output.jpg", false, 40, 1000000000L/978); + + int [] exp_offsets_x = {-1, 0, 3}; + int [] exp_offsets_y = {0, 0, -1}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR30". + */ + public void testHDR30() throws IOException, InterruptedException { + Log.d(TAG, "testHDR30"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR30/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR30/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR30/input2.jpg") ); + + subTestHDR(inputs, "testHDR30_output.jpg", false, 40, 1000000000L/978); + + // offsets for full image + //int [] exp_offsets_x = {-6, 0, -1}; + //int [] exp_offsets_y = {23, 0, -13}; + // offsets using centre quarter image + int [] exp_offsets_x = {-5, 0, 0}; + int [] exp_offsets_y = {22, 0, -13}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR31". + */ + public void testHDR31() throws IOException, InterruptedException { + Log.d(TAG, "testHDR31"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR31/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR31/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR31/input2.jpg") ); + + subTestHDR(inputs, "testHDR31_output.jpg", false, 40, 1000000000L/422); + + // offsets for full image + //int [] exp_offsets_x = {0, 0, 4}; + //int [] exp_offsets_y = {21, 0, -11}; + // offsets using centre quarter image + int [] exp_offsets_x = {0, 0, 3}; + int [] exp_offsets_y = {21, 0, -11}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR32". + */ + public void testHDR32() throws IOException, InterruptedException { + Log.d(TAG, "testHDR32"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR32/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR32/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR32/input2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR32_output.jpg", true, 40, 1000000000L/1331); + + int [] exp_offsets_x = {1, 0, 0}; + int [] exp_offsets_y = {13, 0, -10}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 3, 101, 251); + //checkHistogramDetails(hdrHistogramDetails, 3, 109, 251); + //checkHistogramDetails(hdrHistogramDetails, 6, 111, 252); + checkHistogramDetails(hdrHistogramDetails, 2, 111, 252); + } + + /** Tests HDR algorithm on test samples "testHDR33". + */ + public void testHDR33() throws IOException, InterruptedException { + Log.d(TAG, "testHDR33"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR33/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR33/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR33/input2.jpg") ); + + subTestHDR(inputs, "testHDR33_output.jpg", true, 40, 1000000000L/354); + + int [] exp_offsets_x = {13, 0, -10}; + int [] exp_offsets_y = {24, 0, -12}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR34". + */ + public void testHDR34() throws IOException, InterruptedException { + Log.d(TAG, "testHDR34"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR34/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR34/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR34/input2.jpg") ); + + subTestHDR(inputs, "testHDR34_output.jpg", true, 40, 1000000000L/4792); + + int [] exp_offsets_x = {5, 0, -8}; + int [] exp_offsets_y = {0, 0, -2}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR35". + */ + public void testHDR35() throws IOException, InterruptedException { + Log.d(TAG, "testHDR35"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR35/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR35/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR35/input2.jpg") ); + + subTestHDR(inputs, "testHDR35_output.jpg", true, 40, 1000000000L/792); + + int [] exp_offsets_x = {-10, 0, 3}; + int [] exp_offsets_y = {7, 0, -3}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR36". + */ + public void testHDR36() throws IOException, InterruptedException { + Log.d(TAG, "testHDR36"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR36/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR36/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR36/input2.jpg") ); + + subTestHDR(inputs, "testHDR36_output.jpg", false, 100, 1000000000L/1148); + + int [] exp_offsets_x = {2, 0, -2}; + int [] exp_offsets_y = {-4, 0, 2}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR37". + */ + public void testHDR37() throws IOException, InterruptedException { + Log.d(TAG, "testHDR37"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR37/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR37/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR37/input2.jpg") ); + + subTestHDR(inputs, "testHDR37_output.jpg", false, 46, 1000000000L/120); + + int [] exp_offsets_x = {0, 0, 3}; + int [] exp_offsets_y = {2, 0, -19}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDR38". + * Tests with Filmic tonemapping. + */ + public void testHDR38Filmic() throws IOException, InterruptedException { + Log.d(TAG, "testHDR38Filmic"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR38/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR38/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR38/input2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR38_filmic_output.jpg", false, 125, 1000000000L/2965, HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_FU2); + + int [] exp_offsets_x = {-1, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + + //checkHistogramDetails(hdrHistogramDetails, 0, 92, 254); + checkHistogramDetails(hdrHistogramDetails, 0, 93, 254); + } + + /** Tests HDR algorithm on test samples "testHDR39". + */ + public void testHDR39() throws IOException, InterruptedException { + Log.d(TAG, "testHDR39"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR39/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR39/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR39/input2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR39_output.jpg", false, 125, 1000000000L/2135); + + int [] exp_offsets_x = {-6, 0, -2}; + int [] exp_offsets_y = {6, 0, -8}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + + checkHistogramDetails(hdrHistogramDetails, 0, 128, 222); + } + + /** Tests HDR algorithm on test samples "testHDR40". + */ + public void testHDR40() throws IOException, InterruptedException { + Log.d(TAG, "testHDR40"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR40/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR40/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR40/input2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR40_output.jpg", false, 50, 1000000000L/262); + + int [] exp_offsets_x = {5, 0, -2}; + int [] exp_offsets_y = {13, 0, 24}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + + checkHistogramDetails(hdrHistogramDetails, 1, 138, 254); + } + + /** Tests HDR algorithm on test samples "testHDR40" with Exponential tonemapping. + */ + public void testHDR40Exponential() throws IOException, InterruptedException { + Log.d(TAG, "testHDR40Exponential"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR40/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR40/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR40/input2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR40_exponential_output.jpg", false, 50, 1000000000L/262, HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_EXPONENTIAL); + + int [] exp_offsets_x = {5, 0, -2}; + int [] exp_offsets_y = {13, 0, 24}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + + checkHistogramDetails(hdrHistogramDetails, 1, 138, 254); + } + + /** Tests HDR algorithm on test samples "testHDR40" with Filmic tonemapping. + */ + public void testHDR40Filmic() throws IOException, InterruptedException { + Log.d(TAG, "testHDR40Filmic"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR40/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR40/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR40/input2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR40_filmic_output.jpg", false, 50, 1000000000L/262, HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_FU2); + + int [] exp_offsets_x = {5, 0, -2}; + int [] exp_offsets_y = {13, 0, 24}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + + checkHistogramDetails(hdrHistogramDetails, 1, 130, 254); + } + + /** Tests HDR algorithm on test samples "testHDR41". + */ + public void testHDR41() throws IOException, InterruptedException { + Log.d(TAG, "testHDR41"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR41/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR41/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR41/input2.jpg") ); + + subTestHDR(inputs, "testHDR41_output.jpg", false, 925, 1000000000L/25); + } + + /** Tests HDR algorithm on test samples "testHDR42". + */ + public void testHDR42() throws IOException, InterruptedException { + Log.d(TAG, "testHDR42"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR42/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR42/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR42/input2.jpg") ); + + subTestHDR(inputs, "testHDR42_output.jpg", false, 112, 1000000000L/679); + } + + /** Tests HDR algorithm on test samples "testHDR43". + */ + public void testHDR43() throws IOException, InterruptedException { + Log.d(TAG, "testHDR43"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR43/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR43/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR43/input2.jpg") ); + + subTestHDR(inputs, "testHDR43_output.jpg", false, 1196, 1000000000L/12); + } + + /** Tests HDR algorithm on test samples "testHDR44". + */ + public void testHDR44() throws IOException, InterruptedException { + Log.d(TAG, "testHDR44"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR44/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR44/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR44/input2.jpg") ); + + subTestHDR(inputs, "testHDR44_output.jpg", false, 100, 1000000000L/1016); + } + + /** Tests HDR algorithm on test samples "testHDR45". + */ + public void testHDR45() throws IOException, InterruptedException { + Log.d(TAG, "testHDR45"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + //inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR45/IMG_6314.jpg") ); + //inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR45/IMG_6312.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR45/IMG_6310.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR45/IMG_6309.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR45/IMG_6311.jpg") ); + //inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR45/IMG_6313.jpg") ); + //inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR45/IMG_6315.jpg") ); + + // ISO 100, exposure time 2s, but pass in -1 since these are HDRNTests + subTestHDR(inputs, "testHDR45_output.jpg", false, -1, -1); + } + + /** Tests HDR algorithm on test samples "testHDR45". + */ + public void testHDR45_exp5() throws IOException, InterruptedException { + Log.d(TAG, "testHDR45_exp5"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + //inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR45/IMG_6314.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR45/IMG_6312.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR45/IMG_6310.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR45/IMG_6309.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR45/IMG_6311.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR45/IMG_6313.jpg") ); + //inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR45/IMG_6315.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR45_exp5_output.jpg", false, -1, -1); + } + + /** Tests HDR algorithm on test samples "testHDR45". + */ + public void testHDR45_exp7() throws IOException, InterruptedException { + Log.d(TAG, "testHDR45_exp7"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR45/IMG_6314.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR45/IMG_6312.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR45/IMG_6310.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR45/IMG_6309.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR45/IMG_6311.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR45/IMG_6313.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR45/IMG_6315.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR45_exp7_output.jpg", false, -1, -1); + } + + /** Tests HDR algorithm on test samples "testHDR46". + */ + public void testHDR46() throws IOException, InterruptedException { + Log.d(TAG, "testHDR46"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + //inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 06.jpg") ); + //inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 05.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 04.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 03.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 02.jpg") ); + //inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 01.jpg") ); + + // ISO 100, exposure time 1/60s, but pass in -1 since these are HDRNTests + subTestHDR(inputs, "testHDR46_output.jpg", false, -1, -1); + } + + /** Tests HDR algorithm on test samples "testHDR46". + */ + public void testHDR46_exp5() throws IOException, InterruptedException { + Log.d(TAG, "testHDR46_exp5"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + //inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 06.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 05.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 04.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 03.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 02.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR46/Izmir Harbor - ppw - 01.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR46_exp5_output.jpg", false, -1, -1); + } + + /** Tests HDR algorithm on test samples "testHDR47". + */ + public void testHDR47_exp2() throws IOException, InterruptedException { + Log.d(TAG, "testHDR47_exp2"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 05.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 03.jpg") ); + + subTestHDR(inputs, "testHDR47_exp2_output.jpg", false, -1, -1); + } + + /** Tests HDR algorithm on test samples "testHDR47". + */ + public void testHDR47() throws IOException, InterruptedException { + Log.d(TAG, "testHDR47"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + //inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 08.jpg") ); + //inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 07.jpg") ); + //inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 06.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 05.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 04.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 03.jpg") ); + //inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 02.jpg") ); + //inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 01.jpg") ); + + // ISO 400, exposure time 1/60s, but pass in -1 since these are HDRNTests + subTestHDR(inputs, "testHDR47_output.jpg", false, -1, -1); + } + + /** Tests HDR algorithm on test samples "testHDR47". + */ + public void testHDR47_exp5() throws IOException, InterruptedException { + Log.d(TAG, "testHDR47_exp5"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + //inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 08.jpg") ); + //inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 07.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 06.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 05.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 04.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 03.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 02.jpg") ); + //inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 01.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR47_exp5_output.jpg", false, -1, -1); + + checkHistogramDetails(hdrHistogramDetails, 1, 73, 255); + } + + /** Tests HDR algorithm on test samples "testHDR47". + */ + public void testHDR47_exp7() throws IOException, InterruptedException { + Log.d(TAG, "testHDR47_exp7"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + //inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 08.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 07.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 06.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 05.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 04.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 03.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 02.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR47/High Five - ppw - 01.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR47_exp7_output.jpg", false, -1, -1); + + checkHistogramDetails(hdrHistogramDetails, 1, 73, 255); + } + + /** Tests HDR algorithm on test samples "testHDR48". + */ + public void testHDR48() throws IOException, InterruptedException { + Log.d(TAG, "testHDR48"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + //inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR48/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR48/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR48/input2.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR48/input3.jpg") ); + //inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR48/input4.jpg") ); + + // ISO 100, exposure time 1/716s, but pass in -1 since these are HDRNTests + subTestHDR(inputs, "testHDR48_output.jpg", false, -1, -1); + } + + /** Tests HDR algorithm on test samples "testHDR48". + */ + public void testHDR48_exp5() throws IOException, InterruptedException { + Log.d(TAG, "testHDR48_exp5"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR48/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR48/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR48/input2.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR48/input3.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR48/input4.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR48_exp5_output.jpg", false, -1, -1); + + checkHistogramDetails(hdrHistogramDetails, 0, 59, 241); + } + + /** Tests HDR algorithm on test samples "testHDR49". + */ + public void testHDR49_exp2() throws IOException, InterruptedException { + Log.d(TAG, "testHDR49_exp2"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR49/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR49/input3.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR49_exp2_output.jpg", false, -1, -1); + + checkHistogramDetails(hdrHistogramDetails, 0, 92, 250); + } + + /** Tests HDR algorithm on test samples "testHDR49". + */ + public void testHDR49() throws IOException, InterruptedException { + Log.d(TAG, "testHDR49"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR49/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR49/input2.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR49/input3.jpg") ); + + // ISO 100, exposure time 1/417s, but pass in -1 since these are HDRNTests + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR49_output.jpg", false, -1, -1); + + //checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 81, 254); + } + + /** Tests HDR algorithm on test samples "testHDR49". + */ + public void testHDR49_exp4() throws IOException, InterruptedException { + Log.d(TAG, "testHDR49_exp4"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR49/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR49/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR49/input3.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR49/input4.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR49_exp4_output.jpg", false, -1, -1); + + //checkHistogramDetails(hdrHistogramDetails, 0, 100, 245); + checkHistogramDetails(hdrHistogramDetails, 0, 94, 244); + } + + /** Tests HDR algorithm on test samples "testHDR49". + */ + public void testHDR49_exp5() throws IOException, InterruptedException { + Log.d(TAG, "testHDR49_exp5"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR49/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR49/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR49/input2.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR49/input3.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR49/input4.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR49_exp5_output.jpg", false, -1, -1); + + //checkHistogramDetails(hdrHistogramDetails, 0, 72, 244); + checkHistogramDetails(hdrHistogramDetails, 0, 78, 243); + } + + /** Tests HDR algorithm on test samples "testHDR50". + */ + public void testHDR50() throws IOException, InterruptedException { + Log.d(TAG, "testHDR50"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR50/IMG_20180626_221357_0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR50/IMG_20180626_221357_1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR50/IMG_20180626_221357_2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR50_output.jpg", false, 867, 1000000000L/14); + + checkHistogramDetails(hdrHistogramDetails, 0, 69, 255); + } + + /** Tests HDR algorithm on test samples "testHDR51". + */ + public void testHDR51() throws IOException, InterruptedException { + Log.d(TAG, "testHDR51"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR51/IMG_20180323_104702_0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR51/IMG_20180323_104702_1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR51/IMG_20180323_104702_2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR51_output.jpg", true, 1600, 1000000000L/11); + + //checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + } + + /** Tests HDR algorithm on test samples "testHDR52". + */ + public void testHDR52() throws IOException, InterruptedException { + Log.d(TAG, "testHDR52"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR52/IMG_20181023_143633_EXP0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR52/IMG_20181023_143633_EXP1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR52/IMG_20181023_143633_EXP2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR52_output.jpg", false, 100, 1000000000L/2105); + + //checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + } + + /** Tests HDR algorithm on test samples "testHDR53". + */ + public void testHDR53() throws IOException, InterruptedException { + Log.d(TAG, "testHDR53"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR53/IMG_20181106_135411_EXP0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR53/IMG_20181106_135411_EXP1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR53/IMG_20181106_135411_EXP2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR53_output.jpg", false, 103, 1000000000L/5381); + + //checkHistogramDetails(hdrHistogramDetails, 0, 55, 254); + checkHistogramDetails(hdrHistogramDetails, 0, 64, 255); + } + + /** Tests HDR algorithm on test samples "testHDR54". + */ + public void testHDR54() throws IOException, InterruptedException { + Log.d(TAG, "testHDR54"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR54/IMG_20181107_115508_EXP0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR54/IMG_20181107_115508_EXP1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR54/IMG_20181107_115508_EXP2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR54_output.jpg", false, 752, 1000000000L/14); + + //checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + } + + /** Tests HDR algorithm on test samples "testHDR55". + */ + public void testHDR55() throws IOException, InterruptedException { + Log.d(TAG, "testHDR55"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR55/IMG_20181107_115608_EXP0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR55/IMG_20181107_115608_EXP1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR55/IMG_20181107_115608_EXP2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR55_output.jpg", false, 1505, 1000000000L/10); + + //checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + } + + /** Tests HDR algorithm on test samples "testHDR56". + */ + public void testHDR56() throws IOException, InterruptedException { + Log.d(TAG, "testHDR56"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR56/180502_141722_OC_0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR56/180502_141722_OC_1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR56/180502_141722_OC_2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR56_output.jpg", false, 50, 1000000000L/40); + + //checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + } + + /** Tests HDR algorithm on test samples "testHDR57". + */ + public void testHDR57() throws IOException, InterruptedException { + Log.d(TAG, "testHDR57"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR57/IMG_20181119_145313_EXP0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR57/IMG_20181119_145313_EXP1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR57/IMG_20181119_145313_EXP2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR57_output.jpg", true, 100, 1000000000L/204); + + //checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + } + + /** Tests HDR algorithm on test samples "testHDR58". + */ + public void testHDR58() throws IOException, InterruptedException { + Log.d(TAG, "testHDR58"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR58/IMG_20190911_210146_0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR58/IMG_20190911_210146_1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR58/IMG_20190911_210146_2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR58_output.jpg", false, 1250, 1000000000L/10); + //HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR58_output.jpg", false, 1250, 1000000000L/10, HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_CLAMP); + + checkHistogramDetails(hdrHistogramDetails, 11, 119, 255); + } + + /** Tests HDR algorithm on test samples "testHDR59". + */ + public void testHDR59() throws IOException, InterruptedException { + Log.d(TAG, "testHDR59"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR59/IMG_20190911_210154_0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR59/IMG_20190911_210154_1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR59/IMG_20190911_210154_2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR59_output.jpg", false, 1250, 1000000000L/10); + //HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR59_output.jpg", false, 1250, 1000000000L/10, HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_CLAMP); + + //checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + } + + /** Tests HDR algorithm on test samples "testHDR60". + */ + public void testHDR60() throws IOException, InterruptedException { + Log.d(TAG, "testHDR60"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR60/IMG_20200507_020319_0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR60/IMG_20200507_020319_1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR60/IMG_20200507_020319_2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR60_output.jpg", false, 491, 1000000000L/10); + //HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR60_output.jpg", false, 491, 1000000000L/10, HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_CLAMP); + + //checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + } + + /** Tests HDR algorithm on test samples "testHDR61". + */ + public void testHDR61() throws IOException, InterruptedException { + Log.d(TAG, "testHDR61"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR61/IMG_20191111_145230_0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR61/IMG_20191111_145230_1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDR61/IMG_20191111_145230_2.jpg") ); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestHDR(inputs, "testHDR61_output.jpg", false, 50, 1000000000L/5025); + + checkHistogramDetails(hdrHistogramDetails, 0, 86, 254); + + int [] exp_offsets_x = {0, 0, 1}; + int [] exp_offsets_y = {0, 0, -2}; + checkHDROffsets(exp_offsets_x, exp_offsets_y); + } + + /** Tests HDR algorithm on test samples "testHDRtemp". + * Used for one-off testing, or to recreate HDR images from the base exposures to test an updated algorithm. + * The test images should be copied to the test device into DCIM/testOpenCamera/testdata/hdrsamples/testHDRtemp/ . + */ + public void testHDRtemp() throws IOException, InterruptedException { + Log.d(TAG, "testHDRtemp"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDRtemp/input0.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDRtemp/input1.jpg") ); + inputs.add( getBitmapFromFile(TestUtils.hdr_images_path + "testHDRtemp/input2.jpg") ); + + subTestHDR(inputs, "testHDRtemp_output.jpg", true, 100, 1000000000L/100); + } + + /** Tests DRO only on a dark image. + */ + @TargetApi(Build.VERSION_CODES.LOLLIPOP) + public void testDRODark0() throws IOException, InterruptedException { + Log.d(TAG, "testDRODark0"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.avg_images_path + "testAvg3/input0.jpg") ); + + subTestHDR(inputs, "testDRODark0_output.jpg", true, -1, -1); + } + + /** Tests DRO only on a dark image. + */ + @TargetApi(Build.VERSION_CODES.LOLLIPOP) + public void testDRODark1() throws IOException, InterruptedException { + Log.d(TAG, "testDRODark1"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add( getBitmapFromFile(TestUtils.avg_images_path + "testAvg8/input0.jpg") ); + + subTestHDR(inputs, "testDRODark1_output.jpg", true, -1, -1); + } + + /** Tests calling the DRO routine with 0.0 factor, and DROALGORITHM_NONE - and that the resultant image is identical. + */ + @TargetApi(Build.VERSION_CODES.LOLLIPOP) + public void testDROZero() throws IOException, InterruptedException { + Log.d(TAG, "testDROZero"); + + setToDefault(); + + Bitmap bitmap = getBitmapFromFile(TestUtils.hdr_images_path + "testHDR3/input1.jpg"); + Bitmap bitmap_saved = bitmap.copy(bitmap.getConfig(), false); + + Thread.sleep(1000); // wait for camera to open + + List inputs = new ArrayList<>(); + inputs.add(bitmap); + try { + mActivity.getApplicationInterface().getHDRProcessor().processHDR(inputs, true, null, true, null, 0.0f, 4, true, HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_REINHARD, HDRProcessor.DROTonemappingAlgorithm.DROALGORITHM_NONE); + } + catch(HDRProcessorException e) { + Log.e(TAG, "processHDR failed", e); + throw new RuntimeException(); + } + + saveBitmap(inputs.get(0), "droZerotestHDR3_output.jpg"); + checkHistogram(bitmap); + + // check bitmaps are the same + Log.d(TAG, "compare bitmap " + bitmap); + Log.d(TAG, "with bitmap_saved " + bitmap_saved); + // sameAs doesn't seem to work + //assertTrue( bitmap.sameAs(bitmap_saved) ); + assertEquals(bitmap.getWidth(), bitmap_saved.getWidth()); + assertEquals(bitmap.getHeight(), bitmap_saved.getHeight()); + int [] old_row = new int[bitmap.getWidth()]; + int [] new_row = new int[bitmap.getWidth()]; + for(int y=0;y inputs, String output_name, int iso, long exposure_time, float zoom_factor, TestUtils.TestAvgCallback cb) { + return TestUtils.subTestAvg(mActivity, inputs, output_name, iso, exposure_time, zoom_factor, cb); + } + + /** Tests Avg algorithm on test samples "testAvg1". + */ + public void testAvg1() throws IOException, InterruptedException { + Log.d(TAG, "testAvg1"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg1/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg1/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg1/input2.jpg"); + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg1_output.jpg", 1600, 1000000000L/17, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + //int [] exp_offsets_x = {0, 3, 0}; + //int [] exp_offsets_y = {0, 1, 0}; + //int [] exp_offsets_x = {0, 4, 0}; + //int [] exp_offsets_y = {0, 1, 0}; + //int [] exp_offsets_x = {0, 2, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + int [] exp_offsets_x = {0, 4, 0}; + int [] exp_offsets_y = {0, 0, 0}; + assertEquals(0, mActivity.getApplicationInterface().getHDRProcessor().sharp_index); + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 2 ) { + //int [] exp_offsets_x = {0, 6, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + //int [] exp_offsets_x = {0, 8, 0}; + //int [] exp_offsets_y = {0, 1, 0}; + //int [] exp_offsets_x = {0, 7, 0}; + //int [] exp_offsets_y = {0, -1, 0}; + //int [] exp_offsets_x = {0, 8, 0}; + //int [] exp_offsets_y = {0, -4, 0}; + int [] exp_offsets_x = {0, 8, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else { + fail(); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + } + + /** Tests Avg algorithm on test samples "testAvg2". + */ + public void testAvg2() throws IOException, InterruptedException { + Log.d(TAG, "testAvg2"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg2/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg2/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg2/input2.jpg"); + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg2_output.jpg", 1600, 1000000000L/17, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + //int [] exp_offsets_x = {0, -15, 0}; + //int [] exp_offsets_y = {0, -10, 0}; + //int [] exp_offsets_x = {0, -15, 0}; + //int [] exp_offsets_y = {0, -11, 0}; + //int [] exp_offsets_x = {0, -12, 0}; + //int [] exp_offsets_y = {0, -12, 0}; + int [] exp_offsets_x = {0, -16, 0}; + int [] exp_offsets_y = {0, -12, 0}; + assertEquals(0, mActivity.getApplicationInterface().getHDRProcessor().sharp_index); + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 2 ) { + //int [] exp_offsets_x = {0, -15, 0}; + //int [] exp_offsets_y = {0, -10, 0}; + //int [] exp_offsets_x = {0, -13, 0}; + //int [] exp_offsets_y = {0, -12, 0}; + //int [] exp_offsets_x = {0, -12, 0}; + //int [] exp_offsets_y = {0, -14, 0}; + int [] exp_offsets_x = {0, -12, 0}; + int [] exp_offsets_y = {0, -12, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else { + fail(); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + } + + /** Tests Avg algorithm on test samples "testAvg3". + */ + public void testAvg3() throws IOException, InterruptedException { + Log.d(TAG, "testAvg3"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg3/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg3/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg3/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg3/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg3/input4.jpg"); + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg3_output.jpg", 1600, 1000000000L/16, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + /*if( index == 1 ) { + //int [] exp_offsets_x = {0, 2, 0}; + //int [] exp_offsets_y = {0, -18, 0}; + //int [] exp_offsets_x = {0, -1, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + //int [] exp_offsets_x = {0, -9, 0}; + //int [] exp_offsets_y = {0, -11, 0}; + //int [] exp_offsets_x = {0, -8, 0}; + //int [] exp_offsets_y = {0, -10, 0}; + int [] exp_offsets_x = {0, -8, 0}; + int [] exp_offsets_y = {0, -8, 0}; + assertTrue(mActivity.getApplicationInterface().getHDRProcessor().sharp_index == 0); + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 2 ) { + //int [] exp_offsets_x = {0, -18, 0}; + //int [] exp_offsets_y = {0, 17, 0}; + //int [] exp_offsets_x = {0, -2, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + //int [] exp_offsets_x = {0, -7, 0}; + //int [] exp_offsets_y = {0, -2, 0}; + //int [] exp_offsets_x = {0, -8, 0}; + //int [] exp_offsets_y = {0, -8, 0}; + int [] exp_offsets_x = {0, -12, 0}; + int [] exp_offsets_y = {0, 8, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 3 ) { + //int [] exp_offsets_x = {0, -12, 0}; + //int [] exp_offsets_y = {0, -25, 0}; + //int [] exp_offsets_x = {0, -2, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + //int [] exp_offsets_x = {0, -9, 0}; + //int [] exp_offsets_y = {0, 14, 0}; + //int [] exp_offsets_x = {0, -8, 0}; + //int [] exp_offsets_y = {0, 2, 0}; + //int [] exp_offsets_x = {0, -12, 0}; + //int [] exp_offsets_y = {0, 12, 0}; + int [] exp_offsets_x = {0, -12, 0}; + int [] exp_offsets_y = {0, 4, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 4 ) { + //int [] exp_offsets_x = {0, -29, 0}; + //int [] exp_offsets_y = {0, -22, 0}; + //int [] exp_offsets_x = {0, -2, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + //int [] exp_offsets_x = {0, -7, 0}; + //int [] exp_offsets_y = {0, 11, 0}; + //int [] exp_offsets_x = {0, -6, 0}; + //int [] exp_offsets_y = {0, 14, 0}; + //int [] exp_offsets_x = {0, -8, 0}; + //int [] exp_offsets_y = {0, 2, 0}; + //int [] exp_offsets_x = {0, -8, 0}; + //int [] exp_offsets_y = {0, 12, 0}; + int [] exp_offsets_x = {0, -8, 0}; + int [] exp_offsets_y = {0, 4, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else { + assertTrue(false); + }*/ + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 21, 177); + //checkHistogramDetails(hdrHistogramDetails, 0, 21, 152); + checkHistogramDetails(hdrHistogramDetails, 0, 21, 166); + } + + /** Tests Avg algorithm on test samples "testAvg4". + */ + public void testAvg4() throws IOException, InterruptedException { + Log.d(TAG, "testAvg4"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg4/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg4/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg4/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg4/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg4/input4.jpg"); + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg4_output.jpg", 1600, 1000000000L/16, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + //int [] exp_offsets_x = {0, 5, 0}; + //int [] exp_offsets_y = {0, 2, 0}; + int [] exp_offsets_x = {0, 5, 0}; + int [] exp_offsets_y = {0, 1, 0}; + assertEquals(0, mActivity.getApplicationInterface().getHDRProcessor().sharp_index); + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 2 ) { + //int [] exp_offsets_x = {0, 3, 0}; + //int [] exp_offsets_y = {0, 5, 0}; + int [] exp_offsets_x = {0, 4, 0}; + int [] exp_offsets_y = {0, 4, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 3 ) { + //int [] exp_offsets_x = {0, 0, 0}; + //int [] exp_offsets_y = {0, 7, 0}; + //int [] exp_offsets_x = {0, 1, 0}; + //int [] exp_offsets_y = {0, 6, 0}; + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 8, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 4 ) { + //int [] exp_offsets_x = {0, 4, 0}; + //int [] exp_offsets_y = {0, 8, 0}; + //int [] exp_offsets_x = {0, 3, 0}; + //int [] exp_offsets_y = {0, 7, 0}; + //int [] exp_offsets_x = {0, 3, 0}; + //int [] exp_offsets_y = {0, 8, 0}; + int [] exp_offsets_x = {0, 3, 0}; + int [] exp_offsets_y = {0, 9, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else { + fail(); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + } + + /** Tests Avg algorithm on test samples "testAvg5". + */ + public void testAvg5() throws IOException, InterruptedException { + Log.d(TAG, "testAvg5"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg5/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg5/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg5/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg5/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg5/input4.jpg"); + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg5_output.jpg", 1600, 1000000000L/16, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + /*if( index == 1 ) { + //int [] exp_offsets_x = {0, 4, 0}; + //int [] exp_offsets_y = {0, -1, 0}; + //int [] exp_offsets_x = {0, 5, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + //int [] exp_offsets_x = {0, 6, 0}; + //int [] exp_offsets_y = {0, -2, 0}; + int [] exp_offsets_x = {0, 4, 0}; + int [] exp_offsets_y = {0, 0, 0}; + assertTrue(mActivity.getApplicationInterface().getHDRProcessor().sharp_index == 0); + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 2 ) { + //int [] exp_offsets_x = {0, 7, 0}; + //int [] exp_offsets_y = {0, -2, 0}; + //int [] exp_offsets_x = {0, 8, 0}; + //int [] exp_offsets_y = {0, -1, 0}; + int [] exp_offsets_x = {0, 8, 0}; + int [] exp_offsets_y = {0, -4, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 3 ) { + //int [] exp_offsets_x = {0, 9, 0}; + //int [] exp_offsets_y = {0, -2, 0}; + //int [] exp_offsets_x = {0, 8, 0}; + //int [] exp_offsets_y = {0, -1, 0}; + int [] exp_offsets_x = {0, 8, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 4 ) { + //int [] exp_offsets_x = {0, 10, 0}; + //int [] exp_offsets_y = {0, -4, 0}; + int [] exp_offsets_x = {0, 11, 0}; + int [] exp_offsets_y = {0, -3, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else { + assertTrue(false); + }*/ + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + } + + /** Tests Avg algorithm on test samples "testAvg6". + */ + public void testAvg6() throws IOException, InterruptedException { + Log.d(TAG, "testAvg6"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg6/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg6/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg6/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg6/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg6/input4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg6/input5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg6/input6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg6/input7.jpg"); + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg6_output.jpg", 1600, 1000000000L/17, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + /*if( true ) + return;*/ + if( index == 1 ) { + //int [] exp_offsets_x = {0, 0, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + //int [] exp_offsets_x = {0, -2, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + assertEquals(0, mActivity.getApplicationInterface().getHDRProcessor().sharp_index); + } + else if( index == 2 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 3 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 4 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 5 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 6 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 7 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else { + fail(); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 18, 51, 201); + //checkHistogramDetails(hdrHistogramDetails, 14, 38, 200); + //checkHistogramDetails(hdrHistogramDetails, 0, 9, 193); + //checkHistogramDetails(hdrHistogramDetails, 0, 9, 199); + //checkHistogramDetails(hdrHistogramDetails, 12, 46, 202); + //checkHistogramDetails(hdrHistogramDetails, 12, 46, 205); + //checkHistogramDetails(hdrHistogramDetails, 12, 44, 209); + //checkHistogramDetails(hdrHistogramDetails, 12, 44, 202); + //checkHistogramDetails(hdrHistogramDetails, 5, 16, 190); + checkHistogramDetails(hdrHistogramDetails, 5, 19, 199); + } + + /** Tests Avg algorithm on test samples "testAvg7". + */ + public void testAvg7() throws IOException, InterruptedException { + Log.d(TAG, "testAvg7"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg7/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg7/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg7/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg7/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg7/input4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg7/input5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg7/input6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg7/input7.jpg"); + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg7_output.jpg", 1600, 1000000000L/16, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + //int [] exp_offsets_x = {0, 0, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + //int [] exp_offsets_x = {0, -10, 0}; + //int [] exp_offsets_y = {0, 6, 0}; + //int [] exp_offsets_x = {0, -6, 0}; + //int [] exp_offsets_y = {0, 2, 0}; + //int [] exp_offsets_x = {0, -4, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + //int [] exp_offsets_x = {0, 0, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + //int [] exp_offsets_x = {0, -4, 0}; + //int [] exp_offsets_y = {0, 0, 0}; + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + assertEquals(0, mActivity.getApplicationInterface().getHDRProcessor().sharp_index); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + } + + /** Tests Avg algorithm on test samples "testAvg8". + */ + public void testAvg8() throws IOException, InterruptedException { + Log.d(TAG, "testAvg8"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg8/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg8/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg8/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg8/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg8/input4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg8/input5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg8/input6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg8/input7.jpg"); + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg8_output.jpg", 1600, 1000000000L/16, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + assertEquals(0, mActivity.getApplicationInterface().getHDRProcessor().sharp_index); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 4, 26, 92); + //checkHistogramDetails(hdrHistogramDetails, 3, 19, 68); + //checkHistogramDetails(hdrHistogramDetails, 0, 10, 60); + //checkHistogramDetails(hdrHistogramDetails, 1, 8, 72); + //checkHistogramDetails(hdrHistogramDetails, 1, 6, 64); + //checkHistogramDetails(hdrHistogramDetails, 1, 15, 75); + checkHistogramDetails(hdrHistogramDetails, 1, 16, 78); + } + + /** Tests Avg algorithm on test samples "testAvg9". + */ + public void testAvg9() throws IOException, InterruptedException { + Log.d(TAG, "testAvg9"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + final boolean use_auto_photos = true; + + if( use_auto_photos ) { + inputs.add(TestUtils.avg_images_path + "testAvg9/input_auto0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input_auto1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input_auto2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input_auto3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input_auto4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input_auto5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input_auto6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input_auto7.jpg"); + } + else { + inputs.add(TestUtils.avg_images_path + "testAvg9/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg9/input7.jpg"); + } + + String out_filename = use_auto_photos ? "testAvg9_auto_output.jpg" : "testAvg9_output.jpg"; + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, out_filename, 1600, use_auto_photos ? 1000000000L/16 : 1000000000L/11, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + assertEquals(0, mActivity.getApplicationInterface().getHDRProcessor().sharp_index); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + } + + /** Tests Avg algorithm on test samples "testAvg10". + */ + public void testAvg10() throws IOException, InterruptedException { + Log.d(TAG, "testAvg10"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + final boolean use_auto_photos = false; + + if( use_auto_photos ) { + inputs.add(TestUtils.avg_images_path + "testAvg10/input_auto0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input_auto1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input_auto2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input_auto3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input_auto4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input_auto5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input_auto6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input_auto7.jpg"); + } + else { + inputs.add(TestUtils.avg_images_path + "testAvg10/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg10/input7.jpg"); + } + + String out_filename = use_auto_photos ? "testAvg10_auto_output.jpg" : "testAvg10_output.jpg"; + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, out_filename, 1196, use_auto_photos ? 1000000000L/12 : 1000000000L/10, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + assertEquals(0, mActivity.getApplicationInterface().getHDRProcessor().sharp_index); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + } + + /** Tests Avg algorithm on test samples "testAvg11". + */ + public void testAvg11() throws IOException, InterruptedException { + Log.d(TAG, "testAvg11"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + // note, we don't actually use 8 images for a bright scene like this, but it serves as a good test for + // misalignment/ghosting anyway + inputs.add(TestUtils.avg_images_path + "testAvg11/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg11/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg11/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg11/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg11/input4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg11/input5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg11/input6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg11/input7.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg11_output.jpg", 100, 1000000000L/338, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + //int [] exp_offsets_x = {0, 4, 0}; + //int [] exp_offsets_y = {0, -8, 0}; + //int [] exp_offsets_x = {0, 6, 0}; + //int [] exp_offsets_y = {0, -8, 0}; + //int [] exp_offsets_x = {0, -6, 0}; + //int [] exp_offsets_y = {0, 8, 0}; + int [] exp_offsets_x = {0, -4, 0}; + int [] exp_offsets_y = {0, 8, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + //assertTrue(mActivity.getApplicationInterface().getHDRProcessor().sharp_index == 1); + } + else if( index == 2 ) { + //int [] exp_offsets_x = {0, -5, 0}; + //int [] exp_offsets_y = {0, -1, 0}; + //int [] exp_offsets_x = {0, -10, 0}; + //int [] exp_offsets_y = {0, 6, 0}; + int [] exp_offsets_x = {0, -8, 0}; + int [] exp_offsets_y = {0, 8, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 3 ) { + //int [] exp_offsets_x = {0, -1, 0}; + //int [] exp_offsets_y = {0, -18, 0}; + //int [] exp_offsets_x = {0, 0, 0}; + //int [] exp_offsets_y = {0, -16, 0}; + //int [] exp_offsets_x = {0, -4, 0}; + //int [] exp_offsets_y = {0, -10, 0}; + //int [] exp_offsets_x = {0, -4, 0}; + //int [] exp_offsets_y = {0, -8, 0}; + int [] exp_offsets_x = {0, -4, 0}; + int [] exp_offsets_y = {0, -12, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 4 ) { + //int [] exp_offsets_x = {0, -3, 0}; + //int [] exp_offsets_y = {0, -20, 0}; + //int [] exp_offsets_x = {0, -2, 0}; + //int [] exp_offsets_y = {0, -18, 0}; + //int [] exp_offsets_x = {0, -6, 0}; + //int [] exp_offsets_y = {0, -12, 0}; + int [] exp_offsets_x = {0, -8, 0}; + int [] exp_offsets_y = {0, -12, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 5 ) { + //int [] exp_offsets_x = {0, -8, 0}; + //int [] exp_offsets_y = {0, 2, 0}; + //int [] exp_offsets_x = {0, -10, 0}; + //int [] exp_offsets_y = {0, 4, 0}; + //int [] exp_offsets_x = {0, -12, 0}; + //int [] exp_offsets_y = {0, 10, 0}; + int [] exp_offsets_x = {0, -12, 0}; + int [] exp_offsets_y = {0, 8, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 6 ) { + //int [] exp_offsets_x = {0, 0, 0}; + //int [] exp_offsets_y = {0, -6, 0}; + //int [] exp_offsets_x = {0, 2, 0}; + //int [] exp_offsets_y = {0, -6, 0}; + //int [] exp_offsets_x = {0, -4, 0}; + //int [] exp_offsets_y = {0, 2, 0}; + int [] exp_offsets_x = {0, -4, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 7 ) { + //int [] exp_offsets_x = {0, 7, 0}; + //int [] exp_offsets_y = {0, -2, 0}; + //int [] exp_offsets_x = {0, 6, 0}; + //int [] exp_offsets_y = {0, 6, 0}; + //int [] exp_offsets_x = {0, 4, 0}; + //int [] exp_offsets_y = {0, 4, 0}; + //int [] exp_offsets_x = {0, 8, 0}; + //int [] exp_offsets_y = {0, 8, 0}; + int [] exp_offsets_x = {0, 4, 0}; + int [] exp_offsets_y = {0, 4, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else { + fail(); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + } + + /** Tests Avg algorithm on test samples "testAvg12". + */ + public void testAvg12() throws IOException, InterruptedException { + Log.d(TAG, "testAvg12"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg12/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg12/input1.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg12_output.jpg", 100, 1000000000L/1617, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + //assertTrue(mActivity.getApplicationInterface().getHDRProcessor().sharp_index == 1); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 30, 254); + //checkHistogramDetails(hdrHistogramDetails, 0, 27, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 20, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 17, 254); + checkHistogramDetails(hdrHistogramDetails, 0, 31, 255); + } + + /** Tests Avg algorithm on test samples "testAvg13". + */ + public void testAvg13() throws IOException, InterruptedException { + Log.d(TAG, "testAvg13"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg13/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg13/input1.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg13_output.jpg", 100, 1000000000L/2482, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + //assertTrue(mActivity.getApplicationInterface().getHDRProcessor().sharp_index == 1); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + } + + /** Tests Avg algorithm on test samples "testAvg14". + */ + public void testAvg14() throws IOException, InterruptedException { + Log.d(TAG, "testAvg14"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg14/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg14/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg14/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg14/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg14/input4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg14/input5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg14/input6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg14/input7.jpg"); + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg14_output.jpg", 1600, 1000000000L/10, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + int [] exp_offsets_x = {0, -8, 0}; + int [] exp_offsets_y = {0, -8, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + assertEquals(0, mActivity.getApplicationInterface().getHDRProcessor().sharp_index); + } + else if( index == 7 ) { + //int [] exp_offsets_x = {0, 4, 0}; + //int [] exp_offsets_y = {0, 28, 0}; + int [] exp_offsets_x = {0, 4, 0}; + int [] exp_offsets_y = {0, 40, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + } + }); + + checkHistogramDetails(hdrHistogramDetails, 0, 25, 245); + } + + /** Tests Avg algorithm on test samples "testAvg15". + */ + public void testAvg15() throws IOException, InterruptedException { + Log.d(TAG, "testAvg15"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg15/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg15/input1.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg15_output.jpg", 100, 1000000000L/1525, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + assertEquals(0, mActivity.getApplicationInterface().getHDRProcessor().sharp_index); + } + } + }); + + checkHistogramDetails(hdrHistogramDetails, 0, 38, 254); + } + + /** Tests Avg algorithm on test samples "testAvg16". + */ + public void testAvg16() throws IOException, InterruptedException { + Log.d(TAG, "testAvg16"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg16/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg16/input1.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg16_output.jpg", 100, 1000000000L/293, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + //assertTrue(mActivity.getApplicationInterface().getHDRProcessor().sharp_index == 1); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + } + + /** Tests Avg algorithm on test samples "testAvg17". + */ + public void testAvg17() throws IOException, InterruptedException { + Log.d(TAG, "testAvg17"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg17/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg17/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg17/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg17/input3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg17/input4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg17/input5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg17/input6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg17/input7.jpg"); + + // the input images record ISO=800, but they were taken with OnePlus 3T which has bug where ISO is reported as max + // of 800; in reality for a scene this dark, it was probably more like ISO 1600 + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg17_output.jpg", 1600, 1000000000L/17, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + int [] exp_offsets_x = {0, -8, 0}; + int [] exp_offsets_y = {0, 4, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + assertEquals(0, mActivity.getApplicationInterface().getHDRProcessor().sharp_index); + } + else if( index == 7 ) { + int [] exp_offsets_x = {0, 12, 0}; + int [] exp_offsets_y = {0, 28, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 100, 233); + //checkHistogramDetails(hdrHistogramDetails, 0, 100, 236); + //checkHistogramDetails(hdrHistogramDetails, 0, 92, 234); + //checkHistogramDetails(hdrHistogramDetails, 0, 102, 241); + //checkHistogramDetails(hdrHistogramDetails, 0, 102, 238); + checkHistogramDetails(hdrHistogramDetails, 0, 103, 244); + } + + /** Tests Avg algorithm on test samples "testAvg18". + */ + public void testAvg18() throws IOException, InterruptedException { + Log.d(TAG, "testAvg18"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg18/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg18/input1.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg18_output.jpg", 100, 1000000000L/591, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + //assertTrue(mActivity.getApplicationInterface().getHDRProcessor().sharp_index == 1); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + } + + /** Tests Avg algorithm on test samples "testAvg19". + */ + public void testAvg19() throws IOException, InterruptedException { + Log.d(TAG, "testAvg19"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + // repeat same image twice + inputs.add(TestUtils.avg_images_path + "testAvg19/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg19/input0.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg19_output.jpg", 100, 1000000000L/2483, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + assertEquals(0, mActivity.getApplicationInterface().getHDRProcessor().sharp_index); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 88, 252); + //checkHistogramDetails(hdrHistogramDetails, 0, 77, 252); + //checkHistogramDetails(hdrHistogramDetails, 0, 87, 252); + //checkHistogramDetails(hdrHistogramDetails, 0, 74, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 58, 255); + } + + /** Tests Avg algorithm on test samples "testAvg20". + */ + public void testAvg20() throws IOException, InterruptedException { + Log.d(TAG, "testAvg20"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + // repeat same image twice + inputs.add(TestUtils.avg_images_path + "testAvg20/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg20/input0.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg20_output.jpg", 100, 1000000000L/3124, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + assertEquals(0, mActivity.getApplicationInterface().getHDRProcessor().sharp_index); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + } + + /** Tests Avg algorithm on test samples "testAvg21". + */ + public void testAvg21() throws IOException, InterruptedException { + Log.d(TAG, "testAvg21"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + // repeat same image twice + inputs.add(TestUtils.avg_images_path + "testAvg21/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg21/input0.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg21_output.jpg", 102, 1000000000L/6918, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + assertEquals(0, mActivity.getApplicationInterface().getHDRProcessor().sharp_index); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + } + + /** Tests Avg algorithm on test samples "testAvg22". + */ + public void testAvg22() throws IOException, InterruptedException { + Log.d(TAG, "testAvg22"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + // repeat same image twice + inputs.add(TestUtils.avg_images_path + "testAvg22/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg22/input0.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg22_output.jpg", 100, 1000000000L/3459, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + assertEquals(0, mActivity.getApplicationInterface().getHDRProcessor().sharp_index); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + } + + /** Tests Avg algorithm on test samples "testAvg23". + */ + public void testAvg23() throws IOException, InterruptedException { + Log.d(TAG, "testAvg23"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg23/IMG_20180520_111250_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg23/IMG_20180520_111250_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg23/IMG_20180520_111250_2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg23/IMG_20180520_111250_3.jpg"); + // only test 4 images, to reflect latest behaviour that we take 4 images for this ISO + /*inputs.add(TestUtils.avg_images_path + "testAvg23/IMG_20180520_111250_4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg23/IMG_20180520_111250_5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg23/IMG_20180520_111250_6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg23/IMG_20180520_111250_7.jpg");*/ + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg23_output.jpg", 1044, 1000000000L/10, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + int [] exp_offsets_x = {0, -4, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 2 ) { + int [] exp_offsets_x = {0, -4, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 3 ) { + int [] exp_offsets_x = {0, -8, 0}; + int [] exp_offsets_y = {0, 4, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 4 ) { + int [] exp_offsets_x = {0, -8, 0}; + int [] exp_offsets_y = {0, 4, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 5 ) { + int [] exp_offsets_x = {0, -12, 0}; + int [] exp_offsets_y = {0, 4, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 6 ) { + int [] exp_offsets_x = {0, -12, 0}; + int [] exp_offsets_y = {0, 4, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 7 ) { + int [] exp_offsets_x = {0, -12, 0}; + int [] exp_offsets_y = {0, 4, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else { + fail(); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 81, 251); + //checkHistogramDetails(hdrHistogramDetails, 0, 80, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 83, 255); + } + + /** Tests Avg algorithm on test samples "testAvg24". + */ + public void testAvg24() throws IOException, InterruptedException { + Log.d(TAG, "testAvg24"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg24/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg24/input1.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg24_output.jpg", 100, 1000000000L/2421, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 77, 250); + //checkHistogramDetails(hdrHistogramDetails, 0, 74, 250); + //checkHistogramDetails(hdrHistogramDetails, 0, 86, 250); + //checkHistogramDetails(hdrHistogramDetails, 0, 86, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 80, 254); + checkHistogramDetails(hdrHistogramDetails, 0, 56, 254); + } + + /** Tests Avg algorithm on test samples "testAvg25". + */ + public void testAvg25() throws IOException, InterruptedException { + Log.d(TAG, "testAvg25"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg25/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg25/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg25/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg25/input3.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg25_output.jpg", 512, 1000000000L/20, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + } + + /** Tests Avg algorithm on test samples "testAvg26". + */ + public void testAvg26() throws IOException, InterruptedException { + Log.d(TAG, "testAvg26"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + // note we now take only 3 images for bright scenes, but still test with 4 images as this serves as a good test + // against ghosting + inputs.add(TestUtils.avg_images_path + "testAvg26/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg26/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg26/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg26/input3.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg26_output.jpg", 100, 1000000000L/365, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + /*if( true ) + return;*/ + if( index == 1 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 2 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 3 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, -4, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else { + fail(); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + } + + /** Tests Avg algorithm on test samples "testAvg27". + */ + public void testAvg27() throws IOException, InterruptedException { + Log.d(TAG, "testAvg27"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg27/IMG_20180610_205929_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg27/IMG_20180610_205929_1.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg27_output.jpg", 100, 1000000000L/482, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + } + + /** Tests Avg algorithm on test samples "testAvg28". + */ + public void testAvg28() throws IOException, InterruptedException { + Log.d(TAG, "testAvg28"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + // example from Google HDR+ dataset + // note, the number of input images doesn't necessarily match what we'd take for this scene, but we want to compare + // to the Google HDR+ result + inputs.add(TestUtils.avg_images_path + "testAvg28/input001.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg28/input002.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg28/input003.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg28/input004.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg28/input005.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg28/input006.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg28/input007.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg28/input008.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg28_output.jpg", 811, 1000000000L/21, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 21, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 18, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 8, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 13, 255); + } + + /** Tests Avg algorithm on test samples "testAvg29". + */ + public void testAvg29() throws IOException, InterruptedException { + Log.d(TAG, "testAvg29"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + // example from Google HDR+ dataset + // note, the number of input images doesn't necessarily match what we'd take for this scene, but we want to compare + // to the Google HDR+ result + inputs.add(TestUtils.avg_images_path + "testAvg29/input001.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg29/input002.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg29/input003.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg29/input004.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg29/input005.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg29/input006.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg29/input007.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg29/input008.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg29/input009.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg29_output.jpg", 40, 1000000000L/2660, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 88, 127, 255); + //checkHistogramDetails(hdrHistogramDetails, 92, 134, 255); + } + + /** Tests Avg algorithm on test samples "testAvg30". + */ + public void testAvg30() throws IOException, InterruptedException { + Log.d(TAG, "testAvg30"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + // example from Google HDR+ dataset + // note, the number of input images doesn't necessarily match what we'd take for this scene, but we want to compare + // to the Google HDR+ result + inputs.add(TestUtils.avg_images_path + "testAvg30/input001.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg30/input002.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg30/input003.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg30_output.jpg", 60, 1000000000L/411, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 2 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, -4, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 3 ) { + int [] exp_offsets_x = {0, 0, 0}; + int [] exp_offsets_y = {0, -4, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else { + fail(); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 134, 254); + //checkHistogramDetails(hdrHistogramDetails, 0, 144, 254); + checkHistogramDetails(hdrHistogramDetails, 0, 107, 254); + } + + /** Tests Avg algorithm on test samples "testAvg31". + */ + public void testAvg31() throws IOException, InterruptedException { + Log.d(TAG, "testAvg31"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + // example from Google HDR+ dataset + // note, the number of input images doesn't necessarily match what we'd take for this scene, but we want to compare + // to the Google HDR+ result + inputs.add(TestUtils.avg_images_path + "testAvg31/input001.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg31/input002.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg31/input003.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg31/input004.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg31/input005.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg31/input006.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg31/input007.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg31/input008.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg31/input009.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg31/input010.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg31_output.jpg", 609, 1000000000L/25, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 24, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 9, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 13, 255); + } + + /** Tests Avg algorithm on test samples "testAvg32". + */ + public void testAvg32() throws IOException, InterruptedException { + Log.d(TAG, "testAvg32"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + // example from Google HDR+ dataset + // note, the number of input images doesn't necessarily match what we'd take for this scene, but we want to compare + // to the Google HDR+ result + inputs.add(TestUtils.avg_images_path + "testAvg32/input001.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg32/input002.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg32/input003.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg32/input004.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg32/input005.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg32/input006.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg32/input007.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg32_output.jpg", 335, 1000000000L/120, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 34, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 13, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 36, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 61, 254); + } + + /** Tests Avg algorithm on test samples "testAvg33". + */ + public void testAvg33() throws IOException, InterruptedException { + Log.d(TAG, "testAvg33"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + // example from Google HDR+ dataset + // note, the number of input images doesn't necessarily match what we'd take for this scene, but we want to compare + // to the Google HDR+ result + inputs.add(TestUtils.avg_images_path + "testAvg33/input001.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg33/input002.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg33/input003.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg33/input004.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg33/input005.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg33/input006.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg33/input007.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg33/input008.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg33/input009.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg33/input010.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg33_output.jpg", 948, 1000000000L/18, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 81, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 63, 255); + } + + /** Tests Avg algorithm on test samples "testAvg34". + */ + public void testAvg34() throws IOException, InterruptedException { + Log.d(TAG, "testAvg34"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg34/IMG_20180627_121959_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg34/IMG_20180627_121959_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg34/IMG_20180627_121959_2.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg34_output.jpg", 100, 1000000000L/289, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 86, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 108, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 114, 254); + checkHistogramDetails(hdrHistogramDetails, 0, 103, 255); + } + + /** Tests Avg algorithm on test samples "testAvg35". + */ + public void testAvg35() throws IOException, InterruptedException { + Log.d(TAG, "testAvg35"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg35/IMG_20180711_144453_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg35/IMG_20180711_144453_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg35/IMG_20180711_144453_2.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg35_output.jpg", 100, 1000000000L/2549, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 165, 247); + checkHistogramDetails(hdrHistogramDetails, 0, 169, 248); + } + + /** Tests Avg algorithm on test samples "testAvg36". + */ + public void testAvg36() throws IOException, InterruptedException { + Log.d(TAG, "testAvg36"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg36/IMG_20180709_114831_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg36/IMG_20180709_114831_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg36/IMG_20180709_114831_2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg36/IMG_20180709_114831_3.jpg"); + // only test 4 images, to reflect latest behaviour that we take 4 images for this ISO/exposure time + /*inputs.add(TestUtils.avg_images_path + "testAvg36/IMG_20180709_114831_4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg36/IMG_20180709_114831_5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg36/IMG_20180709_114831_6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg36/IMG_20180709_114831_7.jpg");*/ + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg36_output.jpg", 752, 1000000000L/10, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + int [] exp_offsets_x = {0, -12, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + else if( index == 3 ) { + int [] exp_offsets_x = {0, -28, 0}; + int [] exp_offsets_y = {0, 0, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 86, 255); + } + + /** Tests Avg algorithm on test samples "testAvg37". + */ + public void testAvg37() throws IOException, InterruptedException { + Log.d(TAG, "testAvg37"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg37/IMG_20180715_173155_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg37/IMG_20180715_173155_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg37/IMG_20180715_173155_2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg37/IMG_20180715_173155_3.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg37_output.jpg", 131, 1000000000L/50, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 12, 109, 255); + //checkHistogramDetails(hdrHistogramDetails, 3, 99, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 99, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 125, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 94, 255); + checkHistogramDetails(hdrHistogramDetails, 6, 94, 255); + } + + /** Tests Avg algorithm on test samples "testAvg38". + */ + public void testAvg38() throws IOException, InterruptedException { + Log.d(TAG, "testAvg38"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg38/IMG_20180716_232102_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg38/IMG_20180716_232102_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg38/IMG_20180716_232102_2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg38/IMG_20180716_232102_3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg38/IMG_20180716_232102_4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg38/IMG_20180716_232102_5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg38/IMG_20180716_232102_6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg38/IMG_20180716_232102_7.jpg"); + + // n.b., this was a zoomed in photo, but can't quite remember the exact zoom level! + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg38_output.jpg", 1505, 1000000000L/10, 3.95f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + } + + /** Tests Avg algorithm on test samples "testAvg39". + */ + public void testAvg39() throws IOException, InterruptedException { + Log.d(TAG, "testAvg39"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + // example from Google HDR+ dataset + // note, the number of input images doesn't necessarily match what we'd take for this scene, but we want to compare + // to the Google HDR+ result + inputs.add(TestUtils.avg_images_path + "testAvg39/input001.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg39/input002.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg39/input003.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg39/input004.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg39/input005.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg39/input006.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg39/input007.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg39/input008.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg39/input009.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg39/input010.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg39_output.jpg", 521, 1000000000L/27, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 64, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 25, 255); + } + + /** Tests Avg algorithm on test samples "testAvg40". + */ + public void testAvg40() throws IOException, InterruptedException { + Log.d(TAG, "testAvg40"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + // example from Google HDR+ dataset + // note, the number of input images doesn't necessarily match what we'd take for this scene, but we want to compare + // to the Google HDR+ result + inputs.add(TestUtils.avg_images_path + "testAvg40/input001.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg40/input002.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg40/input003.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg40/input004.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg40/input005.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg40/input006.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg40/input007.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg40/input008.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg40/input009.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg40_output.jpg", 199, 1000000000L/120, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 50, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 19, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 50, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 67, 255); + } + + /** Tests Avg algorithm on test samples "testAvg41". + */ + public void testAvg41() throws IOException, InterruptedException { + Log.d(TAG, "testAvg41"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + // example from Google HDR+ dataset + // note, the number of input images doesn't necessarily match what we'd take for this scene, but we want to compare + // to the Google HDR+ result + inputs.add(TestUtils.avg_images_path + "testAvg41/input001.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg41/input002.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg41/input003.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg41/input004.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg41/input005.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg41/input006.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg41/input007.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg41/input008.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg41/input009.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg41/input010.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg41_output.jpg", 100, 1000000000L/869, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 49, 255); + //checkHistogramDetails(hdrHistogramDetails, 0, 37, 255); + checkHistogramDetails(hdrHistogramDetails, 0, 59, 254); + } + + /** Tests Avg algorithm on test samples "testAvg42". + */ + public void testAvg42() throws IOException, InterruptedException { + Log.d(TAG, "testAvg42"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg42/IMG_20180822_145152_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg42/IMG_20180822_145152_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg42/IMG_20180822_145152_2.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg42_output.jpg", 100, 1000000000L/2061, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 67, 254); + checkHistogramDetails(hdrHistogramDetails, 0, 61, 255); + } + + /** Tests Avg algorithm on test samples "testAvg43". + */ + public void testAvg43() throws IOException, InterruptedException { + Log.d(TAG, "testAvg43"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg43/IMG_20180831_143226_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg43/IMG_20180831_143226_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg43/IMG_20180831_143226_2.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg43_output.jpg", 100, 1000000000L/2152, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + checkHistogramDetails(hdrHistogramDetails, 0, 69, 253); + } + + /** Tests Avg algorithm on test samples "testAvg44". + */ + public void testAvg44() throws IOException, InterruptedException { + Log.d(TAG, "testAvg44"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg44/IMG_20180830_133917_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg44/IMG_20180830_133917_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg44/IMG_20180830_133917_2.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg44_output.jpg", 40, 1000000000L/2130, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + } + + /** Tests Avg algorithm on test samples "testAvg45". + */ + public void testAvg45() throws IOException, InterruptedException { + Log.d(TAG, "testAvg45"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg45/IMG_20180719_133947_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg45/IMG_20180719_133947_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg45/IMG_20180719_133947_2.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg45_output.jpg", 100, 1000000000L/865, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 75, 255); + } + + /** Tests Avg algorithm on test samples "testAvg46". + */ + public void testAvg46() throws IOException, InterruptedException { + Log.d(TAG, "testAvg46"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg46/IMG_20180903_203141_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg46/IMG_20180903_203141_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg46/IMG_20180903_203141_2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg46/IMG_20180903_203141_3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg46/IMG_20180903_203141_4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg46/IMG_20180903_203141_5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg46/IMG_20180903_203141_6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg46/IMG_20180903_203141_7.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg46_output.jpg", 1505, 1000000000L/10, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + checkHistogramDetails(hdrHistogramDetails, 0, 30, 255); + } + + /** Tests Avg algorithm on test samples "testAvg47". + */ + public void testAvg47() throws IOException, InterruptedException { + Log.d(TAG, "testAvg47"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg47/IMG_20180911_114752_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg47/IMG_20180911_114752_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg47/IMG_20180911_114752_2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg47/IMG_20180911_114752_3.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg47_output.jpg", 749, 1000000000L/12, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 30, 255); + } + + /** Tests Avg algorithm on test samples "testAvg48". + */ + public void testAvg48() throws IOException, InterruptedException { + Log.d(TAG, "testAvg48"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg48/IMG_20180911_110520_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg48/IMG_20180911_110520_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg48/IMG_20180911_110520_2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg48/IMG_20180911_110520_3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg48/IMG_20180911_110520_4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg48/IMG_20180911_110520_5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg48/IMG_20180911_110520_6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg48/IMG_20180911_110520_7.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg48_output.jpg", 1196, 1000000000L/10, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 30, 255); + } + + /** Tests Avg algorithm on test samples "testAvg49". + */ + public void testAvg49() throws IOException, InterruptedException { + Log.d(TAG, "testAvg49"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg49/IMG_20180911_120200_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg49/IMG_20180911_120200_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg49/IMG_20180911_120200_2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg49/IMG_20180911_120200_3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg49/IMG_20180911_120200_4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg49/IMG_20180911_120200_5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg49/IMG_20180911_120200_6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg49/IMG_20180911_120200_7.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg49_output.jpg", 1505, 1000000000L/10, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 30, 255); + } + + /** Tests Avg algorithm on test samples "testAvg50". + */ + public void testAvg50() throws IOException, InterruptedException { + Log.d(TAG, "testAvg50"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg50/IMG_20181015_144335_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg50/IMG_20181015_144335_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg50/IMG_20181015_144335_2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg50/IMG_20181015_144335_3.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg50_output.jpg", 114, 1000000000L/33, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + checkHistogramDetails(hdrHistogramDetails, 0, 91, 255); + } + + /** Tests Avg algorithm on test samples "testAvg51". + */ + public void testAvg51() throws IOException, InterruptedException { + Log.d(TAG, "testAvg51"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg51/IMG_20181025_182917_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg51/IMG_20181025_182917_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg51/IMG_20181025_182917_2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg51/IMG_20181025_182917_3.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg51/IMG_20181025_182917_4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg51/IMG_20181025_182917_5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg51/IMG_20181025_182917_6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg51/IMG_20181025_182917_7.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg51_output.jpg", 1600, 1000000000L/3, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + if( index == 1 ) { + int [] exp_offsets_x = {0, 8, 0}; + int [] exp_offsets_y = {0, 4, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + assertEquals(0, mActivity.getApplicationInterface().getHDRProcessor().sharp_index); + } + else if( index == 7 ) { + int [] exp_offsets_x = {0, 60, 0}; + int [] exp_offsets_y = {0, 28, 0}; + checkHDROffsets(exp_offsets_x, exp_offsets_y, mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize()); + } + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 91, 255); + } + + /** Tests Avg algorithm on test samples "testAvg52". + */ + public void testAvg52() throws IOException, InterruptedException { + Log.d(TAG, "testAvg52"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvg52/IMG_20181119_144836_0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg52/IMG_20181119_144836_1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvg52/IMG_20181119_144836_2.jpg"); + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvg52_output.jpg", 100, 1000000000L/297, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 0, 91, 255); + } + + /** Tests Avg algorithm on test samples "testAvgtemp". + * Used for one-off testing, or to recreate NR images from the base exposures to test an updated alorithm. + * The test images should be copied to the test device into DCIM/testOpenCamera/testdata/hdrsamples/testAvgtemp/ . + */ + public void testAvgtemp() throws IOException, InterruptedException { + Log.d(TAG, "testAvgtemp"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + inputs.add(TestUtils.avg_images_path + "testAvgtemp/input0.png"); + /*inputs.add(TestUtils.avg_images_path + "testAvgtemp/input0.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvgtemp/input1.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvgtemp/input2.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvgtemp/input3.jpg");*/ + /*inputs.add(TestUtils.avg_images_path + "testAvgtemp/input4.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvgtemp/input5.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvgtemp/input6.jpg"); + inputs.add(TestUtils.avg_images_path + "testAvgtemp/input7.jpg");*/ + + TestUtils.HistogramDetails hdrHistogramDetails = subTestAvg(inputs, "testAvgtemp_output.jpg", 250, 1000000000L/33, 1.0f, new TestUtils.TestAvgCallback() { + @Override + public void doneProcessAvg(int index) { + Log.d(TAG, "doneProcessAvg: " + index); + } + }); + + //checkHistogramDetails(hdrHistogramDetails, 1, 39, 253); + } + + private int tonemapConvert(int in, TonemapCurve curve, int channel) { + float in_f = in/255.0f; + float out_f = 0.0f; + // first need to undo the gamma that's already been applied to the test input images (since the tonemap curve also reapplies gamma) + in_f = (float)Math.pow(in_f, 2.2f); + boolean found = false; + for(int i=0;i= p0.x && in_f <= p1.x ) { + found = true; + float alpha = (in_f - p0.x) / (p1.x - p0.x); + out_f = p0.y + alpha * (p1.y - p0.y); + } + } + if( !found ) { + Log.d(TAG, "failed to convert: " + in_f); + throw new RuntimeException(); + } + return (int)(255.0f * out_f + 0.5f); + } + + private TestUtils.HistogramDetails subTestLogProfile(String image_path, String output_name) throws IOException, InterruptedException { + Log.d(TAG, "subTestLogProfile"); + + if( !mPreview.usingCamera2API() ) { + Log.d(TAG, "test requires camera2 api"); + return null; + } + + View switchVideoButton = mActivity.findViewById(net.sourceforge.opencamera.R.id.switch_video); + clickView(switchVideoButton); + waitUntilCameraOpened(); + + Bitmap bitmap = getBitmapFromFile(image_path); + + CameraController2 camera_controller2 = (CameraController2)mPreview.getCameraController(); + TonemapCurve curve = camera_controller2.testGetTonemapCurve(); + + // compute lookup tables for faster operation + int [][] tonemap_lut = new int[3][]; + for(int channel=0;channel<3;channel++) { + Log.d(TAG, "compute tonemap_lut: " + channel); + tonemap_lut[channel] = new int[256]; + for(int i=0;i<256;i++) { + tonemap_lut[channel][i] = tonemapConvert(i, curve, channel); + } + } + + + int [] buffer = new int[bitmap.getWidth()]; + for(int y=0;y inputs, String output_name, String gyro_debug_info_filename, float panorama_pics_per_screen, float camera_angle_x, float camera_angle_y, float gyro_tol_degrees) { + TestUtils.subTestPanorama(mActivity, inputs, output_name, gyro_debug_info_filename, panorama_pics_per_screen, camera_angle_x, camera_angle_y, gyro_tol_degrees); + } + + /** Tests panorama algorithm on test samples "testPanoramaWhite". + * This tests that auto-alignment fails gracefully if we can't find any matches. + */ + public void testPanoramaWhite() throws IOException, InterruptedException { + Log.d(TAG, "testPanoramaWhite"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + inputs.add(TestUtils.panorama_images_path + "testPanoramaWhite/input0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanoramaWhite/input0.jpg"); + float camera_angle_x = 66.3177f; + float camera_angle_y = 50.04736f; + float panorama_pics_per_screen = 2.0f; + String output_name = "testPanoramaWhite_output.jpg"; + + subTestPanorama(inputs, output_name, null, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 2.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama1". + */ + public void testPanorama1() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama1"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + inputs.add(TestUtils.panorama_images_path + "testPanorama1/input0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama1/input1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama1/input2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama1/input3.jpg"); + float camera_angle_x = 62.93796f; + float camera_angle_y = 47.44656f; + float panorama_pics_per_screen = 2.0f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(47.44656/49.56283); + String output_name = "testPanorama1_output.jpg"; + + subTestPanorama(inputs, output_name, null, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 2.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama2". + */ + public void testPanorama2() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama2"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + /*final float panorama_pics_per_screen = 1.0f; + //inputs.add(TestUtils.panorama_images_path + "testPanorama2xxx/input0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama2xxx/input1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama2xxx/input2.jpg");*/ + /*final float panorama_pics_per_screen = 2.0f; + //inputs.add(TestUtils.panorama_images_path + "testPanorama1/input0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama1/input1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama1/input2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama1/input3.jpg"); + String output_name = "testPanorama1_output.jpg";*/ + float panorama_pics_per_screen = 4.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama2/input0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama2/input1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama2/input2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama2/input3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama2/input4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama2/input5.jpg"); + String output_name = "testPanorama2_output.jpg"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/52.26029); + + subTestPanorama(inputs, output_name, null, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 2.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama3". + */ + public void testPanorama3() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama3"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 4.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131249.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131252.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131255.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131258.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131301.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131303.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131305.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131307.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131315.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131317.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131320.jpg"); + String output_name = "testPanorama3_output.jpg"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/52.26029); + + subTestPanorama(inputs, output_name, null, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama3", with panorama_pics_per_screen set + * to 4.0. + */ + public void testPanorama3_picsperscreen2() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama3_picsperscreen2"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 2.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131249.jpg"); + //inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131252.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131255.jpg"); + //inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131258.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131301.jpg"); + //inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131303.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131305.jpg"); + //inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131307.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131315.jpg"); + //inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131317.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama3/IMG_20190214_131320.jpg"); + String output_name = "testPanorama3_picsperscreen2_output.jpg"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/52.26029); + + subTestPanorama(inputs, output_name, null, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama4". + */ + public void testPanorama4() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama4"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 4.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama4/IMG_20190222_225317_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama4/IMG_20190222_225317_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama4/IMG_20190222_225317_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama4/IMG_20190222_225317_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama4/IMG_20190222_225317_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama4/IMG_20190222_225317_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama4/IMG_20190222_225317_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama4/IMG_20190222_225317_7.jpg"); + String output_name = "testPanorama4_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama4/IMG_20190222_225317.xml"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/52.26029); + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama5". + */ + public void testPanorama5() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama5"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 4.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama5/IMG_20190223_220524_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama5/IMG_20190223_220524_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama5/IMG_20190223_220524_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama5/IMG_20190223_220524_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama5/IMG_20190223_220524_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama5/IMG_20190223_220524_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama5/IMG_20190223_220524_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama5/IMG_20190223_220524_7.jpg"); + String output_name = "testPanorama5_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama5/IMG_20190223_220524.xml"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/52.26029); + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + } + + /** Tests panorama algorithm on test samples "testPanorama6". + */ + public void testPanorama6() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama6"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 4.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama6/IMG_20190225_154232_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama6/IMG_20190225_154232_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama6/IMG_20190225_154232_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama6/IMG_20190225_154232_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama6/IMG_20190225_154232_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama6/IMG_20190225_154232_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama6/IMG_20190225_154232_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama6/IMG_20190225_154232_7.jpg"); + String output_name = "testPanorama6_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama6/IMG_20190225_154232.xml"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/52.26029); + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + } + + /** Tests panorama algorithm on test samples "testPanorama7". + */ + public void testPanorama7() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama7"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 4.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama7/IMG_20190225_155510_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama7/IMG_20190225_155510_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama7/IMG_20190225_155510_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama7/IMG_20190225_155510_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama7/IMG_20190225_155510_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama7/IMG_20190225_155510_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama7/IMG_20190225_155510_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama7/IMG_20190225_155510_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama7/IMG_20190225_155510_8.jpg"); + String output_name = "testPanorama7_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama7/IMG_20190225_155510.xml"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/52.26029); + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + } + + /** Tests panorama algorithm on test samples "testPanorama8". + */ + public void testPanorama8() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama8"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 2.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama8/IMG_20190227_001431_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama8/IMG_20190227_001431_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama8/IMG_20190227_001431_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama8/IMG_20190227_001431_3.jpg"); + String output_name = "testPanorama8_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama8/IMG_20190227_001431.xml"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/52.26029); + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + } + + /** Tests panorama algorithm on test samples "testPanorama9". + */ + public void testPanorama9() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama9"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama9/IMG_20190301_145213_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama9/IMG_20190301_145213_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama9/IMG_20190301_145213_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama9/IMG_20190301_145213_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama9/IMG_20190301_145213_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama9/IMG_20190301_145213_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama9/IMG_20190301_145213_6.jpg"); + String output_name = "testPanorama9_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama9/IMG_20190301_145213.xml"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/50.44399); + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + + Thread.sleep(1000); // need to wait for debug images to be saved/broadcast? + } + + /** Tests panorama algorithm on test samples "testPanorama10". + */ + public void testPanorama10() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama10"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_9.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_10.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_11.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948_12.jpg"); + String output_name = "testPanorama10_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama10/IMG_20190301_144948.xml"; + //gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/50.44399); + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + } + + /** Tests panorama algorithm on test samples "testPanorama11". + */ + public void testPanorama11() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama11"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama11/IMG_20190306_143652_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama11/IMG_20190306_143652_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama11/IMG_20190306_143652_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama11/IMG_20190306_143652_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama11/IMG_20190306_143652_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama11/IMG_20190306_143652_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama11/IMG_20190306_143652_6.jpg"); + String output_name = "testPanorama11_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama11/IMG_20190306_143652.xml"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/50.44399); + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + } + + /** Tests panorama algorithm on test samples "testPanorama12". + */ + public void testPanorama12() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama12"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008_9.jpg"); + String output_name = "testPanorama12_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama12/IMG_20190308_152008.xml"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + // these images were taken with incorrect camera view angles, so we compensate in the test: + panorama_pics_per_screen *= (float)(50.282097/50.44399); + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + } + + /** Tests panorama algorithm on test samples "testPanorama13". + */ + public void testPanorama13() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama13"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152_9.jpg"); + String output_name = "testPanorama13_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama13/IMG_20190512_014152.xml"; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + } + + /** Tests panorama algorithm on test samples "testPanorama14". + */ + public void testPanorama14() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama14"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249_9.jpg"); + String output_name = "testPanorama14_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama14/IMG_20190513_151249.xml"; + //gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + } + + /** Tests panorama algorithm on test samples "testPanorama15". + */ + public void testPanorama15() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama15"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624_9.jpg"); + String output_name = "testPanorama15_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama15/IMG_20190513_151624.xml"; + //gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + } + + /** Tests panorama algorithm on test samples "testPanorama16". + */ + public void testPanorama16() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama16"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731_9.jpg"); + String output_name = "testPanorama16_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama16/IMG_20190624_151731.xml"; + //gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + } + + /** Tests panorama algorithm on test samples "testPanorama17". + */ + public void testPanorama17() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama17"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423_9.jpg"); + String output_name = "testPanorama17_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama17/IMG_20190625_135423.xml"; + //gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + } + + /** Tests panorama algorithm on test samples "testPanorama18". + */ + public void testPanorama18() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama18"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559_9.jpg"); + String output_name = "testPanorama18_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama18/IMG_20190626_152559.xml"; + //gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + } + + /** Tests panorama algorithm on test samples "testPanorama19". + */ + public void testPanorama19() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama19"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059_9.jpg"); + String output_name = "testPanorama19_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama19/IMG_20190627_134059.xml"; + //gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama20". + */ + public void testPanorama20() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama20"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027_9.jpg"); + String output_name = "testPanorama20_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama20/IMG_20190628_145027.xml"; + //gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + } + + /** Tests panorama algorithm on test samples "testPanorama21". + */ + public void testPanorama21() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama21"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552_9.jpg"); + String output_name = "testPanorama21_output.jpg"; + String gyro_name = TestUtils.panorama_images_path + "testPanorama21/IMG_20190628_145552.xml"; + //gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 0.5f); + } + + /** Tests panorama algorithm on test samples "testPanorama22". + */ + public void testPanorama22() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama22"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama22/IMG_20190629_165627_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama22/IMG_20190629_165627_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama22/IMG_20190629_165627_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama22/IMG_20190629_165627_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama22/IMG_20190629_165627_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama22/IMG_20190629_165627_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama22/IMG_20190629_165627_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama22/IMG_20190629_165627_7.jpg"); + String output_name = "testPanorama22_output.jpg"; + String gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama23". + */ + public void testPanorama23() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama23"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama23/IMG_20190702_145916_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama23/IMG_20190702_145916_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama23/IMG_20190702_145916_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama23/IMG_20190702_145916_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama23/IMG_20190702_145916_4.jpg"); + String output_name = "testPanorama23_output.jpg"; + String gyro_name = null; + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama24". + */ + public void testPanorama24() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama24"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama24/IMG_20190703_154333_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama24/IMG_20190703_154333_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama24/IMG_20190703_154333_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama24/IMG_20190703_154333_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama24/IMG_20190703_154333_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama24/IMG_20190703_154333_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama24/IMG_20190703_154333_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama24/IMG_20190703_154333_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama24/IMG_20190703_154333_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama24/IMG_20190703_154333_9.jpg"); + String output_name = "testPanorama24_output.jpg"; + String gyro_name = null; + // taken with OnePlus 3T, Camera2 API: + float camera_angle_x = 62.93796f; + float camera_angle_y = 47.44656f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama25". + */ + public void testPanorama25() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama25"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + //float panorama_pics_per_screen = 3.33333f / 2.0f; + inputs.add(TestUtils.panorama_images_path + "testPanorama25/IMG_20190706_215940_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama25/IMG_20190706_215940_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama25/IMG_20190706_215940_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama25/IMG_20190706_215940_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama25/IMG_20190706_215940_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama25/IMG_20190706_215940_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama25/IMG_20190706_215940_6.jpg"); + String output_name = "testPanorama25_output.jpg"; + String gyro_name = null; + // taken with Nokia 8, Camera2 API: + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama26". + */ + public void testPanorama26() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama26"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama26/IMG_20190706_214842_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama26/IMG_20190706_214842_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama26/IMG_20190706_214842_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama26/IMG_20190706_214842_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama26/IMG_20190706_214842_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama26/IMG_20190706_214842_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama26/IMG_20190706_214842_6.jpg"); + String output_name = "testPanorama26_output.jpg"; + String gyro_name = null; + // taken with Nokia 8, Camera2 API: + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama27". + */ + public void testPanorama27() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama27"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama27/IMG_20190706_192120_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama27/IMG_20190706_192120_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama27/IMG_20190706_192120_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama27/IMG_20190706_192120_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama27/IMG_20190706_192120_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama27/IMG_20190706_192120_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama27/IMG_20190706_192120_6.jpg"); + String output_name = "testPanorama27_output.jpg"; + String gyro_name = null; + // taken with Nokia 8, Camera2 API: + float camera_angle_x = 66.708595f; + float camera_angle_y = 50.282097f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama28". + */ + public void testPanorama28() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama28"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + // right-to-left: + /*inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_9.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_0.jpg");*/ + // converted from original JPEGs to PNG using Nokia 8: + inputs.add(TestUtils.panorama_images_path + "testPanorama28/input_bitmap_0.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/input_bitmap_1.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/input_bitmap_2.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/input_bitmap_3.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/input_bitmap_4.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/input_bitmap_5.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/input_bitmap_6.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/input_bitmap_7.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/input_bitmap_8.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/input_bitmap_9.png"); + String output_name = "testPanorama28_output.jpg"; + String gyro_name = null; + // taken with Samsung Galaxy S10e, Camera2 API, standard rear camera: + float camera_angle_x = 66.3177f; + float camera_angle_y = 50.04736f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama28", but with a nbnq similar set of + * input images. Instead of converting the original JPEGs to PNG on Nokia 8, this was done on + * the Samsung Galaxy S10e, which gives small differences, but enough to show up potential + * stability issues. + */ + public void testPanorama28_galaxys10e() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama28_galaxys10e"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + // right-to-left: + /*inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_9.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/IMG_20190725_134756_0.jpg");*/ + // converted from original JPEGs to PNG using Samsung Galaxy S10e: + inputs.add(TestUtils.panorama_images_path + "testPanorama28/galaxys10e_input_bitmap_0.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/galaxys10e_input_bitmap_1.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/galaxys10e_input_bitmap_2.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/galaxys10e_input_bitmap_3.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/galaxys10e_input_bitmap_4.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/galaxys10e_input_bitmap_5.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/galaxys10e_input_bitmap_6.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/galaxys10e_input_bitmap_7.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/galaxys10e_input_bitmap_8.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama28/galaxys10e_input_bitmap_9.png"); + String output_name = "testPanorama28_galaxys10e_output.jpg"; + String gyro_name = null; + // taken with Samsung Galaxy S10e, Camera2 API, standard rear camera: + float camera_angle_x = 66.3177f; + float camera_angle_y = 50.04736f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama29". + */ + public void testPanorama29() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama29"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + // right-to-left: + inputs.add(TestUtils.panorama_images_path + "testPanorama29/IMG_20190719_145852_9.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama29/IMG_20190719_145852_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama29/IMG_20190719_145852_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama29/IMG_20190719_145852_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama29/IMG_20190719_145852_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama29/IMG_20190719_145852_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama29/IMG_20190719_145852_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama29/IMG_20190719_145852_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama29/IMG_20190719_145852_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama29/IMG_20190719_145852_0.jpg"); + String output_name = "testPanorama29_output.jpg"; + String gyro_name = null; + // taken with Nokia 8, old API: + float camera_angle_x = 66.1062f; + float camera_angle_y = 49.88347f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama30". + */ + public void testPanorama30() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama30"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + /*inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_9.jpg");*/ + // converted from original JPEGs to PNG using Nokia 8: + inputs.add(TestUtils.panorama_images_path + "testPanorama30/nokia8_input_bitmap_0.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/nokia8_input_bitmap_1.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/nokia8_input_bitmap_2.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/nokia8_input_bitmap_3.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/nokia8_input_bitmap_4.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/nokia8_input_bitmap_5.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/nokia8_input_bitmap_6.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/nokia8_input_bitmap_7.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/nokia8_input_bitmap_8.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/nokia8_input_bitmap_9.png"); + String output_name = "testPanorama30_output.jpg"; + String gyro_name = null; + // taken with Samsung Galaxy S10e, old API, standard rear camera: + // n.b., camera angles are indeed the exact same as with Camera2 + float camera_angle_x = 66.3177f; + float camera_angle_y = 50.04736f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama30", but with a nbnq similar set of + * input images. Instead of converting the original JPEGs to PNG on Nokia 8, this was done on + * the Samsung Galaxy S10e, which gives small differences, but enough to show up potential + * stability issues. + */ + public void testPanorama30_galaxys10e() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama30_galaxys10e"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + /*inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/IMG_20190723_142934_9.jpg");*/ + // converted from original JPEGs to PNG using Samsung Galaxy S10e: + inputs.add(TestUtils.panorama_images_path + "testPanorama30/galaxys10e_input_bitmap_0.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/galaxys10e_input_bitmap_1.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/galaxys10e_input_bitmap_2.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/galaxys10e_input_bitmap_3.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/galaxys10e_input_bitmap_4.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/galaxys10e_input_bitmap_5.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/galaxys10e_input_bitmap_6.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/galaxys10e_input_bitmap_7.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/galaxys10e_input_bitmap_8.png"); + inputs.add(TestUtils.panorama_images_path + "testPanorama30/galaxys10e_input_bitmap_9.png"); + String output_name = "testPanorama30_galaxys10e_output.jpg"; + String gyro_name = null; + // taken with Samsung Galaxy S10e, old API, standard rear camera: + // n.b., camera angles are indeed the exact same as with Camera2 + float camera_angle_x = 66.3177f; + float camera_angle_y = 50.04736f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama31". + */ + public void testPanorama31() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama31"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama31/IMG_20190704_135633_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama31/IMG_20190704_135633_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama31/IMG_20190704_135633_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama31/IMG_20190704_135633_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama31/IMG_20190704_135633_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama31/IMG_20190704_135633_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama31/IMG_20190704_135633_6.jpg"); + String output_name = "testPanorama31_output.jpg"; + String gyro_name = null; + // taken with OnePlus 3T, Camera2 API: + float camera_angle_x = 62.93796f; + float camera_angle_y = 47.44656f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama3". + */ + public void testPanorama32() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama32"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama32/IMG_20190705_145938_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama32/IMG_20190705_145938_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama32/IMG_20190705_145938_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama32/IMG_20190705_145938_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama32/IMG_20190705_145938_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama32/IMG_20190705_145938_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama32/IMG_20190705_145938_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama32/IMG_20190705_145938_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama32/IMG_20190705_145938_8.jpg"); + String output_name = "testPanorama32_output.jpg"; + String gyro_name = null; + // taken with OnePlus 3T, old API: + float camera_angle_x = 60.0f; + float camera_angle_y = 45.0f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama33". + */ + public void testPanorama33() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama33"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama33/IMG_20190713_013437_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama33/IMG_20190713_013437_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama33/IMG_20190713_013437_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama33/IMG_20190713_013437_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama33/IMG_20190713_013437_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama33/IMG_20190713_013437_5.jpg"); + String output_name = "testPanorama33_output.jpg"; + String gyro_name = null; + // taken with Nokia 8, old API: + float camera_angle_x = 66.1062f; + float camera_angle_y = 49.88347f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama34". + */ + public void testPanorama34() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama34"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + // right-to-left: + inputs.add(TestUtils.panorama_images_path + "testPanorama34/IMG_20190717_144042_9.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama34/IMG_20190717_144042_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama34/IMG_20190717_144042_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama34/IMG_20190717_144042_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama34/IMG_20190717_144042_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama34/IMG_20190717_144042_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama34/IMG_20190717_144042_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama34/IMG_20190717_144042_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama34/IMG_20190717_144042_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama34/IMG_20190717_144042_0.jpg"); + String output_name = "testPanorama34_output.jpg"; + String gyro_name = null; + // taken with Nexus 6, old API: + float camera_angle_x = 62.7533f; + float camera_angle_y = 47.298824f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama35". + */ + public void testPanorama35() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama35"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama35/IMG_20190717_145114_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama35/IMG_20190717_145114_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama35/IMG_20190717_145114_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama35/IMG_20190717_145114_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama35/IMG_20190717_145114_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama35/IMG_20190717_145114_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama35/IMG_20190717_145114_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama35/IMG_20190717_145114_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama35/IMG_20190717_145114_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama35/IMG_20190717_145114_9.jpg"); + String output_name = "testPanorama35_output.jpg"; + String gyro_name = null; + // taken with Nexus 7, old API: + float camera_angle_x = 55.0f; + float camera_angle_y = 41.401073f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama36". + */ + public void testPanorama36() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama36"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama36/IMG_20190722_201331_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama36/IMG_20190722_201331_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama36/IMG_20190722_201331_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama36/IMG_20190722_201331_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama36/IMG_20190722_201331_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama36/IMG_20190722_201331_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama36/IMG_20190722_201331_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama36/IMG_20190722_201331_7.jpg"); + String output_name = "testPanorama36_output.jpg"; + String gyro_name = null; + // taken with Samsung Galaxy S10e, Camera2 API, ultra wide rear camera: + float camera_angle_x = 104.00253f; + float camera_angle_y = 81.008804f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama37". + */ + public void testPanorama37() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama37"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama37/IMG_20190723_203441_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama37/IMG_20190723_203441_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama37/IMG_20190723_203441_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama37/IMG_20190723_203441_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama37/IMG_20190723_203441_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama37/IMG_20190723_203441_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama37/IMG_20190723_203441_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama37/IMG_20190723_203441_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama37/IMG_20190723_203441_8.jpg"); + String output_name = "testPanorama37_output.jpg"; + String gyro_name = null; + // taken with Samsung Galaxy S10e, old API, standard rear camera: + // n.b., camera angles are indeed the exact same as with Camera2 + float camera_angle_x = 66.3177f; + float camera_angle_y = 50.04736f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } + + /** Tests panorama algorithm on test samples "testPanorama38". + */ + public void testPanorama38() throws IOException, InterruptedException { + Log.d(TAG, "testPanorama38"); + + setToDefault(); + + // list assets + List inputs = new ArrayList<>(); + + float panorama_pics_per_screen = 3.33333f; + inputs.add(TestUtils.panorama_images_path + "testPanorama38/IMG_20190722_141148_0.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama38/IMG_20190722_141148_1.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama38/IMG_20190722_141148_2.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama38/IMG_20190722_141148_3.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama38/IMG_20190722_141148_4.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama38/IMG_20190722_141148_5.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama38/IMG_20190722_141148_6.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama38/IMG_20190722_141148_7.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama38/IMG_20190722_141148_8.jpg"); + inputs.add(TestUtils.panorama_images_path + "testPanorama38/IMG_20190722_141148_9.jpg"); + String output_name = "testPanorama38_output.jpg"; + String gyro_name = null; + // taken with Samsung Galaxy S10e, Camera2 API, standard rear camera: + float camera_angle_x = 66.3177f; + float camera_angle_y = 50.04736f; + + subTestPanorama(inputs, output_name, gyro_name, panorama_pics_per_screen, camera_angle_x, camera_angle_y, 1.0f); + } +} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/test/MainTests.java b/app/src/androidTest/java/net/sourceforge/opencamera/test/MainTests.java new file mode 100644 index 0000000..52b7fdc --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/test/MainTests.java @@ -0,0 +1,109 @@ +package net.sourceforge.opencamera.test; + +import android.os.Build; + +import junit.framework.Test; +import junit.framework.TestSuite; + +import net.sourceforge.opencamera.TestUtils; + +public class MainTests { + // Tests that don't fit into another of the Test suites + public static Test suite() { + /*return new TestSuiteBuilder(AllTests.class) + .includeAllPackagesUnderHere() + .build();*/ + TestSuite suite = new TestSuite(MainTests.class.getName()); + // put these tests first as they require various permissions be allowed, that can only be set by user action + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSwitchVideo")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLocationSettings")); + // other tests: + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testScopedStorageChecks1")); + if( !TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testScopedStorageChecks2")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testScopedStorageChecks3")); + } + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testScopedStorageChecks4")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPause")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testImmediatelyQuit")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testStartCameraPreviewCount")); + if( !TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testCamera2PrefUpgrade")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSaveModes")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFlashVideoMode")); + //suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSaveFlashTorchSwitchCamera")); + } + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFlashStartup")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFlashStartup2")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDRRestart")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPreviewSize")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPreviewSizeWYSIWYG")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testResolutionMaxMP")); + if( TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testResolutionBurst")); + } + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAutoFocus")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAutoFocusCorners")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPopup")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPopupLeftLayout")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testRightLayout")); + //suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPopupLayout")); // don't autotest for now, see comments for the test + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSwitchResolution")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFaceDetection")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFocusFlashAvailability")); + if( !TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFocusSwitchVideoSwitchCameras")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFocusRemainMacroSwitchCamera")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFocusRemainMacroSwitchPhoto")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFocusSaveMacroSwitchPhoto")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFocusSwitchVideoResetContinuous")); + } + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testContinuousPictureFocus")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testContinuousPictureRepeatTouch")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testContinuousPictureSwitchAuto")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testContinuousVideoFocusForPhoto")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testStartupAutoFocus")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSaveQuality")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testZoom")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testZoomIdle")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testZoomSwitchCamera")); + if( !TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSwitchCameraIdle")); + } + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSwitchCameraRepeat")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTouchFocusQuick")); + if( !TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testGallery")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSettings")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSettingsSaveLoad")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFolderChooserNew")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFolderChooserInvalid")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSaveFolderHistory")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSaveFolderHistorySAF")); + } + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSettingsPrivacyPolicy")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPreviewRotation")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLayoutNoLimits")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLayoutNoLimitsStartup")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testCameraModes")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFailOpenCamera")); + if( !TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAudioControlIcon")); + } + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testIconsAgainstCameras")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testOnError")); + if( !TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testGPSString")); + } + if( TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPreviewBitmap")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testVideoFPSHighSpeed")); + } + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.O ) { + // intensive test, can crash when run as suite on older devices (Nexus 6, Nexus 7) with Camera2 at least + // also run this test last, just in case + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSwitchCameraRepeat2")); + } + return suite; + } +} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/test/MultiCameraTests.java b/app/src/androidTest/java/net/sourceforge/opencamera/test/MultiCameraTests.java new file mode 100644 index 0000000..830e583 --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/test/MultiCameraTests.java @@ -0,0 +1,18 @@ +package net.sourceforge.opencamera.test; + +import junit.framework.Test; +import junit.framework.TestSuite; + +public class MultiCameraTests { + // Tests to run specifically on devices where MainActivity.isMultiCamEnabled() returns true. + public static Test suite() { + TestSuite suite = new TestSuite(MainTests.class.getName()); + + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testIconsAgainstCameras")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFrontCameraAll")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFrontCamera")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFrontCameraMulti")); + + return suite; + } +} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/test/Nexus7Tests.java b/app/src/androidTest/java/net/sourceforge/opencamera/test/Nexus7Tests.java new file mode 100644 index 0000000..0b3f6f5 --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/test/Nexus7Tests.java @@ -0,0 +1,36 @@ +package net.sourceforge.opencamera.test; + +import android.os.Build; + +import junit.framework.Test; +import junit.framework.TestSuite; + +import net.sourceforge.opencamera.TestUtils; + +public class Nexus7Tests { + // Tests to run specifically on Nexus 7 + public static Test suite() { + TestSuite suite = new TestSuite(MainTests.class.getName()); + + // we run the following tests on the Nexus 7 as a device that supports SAF, but doesn't have Android 7+ (where we use alternative methods for read/writing Exif tags without needing File) + // update: we now (as of 1.48.2) use the same codepaths for exif tags for before and after Android 7, but might as well keep these tests here anyway + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoSAF")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPhotoStampSAF")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testDirectionOnSAF")); + + // tests useful for device with no flash, and only 1 camera + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSwitchVideo")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFocusFlashAvailability")); + + // tests for testing Camera2 API with LEGACY Camera2 functionality + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhoto")); + if( TestUtils.isEmulator() && ( Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP || Build.VERSION.SDK_INT == Build.VERSION_CODES.M ) ) { + // video doesn't work on Android 5 or 6 emulator! + } + else { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideo")); + } + + return suite; + } +} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/test/OldDeviceTests.java b/app/src/androidTest/java/net/sourceforge/opencamera/test/OldDeviceTests.java new file mode 100644 index 0000000..092db18 --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/test/OldDeviceTests.java @@ -0,0 +1,48 @@ +package net.sourceforge.opencamera.test; + +import android.os.Build; + +import junit.framework.Test; +import junit.framework.TestSuite; + +import net.sourceforge.opencamera.TestUtils; + +public class OldDeviceTests { + // Small set of tests to run on very old devices. + public static Test suite() { + TestSuite suite = new TestSuite(MainTests.class.getName()); + + // put these tests first as they require various permissions be allowed, that can only be set by user action + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSwitchVideo")); + + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPause")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSaveModes")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFocusFlashAvailability")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testGallery")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSettings")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSettingsSaveLoad")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFolderChooserNew")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFolderChooserInvalid")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSaveFolderHistory")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSettingsPrivacyPolicy")); + + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLocationOn")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhoto")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAutoLevel")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAutoLevelLowMemory")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAutoLevelAngles")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAutoLevelAnglesLowMemory")); + + if( TestUtils.isEmulator() && ( Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP || Build.VERSION.SDK_INT == Build.VERSION_CODES.M ) ) { + // video doesn't work on Android 5 or 6 emulator! + } + else { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideo")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSubtitles")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testIntentVideo")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testIntentVideoDurationLimit")); + } + + return suite; + } +} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/test/PanoramaTests.java b/app/src/androidTest/java/net/sourceforge/opencamera/test/PanoramaTests.java new file mode 100644 index 0000000..169c127 --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/test/PanoramaTests.java @@ -0,0 +1,61 @@ +package net.sourceforge.opencamera.test; + +import junit.framework.Test; +import junit.framework.TestSuite; + +public class PanoramaTests { + /** Tests for Panorama algorithm - only need to run on a single device + * Should manually look over the images dumped onto DCIM/ + * To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/ + * folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes + * time to transfer to the device every time we run the tests. + * On Android 10+, scoped storage permission needs to be given to Open Camera for the DCIM/testOpenCamera/ folder. + * UPDATE: now deprecated, replaced with PanoramaInstrumentedTests. + */ + public static Test suite() { + TestSuite suite = new TestSuite(MainTests.class.getName()); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanoramaWhite")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama1")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama2")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama3")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama3_picsperscreen2")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama4")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama5")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama6")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama7")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama8")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama9")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama10")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama11")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama12")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama13")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama14")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama15")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama16")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama17")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama18")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama19")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama20")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama21")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama22")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama23")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama24")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama25")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama26")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama27")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama28")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama28_galaxys10e")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama29")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama30")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama30_galaxys10e")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama31")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama32")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama33")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama34")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama35")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama36")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama37")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama38")); + return suite; + } +} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/test/PhotoCamera2Tests.java b/app/src/androidTest/java/net/sourceforge/opencamera/test/PhotoCamera2Tests.java new file mode 100644 index 0000000..22c23a4 --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/test/PhotoCamera2Tests.java @@ -0,0 +1,42 @@ +package net.sourceforge.opencamera.test; + +import junit.framework.Test; +import junit.framework.TestSuite; + +public class PhotoCamera2Tests { + // Tests related to taking photos that require Camera2 - only need to run this suite with Camera2 + public static Test suite() { + TestSuite suite = new TestSuite(MainTests.class.getName()); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAutoFocusReleaseDuringPhoto")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoManualFocus")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoManualISOExposure")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoManualWB")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRaw")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRawWaitCaptureResult")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRawMulti")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRawOnly")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRawExpo")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRawExpoWaitCaptureResult")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRawOnlyExpo")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPreviewPausedTrashRaw")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPreviewPausedTrashRaw2")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoExpo5")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoHDRSlowBurst")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoHDRSaveExpoRaw")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoHDRSaveExpoRawOnly")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFocusBracketing")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFocusBracketingHeavy")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFocusBracketingCancel")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRawFocusBracketing")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRawOnlyFocusBracketing")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFastBurst")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoContinuousBurst")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoContinuousBurstSlow")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoNR")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFlashAutoFakeMode")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFlashOnFakeMode")); + // do testTakePhotoRawRepeat last, and is an intensive test, and if it fails for any reason it seems to cause the following test to crash, terminating the run (at least on Nexus 6)! + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRawRepeat")); + return suite; + } +} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/test/PhotoTests.java b/app/src/androidTest/java/net/sourceforge/opencamera/test/PhotoTests.java new file mode 100644 index 0000000..d4bb125 --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/test/PhotoTests.java @@ -0,0 +1,109 @@ +package net.sourceforge.opencamera.test; + +import junit.framework.Test; +import junit.framework.TestSuite; + +import net.sourceforge.opencamera.TestUtils; + +public class PhotoTests { + // Tests related to taking photos; note that tests to do with photo mode that don't take photos are still part of MainTests + public static Test suite() { + TestSuite suite = new TestSuite(MainTests.class.getName()); + // put these tests first as they require various permissions be allowed, that can only be set by user action + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoSAF")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLocationOn")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLocationDirectionOn")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLocationOff")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLocationOnSAF")); + if( !TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testDirectionOn")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testDirectionOnSAF")); + } + // then do memory intensive tests: + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAutoLevel")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAutoLevelLowMemory")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAutoLevelAngles")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAutoLevelAnglesLowMemory")); + // other tests: + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhoto")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoContinuous")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoContinuousNoTouch")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFlashAuto")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFlashOn")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFlashTorch")); + if( !TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAudioButton")); + } + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoNoAutofocus")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoNoThumbnail")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFlashBug")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFrontCameraAll")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFrontCamera")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFrontCameraMulti")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFrontCameraScreenFlash")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAutoFocus")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoLockedFocus")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoExposureCompensation")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoLockedLandscape")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoLockedPortrait")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPreviewPaused")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPreviewPausedAudioButton")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPreviewPausedSAF")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPreviewPausedTrash")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPreviewPausedTrashSAF")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPreviewPausedTrash2")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoQuickFocus")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRepeatFocus")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRepeatFocusLocked")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAfterFocus")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoSingleTap")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoDoubleTap")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAlt")); + if( !TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTimerBackground")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTimerSettings")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTimerPopup")); + } + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRepeat")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testContinuousPicture1")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testContinuousPicture2")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testContinuousPictureFocusRepeat")); + if( TestUtils.test_camera2 ) { + // test_wait_capture_result only relevant for Camera2 API + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testContinuousPictureFocusRepeatWaitCaptureResult")); + } + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testKeyboardControls")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPhotoStamp")); + if( !TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPhotoStampSAF")); + } + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoDRO")); + if( !TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoDROPhotoStamp")); + } + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoHDR")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPhotoBackgroundHDR")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoHDRSaveExpo")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoHDRFrontCamera")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoHDRAutoStabilise")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoHDRPhotoStamp")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoExpo")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPanorama")); + if( !TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPanoramaMax")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPanoramaCancel")); + //suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPanoramaCancelBySettings")); + } + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testCreateSaveFolder1")); + if( !TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testCreateSaveFolder2")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testCreateSaveFolder3")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testCreateSaveFolder4")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testCreateSaveFolderUnicode")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testCreateSaveFolderEmpty")); + } + // testTakePhotoPreviewPausedShare should be last, as sharing the image may sometimes cause later tests to hang + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPreviewPausedShare")); + return suite; + } +} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/test/TempTests.java b/app/src/androidTest/java/net/sourceforge/opencamera/test/TempTests.java new file mode 100644 index 0000000..fc808e7 --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/test/TempTests.java @@ -0,0 +1,15 @@ +package net.sourceforge.opencamera.test; + +import junit.framework.Test; +import junit.framework.TestSuite; + +public class TempTests { + // Dummy test suite for running an arbitrary subset of tests. + public static Test suite() { + TestSuite suite = new TestSuite(MainTests.class.getName()); + + //suite.addTest(TestSuite.createTest(MainActivityTest.class, "testZoom")); + + return suite; + } +} diff --git a/app/src/androidTest/java/net/sourceforge/opencamera/test/VideoTests.java b/app/src/androidTest/java/net/sourceforge/opencamera/test/VideoTests.java new file mode 100644 index 0000000..41a54c9 --- /dev/null +++ b/app/src/androidTest/java/net/sourceforge/opencamera/test/VideoTests.java @@ -0,0 +1,95 @@ +package net.sourceforge.opencamera.test; + +import junit.framework.Test; +import junit.framework.TestSuite; + +import net.sourceforge.opencamera.TestUtils; + +public class VideoTests { + // Tests related to video recording; note that tests to do with video mode that don't record are still part of MainTests + public static Test suite() { + TestSuite suite = new TestSuite(MainTests.class.getName()); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideo")); + // put these tests first as they require various permissions be allowed, that can only be set by user action: + if( !TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoAudioControl")); + } + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSAF")); + if( !TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSubtitles")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSubtitlesSAF")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSubtitlesGPSSAF")); + } + if( TestUtils.test_camera2 ) { + // tests for video log profile (but these don't actually record video) + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLogProfile1")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLogProfile2")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLogProfile3")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLogProfile1_extra_strong")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLogProfile2_extra_strong")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLogProfile3_extra_strong")); + } + + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testIntentVideo")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testIntentVideoDurationLimit")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testImmersiveMode")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testImmersiveModeEverything")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoStabilization")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoExposureLock")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoFocusArea")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoQuick")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoQuickSAF")); + if( !TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoMaxDuration")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoMaxDurationRestart")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoMaxDurationRestartInterrupt")); + } + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSettings")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoMacro")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoPause")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoPauseStop")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSnapshot")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSnapshotTimer")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSnapshotPausePreview")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSnapshotMax")); + if( !TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoFlashVideo")); + } + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testVideoTimerInterrupt")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testVideoPopup")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testVideoTimerPopup")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoAvailableMemory")); + if( !TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoAvailableMemory2")); + } + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoMaxFileSize1")); + if( !TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoMaxFileSize2")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoMaxFileSize3")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoMaxFileSize4")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoMaxFileSize4SAF")); + } + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoTimeLapse")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoForceFailure")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoForceFailureSAF")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoForceIOException")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoForceCameraControllerException")); + if( TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testVideoLogProfile")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testVideoJTLogProfile")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testVideoGammaProfile")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testVideoEdgeModeNoiseReductionMode")); + } + // put tests which change bitrate, fps or test 4K at end + if( TestUtils.test_camera2 ) { + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoFPSHighSpeedManual")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSlowMotion")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoFPS")); + } + // update: now deprecating these tests, as setting these settings can be dodgy on some devices + /*suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoBitrate")); + suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideo4K"));*/ + + return suite; + } +} diff --git a/app/src/androidTest/res/drawable-hdpi/ic_launcher.png b/app/src/androidTest/res/drawable-hdpi/ic_launcher.png new file mode 100644 index 0000000000000000000000000000000000000000..bee35f00774c07020370a5cd64d64beb1a27707f GIT binary patch literal 3619 zcmV+;4&3pHP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGr5C8xd5CIrr5!(O&4Y)}}K~!i%?OF#^ zR#y`K>QMk)~j!h=|<%=6{!;$L}v;_sAac@Ey+2yY=09cV_O)+{vSq!t;o{`u}r>T*7i1 zatX_6$V@BVxrgM3D3Bi|iz9DN`vUd}i@1)Bf!KEx3HMQ?FzUOYcW2b7qFZwW>}*6> zB!+*9kEbBz5h|3ytvi_f1AHdn_!-?HXMj6q#>0oI+aOi5qguaJ*AM|gQq!rwdYB@` zRHfn*RcfmI-MXWnZE!zH#oUr#GvX6e(gVvwqPlrU&z4QFWyOQX(ZL&^48*ASP^h4; zyq|>7qL)J;2UG*t!>CQK(oBo-NSB zQ#XA!8*a5w!U5~I;a)uI)xvxI;8;SpK3&+d;=vOrRTA#?)2SjTW=-5sqBu@nkm9vS z%vpvHCt`)aEPdY*4EY>ec3Zjv>$ggg3{1(q5Q?Fb5V&7At=^1Jrowju@<0fix&VvU z=%%L+W85Zk}oJ38-6B*O#?H!9E)S&?AxwjIRFlv5UOa7RjVvbJoKnJ+#ARcg6ARC%qwj~6+L}u z?Pw2F#MfhOtp|eYzFZ5nGC&ND=onGlcEPdt;xZT|50X`~Mekc-7$VsLTNyK>Txpaq zgTjS$(G2cG!2-yWNA#hg@g4`qlCjyx3J#&KV20TYc)G}puqT0fkJb=<~s9&u4;|bNze4xdN_5i_6%jGe-N`+6~V#Yzd!w%UMc20Cdad6slyq!~2}7`GHGbVFUx6;n7+*M*)t%RF!FMYVc|-WT zU2}XoNbFMLi+MN~iZ)H;EtM+Zn~}H}CebnW8frzbP{7{AmjqZ`O5J5zrxd7Vjjca6%k#s4vSXlo^)FW zj+NEV_k-^n5{mn1l5XPm@_#-K5( zMn|>#urAsl=9YT&NI%ngsqU#(|DlVHO;zt9;;Inmuc-RnB?5KsiWI}6mDfPkb$~p_ zC8%D$s;94tiEy>nlJV}u=_bOhmgXqR25tY!72r}$ip7eEKt{#LGez;zHQb2B zlpiJFGijd0SK<>zG6|IjFH}O8m$ZBmd?Vko`!Ig@$Cf?PliqDsU`s)r8gdbFBQ08H zgVmeSvM&aY!@T99Joy^4gz}3RYJhT|mk((w!T3@XOX6;vsFr*22sw{)mvK2H^iO0`kuuGq~Tv*qDftBn2*4v zm_1HbVom{GmzM|f{6eb_u46e#do|!Of zJ;&B_!RB2OOSi%t(lR6VwwwwcjdW0C*C9)7)?lmqs7dEcz{qJBGacXjBs!3@C#PU| ze^Jf7U%}FKh=`W(?S(LrH!HYw;XVsB#;=(&U1Nz3jP}8+LojfRM0F^B{6Jj&_NCIQ z$xr(VFIGkU+88$V~_Rf0Qp=cA`}LJgu9eRy8>d$!t7=K71RU+lpTC;8Kc; z1iIFM@0;@L(NLRONl9qa6g6M4Oks(!a`PM1)pv^^Fxr!6K+Uptvw2bIkPF(=E7qM=h z_>|cQ5@tnwg1QWY|GzRo zS7sA3SoicPXmr!4r^UHqllZm+Uz1`92XQ6NUBUK!k|z~&OS&1ZAsu!49h(B>Ryy*A zbf)RNG6385%R`|8!h3FMxdwZVVD2(3nT7?cP^6GJe%9;K4cl-pLF`@56S!YIJ-(gG zAi|^Z$4*%T-Ii4WI3Fs{rAqvTDQw4WA-JYrHpnWC)Gyl90Xy;07ZR?By(0-woSj|? zJDfU?L1S?C3LZYhf#dQ^ujK26;`7rh35^i4`*pKS3QYN4e!Y9)au`ltkl2P1BO!eK zu;T#ycOdu_j-SO3zeyB;UQ&Sq*b|HoJ{NC#{Z_=>mh70IgJ@V=a!N9T-noppeOI-3 zP4ye5i{wVnchrES~#A`#2_E|N}Xjy@3Wn!~4`WMeRaYn;Dc774fm>~gJgV-@{#P7dCNq!qGR#Z2pzO2U~+^{UzGDB;yON?qiaYiC3)Re z4E45@GrGH!&%Aj|(dfroDSOsrGrP#rj@7Ipi*P6+@do41y@4d#aO(I3A+K3I%ovN# zFNxM7_*Ndi*0i>iN7%YgB3#EeAZ&;JJNpM(H<1YT&D+w4D-Su3i)jIKo~RDR(u0jD zsiIwt6E`jaL5Cy(H1ohtmI$j|Llo4sg`yj2njAbKw+(xbO4HA4MM)k#EjeRcR#j)I zlr*@ql5g~T0A?)_vD|%FGUw{l6lpR3D0`kBvWM0<-0^c5JzW&z+CMROxulXeW#nOICe(mucQ2CN&ad3 zw^c`P_0|aG+(uPsrRsE1`;X~v)QWIXKI0`jl?!les{CfFo0jC(Okkfd!kTey#@#q| z?y|aJxp}aHJ*lA0w+99|JltZk@Ki{=pMINRMpBZ_u4@05^8QqE!fBmEUeD0kAT02J zOmgK~R&AgsvLX!&eO)GaR^p16u8JD0kap#129N&Ga8$1% pDktm64VdOMPx#1ZP1_K>z@;j|==^1poj532;bRa{vGr5C8xd5CIrr5!(O&1l>tQK~z{r-B){1 zlvNb}@em(NC=ig33`>!K(mib#y8N)O1i$ zlNkXuhFBpLA0QbI6^Q2w$osXj%hx&k-B<+1c4~(|_|4Alz32Px{oQl!J->U_Op+uF z6wJl%fM7t$3}MvV$8VQ$w-(P&fy)HhW?~4>h%qDrSO0)w1U`*K##J`zbkJ(~TSiwG zFGvR4yU^Jwx<*2HmVXCZcj1T<=H|GVjkuHe;v;O{39BKfYrx(wq3J}#K4>(UH65!L zBXARJtWj|n$}l*Olc)N32+xYkG0B;k2xA;k-^5N--$V50@R^PHGuU?oQ(dw4b^LUk zAKfm+wsirY|zriZyE_f_UvR-R^$74q+P<6Xap=_V|o_OUciAFhVH;6ElO z?U{_`Rw?eJwEK`$P{iH8Dx~GXQtSySMa5mMPCAz%eRn|0yvE(+bV;X^)HTwTT<#hP z16ePWtECN*qHQWXOs?8HFnpM3n*@Vb8x5YdwsF$b)?>*$xH!u;6GM1LjDex7Oe7HJ zF0eASwWF~a@c(ocY7taf+$H-rp;+`1oE#7xj_ZH2Pd}fA?;K3? zz|k}4(7=5Xc5lIgSqNW?6)z)MrJv|Q2(pReQB=l4E+aV|pDA(k7CQ7YuNcmXY@>j! z4J^z#jO`kZd3__E7>hUba^pOf*K{}$+x8%@ScJD11Q}gmwj;4|2P;;16iO=a_-LN4 zdwXP%=QJb5-QJdkn_@_ylhq9pbkb6?SXc;4PZ&-k9f3|Us3IZ(q5_hNOsc(}apB6$dIU$SC;A~Hf+?rxtt{tIhM>}%*n&}FPx#1ZP1_K>z@;j|==^1poj532;bRa{vGr5C8xd5CIrr5!(O&2G~hNK~z{r?U;E~ z&1o3Nzs6qnWz^V3F(`yFS*|52B!rlRO3_TCk+H-uojJC##4+Yr$I-FHjHGNM9Fb5m zsF{f@$<~xMDcqKKp6A|o?!8?%os%;ifAl%OQ_uVS-rswl-}`%h@AG|s)r1iEkoZXc z|6r&Js5}e=boU|dJU~f_Y*sR*k~--=#{0swISO7OJqsa0=wm5QRUQ z$R-)>h6rV$C=ntjSCo{9yAMTCkvvP}7l`B(QCuvW&(isC>Qj-Mr)pOeZ9tKiMEB9s zR%1Rw#3hKo?}{E{gu+!g%oJ@0iC@ru{|Cb@o&Y zZ4Nzr0<$*KR@1H}p8Uf`+H)KT!igQ2V2@vZMZs$<-H6|JA!H*akHU^as@{ON3-?** zh2SI7QvtLAHK>O-N@=Uf%|p{hY?w5I>Bnq*W{Kn!+)u^xYz%dS%_RJG3a{VrDMFoa ztxb`Ut!h^YZ9v^EaODPG=F4VU2ET4y%w$u|vkZ>AQ3IGX$F(DfIfNVGShIjnm0IxM zWAWDp)VTvc5j#)pkHEp>P)x-FFF8XAci^`iC!(-17|wQRWr8-J;M>)Ry@jwyoWH8N z{y*Nc6jcul43s9|I21;85PhAW$ed|#8_Q0zk4Is~c$~Wo->Dd>V5jp4+)Kr|E4*AO z`pU;Wz#27c^4Z1>U}?cdMUcr|HuRSDbdq)@U|^-zqiJI}*-E<-sFEM$Q58^m=;u(# zF`T@}FF&nG!#zaAKC3pXd&+F<4cB;H~Fo-kNh;MQ%v`_!k%$l@wX`ozlx zHoY)uB&@q3_a$qGc81ThuwgfE z$EaRXAzQeP_g{T2v34PM_FsioEwF48;ttFGZr~>Fxo#JlG{horG-<@U%KFAQAB(5y znBxYkF5FFzGIQu=67N>MCu2|_*mESTFl%>K6&fKa*i$2loIxe9A(Rpj@bD#D=kv z_PdHEKgg;1Y@$n<0Z|^1l%Zsz<4ae5SeZGjVvlY?a42`ETU-7NN!Z9sNIHdt2tFuX zG~b89&hp==SXgvWwdtwdPOR?!YZW2ik(AOE(GDIM2Ffn^}~8 zh+arh!4fdcp1BK#uelHiv zf#cQ$5c!)7Q+7{c!@q5Wns5}I6W~7op5jq<2new#H45Lm(7dsAQ zeFn9&EbQ&uE+RLj2R(tcSp9`kKNs*CX3?O2>G#~iK~K0NNo!Zb@d zWTp;oAc`RLvgGW7MriFXPbo6~xL8;E8Z_GxbP)qi-n{6 ze@$khIb=yTDPU+}CnG>-bufY|-}6m+!%dcedlZ6F9+)7-no%kPZ3@Fz&8$F1!obIK zl{IUnRl2pSaNkgAThUSyzETZ6BFyO5xu{$pOW@wb1{6`$eYT>j~-*ohlNLA6p3d*sE)L0(7y{Ehn86qH9k4mSA?jE!T<9Uq&(85PHg= z6@U8yFWVt^gbFQgHr>m8qZ1^?27gJaWSY#?)fR=sE6;pHa>?xYpE!tq+J`t}h)(6& zT7+UPM6C_Z9+QSdR@T-sQ90>e8AMSIAnPwIG$hSrjk%7LZzs+b4tsEWkD4NBoNgy zN1b+pAnz3mz?N+3&w0G!?@`M$vx+xI(EMf1b5!l8!WL8aW(etP)x}ei<~qpN8@NH* znF&PEi8XPMHoHT^EJe6>N%)kTOQq2#&Njl2M|HR-YCl4MA@a<` zBx+rzsN=%A`pVl(lpp4Xiz1yPl4wry5a6h-9E@A=}gJE@WkFVzc+I( zYjP|aX+=UMg(A(Lf=1n@Ti&<$rG0sleF6>of?B(u&enZ4+Rr(R^a7)%V9lI#V`nEf zgA6X##J+lA6rI;$%T5$RT5(TalUlVmsyEpqJrCm zFeEIQ%vafDIp_I<1k~GY^xg{((0%DsTOEjPk2K*%FMW>8yj<@89MJl3pOF(o#xO=) zdCPLEo2x`Ko&gS}({WgS%b26}ZGpgP>%AA6-L9m&6cRCB7$o2^Ae6&~c=a@a597gb zt>q+J3%GC6FuRmG^V6JI1>mvBzSq7}5`;@W+$tYg44$YGk_!ue#fRh}ycmXsfv?@d z2sr*fjReg^ku6Ixb6{+t+i2 z?G!e)@PD54xoT8BsnvsgQgUHtp%L~CF1yCgWkzPFz|wo81qdru2lF=SRo;9vgq?ow z(d+X=_!uwlJad;fiPVLeP5WnV{SB-jPj>KrVnQP2T^gtyrzaQSJs39z^W-^;@0#~T zex#l@EMy*#<@J!T>^GkE&-|BKB261sv&Cmu^i24njpkIp8I0M`ph{Bg@#6py-}L4N zLZE`}c%+2C&>w|wTZ$i4T8Sh^EEmvLGK6o^g9B1Aw`h{3I${j|1Da}`q4+4B*hqYt zFE)C{xTY+h&4+NOKF7|q)T&cW{{pbMB_K?ARFeu(#2ZAa)xlmrKCR_E6QnEgG1(I}zefRu=e@goAigsn5jcE)Sf{DGLX^zB z!2LMnEFPMpg^u)^_gA6AniQ5=P9lQ}gkEc}ikY9`PmpcN3X)u3 zQtg+>?r07)DST*rhxf#!FZrdRLzcS)8intDskrr<;NhU&SZ6VbI4cz0irr1g0;bG^!1hyJRo-%mP@Zg3o)<0n1=EzF^Y zg^#Tti2<+d_sY@1O$xjzP=R&+?U=BQ&HSl|okgwmMlfw|>Sd{tTEgBJsh=cQ?nCTr z)-upC&CR$`dqL_PvgA&T>o4^{uKRrSoedn`S?q*$iI`NK_Hfv53lxLCZJ<`bo0UA5 z$}z2JBzbh$`CVs83C_5y0>;8EX|0x`;*cw^Jw1#52eYK@bFtvH`=>iJ30!(PvY2_* zyat-V{iJnDwPC}67+P_@`)C-JZ6uYJ1T3FTm11wDp$KT zTZUT$ma|~HbC%!D%884N;ubOqt-=*oTwd;AHzIiCOwyTH2!|Ht|s zY8xo%LI+8z4}D-Umn>_)WHL19y2f0y8QoCs4@ukGE^m?#Fl zz7RaTh=Y#|tbYHa|MYB?)2 zK2R94zCHy1`?V#AkW?3%2vdfoN5G4O=t*Z{Fq&(?t+G^en%BFzx)sI%;Vb zI?+?D_|Se`x7~c?LkLAz-VN^TnV-`ORxpyvSW-E`J(#g)EtzH|yrX=yUDTqeIPL97{edq1*EyI7YFJA9TNXL%8EV|5~TIt_bgZN2c9T1o-8mm5AS-f&lpkR8> zeYD7KLrhkJBL)mE|DFvD?3@4|iAH2LkO@!M+`>Gs* zWV`JnzmU+LHFvJXTUshI#?T{Q9fT>|Ir*Eq!lHbxGok}ItxN4`{^a0^pd=|&6*u?_zqNEz6il0G~xVfkMoxTbqfgHs{#h{+vc38g0Q(vdy2&M+W+-^vXC5en~ZSLr!QzzUw_L0b!Ba(k8o7* Fe*mA#Y|{V$ literal 0 HcmV?d00001 diff --git a/app/src/androidTest/res/values/strings.xml b/app/src/androidTest/res/values/strings.xml new file mode 100644 index 0000000..6835339 --- /dev/null +++ b/app/src/androidTest/res/values/strings.xml @@ -0,0 +1,6 @@ + + + + OpenCamera.testTest + + diff --git a/app/src/main/AndroidManifest.xml b/app/src/main/AndroidManifest.xml new file mode 100644 index 0000000..6020ec4 --- /dev/null +++ b/app/src/main/AndroidManifest.xml @@ -0,0 +1,159 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/app/src/main/assets/androidx_LICENSE-2.0.txt b/app/src/main/assets/androidx_LICENSE-2.0.txt new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/app/src/main/assets/androidx_LICENSE-2.0.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/app/src/main/assets/google_material_design_icons_LICENSE-2.0.txt b/app/src/main/assets/google_material_design_icons_LICENSE-2.0.txt new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/app/src/main/assets/google_material_design_icons_LICENSE-2.0.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/app/src/main/assets/gpl-3.0.txt b/app/src/main/assets/gpl-3.0.txt new file mode 100644 index 0000000..94a9ed0 --- /dev/null +++ b/app/src/main/assets/gpl-3.0.txt @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/app/src/main/java/net/sourceforge/opencamera/AudioListener.java b/app/src/main/java/net/sourceforge/opencamera/AudioListener.java new file mode 100644 index 0000000..a913748 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/AudioListener.java @@ -0,0 +1,185 @@ +package net.sourceforge.opencamera; + +import android.media.AudioFormat; +import android.media.AudioRecord; +import android.media.MediaRecorder; +import android.util.Log; + +import androidx.annotation.RequiresPermission; + +/** Sets up a listener to listen for noise level. + */ +class AudioListener { + private static final String TAG = "AudioListener"; + private volatile boolean is_running = true; // should be volatile, as used to communicate between threads + private int buffer_size = -1; + private AudioRecord ar; // modification to ar should always be synchronized (on AudioListener.this), as the ar can be released in the AudioListener's own thread + private Thread thread; + + public interface AudioListenerCallback { + void onAudio(int level); + } + + /** Create a new AudioListener. The caller should call the start() method to start listening. + */ + @RequiresPermission(android.Manifest.permission.RECORD_AUDIO) + AudioListener(final AudioListenerCallback cb) { + if( MyDebug.LOG ) + Log.d(TAG, "new AudioListener"); + final int sample_rate = 8000; + int channel_config = AudioFormat.CHANNEL_IN_MONO; + int audio_format = AudioFormat.ENCODING_PCM_16BIT; + try { + buffer_size = AudioRecord.getMinBufferSize(sample_rate, channel_config, audio_format); + //buffer_size = -1; // test + if( MyDebug.LOG ) + Log.d(TAG, "buffer_size: " + buffer_size); + if( buffer_size <= 0 ) { + if( MyDebug.LOG ) { + if( buffer_size == AudioRecord.ERROR ) + Log.e(TAG, "getMinBufferSize returned ERROR"); + else if( buffer_size == AudioRecord.ERROR_BAD_VALUE ) + Log.e(TAG, "getMinBufferSize returned ERROR_BAD_VALUE"); + } + return; + } + + synchronized(AudioListener.this) { + ar = new AudioRecord(MediaRecorder.AudioSource.MIC, sample_rate, channel_config, audio_format, buffer_size); + AudioListener.this.notifyAll(); // probably not needed currently as no thread should be waiting for creation, but just for consistency + } + } + catch(Exception e) { + MyDebug.logStackTrace(TAG, "failed to create audiorecord", e); + return; + } + + // check initialised + synchronized(AudioListener.this) { + if( ar.getState() == AudioRecord.STATE_INITIALIZED ) { + if( MyDebug.LOG ) + Log.d(TAG, "audiorecord is initialised"); + } + else { + Log.e(TAG, "audiorecord failed to initialise"); + ar.release(); + ar = null; + AudioListener.this.notifyAll(); // again probably not needed, but just in case + return; + } + } + + final short[] buffer = new short[buffer_size]; + ar.startRecording(); + + this.thread = new Thread() { + @Override + public void run() { + /*int sample_delay = (1000 * buffer_size) / sample_rate; + if( MyDebug.LOG ) + Log.e(TAG, "sample_delay: " + sample_delay);*/ + + while( is_running ) { + /*try{ + Thread.sleep(sample_delay); + } + catch(InterruptedException e) { + MyDebug.logStackTrace(TAG, "InterruptedException from sleep", e); + }*/ + try { + int n_read = ar.read(buffer, 0, buffer_size); + if( n_read > 0 ) { + int average_noise = 0; + int max_noise = 0; + for(int i=0;i targetVectors = new ArrayList<>(); + private float targetAngle; // target angle in radians + private float uprightAngleTol; // in radians + private boolean targetAchieved; + private float tooFarAngle; // in radians + private TargetCallback targetCallback; + private boolean has_lastTargetAngle; + private float lastTargetAngle; + private int is_upright; // if hasTarget==true, this stores whether the "upright" orientation of the device is close enough to the orientation when recording was started: 0 for yes, otherwise -1 for too anti-clockwise, +1 for too clockwise + + GyroSensor(Context context) { + mSensorManager = (SensorManager)context.getSystemService(Context.SENSOR_SERVICE); + + mSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE); + mSensorAccel = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER); + + //mSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ROTATION_VECTOR); + //mSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_GAME_ROTATION_VECTOR); + //mSensorAccel = null; + + if( MyDebug.LOG ) { + Log.d(TAG, "GyroSensor"); + if( mSensor == null ) + Log.d(TAG, "gyroscope not available"); + else if( mSensorAccel == null ) + Log.d(TAG, "accelerometer not available"); + } + setToIdentity(); + } + + boolean hasSensors() { + // even though the gyro sensor works if mSensorAccel is not present, for best behaviour we require them both + return mSensor != null && mSensorAccel != null; + } + + private void setToIdentity() { + for(int i=0;i<9;i++) { + currentRotationMatrix[i] = 0.0f; + } + currentRotationMatrix[0] = 1.0f; + currentRotationMatrix[4] = 1.0f; + currentRotationMatrix[8] = 1.0f; + System.arraycopy(currentRotationMatrix, 0, currentRotationMatrixGyroOnly, 0, 9); + + for(int i=0;i<3;i++) { + initAccelVector[i] = 0.0f; + // don't set accelVector, rotationVector, gyroVector to 0 here, as we continually smooth the values even when not recording + } + has_init_accel = false; + has_original_rotation_matrix = false; + } + + /** Helper method to set a 3D vector. + */ + static void setVector(final float[] vector, float x, float y, float z) { + vector[0] = x; + vector[1] = y; + vector[2] = z; + } + + /** Helper method to access the (i, j)th component of a 3x3 matrix. + */ + private static float getMatrixComponent(final float [] matrix, int row, int col) { + return matrix[row*3+col]; + } + + /** Helper method to set the (i, j)th component of a 3x3 matrix. + */ + private static void setMatrixComponent(final float [] matrix, int row, int col, float value) { + matrix[row*3+col] = value; + } + + /** Helper method to multiply 3x3 matrix with a 3D vector. + */ + public static void transformVector(final float [] result, final float [] matrix, final float [] vector) { + // result[i] = matrix[ij] . vector[j] + for(int i=0;i<3;i++) { + result[i] = 0.0f; + for(int j=0;j<3;j++) { + result[i] += getMatrixComponent(matrix, i, j) * vector[j]; + } + } + } + + /** Helper method to multiply the transpose of a 3x3 matrix with a 3D vector. + * For 3x3 rotation (orthonormal) matrices, the transpose is the inverse. + */ + private void transformTransposeVector(final float [] result, final float [] matrix, final float [] vector) { + // result[i] = matrix[ji] . vector[j] + for(int i=0;i<3;i++) { + result[i] = 0.0f; + for(int j=0;j<3;j++) { + result[i] += getMatrixComponent(matrix, j, i) * vector[j]; + } + } + } + + /* We should enable sensors before startRecording(), so that we can apply smoothing to the + * sensors to reduce noise. + * This should be limited to when we might want to use the gyro, to help battery life. + */ + void enableSensors() { + if( MyDebug.LOG ) + Log.d(TAG, "enableSensors"); + has_rotationVector = false; + has_gyroVector = false; + for(int i=0;i<3;i++) { + accelVector[i] = 0.0f; + rotationVector[i] = 0.0f; + gyroVector[i] = 0.0f; + } + + if( mSensor != null ) + mSensorManager.registerListener(this, mSensor, SensorManager.SENSOR_DELAY_UI); + if( mSensorAccel != null ) + mSensorManager.registerListener(this, mSensorAccel, SensorManager.SENSOR_DELAY_UI); + } + + void disableSensors() { + if( MyDebug.LOG ) + Log.d(TAG, "disableSensors"); + mSensorManager.unregisterListener(this); + } + + void startRecording() { + if( MyDebug.LOG ) + Log.d(TAG, "startRecording"); + is_recording = true; + timestamp = 0; + setToIdentity(); + } + + void stopRecording() { + if( is_recording ) { + if( MyDebug.LOG ) + Log.d(TAG, "stopRecording"); + is_recording = false; + timestamp = 0; + } + } + + public boolean isRecording() { + return this.is_recording; + } + + void setTarget(float target_x, float target_y, float target_z, float targetAngle, float uprightAngleTol, float tooFarAngle, TargetCallback targetCallback) { + this.hasTarget = true; + this.targetVectors.clear(); + addTarget(target_x, target_y, target_z); + this.targetAngle = targetAngle; + this.uprightAngleTol = uprightAngleTol; + this.tooFarAngle = tooFarAngle; + this.targetCallback = targetCallback; + this.has_lastTargetAngle = false; + this.lastTargetAngle = 0.0f; + } + + void addTarget(float target_x, float target_y, float target_z) { + float [] vector = new float[]{target_x, target_y, target_z}; + this.targetVectors.add(vector); + } + + void clearTarget() { + this.hasTarget = false; + this.targetVectors.clear(); + this.targetCallback = null; + this.has_lastTargetAngle = false; + this.lastTargetAngle = 0.0f; + } + + void disableTargetCallback() { + this.targetCallback = null; + } + + boolean hasTarget() { + return this.hasTarget; + } + + boolean isTargetAchieved() { + return this.hasTarget && this.targetAchieved; + } + + public int isUpright() { + return this.is_upright; + } + + @Override + public void onAccuracyChanged(Sensor sensor, int accuracy) { + } + + private void adjustGyroForAccel() { + if( timestamp == 0 ) { + // don't have a gyro matrix yet + return; + } + else if( !has_init_accel ) { + return; + } + /*if( true ) + return;*/ // don't use accelerometer for now + + //transformVector(tempVector, currentRotationMatrix, initAccelVector); + // tempVector is now the initAccelVector transformed by the gyro matrix + //transformTransposeVector(tempVector, currentRotationMatrix, initAccelVector); + transformVector(tempVector, currentRotationMatrix, accelVector); + // tempVector is now the accelVector transformed by the gyro matrix + double cos_angle = (tempVector[0] * initAccelVector[0] + tempVector[1] * initAccelVector[1] + tempVector[2] * initAccelVector[2]); + /*if( MyDebug.LOG ) { + Log.d(TAG, "adjustGyroForAccel:"); + Log.d(TAG, "### currentRotationMatrix row 0: " + currentRotationMatrix[0] + " , " + currentRotationMatrix[1] + " , " + currentRotationMatrix[2]); + Log.d(TAG, "### currentRotationMatrix row 1: " + currentRotationMatrix[3] + " , " + currentRotationMatrix[4] + " , " + currentRotationMatrix[5]); + Log.d(TAG, "### currentRotationMatrix row 2: " + currentRotationMatrix[6] + " , " + currentRotationMatrix[7] + " , " + currentRotationMatrix[8]); + Log.d(TAG, "### initAccelVector: " + initAccelVector[0] + " , " + initAccelVector[1] + " , " + initAccelVector[2]); + Log.d(TAG, "### accelVector: " + accelVector[0] + " , " + accelVector[1] + " , " + accelVector[2]); + Log.d(TAG, "### tempVector: " + tempVector[0] + " , " + tempVector[1] + " , " + tempVector[2]); + Log.d(TAG, "### cos_angle: " + cos_angle); + }*/ + if( cos_angle >= 0.99999999995 ) { + // gyroscope already matches accelerometer + return; + } + + double angle = Math.acos(cos_angle); + angle *= 0.02f; // filter + cos_angle = Math.cos(angle); + + /* + // compute matrix to transform tempVector to accelVector + // compute (tempVector X accelVector) normalised + double a_x = tempVector[1] * accelVector[2] - tempVector[2] * accelVector[1]; + double a_y = tempVector[2] * accelVector[0] - tempVector[0] * accelVector[2]; + double a_z = tempVector[0] * accelVector[1] - tempVector[1] * accelVector[0]; + */ + // compute matrix to transform tempVector to initAccelVector + // compute (tempVector X initAccelVector) normalised + double a_x = tempVector[1] * initAccelVector[2] - tempVector[2] * initAccelVector[1]; + double a_y = tempVector[2] * initAccelVector[0] - tempVector[0] * initAccelVector[2]; + double a_z = tempVector[0] * initAccelVector[1] - tempVector[1] * initAccelVector[0]; + double a_mag = Math.sqrt(a_x*a_x + a_y*a_y + a_z*a_z); + if( a_mag < 1.0e-5 ) { + // parallel or anti-parallel case + return; + } + a_x /= a_mag; + a_y /= a_mag; + a_z /= a_mag; + double sin_angle = Math.sqrt(1.0-cos_angle*cos_angle); + // from http://immersivemath.com/forum/question/rotation-matrix-from-one-vector-to-another/ + setMatrixComponent(tempMatrix, 0, 0, (float)(a_x*a_x*(1.0-cos_angle)+cos_angle)); + setMatrixComponent(tempMatrix, 0, 1, (float)(a_x*a_y*(1.0-cos_angle)-sin_angle*a_z)); + setMatrixComponent(tempMatrix, 0, 2, (float)(a_x*a_z*(1.0-cos_angle)+sin_angle*a_y)); + setMatrixComponent(tempMatrix, 1, 0, (float)(a_x*a_y*(1.0-cos_angle)+sin_angle*a_z)); + setMatrixComponent(tempMatrix, 1, 1, (float)(a_y*a_y*(1.0-cos_angle)+cos_angle)); + setMatrixComponent(tempMatrix, 1, 2, (float)(a_y*a_z*(1.0-cos_angle)-sin_angle*a_x)); + setMatrixComponent(tempMatrix, 2, 0, (float)(a_x*a_z*(1.0-cos_angle)-sin_angle*a_y)); + setMatrixComponent(tempMatrix, 2, 1, (float)(a_y*a_z*(1.0-cos_angle)+sin_angle*a_x)); + setMatrixComponent(tempMatrix, 2, 2, (float)(a_z*a_z*(1.0-cos_angle)+cos_angle)); + /*if( MyDebug.LOG ) { + // test: + System.arraycopy(tempVector, 0, inVector, 0, 3); + transformVector(tempVector, tempMatrix, inVector); + Log.d(TAG, "### tempMatrix row 0: " + tempMatrix[0] + " , " + tempMatrix[1] + " , " + tempMatrix[2]); + Log.d(TAG, "### tempMatrix row 1: " + tempMatrix[3] + " , " + tempMatrix[4] + " , " + tempMatrix[5]); + Log.d(TAG, "### tempMatrix row 2: " + tempMatrix[6] + " , " + tempMatrix[7] + " , " + tempMatrix[8]); + Log.d(TAG, "### rotated tempVector: " + tempVector[0] + " , " + tempVector[1] + " , " + tempVector[2]); + }*/ + // replace currentRotationMatrix with tempMatrix.currentRotationMatrix + // since [tempMatrix.currentRotationMatrix].[initAccelVector] = tempMatrix.tempVector = accelVector + // since [tempMatrix.currentRotationMatrix].[accelVector] = tempMatrix.tempVector = initAccelVector + for(int i=0;i<3;i++) { + for(int j=0;j<3;j++) { + float value = 0.0f; + // temp2Matrix[ij] = tempMatrix[ik] * currentRotationMatrix[kj] + for(int k=0;k<3;k++) { + value += getMatrixComponent(tempMatrix, i, k) * getMatrixComponent(currentRotationMatrix, k, j); + } + setMatrixComponent(temp2Matrix, i, j, value); + } + } + + System.arraycopy(temp2Matrix, 0, currentRotationMatrix, 0, 9); + + /*if( MyDebug.LOG ) { + // test: + //transformVector(tempVector, temp2Matrix, initAccelVector); + //transformTransposeVector(tempVector, currentRotationMatrix, initAccelVector); + transformVector(tempVector, temp2Matrix, accelVector); + Log.d(TAG, "### new currentRotationMatrix row 0: " + temp2Matrix[0] + " , " + temp2Matrix[1] + " , " + temp2Matrix[2]); + Log.d(TAG, "### new currentRotationMatrix row 1: " + temp2Matrix[3] + " , " + temp2Matrix[4] + " , " + temp2Matrix[5]); + Log.d(TAG, "### new currentRotationMatrix row 2: " + temp2Matrix[6] + " , " + temp2Matrix[7] + " , " + temp2Matrix[8]); + Log.d(TAG, "### new tempVector: " + tempVector[0] + " , " + tempVector[1] + " , " + tempVector[2]); + }*/ + } + + @Override + public void onSensorChanged(SensorEvent event) { + /*if( MyDebug.LOG ) + Log.d(TAG, "onSensorChanged: " + event);*/ + if( event.sensor.getType() == Sensor.TYPE_ACCELEROMETER ) { + final float sensor_alpha = 0.8f; // for filter + for(int i=0;i<3;i++) { + //this.accelVector[i] = event.values[i]; + this.accelVector[i] = sensor_alpha * this.accelVector[i] + (1.0f-sensor_alpha) * event.values[i]; + } + + double mag = Math.sqrt(accelVector[0]*accelVector[0] + accelVector[1]*accelVector[1] + accelVector[2]*accelVector[2]); + if( mag > 1.0e-8 ) { + //noinspection lossy-conversions + accelVector[0] /= mag; + //noinspection lossy-conversions + accelVector[1] /= mag; + //noinspection lossy-conversions + accelVector[2] /= mag; + } + + if( !has_init_accel ) { + System.arraycopy(accelVector, 0, initAccelVector, 0, 3); + has_init_accel = true; + } + + adjustGyroForAccel(); + } + else if( event.sensor.getType() == Sensor.TYPE_GYROSCOPE ) { + if( has_gyroVector ) { + final float sensor_alpha = 0.5f; // for filter + for(int i=0;i<3;i++) { + //this.gyroVector[i] = event.values[i]; + this.gyroVector[i] = sensor_alpha * this.gyroVector[i] + (1.0f-sensor_alpha) * event.values[i]; + } + } + else { + System.arraycopy(event.values, 0, this.gyroVector, 0, 3); + has_gyroVector = true; + } + + // This timestep's delta rotation to be multiplied by the current rotation + // after computing it from the gyro sample data. + if( timestamp != 0 ) { + final float dT = (event.timestamp - timestamp) * NS2S; + // Axis of the rotation sample, not normalized yet. + float axisX = gyroVector[0]; + float axisY = gyroVector[1]; + float axisZ = gyroVector[2]; + + // Calculate the angular speed of the sample + double omegaMagnitude = Math.sqrt(axisX*axisX + axisY*axisY + axisZ*axisZ); + + // Normalize the rotation vector if it's big enough to get the axis + // (that is, EPSILON should represent your maximum allowable margin of error) + if( omegaMagnitude > 1.0e-5 ) { + //noinspection lossy-conversions + axisX /= omegaMagnitude; + //noinspection lossy-conversions + axisY /= omegaMagnitude; + //noinspection lossy-conversions + axisZ /= omegaMagnitude; + } + + // Integrate around this axis with the angular speed by the timestep + // in order to get a delta rotation from this sample over the timestep + // We will convert this axis-angle representation of the delta rotation + // into a quaternion before turning it into the rotation matrix. + double thetaOverTwo = omegaMagnitude * dT / 2.0f; + float sinThetaOverTwo = (float)Math.sin(thetaOverTwo); + float cosThetaOverTwo = (float)Math.cos(thetaOverTwo); + deltaRotationVector[0] = sinThetaOverTwo * axisX; + deltaRotationVector[1] = sinThetaOverTwo * axisY; + deltaRotationVector[2] = sinThetaOverTwo * axisZ; + deltaRotationVector[3] = cosThetaOverTwo; + /*if( MyDebug.LOG ) { + Log.d(TAG, "### values: " + event.values[0] + " , " + event.values[1] + " , " + event.values[2]); + Log.d(TAG, "smoothed values: " + gyroVector[0] + " , " + gyroVector[1] + " , " + gyroVector[2]); + }*/ + + SensorManager.getRotationMatrixFromVector(deltaRotationMatrix, deltaRotationVector); + // User code should concatenate the delta rotation we computed with the current rotation + // in order to get the updated rotation. + // currentRotationMatrix = currentRotationMatrix * deltaRotationMatrix; + for(int i=0;i<3;i++) { + for(int j=0;j<3;j++) { + float value = 0.0f; + // tempMatrix[ij] = currentRotationMatrix[ik] * deltaRotationMatrix[kj] + for(int k=0;k<3;k++) { + value += getMatrixComponent(currentRotationMatrix, i, k) * getMatrixComponent(deltaRotationMatrix, k, j); + } + setMatrixComponent(tempMatrix, i, j, value); + } + } + + System.arraycopy(tempMatrix, 0, currentRotationMatrix, 0, 9); + + for(int i=0;i<3;i++) { + for(int j=0;j<3;j++) { + float value = 0.0f; + // tempMatrix[ij] = currentRotationMatrixGyroOnly[ik] * deltaRotationMatrix[kj] + for(int k=0;k<3;k++) { + value += getMatrixComponent(currentRotationMatrixGyroOnly, i, k) * getMatrixComponent(deltaRotationMatrix, k, j); + } + setMatrixComponent(tempMatrix, i, j, value); + } + } + System.arraycopy(tempMatrix, 0, currentRotationMatrixGyroOnly, 0, 9); + + + /*if( MyDebug.LOG ) { + setVector(inVector, 0.0f, 0.0f, -1.0f); // vector pointing behind the device's screen + transformVector(tempVector, currentRotationMatrix, inVector); + //transformTransposeVector(tempVector, currentRotationMatrix, inVector); + Log.d(TAG, "### gyro vector: " + tempVector[0] + " , " + tempVector[1] + " , " + tempVector[2]); + }*/ + + adjustGyroForAccel(); + + } + + timestamp = event.timestamp; + } + else if( event.sensor.getType() == Sensor.TYPE_ROTATION_VECTOR || event.sensor.getType() == Sensor.TYPE_GAME_ROTATION_VECTOR ) { + if( has_rotationVector ) { + //final float sensor_alpha = 0.7f; // for filter + final float sensor_alpha = 0.8f; // for filter + for(int i=0;i<3;i++) { + //this.rotationVector[i] = event.values[i]; + this.rotationVector[i] = sensor_alpha * this.rotationVector[i] + (1.0f-sensor_alpha) * event.values[i]; + } + } + else { + System.arraycopy(event.values, 0, this.rotationVector, 0, 3); + has_rotationVector = true; + } + + SensorManager.getRotationMatrixFromVector(tempMatrix, rotationVector); + + if( !has_original_rotation_matrix ) { + System.arraycopy(tempMatrix, 0, originalRotationMatrix, 0, 9); + has_original_rotation_matrix = event.values[3] != 1.0; + } + + // current = originalT.new + for(int i=0;i<3;i++) { + for(int j=0;j<3;j++) { + float value = 0.0f; + // currentRotationMatrix[ij] = originalRotationMatrix[ki] * tempMatrix[kj] + for(int k=0;k<3;k++) { + value += getMatrixComponent(originalRotationMatrix, k, i) * getMatrixComponent(tempMatrix, k, j); + } + setMatrixComponent(currentRotationMatrix, i, j, value); + } + } + + if( MyDebug.LOG ) { + Log.d(TAG, "### values: " + event.values[0] + " , " + event.values[1] + " , " + event.values[2] + " , " + event.values[3]); + Log.d(TAG, " " + currentRotationMatrix[0] + " , " + currentRotationMatrix[1] + " , " + currentRotationMatrix[2]); + Log.d(TAG, " " + currentRotationMatrix[3] + " , " + currentRotationMatrix[4] + " , " + currentRotationMatrix[5]); + Log.d(TAG, " " + currentRotationMatrix[6] + " , " + currentRotationMatrix[7] + " , " + currentRotationMatrix[8]); + } + } + + if( hasTarget ) { + int n_too_far = 0; + targetAchieved = false; + for(int indx=0;indx 0) ? 1 : -1;*/ + // store up vector + is_upright = 0; + + float ux = tempVector[0]; + float uy = tempVector[1]; + float uz = tempVector[2]; + + // project up vector into plane perpendicular to targetVector + // v' = v - (v.n)n + float u_dot_n = ux * targetVector[0] + uy * targetVector[1] + uz * targetVector[2]; + float p_ux = ux - u_dot_n * targetVector[0]; + float p_uy = uy - u_dot_n * targetVector[1]; + float p_uz = uz - u_dot_n * targetVector[2]; + /*if( MyDebug.LOG ) { + Log.d(TAG, " u: " + ux + " , " + uy + " , " + uz); + Log.d(TAG, " p_u: " + p_ux + " , " + p_uy + " , " + p_uz); + }*/ + double p_u_mag = Math.sqrt(p_ux*p_ux + p_uy*p_uy + p_uz*p_uz); + if( p_u_mag > 1.0e-5 ) { + /*if( MyDebug.LOG ) { + Log.d(TAG, " p_u norm: " + p_ux/p_u_mag + " , " + p_uy/p_u_mag + " , " + p_uz/p_u_mag); + }*/ + // normalise p_u + //noinspection lossy-conversions + p_ux /= p_u_mag; + //p_uy /= p_u_mag; // commented out as not needed + //noinspection lossy-conversions + p_uz /= p_u_mag; + + // compute p_u X (0 1 0) + float cx = - p_uz; + float cy = 0.0f; + float cz = p_ux; + /*if( MyDebug.LOG ) { + Log.d(TAG, " c: " + cx + " , " + cy + " , " + cz); + }*/ + float sin_angle_up = (float)Math.sqrt(cx*cx + cy*cy + cz*cz); + float angle_up = (float)Math.asin(sin_angle_up); + + setVector(inVector, 0.0f, 0.0f, -1.0f); // vector pointing behind the device's screen + transformVector(tempVector, currentRotationMatrix, inVector); + + if( Math.abs(angle_up) > this.uprightAngleTol ) { + float dot = cx*tempVector[0] + cy*tempVector[1] + cz*tempVector[2]; + is_upright = (dot < 0) ? 1 : -1; + } + } + + float cos_angle = tempVector[0] * targetVector[0] + tempVector[1] * targetVector[1] + tempVector[2] * targetVector[2]; + float angle = (float)Math.acos(cos_angle); + if( is_upright == 0 ) { + /*if( MyDebug.LOG ) + Log.d(TAG, "gyro vector angle with target: " + Math.toDegrees(angle) + " degrees");*/ + if( angle <= targetAngle ) { + if( MyDebug.LOG ) + Log.d(TAG, " ### achieved target angle: " + Math.toDegrees(angle) + " degrees"); + targetAchieved = true; + if( targetCallback != null ) { + //targetCallback.onAchieved(indx); + if( has_lastTargetAngle ) { + if( MyDebug.LOG ) + Log.d(TAG, " last target angle: " + Math.toDegrees(lastTargetAngle) + " degrees"); + if( angle > lastTargetAngle ) { + // started to get worse, so call callback + targetCallback.onAchieved(indx); + } + // else, don't call callback yet, as we may get closer to the target + } + } + // only bother setting the lastTargetAngle if within the target angle - otherwise we'll have problems if there is more than one target set + has_lastTargetAngle = true; + lastTargetAngle = angle; + } + } + + if( angle > tooFarAngle ) { + n_too_far++; + } + /*if( MyDebug.LOG ) + Log.d(TAG, "targetAchieved? " + targetAchieved);*/ + } + if( n_too_far > 0 && n_too_far == targetVectors.size() ) { + if( targetCallback != null ) { + targetCallback.onTooFar(); + } + } + } + } + + /* This returns a 3D vector, that represents the current direction that the device is pointing (looking towards the screen), + * relative to when startRecording() was called. + * That is, the coordinate system is defined by the device's initial orientation when startRecording() was called: + * X: -ve to +ve is left to right + * Y: -ve to +ve is down to up + * Z: -ve to +ve is out of the screen to behind the screen + * So if the device hasn't changed orientation, this will return (0, 0, -1). + * (1, 0, 0) means the device has rotated 90 degrees so it's now pointing to the right. + * @param result An array of length 3 to store the returned vector. + */ + /*void getRelativeVector(float [] result) { + setVector(inVector, 0.0f, 0.0f, -1.0f); // vector pointing behind the device's screen + transformVector(result, currentRotationMatrix, inVector); + }*/ + + /*void getRelativeInverseVector(float [] result) { + setVector(inVector, 0.0f, 0.0f, -1.0f); // vector pointing behind the device's screen + transformTransposeVector(result, currentRotationMatrix, inVector); + }*/ + + public void getRelativeInverseVector(float [] out, float [] in) { + transformTransposeVector(out, currentRotationMatrix, in); + } + + public void getRelativeInverseVectorGyroOnly(float [] out, float [] in) { + transformTransposeVector(out, currentRotationMatrixGyroOnly, in); + } + + public void getRotationMatrix(float [] out) { + System.arraycopy(currentRotationMatrix, 0, out, 0, 9); + } + + // for testing + + public void testForceTargetAchieved(int indx) { + if( MyDebug.LOG ) + Log.d(TAG, "testForceTargetAchieved: " + indx); + if( targetCallback != null ) { + targetCallback.onAchieved(indx); + } + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/HDRProcessor.java b/app/src/main/java/net/sourceforge/opencamera/HDRProcessor.java new file mode 100644 index 0000000..9ca7b72 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/HDRProcessor.java @@ -0,0 +1,2362 @@ +package net.sourceforge.opencamera; + +/*import java.io.File; +import java.io.FileOutputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.io.OutputStream;*/ +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; + +import android.content.Context; +import android.graphics.Bitmap; +import android.graphics.Matrix; +//import android.media.MediaScannerConnection; +//import android.os.Environment; + +import androidx.annotation.NonNull; +import android.util.Log; + +public class HDRProcessor { + private static final String TAG = "HDRProcessor"; + + private final Context context; + private final boolean is_test; + + // public for access by testing + public int [] offsets_x = null; + public int [] offsets_y = null; + @SuppressWarnings("CanBeFinal") + public int sharp_index = 0; + + private enum HDRAlgorithm { + HDRALGORITHM_STANDARD, + HDRALGORITHM_SINGLE_IMAGE + } + + public enum TonemappingAlgorithm { + TONEMAPALGORITHM_CLAMP, + TONEMAPALGORITHM_EXPONENTIAL, + TONEMAPALGORITHM_REINHARD, + TONEMAPALGORITHM_FU2, + TONEMAPALGORITHM_ACES + } + public static final TonemappingAlgorithm default_tonemapping_algorithm_c = TonemappingAlgorithm.TONEMAPALGORITHM_REINHARD; + public enum DROTonemappingAlgorithm { + DROALGORITHM_NONE, + DROALGORITHM_GAINGAMMA + } + + public HDRProcessor(Context context, boolean is_test) { + this.context = context; + this.is_test = is_test; + } + + public void onDestroy() { + if( MyDebug.LOG ) + Log.d(TAG, "onDestroy"); + } + + /** Given a set of data Xi and Yi, this function estimates a relation between X and Y + * using linear least squares. + * We use it to modify the pixels of images taken at the brighter or darker exposure + * levels, to estimate what the pixel should be at the "base" exposure. + * We estimate as y = parameter_A * x + parameter_B. + */ + private static class ResponseFunction { + float parameter_A; + float parameter_B; + + private ResponseFunction(float parameter_A, float parameter_B) { + this.parameter_A = parameter_A; + this.parameter_B = parameter_B; + } + + static ResponseFunction createIdentity() { + return new ResponseFunction(1.0f, 0.0f); + } + + /** Computes the response function. + * We pass the context, so this inner class can be made static. + * @param x_samples List of Xi samples. Must be at least 3 samples. + * @param y_samples List of Yi samples. Must be same length as x_samples. + * @param weights List of weights. Must be same length as x_samples. + */ + ResponseFunction(Context context, int id, List x_samples, List y_samples, List weights) { + if( MyDebug.LOG ) + Log.d(TAG, "ResponseFunction"); + + if( x_samples.size() != y_samples.size() ) { + if( MyDebug.LOG ) + Log.e(TAG, "unequal number of samples"); + // throw RuntimeException, as this is a programming error + throw new RuntimeException(); + } + else if( x_samples.size() != weights.size() ) { + if( MyDebug.LOG ) + Log.e(TAG, "unequal number of samples"); + // throw RuntimeException, as this is a programming error + throw new RuntimeException(); + } + else if( x_samples.size() <= 3 ) { + if( MyDebug.LOG ) + Log.e(TAG, "not enough samples"); + // throw RuntimeException, as this is a programming error + throw new RuntimeException(); + } + + // linear Y = AX + B + boolean done = false; + double sum_wx = 0.0; + double sum_wx2 = 0.0; + double sum_wxy = 0.0; + double sum_wy = 0.0; + double sum_w = 0.0; + for(int i=0;i sort_order); + } + + /** Converts a list of bitmaps into a HDR image, which is then tonemapped to a final RGB image. + * @param bitmaps The list of bitmaps, which should be in order of increasing brightness (exposure). + * Currently only supports a list of either 1 image, or 3 images (the 2nd should be + * at the desired exposure level for the resultant image). + * The bitmaps must all be the same resolution. + * @param release_bitmaps If true, the resultant image will be stored in one of the input bitmaps. + * The bitmaps array will be updated so that the first entry will contain + * the output bitmap. If assume_sorted is true, this will be equal to the + * input bitmaps.get( (bitmaps.size()-1) / 2). The remainder bitmaps will have + * recycle() called on them. + * If false, the resultant image is copied to output_bitmap. + * @param output_bitmap If release_bitmaps is false, the resultant image is stored in this bitmap. + * If release_bitmaps is true, this parameter is ignored. + * @param assume_sorted If true, the input bitmaps should be sorted in order from darkest to brightest + * exposure. If false, the function will automatically resort. + * @param sort_cb If assume_sorted is false and this is non-null, sort_cb.sortOrder() will be + * called to indicate the sort order when this is known. + * @param hdr_alpha A value from 0.0f to 1.0f indicating the "strength" of the HDR effect. Specifically, + * this controls the level of the local contrast enhancement done in adjustHistogram(). + * @param n_tiles A value of 1 or greater indicating how local the contrast enhancement algorithm should be. + * @param ce_preserve_blacks + * If true (recommended), then we apply a modification to the contrast enhancement algorithm to avoid + * making darker pixels too dark. A value of false gives more contrast on the darker regions of the + * resultant image. + * @param tonemapping_algorithm + * Algorithm to use for tonemapping (if multiple images are received). + * @param dro_tonemapping_algorithm + * Algorithm to use for tonemapping (if single image is received). + */ + public void processHDR(List bitmaps, boolean release_bitmaps, Bitmap output_bitmap, boolean assume_sorted, SortCallback sort_cb, float hdr_alpha, int n_tiles, boolean ce_preserve_blacks, TonemappingAlgorithm tonemapping_algorithm, DROTonemappingAlgorithm dro_tonemapping_algorithm) throws HDRProcessorException { + if( MyDebug.LOG ) + Log.d(TAG, "processHDR"); + if( !assume_sorted && !release_bitmaps ) { + if( MyDebug.LOG ) + Log.d(TAG, "take a copy of bitmaps array"); + // if !release_bitmaps, then we shouldn't be modifying the input bitmaps array - but if !assume_sorted, we need to sort them + // so make sure we take a copy + bitmaps = new ArrayList<>(bitmaps); + } + int n_bitmaps = bitmaps.size(); + //if( n_bitmaps != 1 && n_bitmaps != 3 && n_bitmaps != 5 && n_bitmaps != 7 ) { + if( n_bitmaps < 1 || n_bitmaps > 7 ) { + if( MyDebug.LOG ) + Log.e(TAG, "n_bitmaps not supported: " + n_bitmaps); + throw new HDRProcessorException(HDRProcessorException.INVALID_N_IMAGES); + } + for(int i=1;i sort_order = new ArrayList<>(); + sort_order.add(0); + sort_cb.sortOrder(sort_order); + } + processSingleImage(bitmaps, release_bitmaps, output_bitmap, hdr_alpha, n_tiles, ce_preserve_blacks, dro_tonemapping_algorithm); + break; + case HDRALGORITHM_STANDARD: + processHDRCore(bitmaps, release_bitmaps, output_bitmap, assume_sorted, sort_cb, hdr_alpha, n_tiles, ce_preserve_blacks, tonemapping_algorithm); + break; + default: + if( MyDebug.LOG ) + Log.e(TAG, "unknown algorithm " + algorithm); + // throw RuntimeException, as this is a programming error + throw new RuntimeException(); + } + } + + /** Creates a ResponseFunction to estimate how pixels from the in_bitmap should be adjusted to + * match the exposure level of out_bitmap. + * The supplied offsets offset_x, offset_y give the offset for in_bitmap as computed by + * autoAlignment(). + */ + private ResponseFunction createFunctionFromBitmaps(int id, Bitmap in_bitmap, Bitmap out_bitmap, int offset_x, int offset_y) { + if( MyDebug.LOG ) + Log.d(TAG, "createFunctionFromBitmaps"); + List x_samples = new ArrayList<>(); + List y_samples = new ArrayList<>(); + List weights = new ArrayList<>(); + + final int n_samples_c = 100; + final int n_w_samples = (int)Math.sqrt(n_samples_c); + final int n_h_samples = n_samples_c/n_w_samples; + + double avg_in = 0.0; + double avg_out = 0.0; + for(int y=0;y= in_bitmap.getWidth() || y_coord + offset_y < 0 || y_coord + offset_y >= in_bitmap.getHeight() ) { + continue; + } + int in_col = in_bitmap.getPixel(x_coord + offset_x, y_coord + offset_y); + int out_col = out_bitmap.getPixel(x_coord, y_coord); + double in_value = averageRGB(in_col); + double out_value = averageRGB(out_col); + avg_in += in_value; + avg_out += out_value; + x_samples.add(in_value); + y_samples.add(out_value); + } + } + if( x_samples.isEmpty() ) { + Log.e(TAG, "no samples for response function!"); + // shouldn't happen, but could do with a very large offset - just make up a dummy sample + double in_value = 255.0; + double out_value = 255.0; + avg_in += in_value; + avg_out += out_value; + x_samples.add(in_value); + y_samples.add(out_value); + } + avg_in /= x_samples.size(); + avg_out /= x_samples.size(); + boolean is_dark_exposure = avg_in < avg_out; + if( MyDebug.LOG ) { + Log.d(TAG, "avg_in: " + avg_in); + Log.d(TAG, "avg_out: " + avg_out); + Log.d(TAG, "is_dark_exposure: " + is_dark_exposure); + } + { + // calculate weights + double min_value = x_samples.get(0); + double max_value = x_samples.get(0); + for(int i=1;i max_value ) + max_value = value; + } + double med_value = 0.5*(min_value + max_value); + if( MyDebug.LOG ) { + Log.d(TAG, "min_value: " + min_value); + Log.d(TAG, "max_value: " + max_value); + Log.d(TAG, "med_value: " + med_value); + } + double min_value_y = y_samples.get(0); + double max_value_y = y_samples.get(0); + for(int i=1;i max_value_y ) + max_value_y = value; + } + double med_value_y = 0.5*(min_value_y + max_value_y); + if( MyDebug.LOG ) { + Log.d(TAG, "min_value_y: " + min_value_y); + Log.d(TAG, "max_value_y: " + max_value_y); + Log.d(TAG, "med_value_y: " + med_value_y); + } + for(int i=0;i> 16; + int g = (color & 0xFF00) >> 8; + int b = (color & 0xFF); + return (r + g + b)/3.0; + //return 0.27*r + 0.67*g + 0.06*b; + } + + /** Core implementation of HDR algorithm. + */ + private void processHDRCore(List bitmaps, boolean release_bitmaps, Bitmap output_bitmap, boolean assume_sorted, SortCallback sort_cb, float hdr_alpha, int n_tiles, boolean ce_preserve_blacks, TonemappingAlgorithm tonemapping_algorithm) { + if( MyDebug.LOG ) + Log.d(TAG, "processHDRCore"); + + long time_s = System.currentTimeMillis(); + + int n_bitmaps = bitmaps.size(); + int width = bitmaps.get(0).getWidth(); + int height = bitmaps.get(0).getHeight(); + ResponseFunction [] response_functions = new ResponseFunction[n_bitmaps]; // ResponseFunction for each image (the ResponseFunction entry can be left null to indicate the Identity) + offsets_x = new int[n_bitmaps]; + offsets_y = new int[n_bitmaps]; + /*int [][] buffers = new int[n_bitmaps][]; + for(int i=0;i " + response_functions[i].parameter_A); + Log.d(TAG, " B: " + this_B + " -> " + response_functions[i].parameter_B); + } + } + } + + /* + // calculate average luminance by sampling + final int n_samples_c = 100; + final int n_w_samples = (int)Math.sqrt(n_samples_c); + final int n_h_samples = n_samples_c/n_w_samples; + + double sum_log_luminance = 0.0; + int count = 0; + for(int y=0;y T = M . (Vmax + C) / (Vmax . (M + C)) + // => (T/M).(M + C) = (Vmax + C) / Vmax = 1 + C/Vmax + // => C . ( T/M - 1/Vmax ) = 1 - T + // => C = (1-T) / (T/M - 1/Vmax) + // Since we want C <= 1, we must have: + // 1-T <= T/M - 1/Vmax + // => 1/Vmax <= T/M + T - 1 + // If this isn't the case, we set C to 1 (to preserve the median as close as possible). + // Note that if we weren't doing the linear scaling below, this would reduce to choosing + // C = M(1-T)/T. We also tend to that as max_possible_value tends to infinity. So even though + // we only sometimes enter this case, it's important for cases where max_possible_value + // might be estimated too large (also consider that if we ever support more than 3 images, + // we'd risk having too large values). + // If T=M, then this simplifies to C = 1-M. + // I've tested that using "C = 1-M" always (and no linear scaling) also gives good results: + // much better compared to Open Camera 1.39, though not quite as good as doing both this + // and linear scaling (testHDR18, testHDR26, testHDR32 look too grey and/or bright). + final float tonemap_denom = ((float)median_target)/(float)median_brightness - (255.0f / max_possible_value); + if( MyDebug.LOG ) + Log.d(TAG, "tonemap_denom: " + tonemap_denom); + if( tonemap_denom != 0.0f ) { // just in case + tonemap_scale_c = (255.0f - median_target) / tonemap_denom; + if( MyDebug.LOG ) + Log.d(TAG, "tonemap_scale_c (before setting min): " + tonemap_scale_c); + /*if( tonemap_scale_c < 0.5f*255.0f ) { + throw new RuntimeException("tonemap_scale_c: " + tonemap_scale_c); + }*/ + // important to set a min value, see testHDR58, testHDR59, testHDR60 - at least 0.25, but 0.5 works better: + //tonemap_scale_c = Math.max(tonemap_scale_c, 0.25f*255.0f); + tonemap_scale_c = Math.max(tonemap_scale_c, 0.5f*255.0f); + } + //throw new RuntimeException(); // test + } + // Higher tonemap_scale_c values means darker results from the Reinhard tonemapping. + // Colours brighter than 255-tonemap_scale_c will be made darker, colours darker than 255-tonemap_scale_c will be made brighter + // (tonemap_scale_c==255 means therefore that colours will only be made darker). + if( MyDebug.LOG ) + Log.d(TAG, "tonemap_scale_c: " + tonemap_scale_c); + + // algorithm specific parameters + float linear_scale = 0.0f; + float W = 0.0f; + switch( tonemapping_algorithm ) { + case TONEMAPALGORITHM_EXPONENTIAL: + { + // The basic algorithm is f(V) = 1 - exp( - E * V ), where V is the HDR value, E is a + // constant. This maps [0, infinity] to [0, 1]. However we have an estimate of the maximum + // possible value, Vmax, so we can set a linear scaling S so that [0, Vmax] maps to [0, 1] + // f(V) = S . (1 - exp( - E * V )) + // so 1 = S . (1 - exp( - E * Vmax )) + // => S = 1 / (1 - exp( - E * Vmax )) + // Note that Vmax should be set to a minimum of 255, else we'll make darker images brighter. + final float exposure = 1.2f; // should match setting in process_hdr.rs + linear_scale = (float)(1.0 / (1.0 - Math.exp(-exposure * max_possible_value / 255.0))); + if( MyDebug.LOG ) + Log.d(TAG, "linear_scale: " + linear_scale); + break; + } + case TONEMAPALGORITHM_REINHARD: { + // The basic algorithm is f(V) = V / (V+C), where V is the HDR value, C is tonemap_scale_c + // This was used until Open Camera 1.39, but has the problem of making images too dark: it + // maps [0, infinity] to [0, 1], but since in practice we never have very large V values, we + // won't use the full [0, 1] range. So we apply a linear scale S: + // f(V) = V.S / (V+C) + // S is chosen such that the maximum possible value, Vmax, maps to 1. So: + // 1 = Vmax . S / (Vmax + C) + // => S = (Vmax + C)/Vmax + // Note that we don't actually know the maximum HDR value, but instead we estimate it with + // max_possible_value, which gives the maximum value we'd have if even the darkest image was + // 255.0. + // Note that if max_possible_value was less than 255, we'd end up scaling a max value less than + // 1, to [0, 1], i.e., making dark images brighter, which we don't want, which is why above we + // set max_possible_value to a minimum of 255. In practice, this is unlikely to ever happen + // since max_possible_value is calculated as a maximum possible based on the response functions + // (as opposed to the real brightest HDR value), so even for dark photos we'd expect to have + // max_possible_value >= 255. + // Note that the original Reinhard tonemapping paper describes a non-linear scaling by (1 + CV/Vmax^2), + // though this is poorer performance (in terms of calculation time). + linear_scale = (max_possible_value + tonemap_scale_c) / max_possible_value; + if( MyDebug.LOG ) + Log.d(TAG, "linear_scale: " + linear_scale); + break; + } + case TONEMAPALGORITHM_FU2: + { + // For FU2, we have f(V) = U(EV) / U(W), where V is the HDR value, U is a function. + // We want f(Vmax) = 1, so EVmax = W + final float fu2_exposure_bias = 2.0f / 255.0f; // should match setting in process_hdr.rs + W = fu2_exposure_bias * max_possible_value; + if( MyDebug.LOG ) + Log.d(TAG, "fu2 W: " + W); + break; + } + } + + { + if( release_bitmaps ) { + output_bitmap = bitmaps.get(base_bitmap); + } + + float [] parameters_A = new float[response_functions.length]; + float [] parameters_B = new float[response_functions.length]; + for(int i=0;i bitmaps, boolean release_bitmaps, Bitmap output_bitmap, float hdr_alpha, int n_tiles, boolean ce_preserve_blacks, DROTonemappingAlgorithm dro_tonemapping_algorithm) { + if( MyDebug.LOG ) + Log.d(TAG, "processSingleImage"); + + long time_s = System.currentTimeMillis(); + + int width = bitmaps.get(0).getWidth(); + int height = bitmaps.get(0).getHeight(); + + Bitmap input_bitmap = bitmaps.get(0); + + if( release_bitmaps ) { + output_bitmap = input_bitmap; + } + + if( dro_tonemapping_algorithm == DROTonemappingAlgorithm.DROALGORITHM_GAINGAMMA ) { + // brighten? + int [] histo = computeHistogram(input_bitmap, HDRProcessor.HistogramType.HISTOGRAM_TYPE_VALUE); + HistogramInfo histogramInfo = getHistogramInfo(histo); + int brightness = histogramInfo.median_brightness; + int max_brightness = histogramInfo.max_brightness; + if( MyDebug.LOG ) + Log.d(TAG, "### processSingleImage: time after computeHistogram: " + (System.currentTimeMillis() - time_s)); + if( MyDebug.LOG ) { + Log.d(TAG, "median brightness: " + brightness); + Log.d(TAG, "max brightness: " + max_brightness); + } + BrightenFactors brighten_factors = computeBrightenFactors(false, 0, 0, brightness, max_brightness); + float gain = brighten_factors.gain; + float gamma = brighten_factors.gamma; + float low_x = brighten_factors.low_x; + float mid_x = brighten_factors.mid_x; + if( MyDebug.LOG ) { + Log.d(TAG, "gain: " + gain); + Log.d(TAG, "gamma: " + gamma); + Log.d(TAG, "low_x: " + low_x); + Log.d(TAG, "mid_x: " + mid_x); + } + + if( Math.abs(gain - 1.0) > 1.0e-5 || max_brightness != 255 || Math.abs(gamma - 1.0) > 1.0e-5 ) { + if( MyDebug.LOG ) + Log.d(TAG, "apply gain/gamma"); + //if( true ) + // throw new HDRProcessorException(HDRProcessorException.UNEQUAL_SIZES); // test + + JavaImageFunctions.DROBrightenApplyFunction function = new JavaImageFunctions.DROBrightenApplyFunction(gain, gamma, low_x, mid_x, max_brightness); + JavaImageProcessing.applyFunction(function, input_bitmap, output_bitmap, 0, 0, width, height); + + // output is now the input for subsequent operations + input_bitmap = output_bitmap; + if( MyDebug.LOG ) + Log.d(TAG, "### processSingleImage: time after dro_brighten: " + (System.currentTimeMillis() - time_s)); + } + } + + adjustHistogram(input_bitmap, output_bitmap, width, height, hdr_alpha, n_tiles, ce_preserve_blacks, time_s); + + if( MyDebug.LOG ) + Log.d(TAG, "### time for processSingleImage: " + (System.currentTimeMillis() - time_s)); + } + + void brightenImage(Bitmap bitmap, int brightness, int max_brightness, int brightness_target) { + if( MyDebug.LOG ) { + Log.d(TAG, "brightenImage"); + Log.d(TAG, "brightness: " + brightness); + Log.d(TAG, "max_brightness: " + max_brightness); + Log.d(TAG, "brightness_target: " + brightness_target); + } + BrightenFactors brighten_factors = computeBrightenFactors(false, 0, 0, brightness, max_brightness, brightness_target, false); + float gain = brighten_factors.gain; + float gamma = brighten_factors.gamma; + float low_x = brighten_factors.low_x; + float mid_x = brighten_factors.mid_x; + if( MyDebug.LOG ) { + Log.d(TAG, "gain: " + gain); + Log.d(TAG, "gamma: " + gamma); + Log.d(TAG, "low_x: " + low_x); + Log.d(TAG, "mid_x: " + mid_x); + } + + if( Math.abs(gain - 1.0) > 1.0e-5 || max_brightness != 255 || Math.abs(gamma - 1.0) > 1.0e-5 ) { + if( MyDebug.LOG ) + Log.d(TAG, "apply gain/gamma"); + + JavaImageFunctions.DROBrightenApplyFunction function = new JavaImageFunctions.DROBrightenApplyFunction(gain, gamma, low_x, mid_x, max_brightness); + JavaImageProcessing.applyFunction(function, bitmap, bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight()); + } + } + + public static boolean sceneIsLowLight(int iso, long exposure_time) { + final int ISO_FOR_DARK = 1100; + // For Nexus 6, max reported ISO is 1196, so the limit for dark scenes shouldn't be more than this + // Nokia 8's max reported ISO is 1551 + // Note that OnePlus 3T has max reported ISO of 800, but this is a device bug + // The addition of the iso*exposure_time helps behaviour on Galaxy S10e which uses ISO >= 1600 + // far more often, even for non-dark scenes. Potentially we could drop the requirement for + // "iso >= ISO_FOR_DARK" and instead have iso*exposure_time >= 91 to 115, but we need the + // dedicated iso check for Nexus 6 (iso 1196 exposure time 1/12s should be dark) and + // Nokia 8 testAvg23 (iso 1044 exposure time 0.1s shouldn't be dark). + // We also assume dark for long exposure times (which in practice is probably set in + // manual mode) - since long exposure times will give lower ISOs (e.g., on Galaxy S10e) + // (also useful for cameras where max ISO isn't as high as ISO_FOR_DARK) + //return iso >= ISO_FOR_DARK; + return ( iso >= ISO_FOR_DARK && iso*exposure_time >= 69*1000000000L ) || exposure_time >= (1000000000L/5-10000L); + } + + private int cached_avg_sample_size = 1; + + /** As part of the noise reduction process, the caller should scale the input images down by the factor returned + * by this method. This both provides a spatial smoothing, as well as improving performance and memory usage. + */ + public int getAvgSampleSize(int capture_result_iso, long capture_result_exposure_tim) { + // If changing this, may also want to change the radius of the spatial filter in avg_brighten.rs ? + //this.cached_avg_sample_size = (n_images>=8) ? 2 : 1; + this.cached_avg_sample_size = sceneIsLowLight(capture_result_iso, capture_result_exposure_tim) ? 2 : 1; + //this.cached_avg_sample_size = 1; + //this.cached_avg_sample_size = 2; + if( MyDebug.LOG ) + Log.d(TAG, "getAvgSampleSize: " + cached_avg_sample_size); + return cached_avg_sample_size; + } + + public int getAvgSampleSize() { + return cached_avg_sample_size; + } + + public static class AvgData { + float [] pixels_rgbf_out; + Bitmap bitmap_avg_align; + Bitmap bitmap_orig; // first bitmap, need to keep until all images are processed + + AvgData(float [] pixels_rgbf_out, Bitmap bitmap_avg_align, Bitmap bitmap_orig) { + this.pixels_rgbf_out = pixels_rgbf_out; + this.bitmap_avg_align = bitmap_avg_align; + this.bitmap_orig = bitmap_orig; + } + + public void destroy() { + if( MyDebug.LOG ) + Log.d(TAG, "AvgData.destroy()"); + pixels_rgbf_out = null; + if( bitmap_avg_align != null ) { + bitmap_avg_align.recycle(); + bitmap_avg_align = null; + } + if( bitmap_orig != null ) { + bitmap_orig.recycle(); + bitmap_orig = null; + } + } + } + + /** Combines two images by averaging them. Each pixel of bitmap_avg is modified to contain: + * (avg_factor * bitmap_avg + bitmap_new)/(avg_factor+1) + * A simple average is therefore obtained by calling this function with avg_factor = 1.0f. + * For averaging multiple images, first call this function with avg_factor 1.0 for the first + * two images, then call updateAvg() for subsequent images, increasing avg_factor by 1.0 each + * time. + * The reason we do it this way (rather than just receiving a list of bitmaps) is so that we + * can average multiple images without having to keep them all in memory simultaneously. + * @param bitmap_avg One of the input images. The bitmap is recycled. + * @param bitmap_new The other input image. The bitmap is recycled. + * @param avg_factor The weighting factor for bitmap_avg. + * @param iso The ISO used to take the photos. + * @param exposure_time The exposure time used to take the photos. + * @param zoom_factor The digital zoom factor used to take the photos. + */ + public AvgData processAvg(Bitmap bitmap_avg, Bitmap bitmap_new, float avg_factor, int iso, long exposure_time, float zoom_factor) throws HDRProcessorException { + if( MyDebug.LOG ) { + Log.d(TAG, "processAvg"); + Log.d(TAG, "avg_factor: " + avg_factor); + } + if( bitmap_avg.getWidth() != bitmap_new.getWidth() || + bitmap_avg.getHeight() != bitmap_new.getHeight() ) { + if( MyDebug.LOG ) { + Log.e(TAG, "bitmaps not of same resolution"); + } + throw new HDRProcessorException(HDRProcessorException.UNEQUAL_SIZES); + } + + long time_s = System.currentTimeMillis(); + + int width = bitmap_avg.getWidth(); + int height = bitmap_avg.getHeight(); + + /*final boolean use_sharpness_test = false; // disabled for now - takes about 1s extra, and no evidence this helps quality + if( use_sharpness_test ) { + float sharpness_avg = computeSharpness(allocation_avg, width, time_s); + float sharpness_new = computeSharpness(allocation_new, width, time_s); + if( sharpness_new > sharpness_avg ) { + if( MyDebug.LOG ) + Log.d(TAG, "use new image as reference"); + Allocation dummy_allocation = allocation_avg; + allocation_avg = allocation_new; + allocation_new = dummy_allocation; + Bitmap dummy_bitmap = bitmap_avg; + bitmap_avg = bitmap_new; + bitmap_new = dummy_bitmap; + sharp_index = 1; + } + else { + sharp_index = 0; + } + if( MyDebug.LOG ) + Log.d(TAG, "sharp_index: " + sharp_index); + }*/ + + /*LuminanceInfo luminanceInfo = computeMedianLuminance(bitmap_avg, 0, 0, width, height); + if( MyDebug.LOG ) + Log.d(TAG, "median: " + luminanceInfo.median_value);*/ + + AvgData avg_data = processAvgCore(null, bitmap_avg, bitmap_new, width, height, avg_factor, iso, exposure_time, zoom_factor, time_s); + + if( MyDebug.LOG ) + Log.d(TAG, "### time for processAvg: " + (System.currentTimeMillis() - time_s)); + + return avg_data; + } + + /** Combines multiple images by averaging them. See processAvg() for more details. + * @param avg_data The argument returned by processAvg(). + * @param width The width of the images. + * @param height The height of the images. + * @param bitmap_new The new input image. The bitmap is recycled. + * @param avg_factor The weighting factor for bitmap_avg. + * @param iso The ISO used to take the photos. + * @param exposure_time The exposure time used to take the photos. + * @param zoom_factor The digital zoom factor used to take the photos. + */ + public void updateAvg(AvgData avg_data, int width, int height, Bitmap bitmap_new, float avg_factor, int iso, long exposure_time, float zoom_factor) throws HDRProcessorException { + if( MyDebug.LOG ) { + Log.d(TAG, "updateAvg"); + Log.d(TAG, "avg_factor: " + avg_factor); + } + if( width != bitmap_new.getWidth() || + height != bitmap_new.getHeight() ) { + if( MyDebug.LOG ) { + Log.e(TAG, "bitmaps not of same resolution"); + } + throw new HDRProcessorException(HDRProcessorException.UNEQUAL_SIZES); + } + + long time_s = System.currentTimeMillis(); + + processAvgCore(avg_data, null, bitmap_new, width, height, avg_factor, iso, exposure_time, zoom_factor, time_s); + + if( MyDebug.LOG ) + Log.d(TAG, "### time for updateAvg: " + (System.currentTimeMillis() - time_s)); + } + + /** Core algorithm for Noise Reduction algorithm. + * @param avg_data Should be null for first call, and non-null for subsequent calls. This should + * be the AvgData returned by the first call. + * @param bitmap_avg If non-null, the first bitmap (which will be recycled when the returned + * AvgData is destroyed). If null, an avg_data should be supplied. + * @param bitmap_new The new bitmap to combined. The bitmap will be recycled. + * @param width The width of the bitmaps. + * @param height The height of the bitmaps. + * @param avg_factor The averaging factor. + * @param iso The ISO used for the photos. + * @param zoom_factor The digital zoom factor used to take the photos. + * @param time_s Time, for debugging. + */ + private AvgData processAvgCore(AvgData avg_data, Bitmap bitmap_avg, Bitmap bitmap_new, int width, int height, float avg_factor, int iso, long exposure_time, float zoom_factor, long time_s) { + if( MyDebug.LOG ) { + Log.d(TAG, "processAvgCore"); + Log.d(TAG, "iso: " + iso); + Log.d(TAG, "zoom_factor: " + zoom_factor); + } + + float [] pixels_rgbf_out = null; + Bitmap bitmap_avg_align = null; // if non-null, use this bitmap for alignment for averaged image. + Bitmap bitmap_orig = null; // if non-null, this is a bitmap representing the first image. + if( avg_data != null ) { + pixels_rgbf_out = avg_data.pixels_rgbf_out; + bitmap_avg_align = avg_data.bitmap_avg_align; + bitmap_orig = avg_data.bitmap_orig; + } + + offsets_x = new int[2]; + offsets_y = new int[2]; + boolean floating_point; + if( bitmap_avg != null && pixels_rgbf_out == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "process first bitmap"); + floating_point = false; + } + else if( bitmap_avg == null && pixels_rgbf_out != null ) { + floating_point = true; + if( MyDebug.LOG ) + Log.d(TAG, "processing existing result"); + } + else { + throw new RuntimeException("only one of bitmap_avg or pixels_rgbf_out should be supplied"); + } + + { + // perform auto-alignment + List align_bitmaps = new ArrayList<>(); + Bitmap bitmap_new_align; + int alignment_width; + int alignment_height; + int full_alignment_width = width; + int full_alignment_height = height; + + //final int scale_align_size = 2; + //final int scale_align_size = 4; + //final int scale_align_size = Math.max(4 / this.cached_avg_sample_size, 1); + final int scale_align_size = (zoom_factor > 3.9f) ? + 1 : + Math.max(4 / this.getAvgSampleSize(iso, exposure_time), 1); + if( MyDebug.LOG ) + Log.d(TAG, "scale_align_size: " + scale_align_size); + boolean crop_to_centre = true; + { + // use scaled down and/or cropped bitmaps for alignment + if( MyDebug.LOG ) + Log.d(TAG, "### time before creating bitmaps for autoalignment: " + (System.currentTimeMillis() - time_s)); + Matrix align_scale_matrix = new Matrix(); + align_scale_matrix.postScale(1.0f/scale_align_size, 1.0f/scale_align_size); + full_alignment_width /= scale_align_size; + full_alignment_height /= scale_align_size; + + final boolean full_align = false; // whether alignment images should be created as being cropped to the centre + //final boolean full_align = true; // whether alignment images should be created as being cropped to the centre + int align_width = width; + int align_height = height; + int align_x = 0; + int align_y = 0; + if( !full_align ) { + // need to use /2 rather than /4 to prevent misalignment in testAvg26 + //align_width = width/4; + //align_height = height/4; + align_width = width/2; + align_height = height/2; + align_x = (width - align_width)/2; + align_y = (height - align_height)/2; + crop_to_centre = false; // no need to crop in autoAlignment, as we're cropping here + } + + final boolean filter_align = false; + //final boolean filter_align = true; + if( bitmap_avg_align == null ) { + bitmap_avg_align = Bitmap.createBitmap(bitmap_avg, align_x, align_y, align_width, align_height, align_scale_matrix, filter_align); + if( MyDebug.LOG ) + Log.d(TAG, "### time after creating avg bitmap for autoalignment: " + (System.currentTimeMillis() - time_s)); + } + bitmap_new_align = Bitmap.createBitmap(bitmap_new, align_x, align_y, align_width, align_height, align_scale_matrix, filter_align); + + alignment_width = bitmap_new_align.getWidth(); + alignment_height = bitmap_new_align.getHeight(); + + align_bitmaps.add(bitmap_avg_align); + align_bitmaps.add(bitmap_new_align); + if( MyDebug.LOG ) + Log.d(TAG, "### time after creating bitmaps for autoalignment: " + (System.currentTimeMillis() - time_s)); + } + + // misalignment more likely in "dark" images with more images and/or longer exposures + // using max_align_scale=2 needed to prevent misalignment in testAvg51; also helps testAvg14 + boolean wider = sceneIsLowLight(iso, exposure_time); + autoAlignment(offsets_x, offsets_y, alignment_width, alignment_height, align_bitmaps, 0, true, null, false, 1, crop_to_centre, wider ? 2 : 1, full_alignment_width, full_alignment_height, time_s); + + { + for(int i=0;i= 700 ) { + // helps reduce speckles in testAvg17, testAvg23, testAvg33, testAvg36, testAvg38 + // using this level for testAvg31 (ISO 609) would increase ghosting + //limited_iso = 500; + limited_iso = 800; + if( iso >= 1100 ) { + // helps further reduce speckles in testAvg17, testAvg38 + // but don't do for iso >= 700 as makes "vicks" text in testAvg23 slightly more blurred + wiener_cutoff_factor = 8.0f; + } + } + limited_iso = Math.max(limited_iso, 100); + float wiener_C = 10.0f * limited_iso; + //float wiener_C = 1000.0f; + //float wiener_C = 4000.0f; + + // Tapering the wiener scale means that we do more averaging for earlier images in the stack, the + // logic being we'll have more chance of ghosting or misalignment with later images. + // This helps: testAvg31, testAvg33. + // Also slightly helps testAvg17, testAvg23 (slightly less white speckle on tv), testAvg28 + // (one less white speckle on face). + // Note that too much tapering risks increasing ghosting in testAvg26, testAvg39. + float tapered_wiener_scale = 1.0f - (float)Math.pow(0.5, avg_factor); + if( MyDebug.LOG ) { + Log.d(TAG, "avg_factor: " + avg_factor); + Log.d(TAG, "tapered_wiener_scale: " + tapered_wiener_scale); + } + wiener_C /= tapered_wiener_scale; + + float wiener_C_cutoff = wiener_cutoff_factor * wiener_C; + if( MyDebug.LOG ) { + Log.d(TAG, "wiener_C: " + wiener_C); + Log.d(TAG, "wiener_cutoff_factor: " + wiener_cutoff_factor); + } + + if( bitmap_orig == null ) { + // bitmap_orig should only be null for the first pair of images, which should be when we + // have the avg image not in floating point format + if( floating_point ) { + throw new RuntimeException("is in floating point mode, but no bitmap_orig supplied"); + } + bitmap_orig = bitmap_avg; + } + + /*if( !floating_point )*/ { + if( pixels_rgbf_out == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "need to create pixels_rgbf_out"); + pixels_rgbf_out = new float[3*width*height]; + if( MyDebug.LOG ) + Log.d(TAG, "### time after create pixels_rgbf_out: " + (System.currentTimeMillis() - time_s)); + } + JavaImageFunctions.AvgApplyFunction function = new JavaImageFunctions.AvgApplyFunction(pixels_rgbf_out, bitmap_new, bitmap_orig, width, height, offsets_x[1], offsets_y[1], avg_factor, wiener_C, wiener_C_cutoff); + JavaImageProcessing.applyFunction(function, bitmap_avg, null, 0, 0, width, height); + if( MyDebug.LOG ) + Log.d(TAG, "### time after AvgApplyFunction: " + (System.currentTimeMillis() - time_s)); + } + + // N.B., we don't recycle bitmap_avg (if non-null), we need to use when processing later iterations. + // Instead the first bitmap is recycled in + // AvgData.destroy(). Also note that if we did recycle bitmap_avg, we get a native crash in + // process_avg.rs when reading from bitmap_orig, but only on some devices (crash can be + // reproduced on Android 11 with Android emulator, e.g., running testTakePhotoNR). + /*if( bitmap_avg != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "release bitmap_avg"); + bitmap_avg.recycle(); + bitmap_avg = null; + }*/ + if( bitmap_new != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "release bitmap_new"); + bitmap_new.recycle(); + //noinspection UnusedAssignment + bitmap_new = null; + } + + if( MyDebug.LOG ) + Log.d(TAG, "### time for processAvgCore: " + (System.currentTimeMillis() - time_s)); + return new AvgData(pixels_rgbf_out, bitmap_avg_align, bitmap_avg); + } + + void autoAlignment(int[] offsets_x, int[] offsets_y, int width, int height, List bitmaps, int base_bitmap, boolean use_mtb, int max_align_scale) { + if( MyDebug.LOG ) + Log.d(TAG, "autoAlignment"); + autoAlignment(offsets_x, offsets_y, width, height, bitmaps, base_bitmap, true, null, use_mtb, 1, false, max_align_scale, width, height, 0); + } + + static class BrightnessDetails { + final int median_brightness; // median brightness value of the median image + + BrightnessDetails(int median_brightness) { + this.median_brightness = median_brightness; + } + } + + /** + * + * @param bitmaps Bitmaps to align. + * @param base_bitmap Index of bitmap in bitmaps that should be kept fixed; the other bitmaps + * will be aligned relative to this. + * @param assume_sorted If assume_sorted if false, and use_mtb is true, this function will also + * sort the bitmaps from darkest to brightest. + * @param use_mtb Whether to align based on the median threshold bitmaps or not. + * @param max_align_scale If larger than 1, start from a larger start area. + */ + private BrightnessDetails autoAlignment(int [] offsets_x, int [] offsets_y, int width, int height, List bitmaps, int base_bitmap, boolean assume_sorted, SortCallback sort_cb, boolean use_mtb, int min_step_size, boolean crop_to_centre, int max_align_scale, int full_width, int full_height, long time_s) { + if( MyDebug.LOG ) { + Log.d(TAG, "autoAlignment"); + Log.d(TAG, "width: " + width); + Log.d(TAG, "height: " + height); + Log.d(TAG, "use_mtb: " + use_mtb); + Log.d(TAG, "max_align_scale: " + max_align_scale); + Log.d(TAG, "bitmaps: " + bitmaps.size()); + } + + int n_images = bitmaps.size(); + if( bitmaps.size() != offsets_x.length ) { + throw new RuntimeException("unequal bitmaps and offsets_x lengths"); + } + else if( bitmaps.size() != offsets_y.length ) { + throw new RuntimeException("unequal bitmaps and offsets_y lengths"); + } + + // initialise + for(int i=0;i bitmapInfos = new ArrayList<>(bitmaps.size()); + for(int i=0;i() { + @Override + public int compare(BitmapInfo o1, BitmapInfo o2) { + // important to use the code in LuminanceInfo.compareTo(), as that's also tested via the unit test + // sortLuminanceInfo() + return o1.luminanceInfo.compareTo(o2.luminanceInfo); + } + }); + bitmaps.clear(); + for(int i=0;i sort_order = new ArrayList<>(); + for(int i=0;i 255-(min_diff_c+1) ) { + throw new RuntimeException("image " + i + " has median_value: " + median_value); // test + }*/ + median_value = Math.max(median_value, min_diff_c+1); + median_value = Math.min(median_value, 255-(min_diff_c+1)); + if( MyDebug.LOG ) + Log.d(TAG, i + ": median_value is now: " + median_value); + + { + Bitmap output_mtb_bitmap = Bitmap.createBitmap(mtb_width, mtb_height, Bitmap.Config.ALPHA_8); + JavaImageFunctions.CreateMTBApplyFunction function = new JavaImageFunctions.CreateMTBApplyFunction(use_mtb, median_value); + JavaImageProcessing.applyFunction(function, bitmaps.get(i), output_mtb_bitmap, mtb_x, mtb_y, mtb_x+mtb_width, mtb_y+mtb_height, 0, 0); + if( MyDebug.LOG ) + Log.d(TAG, "### time after CreateMTBApplyFunction: " + (System.currentTimeMillis() - time_s)); + mtb_bitmaps[i] = output_mtb_bitmap; + } + } + + // The initial step_size N should be a power of 2; the maximum offset we can achieve by the algorithm is N-1. + // For pictures resolution 4160x3120, this gives max_ideal_size 27, and initial_step_size 32. + // On tests testHDR1 to testHDR35, the max required offset was 24 pixels (for testHDR33) even when using + // inital_step_size of 64. + // Note for old renderscript codepath: there isn't really a performance cost in allowing higher initial step sizes (as larger sizes have less + // sampling - since we sample every step_size pixels - though there might be some overhead for every extra call + // to renderscript that we do). But high step sizes have a risk of producing really bad results if we were + // to misidentify cases as needing a large offset. + int max_dim = Math.max(full_width, full_height); // n.b., use the full width and height here, not the mtb_width, height + //int max_ideal_size = max_dim / (wider ? 75 : 150); + int max_ideal_size = (max_align_scale * max_dim) / 150; + int initial_step_size = 1; + while( initial_step_size < max_ideal_size ) { + initial_step_size *= 2; + } + //initial_step_size = 64; + if( MyDebug.LOG ) { + Log.d(TAG, "max_dim: " + max_dim); + Log.d(TAG, "max_ideal_size: " + max_ideal_size); + Log.d(TAG, "initial_step_size: " + initial_step_size); + } + + if( mtb_bitmaps != null && mtb_bitmaps[base_bitmap] == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "base image not suitable for image alignment"); + for(int i=0;i min_step_size ) { + step_size /= 2; + int pixel_step_size = step_size * pixel_step; + if( pixel_step_size > mtb_width || pixel_step_size > mtb_height ) + pixel_step_size = step_size; + + if( MyDebug.LOG ) { + Log.d(TAG, "call align for image: " + i); + Log.d(TAG, " versus base image: " + base_bitmap); + Log.d(TAG, "step_size: " + step_size); + Log.d(TAG, "pixel_step_size: " + pixel_step_size); + } + + final boolean use_pyramid = false; + //final boolean use_pyramid = true; + + int stop_x, stop_y; + if( use_pyramid ) { + stop_x = mtb_width; + stop_y = mtb_height; + } + else { + // see note inside align_mtb.rs/align_mtb() for why we sample over a subset of the image + stop_x = mtb_width/pixel_step_size; + stop_y = mtb_height/pixel_step_size; + } + if( MyDebug.LOG ) { + Log.d(TAG, "stop_x: " + stop_x); + Log.d(TAG, "stop_y: " + stop_y); + } + + int [] errors; + + { + JavaImageFunctions.AlignMTBApplyFunction function = new JavaImageFunctions.AlignMTBApplyFunction(use_mtb, mtb_bitmaps[base_bitmap], mtb_bitmaps[i], offsets_x[i], offsets_y[i], pixel_step_size); + JavaImageProcessing.applyFunction(function, null, null, 0, 0, stop_x, stop_y); + if( MyDebug.LOG ) + Log.d(TAG, "### time after AlignMTBApplyFunction: " + (System.currentTimeMillis() - time_s)); + errors = function.getErrors(); + } + + int best_error = -1; + int best_id = -1; + for(int j=0;j<9;j++) { + int this_error = errors[j]; + if( MyDebug.LOG ) + Log.d(TAG, " errors[" + j + "]: " + this_error); + if( best_id==-1 || this_error < best_error ) { + best_error = this_error; + best_id = j; + } + } + if( MyDebug.LOG ) + Log.d(TAG, " best_id " + best_id + " error: " + best_error); + if( best_error >= 2000000000 ) { + Log.e(TAG, " auto-alignment failed due to overflow"); + // hitting overflow means behaviour will be unstable under SMP, and auto-alignment won't be reliable anyway + best_id = 4; // default to centre + if( is_test ) { + throw new RuntimeException(); + } + } + /*if( best_id != 4 ) { + int this_off_x = best_id % 3; + int this_off_y = best_id/3; + this_off_x--; + this_off_y--; + for(int j=0;j<9;j++) { + int that_off_x = j % 3; + int that_off_y = j/3; + that_off_x--; + that_off_y--; + if( this_off_x * that_off_x == -1 || this_off_y * that_off_y == -1 ) { + float diff = ((float)(best_error - errors[j]))/(float)errors[j]; + if( MyDebug.LOG ) + Log.d(TAG, " opposite errors[" + j + "] diff: " + diff); + if( Math.abs(diff) <= 0.02f ) { + if( MyDebug.LOG ) + Log.d(TAG, " reject auto-alignment"); + best_id = 4; + break; + } + } + } + }*/ + if( best_id != -1 ) { + int this_off_x = best_id % 3; + int this_off_y = best_id/3; + this_off_x--; + this_off_y--; + if( MyDebug.LOG ) { + Log.d(TAG, "this_off_x: " + this_off_x); + Log.d(TAG, "this_off_y: " + this_off_y); + } + offsets_x[i] += this_off_x * step_size; + offsets_y[i] += this_off_y * step_size; + if( MyDebug.LOG ) { + Log.d(TAG, "offsets_x is now: " + offsets_x[i]); + Log.d(TAG, "offsets_y is now: " + offsets_y[i]); + } + /*if( wider && step_size == initial_step_size/2 && (this_off_x != 0 || this_off_y != 0 ) ) { + throw new RuntimeException(); // test + }*/ + } + } + if( MyDebug.LOG ) { + Log.d(TAG, "resultant offsets for image: " + i); + Log.d(TAG, "resultant offsets_x: " + offsets_x[i]); + Log.d(TAG, "resultant offsets_y: " + offsets_y[i]); + } + } + + /*for(int i=0;i { + final int min_value; + final int median_value; + final int hi_value; + final boolean noisy; + + public LuminanceInfo(int min_value, int median_value, int hi_value, boolean noisy) { + this.min_value = min_value; + this.median_value = median_value; + this.hi_value = hi_value; + this.noisy = noisy; + } + + @Override + @NonNull + public String toString() { + return "min: " + min_value + " , median: " + median_value + " , hi: " + hi_value + " , noisy: " + noisy; + } + + @Override + public int compareTo(LuminanceInfo o) { + int value = this.median_value - o.median_value; + if( value == 0 ) { + // fall back to using min_value + value = this.min_value - o.min_value; + } + if( value == 0 ) { + // fall back to using hi_value + value = this.hi_value - o.hi_value; + } + return value; + } + } + + private LuminanceInfo computeMedianLuminance(Bitmap bitmap, int mtb_x, int mtb_y, int mtb_width, int mtb_height) { + if( MyDebug.LOG ) { + Log.d(TAG, "computeMedianLuminance"); + Log.d(TAG, "mtb_x: " + mtb_x); + Log.d(TAG, "mtb_y: " + mtb_y); + Log.d(TAG, "mtb_width: " + mtb_width); + Log.d(TAG, "mtb_height: " + mtb_height); + } + final int n_samples_c = 100; + final int n_w_samples = (int)Math.sqrt(n_samples_c); + final int n_h_samples = n_samples_c/n_w_samples; + + int [] histo = new int[256]; + for(int i=0;i<256;i++) + histo[i] = 0; + int total = 0; + //double sum_log_luminance = 0.0; + for(int y=0;y> 16; + int g = (color & 0xFF00) >> 8; + int b = (color & 0xFF); + int luminance = Math.max(r, g); + luminance = Math.max(luminance, b); + histo[luminance]++; + //sum_log_luminance += Math.log(luminance+1.0); // add 1 so we don't take log of 0...; + total++; + } + } + /*float avg_luminance = (float)(Math.exp( sum_log_luminance / total )); + if( MyDebug.LOG ) + Log.d(TAG, "avg_luminance: " + avg_luminance);*/ + int middle = total/2; + int count = 0; + boolean noisy = false; + int min_value = -1, hi_value = -1; + // first count backwards to get hi_value + for(int i=255;i>=0;i--) { + /*if( histo[i] > 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "max luminance " + i); + max_value = i; + break; + }*/ + count += histo[i]; + if( count >= total/10 ) { + if( MyDebug.LOG ) + Log.d(TAG, "hi luminance " + i); + hi_value = i; + break; + } + } + + // then count forwards to get min and median values + count = 0; + for(int i=0;i<256;i++) { + count += histo[i]; + if( min_value == -1 && histo[i] > 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "min luminance " + i); + min_value = i; + } + if( count >= middle ) { + if( MyDebug.LOG ) + Log.d(TAG, "median luminance " + i); + final int noise_threshold = 4; + int n_below = 0, n_above = 0; + for(int j=0;j<=i-noise_threshold;j++) { + n_below += histo[j]; + } + for(int j=0;j<=i+noise_threshold && j<256;j++) { + n_above += histo[j]; + } + double frac_below = n_below / (double)total; + if( MyDebug.LOG ) { + double frac_above = 1.0 - n_above / (double)total; + Log.d(TAG, "count: " + count); + Log.d(TAG, "n_below: " + n_below); + Log.d(TAG, "n_above: " + n_above); + Log.d(TAG, "frac_below: " + frac_below); + Log.d(TAG, "frac_above: " + frac_above); + } + if( frac_below < 0.2 ) { + // needed for testHDR2, testHDR28 + // note that we don't exclude cases where frac_above is too small, as this could be an overexposed image - see testHDR31 + if( MyDebug.LOG ) + Log.d(TAG, "too dark/noisy"); + noisy = true; + } + return new LuminanceInfo(min_value, i, hi_value, noisy); + } + } + Log.e(TAG, "computeMedianLuminance failed"); + return new LuminanceInfo(min_value, 127, hi_value, true); + } + + /** Clips the histogram, for Contrast Limited AHE algorithm. + * @param histogram Histogram to modify (length 256). + * @param temp_c_histogram Temporary workspace (length 256). + * @param sub_width Width of the region being processed. + * @param sub_height Height of the region being processed. + */ + void clipHistogram(int [] histogram, int [] temp_c_histogram, int sub_width, int sub_height, boolean ce_preserve_blacks) { + int n_pixels = sub_width * sub_height; + int clip_limit = (5 * n_pixels) / 256; + /*if( MyDebug.LOG ) { + Log.d(TAG, "clip_limit: " + clip_limit); + Log.d(TAG, " relative clip limit: " + clip_limit*256.0f/n_pixels); + }*/ + { + // find real clip limit + int bottom = 0, top = clip_limit; + while( top - bottom > 1 ) { + int middle = (top + bottom)/2; + int sum = 0; + for(int x=0;x<256;x++) { + if( histogram[x] > middle ) { + sum += (histogram[x] - clip_limit); + } + } + if( sum > (clip_limit - middle) * 256 ) + top = middle; + else + bottom = middle; + } + clip_limit = (top + bottom)/2; + /*if( MyDebug.LOG ) { + Log.d(TAG, "updated clip_limit: " + clip_limit); + Log.d(TAG, " relative updated clip limit: " + clip_limit*256.0f/n_pixels); + }*/ + } + int n_clipped = 0; + for(int x=0;x<256;x++) { + if( histogram[x] > clip_limit ) { + /*if( MyDebug.LOG ) { + Log.d(TAG, " " + x + " : " + histogram[x] + " : " + (histogram[x]*256.0f/n_pixels)); + }*/ + n_clipped += (histogram[x] - clip_limit); + histogram[x] = clip_limit; + } + } + int n_clipped_per_bucket = n_clipped / 256; + /*if( MyDebug.LOG ) { + Log.d(TAG, "n_clipped: " + n_clipped); + Log.d(TAG, "n_clipped_per_bucket: " + n_clipped_per_bucket); + }*/ + for(int x=0;x<256;x++) { + histogram[x] += n_clipped_per_bucket; + } + + if( ce_preserve_blacks ) { + // This helps tests such as testHDR52, testHDR57, testAvg26, testAvg30 + // The basic idea is that we want to avoid making darker pixels darker (by too + // much). We do this by adjusting the histogram: + // * We can set a minimum value of each histogram value. E.g., if we set all + // pixels up to a certain brightness to a value equal to n_pixels/256, then + // we prevent those pixels from being made darker. In practice, we choose + // a tapered minimum, starting at (n_pixels/256) for black pixels, linearly + // interpolating to no minimum at brightness 128 (dark_threshold_c). + // * For any adjusted value of the histogram, we redistribute, by reducing + // the histogram values of brighter pixels with values larger than (n_pixels/256), + // reducing them to a minimum of (n_pixels/256). + // * Lastly, we only modify a given histogram value if pixels of that brightness + // would be made darker by the CLAHE algorithm. We can do this by looking at + // the cumulative histogram (as computed before modifying any values). + /*if( MyDebug.LOG ) { + for(int x=0;x<256;x++) { + Log.d(TAG, "pre-brighten histogram[" + x + "] = " + histogram[x]); + } + }*/ + + temp_c_histogram[0] = histogram[0]; + for(int x=1;x<256;x++) { + temp_c_histogram[x] = temp_c_histogram[x-1] + histogram[x]; + } + + // avoid making pixels too dark + int equal_limit = n_pixels / 256; + if( MyDebug.LOG ) + Log.d(TAG, "equal_limit: " + equal_limit); + //final int dark_threshold_c = 64; + final int dark_threshold_c = 128; + //final int dark_threshold_c = 256; + for(int x=0;x= c_equal_limit ) { + continue; + } + float alpha = 1.0f - ((float)x)/((float)dark_threshold_c); + //float alpha = 1.0f - ((float)x)/256.0f; + int limit = (int)(alpha * equal_limit); + //int limit = equal_limit; + /*if( MyDebug.LOG ) + Log.d(TAG, "x: " + x + " ; limit: " + limit);*/ + /*histogram[x] = Math.max(histogram[x], limit); + if( MyDebug.LOG ) + Log.d(TAG, " histogram pulled up to: " + histogram[x]);*/ + if( histogram[x] < limit ) { + // top up by redistributing later values + for(int y=x+1;y<256 && histogram[x] < limit;y++) { + if( histogram[y] > equal_limit ) { + int move = histogram[y] - equal_limit; + move = Math.min(move, limit - histogram[x]); + histogram[x] += move; + histogram[y] -= move; + } + } + /*if( MyDebug.LOG ) + Log.d(TAG, " histogram pulled up to: " + histogram[x]);*/ + /*if( temp_c_histogram[x] >= c_equal_limit ) + throw new RuntimeException(); // test*/ + } + } + } + } + + void adjustHistogram(Bitmap bitmap_in, Bitmap bitmap_out, int width, int height, float hdr_alpha, int n_tiles, boolean ce_preserve_blacks, long time_s) { + if( MyDebug.LOG ) + Log.d(TAG, "adjustHistogram [bitmap]"); + + //final boolean adjust_histogram_local = false; + final boolean adjust_histogram_local = true; + + if( adjust_histogram_local ) { + // Contrast Limited Adaptive Histogram Equalisation (CLAHE) + // Note we don't fully equalise the histogram, rather the resultant image is the mid-point of the non-equalised and fully-equalised images + // See https://en.wikipedia.org/wiki/Adaptive_histogram_equalization#Contrast_Limited_AHE + // Also see "Adaptive Histogram Equalization and its Variations" ( http://www.cs.unc.edu/Research/MIDAG/pubs/papers/Adaptive%20Histogram%20Equalization%20and%20Its%20Variations.pdf ), + // Pizer, Amburn, Austin, Cromartie, Geselowitz, Greer, ter Haar Romeny, Zimmerman, Zuiderveld (1987). + // Also note that if ce_preserve_blacks is true, we apply a modification to this algorithm, see below. + + if( MyDebug.LOG ) + Log.d(TAG, "time before creating histograms: " + (System.currentTimeMillis() - time_s)); + // create histograms + + //final int n_tiles_c = 8; + //final int n_tiles_c = 4; + //final int n_tiles_c = 1; + int [] c_histogram = new int[n_tiles*n_tiles*256]; + int [] temp_c_histogram = new int[256]; + for(int i=0;i= middle && median_brightness == -1 ) { + median_brightness = i; + } + if( histo[i] > 0 ) { + max_brightness = i; + } + } + int mean_brightness = (int)(sum_brightness/count + 0.1); + + return new HistogramInfo(total, mean_brightness, median_brightness, max_brightness); + } + + private static int getBrightnessTarget(int brightness, float max_gain_factor, int ideal_brightness) { + if( brightness > 0 ) { + // At least try to achieve a minimum brightness. + // Increasing max_gain_factor helps the following tests significantly: testAvg12, testAvg14, testAvg15, + // testAvg28, testAvg31, testAvg32. + // Other tests also helped to a lesser degree are: testAvg1, testAvg5, testAvg6, testAvg40, testAvg41, + // testAvg42, testHDR1, testHDR1_exp5, testHDR11 (DRO example), testHDR20 (DRO example), testHDR28 (DRO example), + // testHDR48, testHDR49, testHDR49_exp5, testHDR53. + // We need to be careful of increasing max_gain_factor too high in some cases - for AvgTests, see comment in + // computeBrightenFactors() for examples of tests that would be affected. + + final float min_brightness_c = 42.0f; + float min_max_gain_factor = min_brightness_c / brightness; + max_gain_factor = Math.max(max_gain_factor, min_max_gain_factor); + + // still set some maximum max_gain_factor - highest max_gain_factor in tests is + // testAvg14 with max_gain_factor=14.0, which benefits from this, but some parts starting + // to look overblown + max_gain_factor = Math.min(max_gain_factor, 15.0f); + } + + if( brightness <= 0 ) + brightness = 1; + if( MyDebug.LOG ) { + Log.d(TAG, "brightness: " + brightness); + Log.d(TAG, "max_gain_factor: " + max_gain_factor); + Log.d(TAG, "ideal_brightness: " + ideal_brightness); + } + int median_target = Math.min(ideal_brightness, (int)(max_gain_factor*brightness)); + return Math.max(brightness, median_target); // don't make darker + } + + public static class BrightenFactors { + public final float gain; + public final float low_x; + public final float mid_x; + public final float gamma; + + BrightenFactors(float gain, float low_x, float mid_x, float gamma) { + this.gain = gain; + this.low_x = low_x; + this.mid_x = mid_x; + this.gamma = gamma; + } + } + + /** Computes various factors used in the avg_brighten.rs script. + */ + public static BrightenFactors computeBrightenFactors(boolean has_iso_exposure, int iso, long exposure_time, int brightness, int max_brightness) { + // For outdoor/bright images, don't want max_gain_factor 4, otherwise we lose variation in grass colour in testAvg42 + // and having max_gain_factor at 1.5 prevents testAvg43, testAvg44 being too bright and oversaturated + // for other images, we also don't want max_gain_factor 4, as makes cases too bright and overblown if it would + // take the max_possible_value over 255. Especially testAvg46, but also testAvg25, testAvg31, testAvg38, + // testAvg39. + // Note however that we now do allow increasing the max_gain_factor in getBrightnessTarget(), depending on + // brightness levels. + float max_gain_factor = 1.5f; + int ideal_brightness = 119; + if( has_iso_exposure && iso < 1100 && exposure_time < 1000000000L/59 ) { + // this helps: testAvg12, testAvg21, testAvg35 + // but note we don't want to treat the following as "bright": testAvg17, testAvg23, testAvg36, testAvg37, testAvg50 + ideal_brightness = 199; + } + int brightness_target = getBrightnessTarget(brightness, max_gain_factor, ideal_brightness); + //int max_target = Math.min(255, (int)((max_brightness*brightness_target)/(float)brightness + 0.5f) ); + if( MyDebug.LOG ) { + Log.d(TAG, "brightness: " + brightness); + Log.d(TAG, "max_brightness: " + max_brightness); + Log.d(TAG, "ideal_brightness: " + ideal_brightness); + Log.d(TAG, "brightness target: " + brightness_target); + //Log.d(TAG, "max target: " + max_target); + } + + return computeBrightenFactors(has_iso_exposure, iso, exposure_time, brightness, max_brightness, brightness_target, true); + } + + /** Computes various factors used in the avg_brighten.rs script. + */ + private static BrightenFactors computeBrightenFactors(boolean has_iso_exposure, int iso, long exposure_time, int brightness, int max_brightness, int brightness_target, boolean brighten_only) { + /* We use a combination of gain and gamma to brighten images if required. Gain works best for + * dark images (e.g., see testAvg8), gamma works better for bright images (e.g., testAvg12). + */ + if( brightness <= 0 ) + brightness = 1; + float gain = brightness_target / (float)brightness; + if( MyDebug.LOG ) + Log.d(TAG, "gain " + gain); + if( gain < 1.0f && brighten_only ) { + gain = 1.0f; + if( MyDebug.LOG ) { + Log.d(TAG, "clamped gain to: " + gain); + } + } + float gamma = 1.0f; + float max_possible_value = gain*max_brightness; + if( MyDebug.LOG ) + Log.d(TAG, "max_possible_value: " + max_possible_value); + + /*if( max_possible_value > 255.0f ) { + gain = 255.0f / max_brightness; + if( MyDebug.LOG ) + Log.d(TAG, "limit gain to: " + gain); + // use gamma correction for the remainder + if( brightness_target > gain * brightness ) { + gamma = (float) (Math.log(brightness_target / 255.0f) / Math.log(gain * brightness / 255.0f)); + } + } + + //float gamma = (float)(Math.log(brightness_target/255.0f) / Math.log(brightness/255.0f)); + if( MyDebug.LOG ) + Log.d(TAG, "gamma " + gamma); + final float min_gamma_non_bright_c = 0.75f; + //final float min_gamma_non_bright_c = 0.5f; + if( gamma > 1.0f ) { + gamma = 1.0f; + if( MyDebug.LOG ) { + Log.d(TAG, "clamped gamma to : " + gamma); + } + } + else if( has_iso_exposure && iso > 150 && gamma < min_gamma_non_bright_c ) { + // too small gamma on non-bright reduces contrast too much (e.g., see testAvg9) + // however we can't clamp too much, see testAvg28, testAvg32 + gamma = min_gamma_non_bright_c; + if( MyDebug.LOG ) { + Log.d(TAG, "clamped gamma to : " + gamma); + } + }*/ + + float mid_x = 255.5f; + if( max_possible_value > 255.0f ) { + if( MyDebug.LOG ) + Log.d(TAG, "use piecewise gain/gamma"); + // use piecewise function with gain and gamma + // changed from 0.5 to 0.6 to help grass colour variation in testAvg42; also helps testAvg6; using 0.8 helps testAvg46 and testAvg50 further + //float mid_y = ( has_iso_exposure && iso <= 150 ) ? 0.6f*255.0f : 0.8f*255.0f; + float mid_y = ( has_iso_exposure && iso < 1100 && exposure_time < 1000000000L/59 ) ? 0.6f*255.0f : 0.8f*255.0f; + mid_x = mid_y / gain; + gamma = (float)(Math.log(mid_y/255.0f) / Math.log(mid_x/max_brightness)); + } + else if( brighten_only && max_possible_value < 255.0f && max_brightness > 0 ) { + // slightly brightens testAvg17; also brightens testAvg8 to be clearer + float alt_gain = 255.0f / max_brightness; + // okay to allow higher max than max_gain_factor, when it isn't going to take us over 255 + alt_gain = Math.min(alt_gain, 4.0f); + if( MyDebug.LOG ) + Log.d(TAG, "alt_gain: " + alt_gain); + if( alt_gain > gain ) { + gain = alt_gain; + if( MyDebug.LOG ) + Log.d(TAG, "increased gain to: " + gain); + } + } + float low_x = 0.0f; + if( has_iso_exposure && iso >= 400 ) { + // this helps: testAvg10, testAvg28, testAvg31, testAvg33 + //low_x = Math.min(8.0f, 0.125f*mid_x); + // don't use mid_x directly, otherwise we get unstable behaviour depending on whether we + // entered "use piecewise gain/gamma" above or not + // see unit test testBrightenFactors(). + float piecewise_mid_y = 0.5f*255.0f; + float piecewise_mid_x = piecewise_mid_y / gain; + low_x = Math.min(8.0f, 0.125f*piecewise_mid_x); + } + if( MyDebug.LOG ) { + Log.d(TAG, "low_x " + low_x); + Log.d(TAG, "mid_x " + mid_x); + Log.d(TAG, "gamma " + gamma); + } + + return new BrightenFactors(gain, low_x, mid_x, gamma); + } + + private float computeBlackLevel(HistogramInfo histogramInfo, int [] histo, int iso) { + float black_level = 0.0f; + { + // quick and dirty dehaze algorithm + // helps (among others): testAvg1 to testAvg10, testAvg27, testAvg30, testAvg31, testAvg39, testAvg40 + int total = histogramInfo.total; + int percentile = (int)(total*0.001f); + int count = 0; + int darkest_brightness = -1; + for(int i = 0; i < histo.length; i++) { + count += histo[i]; + if( count >= percentile && darkest_brightness == -1 ) { + darkest_brightness = i; + } + } + black_level = Math.max(black_level, darkest_brightness); + // don't allow black_level too high for "dark" images, as this can cause problems due to exaggerating noise (e.g., + // see testAvg38) + black_level = Math.min(black_level, iso <= 700 ? 18 : 4); + if( MyDebug.LOG ) { + Log.d(TAG, "percentile: " + percentile); + Log.d(TAG, "darkest_brightness: " + darkest_brightness); + Log.d(TAG, "black_level is now: " + black_level); + } + } + return black_level; + } + + /** Final stage of the noise reduction algorithm. + * @param pixels_in_rgbf The pixels in floating point RGB format. + * @param width Width of the input. + * @param height Height of the input. + * @param iso ISO used for the original images. + * @param exposure_time Exposure time used for the original images. + * @return Resultant bitmap. + */ + private Bitmap avgBrightenRGBf(float [] pixels_in_rgbf, int width, int height, int iso, long exposure_time) { + if( MyDebug.LOG ) { + Log.d(TAG, "avgBrightenRGBf"); + Log.d(TAG, "iso: " + iso); + Log.d(TAG, "exposure_time: " + exposure_time); + } + + long time_s = System.currentTimeMillis(); + + int [] histo = computeHistogram(pixels_in_rgbf, width, height, false); + + HistogramInfo histogramInfo = getHistogramInfo(histo); + int brightness = histogramInfo.median_brightness; + int max_brightness = histogramInfo.max_brightness; + if( MyDebug.LOG ) + Log.d(TAG, "### time after computeHistogram: " + (System.currentTimeMillis() - time_s)); + + if( MyDebug.LOG ) { + Log.d(TAG, "median brightness: " + histogramInfo.median_brightness); + Log.d(TAG, "mean brightness: " + histogramInfo.mean_brightness); + Log.d(TAG, "max brightness: " + max_brightness); + /*for(int i=0;i<256;i++) { + Log.d(TAG, "histogram[" + i + "]: " + histo[i]); + }*/ + } + + BrightenFactors brighten_factors = computeBrightenFactors(true, iso, exposure_time, brightness, max_brightness); + float gain = brighten_factors.gain; + float low_x = brighten_factors.low_x; + float mid_x = brighten_factors.mid_x; + float gamma = brighten_factors.gamma; + + //float gain = brightness_target / (float)brightness; + /*float gamma = (float)(Math.log(max_target/(float)brightness_target) / Math.log(max_brightness/(float)brightness)); + float gain = brightness_target / ((float)Math.pow(brightness/255.0f, gamma) * 255.0f); + if( MyDebug.LOG ) { + Log.d(TAG, "gamma " + gamma); + Log.d(TAG, "gain " + gain); + Log.d(TAG, "gain2 " + max_target / ((float)Math.pow(max_brightness/255.0f, gamma) * 255.0f)); + }*/ + /*float gain = brightness_target / (float)brightness; + if( MyDebug.LOG ) { + Log.d(TAG, "gain: " + gain); + } + if( gain < 1.0f ) { + gain = 1.0f; + if( MyDebug.LOG ) { + Log.d(TAG, "clamped gain to : " + gain); + } + }*/ + + float black_level = computeBlackLevel(histogramInfo, histo, iso); + + // use a lower medial filter strength for pixel binned images, so that we don't blur testAvg46 so much (especially sign text) + float median_filter_strength = (cached_avg_sample_size >= 2) ? 0.5f : 1.0f; + if( MyDebug.LOG ) + Log.d(TAG, "median_filter_strength: " + median_filter_strength); + + Bitmap output_bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); + JavaImageFunctions.AvgBrightenApplyFunction function = new JavaImageFunctions.AvgBrightenApplyFunction(pixels_in_rgbf, width, height, gain, gamma, low_x, mid_x, max_brightness, median_filter_strength, black_level); + //JavaImageProcessing.applyFunction(function, input_bitmap, output_bitmap, 0, 0, width, height); + JavaImageProcessing.applyFunction(function, null, output_bitmap, 0, 0, width, height); + if( MyDebug.LOG ) + Log.d(TAG, "### time after AvgBrightenApplyFunction: " + (System.currentTimeMillis() - time_s)); + + //if( iso <= 150 ) { + if( iso < 1100 && exposure_time < 1000000000L/59 ) { + // for bright scenes, contrast enhancement helps improve the quality of images (especially where we may have both + // dark and bright regions, e.g., testAvg12); but for dark scenes, it just blows up the noise too much + // keep n_tiles==1 - get too much contrast enhancement with n_tiles==4 e.g. for testAvg34 + // tests that are better at 25% (median brightness in brackets): testAvg16 (90), testAvg26 (117), testAvg30 (79), + // testAvg43 (55), testAvg44 (82) + // tests that are better at 50%: testAvg12 (8), testAvg13 (38), testAvg15 (10), testAvg18 (39), testAvg19 (37) + // other tests improved by doing contrast enhancement: testAvg32, testAvg40 + final int median_lo = 60, median_hi = 35; + float alpha = (histogramInfo.median_brightness - median_lo) / (float)(median_hi - median_lo); + alpha = Math.max(alpha, 0.0f); + alpha = Math.min(alpha, 1.0f); + float amount = (1.0f-alpha) * 0.25f + alpha * 0.5f; + if( MyDebug.LOG ) { + Log.d(TAG, "dro alpha: " + alpha); + Log.d(TAG, "dro amount: " + amount); + } + adjustHistogram(output_bitmap, output_bitmap, width, height, amount, 1, true, time_s); + if( MyDebug.LOG ) + Log.d(TAG, "### time after adjustHistogram: " + (System.currentTimeMillis() - time_s)); + } + + if( MyDebug.LOG ) + Log.d(TAG, "### total time for avgBrighten: " + (System.currentTimeMillis() - time_s)); + return output_bitmap; + } + + /** Final stage of the noise reduction algorithm. + * @param avg_data AvgData returned from call to processAvg(). + * @param width Width of the input. + * @param height Height of the input. + * @param iso ISO used for the original images. + * @param exposure_time Exposure time used for the original images. + * @return Resultant bitmap. + */ + public Bitmap avgBrighten(AvgData avg_data, int width, int height, int iso, long exposure_time) { + return avgBrightenRGBf(avg_data.pixels_rgbf_out, width, height, iso, exposure_time); + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/HDRProcessorException.java b/app/src/main/java/net/sourceforge/opencamera/HDRProcessorException.java new file mode 100644 index 0000000..056e288 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/HDRProcessorException.java @@ -0,0 +1,19 @@ +package net.sourceforge.opencamera; + +/** Exception for HDRProcessor class. + */ +@SuppressWarnings("WeakerAccess") +public class HDRProcessorException extends Exception { + final static public int INVALID_N_IMAGES = 0; // the supplied number of images is not supported + final static public int UNEQUAL_SIZES = 1; // images not of the same resolution + + final private int code; + + HDRProcessorException(int code) { + this.code = code; + } + + public int getCode() { + return code; + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/ImageSaver.java b/app/src/main/java/net/sourceforge/opencamera/ImageSaver.java new file mode 100644 index 0000000..79582a4 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/ImageSaver.java @@ -0,0 +1,4607 @@ +package net.sourceforge.opencamera; + +import net.sourceforge.opencamera.cameracontroller.CameraController; +import net.sourceforge.opencamera.cameracontroller.RawImage; +import net.sourceforge.opencamera.preview.ApplicationInterface; +import net.sourceforge.opencamera.preview.Preview; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.FileDescriptor; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.StringWriter; +import java.io.Writer; +import java.nio.ByteBuffer; +import java.nio.charset.Charset; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Date; +import java.util.List; +import java.util.Locale; +import java.util.TimeZone; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; + +import android.annotation.SuppressLint; +import android.app.Activity; +import android.app.ActivityManager; +import android.content.ContentValues; +import android.content.Intent; +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Matrix; +import android.graphics.Paint; +import android.graphics.Paint.Align; +//import android.location.Address; // don't use until we have info for data privacy! +//import android.location.Geocoder; // don't use until we have info for data privacy! +import android.location.Location; + +import androidx.annotation.RequiresApi; +import androidx.exifinterface.media.ExifInterface; + +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaCodecList; +import android.media.MediaFormat; +import android.media.MediaMuxer; +import android.net.Uri; +import android.os.Build; +import android.os.ParcelFileDescriptor; +import android.provider.MediaStore; +import android.util.Log; +import android.util.Range; +import android.util.TypedValue; +import android.util.Xml; +import android.view.Gravity; +import android.view.LayoutInflater; +import android.view.Surface; +import android.view.View; +import android.widget.LinearLayout; +import android.widget.TextView; + +import org.xmlpull.v1.XmlPullParser; +import org.xmlpull.v1.XmlSerializer; + +/** Handles the saving (and any required processing) of photos. + */ +public class ImageSaver extends Thread { + private static final String TAG = "ImageSaver"; + + static final String hdr_suffix = "_HDR"; + static final String nr_suffix = "_NR"; + static final String pano_suffix = "_PANO"; + + private final Paint p = new Paint(); + + private final MainActivity main_activity; + private final HDRProcessor hdrProcessor; + private final PanoramaProcessor panoramaProcessor; + + /* We use a separate count n_images_to_save, rather than just relying on the queue size, so we can take() an image from queue, + * but only decrement the count when we've finished saving the image. + * In general, n_images_to_save represents the number of images still to process, including ones currently being processed. + * Therefore we should always have n_images_to_save >= queue.size(). + * Also note, main_activity.imageQueueChanged() should be called on UI thread after n_images_to_save increases or + * decreases. + * Access to n_images_to_save should always be synchronized to this (i.e., the ImageSaver class). + * n_real_images_to_save excludes "Dummy" or "on_destroy" requests, and should also be synchronized, and modified + * at the same time as n_images_to_save. + */ + private int n_images_to_save = 0; + private int n_real_images_to_save = 0; + private final int queue_capacity; + private final BlockingQueue queue; + private final static int queue_cost_jpeg_c = 1; // also covers WEBP + private final static int queue_cost_dng_c = 6; + //private final static int queue_cost_dng_c = 1; + + // Should be same as MainActivity.app_is_paused, but we keep our own copy to make threading easier (otherwise, all + // accesses of MainActivity.app_is_paused would need to be synchronized). + // Access to app_is_paused should always be synchronized to this (i.e., the ImageSaver class). + private boolean app_is_paused = true; + + // for testing; must be volatile for test project reading the state + // n.b., avoid using static, as static variables are shared between different instances of an application, + // and won't be reset in subsequent tests in a suite! + public static volatile boolean test_small_queue_size; // needs to be static, as it needs to be set before activity is created to take effect + public volatile boolean test_slow_saving; + public volatile boolean test_queue_blocked; + + static class Request { + enum Type { + JPEG, // also covers WEBP + RAW, + DUMMY, + ON_DESTROY // indicate that application is being destroyed, so should exit thread + } + final Type type; + enum ProcessType { + NORMAL, + HDR, // also covers DRO, if only 1 image in the request + AVERAGE, + PANORAMA, + X_NIGHT + } + final ProcessType process_type; // for type==JPEG + final boolean force_suffix; // affects filename suffixes for saving jpeg_images: if true, filenames will always be appended with a suffix like _0, even if there's only 1 image in jpeg_images + final int suffix_offset; // affects filename suffixes for saving jpeg_images, when force_suffix is true or there are multiple images in jpeg_images: the suffixes will be offset by this number + enum SaveBase { + SAVEBASE_NONE, + SAVEBASE_FIRST, + SAVEBASE_ALL, + SAVEBASE_ALL_PLUS_DEBUG // for PANORAMA + } + final SaveBase save_base; // whether to save the base images, for process_type HDR, AVERAGE or PANORAMA + /* jpeg_images: for jpeg (may be null otherwise). + * If process_type==HDR, this should be 1 or 3 images, and the images are combined/converted to a HDR image (if there's only 1 + * image, this uses fake HDR or "DRO"). + * If process_type==NORMAL, then multiple images are saved sequentially. + */ + final List jpeg_images; + final List preshot_bitmaps; // if non-null, bitmaps for preshots; bitmaps will be recycled once processed + final RawImage raw_image; // for raw + final boolean image_capture_intent; + final Uri image_capture_intent_uri; + final boolean using_camera2; + final boolean using_camera_extensions; + /* image_format allows converting the standard JPEG image into another file format. +# */ + enum ImageFormat { + STD, // leave unchanged from the standard JPEG format + WEBP, + PNG + } + ImageFormat image_format; + int image_quality; + boolean do_auto_stabilise; + final double level_angle; // in degrees + final List gyro_rotation_matrix; // used for panorama (one 3x3 matrix per jpeg_images entry), otherwise can be null + boolean panorama_dir_left_to_right; // used for panorama + float camera_view_angle_x; // used for panorama + float camera_view_angle_y; // used for panorama + final boolean is_front_facing; + boolean mirror; + final Date current_date; + final HDRProcessor.TonemappingAlgorithm preference_hdr_tonemapping_algorithm; // for HDR + final String preference_hdr_contrast_enhancement; // for HDR + final int iso; // not applicable for RAW image + final long exposure_time; // not applicable for RAW image + final float zoom_factor; // not applicable for RAW image + String preference_stamp; + String preference_textstamp; + final int font_size; + final int color; + final String pref_style; + final String preference_stamp_dateformat; + final String preference_stamp_timeformat; + final String preference_stamp_gpsformat; + //final String preference_stamp_geo_address; + final String preference_units_distance; + final boolean panorama_crop; // used for panorama + enum RemoveDeviceExif { + OFF, // don't remove any device exif tags + ON, // remove all device exif tags + KEEP_DATETIME // remove all device exif tags except datetime tags + } + final RemoveDeviceExif remove_device_exif; + final boolean store_location; + final Location location; + final boolean store_geo_direction; + final double geo_direction; // in radians + final boolean store_ypr; // whether to store geo_angle, pitch_angle, level_angle in USER_COMMENT if exif (for JPEGs) + final double pitch_angle; // the pitch that the phone is at, in degrees + final String custom_tag_artist; + final String custom_tag_copyright; + final int sample_factor; // sampling factor for thumbnail, higher means lower quality + + Request(Type type, + ProcessType process_type, + boolean force_suffix, + int suffix_offset, + SaveBase save_base, + List jpeg_images, + List preshot_bitmaps, + RawImage raw_image, + boolean image_capture_intent, Uri image_capture_intent_uri, + boolean using_camera2, boolean using_camera_extensions, + ImageFormat image_format, int image_quality, + boolean do_auto_stabilise, double level_angle, List gyro_rotation_matrix, + boolean is_front_facing, + boolean mirror, + Date current_date, + HDRProcessor.TonemappingAlgorithm preference_hdr_tonemapping_algorithm, + String preference_hdr_contrast_enhancement, + int iso, + long exposure_time, + float zoom_factor, + String preference_stamp, String preference_textstamp, int font_size, int color, String pref_style, String preference_stamp_dateformat, String preference_stamp_timeformat, String preference_stamp_gpsformat, + //String preference_stamp_geo_address, + String preference_units_distance, + boolean panorama_crop, + RemoveDeviceExif remove_device_exif, + boolean store_location, Location location, boolean store_geo_direction, double geo_direction, + double pitch_angle, boolean store_ypr, + String custom_tag_artist, + String custom_tag_copyright, + int sample_factor) { + this.type = type; + this.process_type = process_type; + this.force_suffix = force_suffix; + this.suffix_offset = suffix_offset; + this.save_base = save_base; + this.jpeg_images = jpeg_images; + this.preshot_bitmaps = preshot_bitmaps; + this.raw_image = raw_image; + this.image_capture_intent = image_capture_intent; + this.image_capture_intent_uri = image_capture_intent_uri; + this.using_camera2 = using_camera2; + this.using_camera_extensions = using_camera_extensions; + this.image_format = image_format; + this.image_quality = image_quality; + this.do_auto_stabilise = do_auto_stabilise; + this.level_angle = level_angle; + this.gyro_rotation_matrix = gyro_rotation_matrix; + this.is_front_facing = is_front_facing; + this.mirror = mirror; + this.current_date = current_date; + this.preference_hdr_tonemapping_algorithm = preference_hdr_tonemapping_algorithm; + this.preference_hdr_contrast_enhancement = preference_hdr_contrast_enhancement; + this.iso = iso; + this.exposure_time = exposure_time; + this.zoom_factor = zoom_factor; + this.preference_stamp = preference_stamp; + this.preference_textstamp = preference_textstamp; + this.font_size = font_size; + this.color = color; + this.pref_style = pref_style; + this.preference_stamp_dateformat = preference_stamp_dateformat; + this.preference_stamp_timeformat = preference_stamp_timeformat; + this.preference_stamp_gpsformat = preference_stamp_gpsformat; + //this.preference_stamp_geo_address = preference_stamp_geo_address; + this.preference_units_distance = preference_units_distance; + this.panorama_crop = panorama_crop; + this.remove_device_exif = remove_device_exif; + this.store_location = store_location; + this.location = location; + this.store_geo_direction = store_geo_direction; + this.geo_direction = geo_direction; + this.pitch_angle = pitch_angle; + this.store_ypr = store_ypr; + this.custom_tag_artist = custom_tag_artist; + this.custom_tag_copyright = custom_tag_copyright; + this.sample_factor = sample_factor; + } + + /** Returns a copy of this object. Note that it is not a deep copy - data such as JPEG and RAW + * data will not be copied. + */ + Request copy() { + return new Request(this.type, + this.process_type, + this.force_suffix, + this.suffix_offset, + this.save_base, + this.jpeg_images, + this.preshot_bitmaps, + this.raw_image, + this.image_capture_intent, this.image_capture_intent_uri, + this.using_camera2, this.using_camera_extensions, + this.image_format, this.image_quality, + this.do_auto_stabilise, this.level_angle, this.gyro_rotation_matrix, + this.is_front_facing, + this.mirror, + this.current_date, + this.preference_hdr_tonemapping_algorithm, + this.preference_hdr_contrast_enhancement, + this.iso, + this.exposure_time, + this.zoom_factor, + this.preference_stamp, this.preference_textstamp, this.font_size, this.color, this.pref_style, this.preference_stamp_dateformat, this.preference_stamp_timeformat, this.preference_stamp_gpsformat, + //this.preference_stamp_geo_address, + this.preference_units_distance, + this.panorama_crop, this.remove_device_exif, this.store_location, this.location, this.store_geo_direction, this.geo_direction, + this.pitch_angle, this.store_ypr, + this.custom_tag_artist, + this.custom_tag_copyright, + this.sample_factor); + } + } + + ImageSaver(MainActivity main_activity) { + super("ImageSaver"); + if( MyDebug.LOG ) + Log.d(TAG, "ImageSaver"); + this.main_activity = main_activity; + + ActivityManager activityManager = (ActivityManager) main_activity.getSystemService(Activity.ACTIVITY_SERVICE); + this.queue_capacity = computeQueueSize(activityManager.getLargeMemoryClass()); + this.queue = new ArrayBlockingQueue<>(queue_capacity); // since we remove from the queue and then process in the saver thread, in practice the number of background photos - including the one being processed - is one more than the length of this queue + + this.hdrProcessor = new HDRProcessor(main_activity, main_activity.is_test); + this.panoramaProcessor = new PanoramaProcessor(main_activity, hdrProcessor); + + p.setAntiAlias(true); + } + + /** Returns the length of the image saver queue. In practice, the number of images that can be taken at once before the UI + * blocks is 1 more than this, as 1 image will be taken off the queue to process straight away. + */ + public int getQueueSize() { + return this.queue_capacity; + } + + /** Compute a sensible size for the queue, based on the device's memory (large heap). + */ + public static int computeQueueSize(int large_heap_memory) { + if( MyDebug.LOG ) + Log.d(TAG, "large max memory = " + large_heap_memory + "MB"); + int max_queue_size; + if( MyDebug.LOG ) + Log.d(TAG, "test_small_queue_size?: " + test_small_queue_size); + if( test_small_queue_size ) { + large_heap_memory = 0; + } + + if( large_heap_memory >= 512 ) { + // This should be at least 5*(queue_cost_jpeg_c+queue_cost_dng_c)-1 so we can take a burst of 5 photos + // (e.g., in expo mode) with RAW+JPEG without blocking (we subtract 1, as the first image can be immediately + // taken off the queue). + // This should also be at least 19 so we can take a burst of 20 photos with JPEG without blocking (we subtract 1, + // as the first image can be immediately taken off the queue). + // This should be at most 70 for large heap 512MB (estimate based on reserving 160MB for post-processing and HDR + // operations, then estimate a JPEG image at 5MB). + max_queue_size = 34; + } + else if( large_heap_memory >= 256 ) { + // This should be at most 19 for large heap 256MB. + max_queue_size = 12; + } + else if( large_heap_memory >= 128 ) { + // This should be at least 1*(queue_cost_jpeg_c+queue_cost_dng_c)-1 so we can take a photo with RAW+JPEG + // without blocking (we subtract 1, as the first image can be immediately taken off the queue). + // This should be at most 8 for large heap 128MB (allowing 80MB for post-processing). + max_queue_size = 8; + } + else { + // This should be at least 1*(queue_cost_jpeg_c+queue_cost_dng_c)-1 so we can take a photo with RAW+JPEG + // without blocking (we subtract 1, as the first image can be immediately taken off the queue). + max_queue_size = 6; + } + //max_queue_size = 1; + //max_queue_size = 3; + if( MyDebug.LOG ) + Log.d(TAG, "max_queue_size = " + max_queue_size); + return max_queue_size; + } + + /** Computes the cost for a particular request. + * Note that for RAW+DNG mode, computeRequestCost() is called twice for a given photo (one for each + * of the two requests: one RAW, one JPEG). + * @param is_raw Whether RAW/DNG or JPEG. + * @param n_images This is the number of JPEG or RAW images that are in the request. + */ + public static int computeRequestCost(boolean is_raw, int n_images) { + if( MyDebug.LOG ) { + Log.d(TAG, "computeRequestCost"); + Log.d(TAG, "is_raw: " + is_raw); + Log.d(TAG, "n_images: " + n_images); + } + int cost; + if( is_raw ) + cost = n_images * queue_cost_dng_c; + else { + cost = n_images * queue_cost_jpeg_c; + //cost = (n_images > 1 ? 2 : 1) * queue_cost_jpeg_c; + } + return cost; + } + + /** Computes the cost (in terms of number of slots on the image queue) of a new photo. + * @param n_raw The number of JPEGs that will be taken. + * @param n_jpegs The number of JPEGs that will be taken. + */ + int computePhotoCost(int n_raw, int n_jpegs) { + if( MyDebug.LOG ) { + Log.d(TAG, "computePhotoCost"); + Log.d(TAG, "n_raw: " + n_raw); + Log.d(TAG, "n_jpegs: " + n_jpegs); + } + int cost = 0; + if( n_raw > 0 ) + cost += computeRequestCost(true, n_raw); + if( n_jpegs > 0 ) + cost += computeRequestCost(false, n_jpegs); + if( MyDebug.LOG ) + Log.d(TAG, "cost: " + cost); + return cost; + } + + /** Whether taking an extra photo would overflow the queue, resulting in the UI hanging. + * @param n_raw The number of JPEGs that will be taken. + * @param n_jpegs The number of JPEGs that will be taken. + */ + boolean queueWouldBlock(int n_raw, int n_jpegs) { + int photo_cost = this.computePhotoCost(n_raw, n_jpegs); + return this.queueWouldBlock(photo_cost); + } + + /** Whether taking an extra photo would overflow the queue, resulting in the UI hanging. + * @param photo_cost The result returned by computePhotoCost(). + */ + synchronized boolean queueWouldBlock(int photo_cost) { + if( MyDebug.LOG ) { + Log.d(TAG, "queueWouldBlock"); + Log.d(TAG, "photo_cost: " + photo_cost); + Log.d(TAG, "n_images_to_save: " + n_images_to_save); + Log.d(TAG, "queue_capacity: " + queue_capacity); + } + // we add one to queue, to account for the image currently being processed; n_images_to_save includes an image + // currently being processed + if( n_images_to_save == 0 ) { + // In theory, we should never have the extra_cost large enough to block the queue even when no images are being + // saved - but we have this just in case. This means taking the photo will likely block the UI, but we don't want + // to disallow ever taking photos! + if( MyDebug.LOG ) + Log.d(TAG, "queue is empty"); + return false; + } + else if( n_images_to_save + photo_cost > queue_capacity + 1 ) { + if( MyDebug.LOG ) + Log.d(TAG, "queue would block"); + return true; + } + if( MyDebug.LOG ) + Log.d(TAG, "queue would not block"); + return false; + } + + /** Returns the maximum number of DNG images that might be held by the image saver queue at once, before blocking. + */ + int getMaxDNG() { + int max_dng = (queue_capacity+1)/queue_cost_dng_c; + max_dng++; // increase by 1, as the user can still take one extra photo if the queue is exactly full + if( MyDebug.LOG ) + Log.d(TAG, "max_dng = " + max_dng); + return max_dng; + } + + /** Returns the number of images to save, weighted by their cost (e.g., so a single RAW image + * will be counted as multiple images). + */ + public synchronized int getNImagesToSave() { + return n_images_to_save; + } + + /** Returns the number of images to save (e.g., so a single RAW image will only be counted as + * one image, unlike getNImagesToSave()). + + */ + public synchronized int getNRealImagesToSave() { + return n_real_images_to_save; + } + + /** Application has paused. + */ + void onPause() { + synchronized(this) { + app_is_paused = true; + } + } + + /** Application has resumed. + */ + void onResume() { + synchronized(this) { + app_is_paused = false; + } + } + + void onDestroy() { + if( MyDebug.LOG ) + Log.d(TAG, "onDestroy"); + { + // a request so that the imagesaver thread will complete + Request request = new Request(Request.Type.ON_DESTROY, + Request.ProcessType.NORMAL, + false, + 0, + Request.SaveBase.SAVEBASE_NONE, + null, + null, + null, + false, null, + false, false, + Request.ImageFormat.STD, 0, + false, 0.0, null, + false, + false, + null, + HDRProcessor.default_tonemapping_algorithm_c, + null, + 0, + 0, + 1.0f, + null, null, 0, 0, null, null, null, null, + //null, + null, + false, Request.RemoveDeviceExif.OFF, false, null, false, 0.0, + 0.0, false, + null, null, + 1); + if( MyDebug.LOG ) + Log.d(TAG, "add on_destroy request"); + addRequest(request, 1); + } + if( panoramaProcessor != null ) { + panoramaProcessor.onDestroy(); + } + if( hdrProcessor != null ) { + hdrProcessor.onDestroy(); + } + } + + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "starting ImageSaver thread..."); + while( true ) { + try { + if( MyDebug.LOG ) + Log.d(TAG, "ImageSaver thread reading from queue, size: " + queue.size()); + Request request = queue.take(); // if empty, take() blocks until non-empty + // Only decrement n_images_to_save after we've actually saved the image! Otherwise waitUntilDone() will return + // even though we still have a last image to be saved. + if( MyDebug.LOG ) + Log.d(TAG, "ImageSaver thread found new request from queue, size is now: " + queue.size()); + boolean success; + boolean on_destroy = false; + switch (request.type) { + case RAW: + if (MyDebug.LOG) + Log.d(TAG, "request is raw"); + success = saveImageNowRaw(request); + break; + case JPEG: + if (MyDebug.LOG) + Log.d(TAG, "request is jpeg"); + success = saveImageNow(request); + break; + case DUMMY: + if (MyDebug.LOG) + Log.d(TAG, "request is dummy"); + success = true; + break; + case ON_DESTROY: + if( MyDebug.LOG ) + Log.d(TAG, "request is on_destroy"); + success = true; + on_destroy = true; + break; + default: + if (MyDebug.LOG) + Log.e(TAG, "request is unknown type!"); + success = false; + break; + } + if( test_slow_saving ) { + // ignore warning about "Call to Thread.sleep in a loop", this is only activated in test code + //noinspection BusyWait + Thread.sleep(2000); + } + if( MyDebug.LOG ) { + if( success ) + Log.d(TAG, "ImageSaver thread successfully saved image"); + else + Log.e(TAG, "ImageSaver thread failed to save image"); + } + synchronized( this ) { + n_images_to_save--; + if( request.type != Request.Type.DUMMY && request.type != Request.Type.ON_DESTROY ) + n_real_images_to_save--; + if( MyDebug.LOG ) + Log.d(TAG, "ImageSaver thread processed new request from queue, images to save is now: " + n_images_to_save); + if( MyDebug.LOG && n_images_to_save < 0 ) { + Log.e(TAG, "images to save has become negative"); + throw new RuntimeException(); + } + else if( MyDebug.LOG && n_real_images_to_save < 0 ) { + Log.e(TAG, "real images to save has become negative"); + throw new RuntimeException(); + } + notifyAll(); + + main_activity.runOnUiThread(new Runnable() { + public void run() { + main_activity.imageQueueChanged(); + } + }); + } + if( on_destroy ) { + break; + } + } + catch(InterruptedException e) { + MyDebug.logStackTrace(TAG, "interrupted while trying to read from ImageSaver queue", e); + } + } + if( MyDebug.LOG ) + Log.d(TAG, "stopping ImageSaver thread..."); + } + + /** Saves a photo. + * If do_in_background is true, the photo will be saved in a background thread. If the queue is full, the function will wait + * until it isn't full. Otherwise it will return immediately. The function always returns true for background saving. + * If do_in_background is false, the photo is saved on the current thread, and the function returns whether the photo was saved + * successfully. + */ + boolean saveImageJpeg(boolean do_in_background, + Request.ProcessType processType, + boolean force_suffix, + int suffix_offset, + boolean save_expo, + List images, + List preshot_bitmaps, + boolean image_capture_intent, Uri image_capture_intent_uri, + boolean using_camera2, boolean using_camera_extensions, + Request.ImageFormat image_format, int image_quality, + boolean do_auto_stabilise, double level_angle, + boolean is_front_facing, + boolean mirror, + Date current_date, + HDRProcessor.TonemappingAlgorithm preference_hdr_tonemapping_algorithm, + String preference_hdr_contrast_enhancement, + int iso, + long exposure_time, + float zoom_factor, + String preference_stamp, String preference_textstamp, int font_size, int color, String pref_style, String preference_stamp_dateformat, String preference_stamp_timeformat, String preference_stamp_gpsformat, + //String preference_stamp_geo_address, + String preference_units_distance, + boolean panorama_crop, + Request.RemoveDeviceExif remove_device_exif, + boolean store_location, Location location, boolean store_geo_direction, double geo_direction, + double pitch_angle, boolean store_ypr, + String custom_tag_artist, + String custom_tag_copyright, + int sample_factor) { + if( MyDebug.LOG ) { + Log.d(TAG, "saveImageJpeg"); + Log.d(TAG, "do_in_background? " + do_in_background); + Log.d(TAG, "number of images: " + images.size()); + } + return saveImage(do_in_background, + false, + processType, + force_suffix, + suffix_offset, + save_expo, + images, + preshot_bitmaps, + null, + image_capture_intent, image_capture_intent_uri, + using_camera2, using_camera_extensions, + image_format, image_quality, + do_auto_stabilise, level_angle, + is_front_facing, + mirror, + current_date, + preference_hdr_tonemapping_algorithm, + preference_hdr_contrast_enhancement, + iso, + exposure_time, + zoom_factor, + preference_stamp, preference_textstamp, font_size, color, pref_style, preference_stamp_dateformat, preference_stamp_timeformat, preference_stamp_gpsformat, + //preference_stamp_geo_address, + preference_units_distance, + panorama_crop, remove_device_exif, store_location, location, store_geo_direction, geo_direction, + pitch_angle, store_ypr, + custom_tag_artist, + custom_tag_copyright, + sample_factor); + } + + /** Saves a RAW photo. + * If do_in_background is true, the photo will be saved in a background thread. If the queue is full, the function will wait + * until it isn't full. Otherwise it will return immediately. The function always returns true for background saving. + * If do_in_background is false, the photo is saved on the current thread, and the function returns whether the photo was saved + * successfully. + */ + boolean saveImageRaw(boolean do_in_background, + boolean force_suffix, + int suffix_offset, + RawImage raw_image, + Date current_date) { + if( MyDebug.LOG ) { + Log.d(TAG, "saveImageRaw"); + Log.d(TAG, "do_in_background? " + do_in_background); + } + return saveImage(do_in_background, + true, + Request.ProcessType.NORMAL, + force_suffix, + suffix_offset, + false, + null, + null, + raw_image, + false, null, + false, false, + Request.ImageFormat.STD, 0, + false, 0.0, + false, + false, + current_date, + HDRProcessor.default_tonemapping_algorithm_c, + null, + 0, + 0, + 1.0f, + null, null, 0, 0, null, null, null, null, + //null, + null, + false, Request.RemoveDeviceExif.OFF, false, null, false, 0.0, + 0.0, false, + null, null, + 1); + } + + private Request pending_image_average_request = null; + + /** Used for a batch of images that will be combined into a single request. This applies to + * processType AVERAGE and PANORAMA. + */ + void startImageBatch(boolean do_in_background, + Request.ProcessType processType, + List preshot_bitmaps, + Request.SaveBase save_base, + boolean image_capture_intent, Uri image_capture_intent_uri, + boolean using_camera2, boolean using_camera_extensions, + Request.ImageFormat image_format, int image_quality, + boolean do_auto_stabilise, double level_angle, boolean want_gyro_matrices, + boolean is_front_facing, + boolean mirror, + Date current_date, + int iso, + long exposure_time, + float zoom_factor, + String preference_stamp, String preference_textstamp, int font_size, int color, String pref_style, String preference_stamp_dateformat, String preference_stamp_timeformat, String preference_stamp_gpsformat, + //String preference_stamp_geo_address, + String preference_units_distance, + boolean panorama_crop, + Request.RemoveDeviceExif remove_device_exif, + boolean store_location, Location location, boolean store_geo_direction, double geo_direction, + double pitch_angle, boolean store_ypr, + String custom_tag_artist, + String custom_tag_copyright, + int sample_factor) { + if( MyDebug.LOG ) { + Log.d(TAG, "startImageBatch"); + Log.d(TAG, "do_in_background? " + do_in_background); + } + pending_image_average_request = new Request(Request.Type.JPEG, + processType, + false, + 0, + save_base, + new ArrayList<>(), + preshot_bitmaps, + null, + image_capture_intent, image_capture_intent_uri, + using_camera2, using_camera_extensions, + image_format, image_quality, + do_auto_stabilise, level_angle, want_gyro_matrices ? new ArrayList<>() : null, + is_front_facing, + mirror, + current_date, + HDRProcessor.default_tonemapping_algorithm_c, + null, + iso, + exposure_time, + zoom_factor, + preference_stamp, preference_textstamp, font_size, color, pref_style, preference_stamp_dateformat, preference_stamp_timeformat, preference_stamp_gpsformat, + //preference_stamp_geo_address, + preference_units_distance, + panorama_crop, remove_device_exif, store_location, location, store_geo_direction, geo_direction, + pitch_angle, store_ypr, + custom_tag_artist, + custom_tag_copyright, + sample_factor); + } + + void addImageBatch(byte [] image, float [] gyro_rotation_matrix) { + if( MyDebug.LOG ) + Log.d(TAG, "addImageBatch"); + if( pending_image_average_request == null ) { + Log.e(TAG, "addImageBatch called but no pending_image_average_request"); + return; + } + pending_image_average_request.jpeg_images.add(image); + if( gyro_rotation_matrix != null ) { + float [] copy = new float[gyro_rotation_matrix.length]; + System.arraycopy(gyro_rotation_matrix, 0, copy, 0, gyro_rotation_matrix.length); + pending_image_average_request.gyro_rotation_matrix.add(copy); + } + if( MyDebug.LOG ) + Log.d(TAG, "image average request images: " + pending_image_average_request.jpeg_images.size()); + } + + Request getImageBatchRequest() { + return pending_image_average_request; + } + + void finishImageBatch(boolean do_in_background) { + if( MyDebug.LOG ) + Log.d(TAG, "finishImageBatch"); + if( pending_image_average_request == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "finishImageBatch called but no pending_image_average_request"); + return; + } + if( do_in_background ) { + if( MyDebug.LOG ) + Log.d(TAG, "add background request"); + int cost = computeRequestCost(false, pending_image_average_request.jpeg_images.size()); + addRequest(pending_image_average_request, cost); + } + else { + // wait for queue to be empty + waitUntilDone(); + saveImageNow(pending_image_average_request); + } + pending_image_average_request = null; + } + + void flushImageBatch() { + if( MyDebug.LOG ) + Log.d(TAG, "flushImageBatch"); + // aside from resetting the state, this allows the allocated JPEG data to be garbage collected + pending_image_average_request = null; + } + + /** Internal saveImage method to handle both JPEG and RAW. + */ + private boolean saveImage(boolean do_in_background, + boolean is_raw, + Request.ProcessType processType, + boolean force_suffix, + int suffix_offset, + boolean save_expo, + List jpeg_images, + List preshot_bitmaps, + RawImage raw_image, + boolean image_capture_intent, Uri image_capture_intent_uri, + boolean using_camera2, boolean using_camera_extensions, + Request.ImageFormat image_format, int image_quality, + boolean do_auto_stabilise, double level_angle, + boolean is_front_facing, + boolean mirror, + Date current_date, + HDRProcessor.TonemappingAlgorithm preference_hdr_tonemapping_algorithm, + String preference_hdr_contrast_enhancement, + int iso, + long exposure_time, + float zoom_factor, + String preference_stamp, String preference_textstamp, int font_size, int color, String pref_style, String preference_stamp_dateformat, String preference_stamp_timeformat, String preference_stamp_gpsformat, + //String preference_stamp_geo_address, + String preference_units_distance, + boolean panorama_crop, + Request.RemoveDeviceExif remove_device_exif, + boolean store_location, Location location, boolean store_geo_direction, double geo_direction, + double pitch_angle, boolean store_ypr, + String custom_tag_artist, + String custom_tag_copyright, + int sample_factor) { + if( MyDebug.LOG ) { + Log.d(TAG, "saveImage"); + Log.d(TAG, "do_in_background? " + do_in_background); + } + boolean success; + + //do_in_background = false; + + Request request = new Request(is_raw ? Request.Type.RAW : Request.Type.JPEG, + processType, + force_suffix, + suffix_offset, + save_expo ? Request.SaveBase.SAVEBASE_ALL : Request.SaveBase.SAVEBASE_NONE, + jpeg_images, + preshot_bitmaps, + raw_image, + image_capture_intent, image_capture_intent_uri, + using_camera2, using_camera_extensions, + image_format, image_quality, + do_auto_stabilise, level_angle, null, + is_front_facing, + mirror, + current_date, + preference_hdr_tonemapping_algorithm, + preference_hdr_contrast_enhancement, + iso, + exposure_time, + zoom_factor, + preference_stamp, preference_textstamp, font_size, color, pref_style, preference_stamp_dateformat, preference_stamp_timeformat, preference_stamp_gpsformat, + //preference_stamp_geo_address, + preference_units_distance, + panorama_crop, remove_device_exif, store_location, location, store_geo_direction, geo_direction, + pitch_angle, store_ypr, + custom_tag_artist, + custom_tag_copyright, + sample_factor); + + if( do_in_background ) { + if( MyDebug.LOG ) + Log.d(TAG, "add background request"); + int cost = computeRequestCost(is_raw, is_raw ? 1 : request.jpeg_images.size()); + addRequest(request, cost); + success = true; // always return true when done in background + } + else { + // wait for queue to be empty + waitUntilDone(); + if( is_raw ) { + success = saveImageNowRaw(request); + } + else { + success = saveImageNow(request); + } + } + + if( MyDebug.LOG ) + Log.d(TAG, "success: " + success); + return success; + } + + /** Adds a request to the background queue, blocking if the queue is already full + */ + private void addRequest(Request request, int cost) { + if( MyDebug.LOG ) + Log.d(TAG, "addRequest, cost: " + cost); + if( main_activity.isDestroyed() ) { + // If the application is being destroyed as a new photo is being taken, it's not safe to continue + // MainDestroy.onDestroy() does call waitUntilDone(), but this is extra protection in case an image comes in after that. + Log.e(TAG, "application is destroyed, image lost!"); + return; + } + // this should not be synchronized on "this": BlockingQueue is thread safe, and if it's blocking in queue.put(), we'll hang because + // the saver queue will need to synchronize on "this" in order to notifyAll() the main thread + boolean done = false; + while( !done ) { + try { + if( MyDebug.LOG ) + Log.d(TAG, "ImageSaver thread adding to queue, size: " + queue.size()); + synchronized( this ) { + // see above for why we don't synchronize the queue.put call + // but we synchronize modification to avoid risk of problems related to compiler optimisation (local caching or reordering) + // also see FindBugs warning due to inconsistent synchronisation + n_images_to_save++; // increment before adding to the queue, just to make sure the main thread doesn't think we're all done + if( request.type != Request.Type.DUMMY && request.type != Request.Type.ON_DESTROY ) + n_real_images_to_save++; + + main_activity.runOnUiThread(new Runnable() { + public void run() { + main_activity.imageQueueChanged(); + } + }); + } + if( queue.size() + 1 > queue_capacity ) { + Log.e(TAG, "ImageSaver thread is going to block, queue already full: " + queue.size()); + test_queue_blocked = true; + //throw new RuntimeException(); // test + } + queue.put(request); // if queue is full, put() blocks until it isn't full + if( MyDebug.LOG ) { + synchronized( this ) { // keep FindBugs happy + Log.d(TAG, "ImageSaver thread added to queue, size is now: " + queue.size()); + Log.d(TAG, "images still to save is now: " + n_images_to_save); + Log.d(TAG, "real images still to save is now: " + n_real_images_to_save); + } + } + done = true; + } + catch(InterruptedException e) { + MyDebug.logStackTrace(TAG, "interrupted while trying to add to ImageSaver queue", e); + } + } + if( cost > 0 ) { + // add "dummy" requests to simulate the cost + for(int i=0;i 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "wait until done..."); + try { + wait(); + } + catch(InterruptedException e) { + MyDebug.logStackTrace(TAG, "interrupted while waiting for ImageSaver queue to be empty", e); + } + if( MyDebug.LOG ) { + Log.d(TAG, "waitUntilDone: queue is size " + queue.size()); + Log.d(TAG, "waitUntilDone: images still to save " + n_images_to_save); + } + } + } + if( MyDebug.LOG ) + Log.d(TAG, "waitUntilDone: images all saved"); + } + + private void setBitmapOptionsSampleSize(BitmapFactory.Options options, int inSampleSize) { + if( MyDebug.LOG ) + Log.d(TAG, "setBitmapOptionsSampleSize: " + inSampleSize); + //options.inSampleSize = inSampleSize; + if( inSampleSize > 1 ) { + // use inDensity for better quality, as inSampleSize uses nearest neighbour + options.inDensity = inSampleSize; + options.inTargetDensity = 1; + } + } + + /** Loads a single jpeg as a Bitmaps. + * @param mutable Whether the bitmap should be mutable. Note that when converting to bitmaps + * for the image post-processing (auto-stabilise etc), in general we need the + * bitmap to be mutable (for photostamp to work). + */ + private Bitmap loadBitmap(byte [] jpeg_image, boolean mutable, int inSampleSize) { + if( MyDebug.LOG ) { + Log.d(TAG, "loadBitmap"); + Log.d(TAG, "mutable?: " + mutable); + } + BitmapFactory.Options options = new BitmapFactory.Options(); + if( MyDebug.LOG ) + Log.d(TAG, "options.inMutable is: " + options.inMutable); + options.inMutable = mutable; + setBitmapOptionsSampleSize(options, inSampleSize); + Bitmap bitmap = BitmapFactory.decodeByteArray(jpeg_image, 0, jpeg_image.length, options); + if( bitmap == null ) { + Log.e(TAG, "failed to decode bitmap"); + } + return bitmap; + } + + /** Helper class for loadBitmaps(). + */ + private static class LoadBitmapThread extends Thread { + Bitmap bitmap; + final BitmapFactory.Options options; + final byte [] jpeg; + LoadBitmapThread(BitmapFactory.Options options, byte [] jpeg) { + super("LoadBitmapThread"); + this.options = options; + this.jpeg = jpeg; + } + + public void run() { + this.bitmap = BitmapFactory.decodeByteArray(jpeg, 0, jpeg.length, options); + } + } + + /** Converts the array of jpegs to Bitmaps. The bitmap with index mutable_id will be marked as mutable (or set to -1 to have no mutable bitmaps, or -2 to have all be mutable bitmaps). + */ + private List loadBitmaps(List jpeg_images, int mutable_id, int inSampleSize) { + if( MyDebug.LOG ) { + Log.d(TAG, "loadBitmaps"); + Log.d(TAG, "mutable_id: " + mutable_id); + } + BitmapFactory.Options mutable_options = new BitmapFactory.Options(); + mutable_options.inMutable = true; // bitmap that needs to be writable + setBitmapOptionsSampleSize(mutable_options, inSampleSize); + BitmapFactory.Options options = new BitmapFactory.Options(); + options.inMutable = false; // later bitmaps don't need to be writable + setBitmapOptionsSampleSize(options, inSampleSize); + LoadBitmapThread [] threads = new LoadBitmapThread[jpeg_images.size()]; + for(int i=0;i bitmaps = new ArrayList<>(); + for(int i=0;i image_info; + + public GyroDebugInfo() { + image_info = new ArrayList<>(); + } + } + + public static boolean readGyroDebugXml(InputStream inputStream, GyroDebugInfo info) { + try { + XmlPullParser parser = Xml.newPullParser(); + parser.setFeature(XmlPullParser.FEATURE_PROCESS_NAMESPACES, false); + parser.setInput(inputStream, null); + parser.nextTag(); + + parser.require(XmlPullParser.START_TAG, null, gyro_info_doc_tag); + GyroDebugInfo.GyroImageDebugInfo image_info = null; + + while( parser.next() != XmlPullParser.END_DOCUMENT ) { + switch( parser.getEventType() ) { + case XmlPullParser.START_TAG: { + String name = parser.getName(); + if( MyDebug.LOG ) { + Log.d(TAG, "start tag, name: " + name); + } + + switch( name ) { + case gyro_info_image_tag: + info.image_info.add( image_info = new GyroDebugInfo.GyroImageDebugInfo() ); + break; + case gyro_info_vector_tag: + if( image_info == null ) { + Log.e(TAG, "vector tag outside of image tag"); + return false; + } + String type = parser.getAttributeValue(null, "type"); + String x_s = parser.getAttributeValue(null, "x"); + String y_s = parser.getAttributeValue(null, "y"); + String z_s = parser.getAttributeValue(null, "z"); + float [] vector = new float[3]; + vector[0] = Float.parseFloat(x_s); + vector[1] = Float.parseFloat(y_s); + vector[2] = Float.parseFloat(z_s); + switch( type ) { + case gyro_info_vector_right_type: + image_info.vectorRight = vector; + break; + case gyro_info_vector_up_type: + image_info.vectorUp = vector; + break; + case gyro_info_vector_screen_type: + image_info.vectorScreen = vector; + break; + default: + Log.e(TAG, "unknown type in vector tag: " + type); + return false; + } + break; + } + break; + } + case XmlPullParser.END_TAG: { + String name = parser.getName(); + if( MyDebug.LOG ) { + Log.d(TAG, "end tag, name: " + name); + } + + //noinspection SwitchStatementWithTooFewBranches + switch( name ) { + case gyro_info_image_tag: + image_info = null; + break; + } + break; + } + } + } + } + catch(Exception e) { + MyDebug.logStackTrace(TAG, "failed to parse xml", e); + return false; + } + finally { + try { + inputStream.close(); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "failed to close inputStream", e); + } + } + return true; + } + + private boolean processHDR(List bitmaps, final Request request, long time_s) { + float hdr_alpha = getHDRAlpha(request.preference_hdr_contrast_enhancement, request.exposure_time, bitmaps.size()); + if( MyDebug.LOG ) + Log.d(TAG, "before HDR first bitmap: " + bitmaps.get(0) + " is mutable? " + bitmaps.get(0).isMutable()); + try { + hdrProcessor.processHDR(bitmaps, true, null, true, null, hdr_alpha, 4, true, request.preference_hdr_tonemapping_algorithm, HDRProcessor.DROTonemappingAlgorithm.DROALGORITHM_GAINGAMMA); // this will recycle all the bitmaps except bitmaps.get(0), which will contain the hdr image + } + catch(HDRProcessorException e) { + MyDebug.logStackTrace(TAG, "HDRProcessorException from processHDR", e); + if( e.getCode() == HDRProcessorException.UNEQUAL_SIZES ) { + // this can happen on OnePlus 3T with old camera API with front camera, seems to be a bug that resolution changes when exposure compensation is set! + Log.e(TAG, "UNEQUAL_SIZES"); + bitmaps.clear(); + System.gc(); + return false; + } + else { + // throw RuntimeException, as we shouldn't ever get the error INVALID_N_IMAGES, if we do it's a programming error + throw new RuntimeException(); + } + } + if( MyDebug.LOG ) { + Log.d(TAG, "HDR performance: time after creating HDR image: " + (System.currentTimeMillis() - time_s)); + } + if( MyDebug.LOG ) + Log.d(TAG, "after HDR first bitmap: " + bitmaps.get(0) + " is mutable? " + bitmaps.get(0).isMutable()); + return true; + } + + /** May be run in saver thread or picture callback thread (depending on whether running in background). + */ + private boolean saveImageNow(final Request request) { + if( MyDebug.LOG ) + Log.d(TAG, "saveImageNow"); + + if( request.type != Request.Type.JPEG ) { + if( MyDebug.LOG ) + Log.d(TAG, "saveImageNow called with non-jpeg request"); + // throw runtime exception, as this is a programming error + throw new RuntimeException(); + } + else if( request.jpeg_images.isEmpty() ) { + if( MyDebug.LOG ) + Log.d(TAG, "saveImageNow called with zero images"); + // throw runtime exception, as this is a programming error + throw new RuntimeException(); + } + + if( request.preshot_bitmaps != null && !request.preshot_bitmaps.isEmpty() && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O ) { + savePreshotBitmaps(request); + } + + boolean success; + if( request.process_type == Request.ProcessType.AVERAGE ) { + if( MyDebug.LOG ) + Log.d(TAG, "average"); + + saveBaseImages(request, "_"); + main_activity.savingImage(true); + + /*List bitmaps = loadBitmaps(request.jpeg_images, 0); + if (bitmaps == null) { + if (MyDebug.LOG) + Log.e(TAG, "failed to load bitmaps"); + main_activity.savingImage(false); + return false; + }*/ + /*Bitmap nr_bitmap = loadBitmap(request.jpeg_images.get(0), true); + + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP ) { + try { + for(int i = 1; i < request.jpeg_images.size(); i++) { + Log.d(TAG, "processAvg for image: " + i); + Bitmap new_bitmap = loadBitmap(request.jpeg_images.get(i), false); + float avg_factor = (float) i; + hdrProcessor.processAvg(nr_bitmap, new_bitmap, avg_factor, true); + // processAvg recycles new_bitmap + } + //hdrProcessor.processAvgMulti(bitmaps, hdr_strength, 4); + //hdrProcessor.avgBrighten(nr_bitmap); + } + catch(HDRProcessorException e) { + MyDebug.logStackTrace(TAG, "HDRProcessorException from processAvg", e); + throw new RuntimeException(); + } + } + else { + Log.e(TAG, "shouldn't have offered NoiseReduction as an option if not on Android 5"); + throw new RuntimeException(); + }*/ + Bitmap nr_bitmap; + { + try { + long time_s = System.currentTimeMillis(); + int inSampleSize = hdrProcessor.getAvgSampleSize(request.iso, request.exposure_time); + //final boolean use_smp = false; + final boolean use_smp = true; + // n_smp_images is how many bitmaps to decompress at once if use_smp==true. Beware of setting too high - + // e.g., storing 4 16MP bitmaps takes 256MB of heap (NR requires at least 512MB large heap); also need to make + // sure there isn't a knock on effect on performance + //final int n_smp_images = 2; + final int n_smp_images = 4; + long this_time_s = System.currentTimeMillis(); + List bitmaps = null; + Bitmap bitmap0, bitmap1; + if( use_smp ) { + /*List sub_jpeg_list = new ArrayList<>(); + sub_jpeg_list.add(request.jpeg_images.get(0)); + sub_jpeg_list.add(request.jpeg_images.get(1)); + bitmaps = loadBitmaps(sub_jpeg_list, -1, inSampleSize); + bitmap0 = bitmaps.get(0); + bitmap1 = bitmaps.get(1);*/ + int n_remaining = request.jpeg_images.size(); + int n_load = Math.min(n_smp_images, n_remaining); + if( MyDebug.LOG ) { + Log.d(TAG, "n_remaining: " + n_remaining); + Log.d(TAG, "n_load: " + n_load); + } + List sub_jpeg_list = new ArrayList<>(); + for(int j=0;j sub_jpeg_list = new ArrayList<>(); + for(int j=i;j new_bitmaps = loadBitmaps(sub_jpeg_list, -1, inSampleSize); + bitmaps.addAll(new_bitmaps); + if( MyDebug.LOG ) + Log.d(TAG, "length of bitmaps list is now: " + bitmaps.size()); + new_bitmap = bitmaps.get(i); + } + } + else { + new_bitmap = loadBitmap(request.jpeg_images.get(i), false, inSampleSize); + } + if( MyDebug.LOG ) { + Log.d(TAG, "*** time for loading extra bitmap: " + (System.currentTimeMillis() - this_time_s)); + } + avg_factor = (float)i; + this_time_s = System.currentTimeMillis(); + hdrProcessor.updateAvg(avg_data, width, height, new_bitmap, avg_factor, request.iso, request.exposure_time, request.zoom_factor); + // updateAvg recycles new_bitmap + if( bitmaps != null ) { + bitmaps.set(i, null); + } + if( MyDebug.LOG ) { + Log.d(TAG, "*** time for updating extra bitmap: " + (System.currentTimeMillis() - this_time_s)); + } + } + + this_time_s = System.currentTimeMillis(); + nr_bitmap = hdrProcessor.avgBrighten(avg_data, width, height, request.iso, request.exposure_time); + if( MyDebug.LOG ) { + Log.d(TAG, "*** time for brighten: " + (System.currentTimeMillis() - this_time_s)); + } + avg_data.destroy(); + //noinspection UnusedAssignment + avg_data = null; + if( MyDebug.LOG ) { + Log.d(TAG, "*** total time for saving NR image: " + (System.currentTimeMillis() - time_s)); + } + } + catch(HDRProcessorException e) { + MyDebug.logStackTrace(TAG, "HDRProcessorException", e); + throw new RuntimeException(); + } + } + + if( MyDebug.LOG ) + Log.d(TAG, "nr_bitmap: " + nr_bitmap + " is mutable? " + nr_bitmap.isMutable()); + System.gc(); + main_activity.savingImage(false); + + if( MyDebug.LOG ) + Log.d(TAG, "save NR image"); + success = saveSingleImageNow(request, request.jpeg_images.get(0), nr_bitmap, nr_suffix, true, true, true, false); + if( MyDebug.LOG && !success ) + Log.e(TAG, "saveSingleImageNow failed for nr image"); + nr_bitmap.recycle(); + System.gc(); + } + else if( request.process_type == Request.ProcessType.HDR ) { + if( MyDebug.LOG ) + Log.d(TAG, "hdr"); + if( request.jpeg_images.size() != 1 && request.jpeg_images.size() != 3 ) { + if( MyDebug.LOG ) + Log.d(TAG, "saveImageNow expected either 1 or 3 images for hdr, not " + request.jpeg_images.size()); + // throw runtime exception, as this is a programming error + throw new RuntimeException(); + } + + long time_s = System.currentTimeMillis(); + if( request.jpeg_images.size() > 1 ) { + // if there's only 1 image, we're in DRO mode, and shouldn't save the base image + // note that in earlier Open Camera versions, we used "_EXP" as the suffix. We now use just "_" from 1.42 onwards, so Google + // Photos will group them together. (Unfortunately using "_EXP_" doesn't work, the images aren't grouped!) + saveBaseImages(request, "_"); + if( MyDebug.LOG ) { + Log.d(TAG, "HDR performance: time after saving base exposures: " + (System.currentTimeMillis() - time_s)); + } + } + + // note, even if we failed saving some of the expo images, still try to save the HDR image + if( MyDebug.LOG ) + Log.d(TAG, "create HDR image"); + main_activity.savingImage(true); + + // see documentation for HDRProcessor.processHDR() - because we're using release_bitmaps==true, we need to make sure that + // the bitmap that will hold the output HDR image is mutable (in case of options like photo stamp) + // see test testTakePhotoHDRPhotoStamp. + int base_bitmap = (request.jpeg_images.size()-1)/2; + if( MyDebug.LOG ) + Log.d(TAG, "base_bitmap: " + base_bitmap); + List bitmaps = loadBitmaps(request.jpeg_images, base_bitmap, 1); + if( bitmaps == null ) { + if( MyDebug.LOG ) + Log.e(TAG, "failed to load bitmaps"); + main_activity.savingImage(false); + return false; + } + if( MyDebug.LOG ) { + Log.d(TAG, "HDR performance: time after decompressing base exposures: " + (System.currentTimeMillis() - time_s)); + } + + if( !processHDR(bitmaps, request, time_s) ) { + main_activity.getPreview().showToast(null, R.string.failed_to_process_hdr); + main_activity.savingImage(false); + return false; + } + + Bitmap hdr_bitmap = bitmaps.get(0); + if( MyDebug.LOG ) + Log.d(TAG, "hdr_bitmap: " + hdr_bitmap + " is mutable? " + hdr_bitmap.isMutable()); + bitmaps.clear(); + System.gc(); + main_activity.savingImage(false); + + if( MyDebug.LOG ) + Log.d(TAG, "save HDR image"); + int base_image_id = ((request.jpeg_images.size()-1)/2); + if( MyDebug.LOG ) + Log.d(TAG, "base_image_id: " + base_image_id); + String suffix = request.jpeg_images.size() == 1 ? "_DRO" : hdr_suffix; + success = saveSingleImageNow(request, request.jpeg_images.get(base_image_id), hdr_bitmap, suffix, true, true, true, false); + if( MyDebug.LOG && !success ) + Log.e(TAG, "saveSingleImageNow failed for hdr image"); + if( MyDebug.LOG ) { + Log.d(TAG, "HDR performance: time after saving HDR image: " + (System.currentTimeMillis() - time_s)); + } + hdr_bitmap.recycle(); + System.gc(); + } + else if( request.process_type == Request.ProcessType.PANORAMA ) { + if( MyDebug.LOG ) + Log.d(TAG, "panorama"); + + // save text file with gyro info + if( !request.image_capture_intent && request.save_base == Request.SaveBase.SAVEBASE_ALL_PLUS_DEBUG ) { + /*final StringBuilder gyro_text = new StringBuilder(); + gyro_text.append("Panorama gyro debug info\n"); + gyro_text.append("n images: " + request.gyro_rotation_matrix.size() + ":\n"); + + float [] inVector = new float[3]; + float [] outVector = new float[3]; + for(int i=0;i bitmaps = loadBitmaps(request.jpeg_images, -2, 1); + if( bitmaps == null ) { + if( MyDebug.LOG ) + Log.e(TAG, "failed to load bitmaps"); + main_activity.savingImage(false); + return false; + } + if( MyDebug.LOG ) { + Log.d(TAG, "panorama performance: time after decompressing base exposures: " + (System.currentTimeMillis() - time_s)); + } + + // rotate the bitmaps if necessary for exif tags + for(int i=0;i upper ) { + throw new IllegalArgumentException("lower must be <= upper"); + } + } + + IntRange(Range range) { + this(range.getLower(), range.getUpper()); + } + + boolean contains(int value) { + return value >= lower && value <= upper; + } + + int clamp(int value) { + if( value <= lower ) + return lower; + else if( value >= upper ) + return upper; + return value; + } + } + + public static CameraController.Size adjustResolutionForVideoCapabilities(int video_width, int video_height, IntRange supported_widths, IntRange supported_heights, int width_alignment, int height_alignment) { + if( !supported_widths.contains(video_width) ) { + double aspect = ((double)video_height) / ((double)video_width); + video_width = supported_widths.clamp(video_width); + video_height = (int)(aspect * video_width + 0.5); + if( MyDebug.LOG ) + Log.d(TAG, "limit video (width) to: " + video_width + " x " + video_height); + } + if( !supported_heights.contains(video_height) ) { + double aspect = ((double)video_height) / ((double)video_width); + video_height = supported_heights.clamp(video_height); + video_width = (int)(video_height / aspect + 0.5); + if( MyDebug.LOG ) + Log.d(TAG, "limit video (height) to: " + video_width + " x " + video_height); + // test width again + if( !supported_widths.contains(video_width) ) { + video_width = supported_widths.clamp(video_width); + if( MyDebug.LOG ) + Log.d(TAG, "can't find valid size that preserves aspect ratios! limit video (width) to: " + video_width + " x " + video_height); + } + } + // Adjust for alignment - we could be cleverer and try to find an adjustment that preserves the aspect + // ratio. But we'd hope that camera preview sizes already satisfy alignments - or if we had to adjust due to + // being outside the supported widths or heights, then we should have clamped to something that already + // satisfies the alignments + int alignment = width_alignment; + if( video_width % alignment != 0 ) { + video_width += alignment - (video_width % alignment); + if( MyDebug.LOG ) + Log.d(TAG, "adjust video width for alignment to: " + video_width); + } + alignment = height_alignment; + if( video_height % alignment != 0 ) { + video_height += alignment - (video_height % alignment); + if( MyDebug.LOG ) + Log.d(TAG, "adjust height for alignment to: " + video_height); + } + return new CameraController.Size(video_width, video_height); + } + + private static class MuxerInfo { + MediaMuxer muxer; + boolean muxer_started = false; + int videoTrackIndex = -1; + } + + @RequiresApi(api = Build.VERSION_CODES.O) + private void encodeVideoFrame(final MediaCodec encoder, MuxerInfo muxer_info, long presentationTimeUs, boolean end_of_stream) throws IOException { + MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); + if( end_of_stream ) { + if( MyDebug.LOG ) + Log.d(TAG, " signal end of stream"); + encoder.signalEndOfInputStream(); + } + while( true ) { + if( MyDebug.LOG ) + Log.d(TAG, " start of loop for saving pre-shot"); + final int timeout_us = 10000; + int outputBufferIndex = encoder.dequeueOutputBuffer(bufferInfo, timeout_us); + if( MyDebug.LOG ) + Log.d(TAG, " outputBufferIndex: " + outputBufferIndex); + if( outputBufferIndex >= 0 ) { + bufferInfo.presentationTimeUs = presentationTimeUs; + ByteBuffer outputBuffer = encoder.getOutputBuffer(outputBufferIndex); + if( outputBuffer == null ) { + Log.e(TAG, "getOutputBuffer returned null"); + throw new IOException(); + } + + if( (bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "BUFFER_FLAG_CODEC_CONFIG"); + bufferInfo.size = 0; + } + + if( bufferInfo.size != 0 ) { + outputBuffer.position(bufferInfo.offset); + outputBuffer.limit(bufferInfo.offset + bufferInfo.size); + + muxer_info.muxer.writeSampleData(muxer_info.videoTrackIndex, outputBuffer, bufferInfo); + } + + encoder.releaseOutputBuffer(outputBufferIndex, false); + + break; + /*if( (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0 ) { + if( MyDebug.LOG ) { + if( !end_of_stream ) { + Log.e(TAG, " reached end of stream unexpectedly"); + } + else { + Log.d(TAG, " end of stream reached"); + } + } + break; + }*/ + } + else { + if( outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER ) { + if( MyDebug.LOG ) + Log.d(TAG, " INFO_TRY_AGAIN_LATER"); + /*if( !end_of_stream ) { + break; + }*/ + } + else if( outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED ) { + if( MyDebug.LOG ) + Log.d(TAG, " INFO_OUTPUT_FORMAT_CHANGED"); + muxer_info.videoTrackIndex = muxer_info.muxer.addTrack(encoder.getOutputFormat()); + muxer_info.muxer.start(); + muxer_info.muxer_started = true; + } + } + } + } + + @RequiresApi(api = Build.VERSION_CODES.O) + private void savePreshotBitmaps(final Request request) { + if( MyDebug.LOG ) + Log.d(TAG, "savePreshotBitmaps"); + + main_activity.savingImage(true); + + List preshot_bitmaps = request.preshot_bitmaps; + if( MyDebug.LOG ) + Log.d(TAG, "number of preshots: " + preshot_bitmaps.size()); + + ApplicationInterface.VideoMethod method = ApplicationInterface.VideoMethod.FILE; + Uri video_uri = null; + String video_filename = null; + ParcelFileDescriptor video_pfd_saf = null; + MediaMuxer muxer = null; + //boolean muxer_started = false; + MuxerInfo muxer_info = new MuxerInfo(); + MediaCodec encoder = null; + boolean saved_preshots = false; + try { + // rotate if necessary + // see comments in Preview.RefreshPreviewBitmapTask for update_preshot for why we need to rotote + int rotation_degrees = main_activity.getPreview().getDisplayRotationDegrees(false); + if( MyDebug.LOG ) + Log.d(TAG, "rotation_degrees: " + rotation_degrees); + if( rotation_degrees != 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "rotate preshots"); + Matrix matrix = new Matrix(); + matrix.postRotate(-rotation_degrees); + for(int i=0;i preshot_height) != (video_width > video_height) ) { + int dummy = video_height; + //noinspection SuspiciousNameCombination + video_height = video_width; + video_width = dummy; + } + if( MyDebug.LOG ) { + Log.d(TAG, "preshot: " + preshot_width + " x " + preshot_height); + Log.d(TAG, "preview: " + video_width + " x " + video_height); + } + + long time_s = System.currentTimeMillis(); + final String mime_type = MediaFormat.MIMETYPE_VIDEO_AVC; + MediaCodecList codecs = new MediaCodecList(MediaCodecList.REGULAR_CODECS); + MediaCodecInfo best_codec_info = null; + int best_error = 0; + int best_offset = 0; + { + MediaCodecInfo [] codec_infos = codecs.getCodecInfos(); + for(MediaCodecInfo codec_info : codec_infos) { + if( !codec_info.isEncoder() ) { + continue; + } + + boolean valid = false; + String [] types = codec_info.getSupportedTypes(); + for(String type : types) { + if( type.equalsIgnoreCase(mime_type) ) { + valid = true; + break; + } + } + + if( valid ) { + MediaCodecInfo.CodecCapabilities capabilities = codec_info.getCapabilitiesForType(mime_type); + MediaCodecInfo.VideoCapabilities video_capabilities = capabilities.getVideoCapabilities(); + if( video_capabilities != null ) { + int error_w = Math.abs(video_capabilities.getSupportedWidths().clamp(video_width) - video_width); + int error_h = Math.abs(video_capabilities.getSupportedHeights().clamp(video_height) - video_height); + int error = error_w*error_h; + int offset_w = video_width % video_capabilities.getWidthAlignment(); + int offset_h = video_height % video_capabilities.getHeightAlignment(); + int offset = offset_w*offset_h; + if( MyDebug.LOG ) { + Log.d(TAG, "video_capabilities:"); + Log.d(TAG, " width range: " + video_capabilities.getSupportedWidths()); + Log.d(TAG, " height range: " + video_capabilities.getSupportedHeights()); + Log.d(TAG, " width alignment: " + video_capabilities.getWidthAlignment()); + Log.d(TAG, " height alignment: " + video_capabilities.getHeightAlignment()); + Log.d(TAG, " error_w: " + error_w); + Log.d(TAG, " error_h: " + error_h); + Log.d(TAG, " offset_w: " + offset_w); + Log.d(TAG, " offset_h: " + offset_h); + } + // prefer codec that's closest to supporting the width/height; among those, prefer codec with smallest adjustment needed for alignment + if( best_codec_info == null || error < best_error || offset < best_offset ) { + best_codec_info = codec_info; + } + } + } + } + } + + if( best_codec_info == null ) { + Log.e(TAG, "can't find a valid codecinfo"); + // don't fail - hope for the best that we might find an encoder below anyway + } + else { + MediaCodecInfo.CodecCapabilities capabilities = best_codec_info.getCapabilitiesForType(mime_type); + MediaCodecInfo.VideoCapabilities video_capabilities = capabilities.getVideoCapabilities(); + Range supported_widths = video_capabilities.getSupportedWidths(); + Range supported_heights = video_capabilities.getSupportedHeights(); + int width_alignment = video_capabilities.getWidthAlignment(); + int height_alignment = video_capabilities.getHeightAlignment(); + CameraController.Size adjusted_size = adjustResolutionForVideoCapabilities(video_width, video_height, new IntRange(supported_widths), new IntRange(supported_heights), width_alignment, height_alignment); + video_width = adjusted_size.width; + video_height = adjusted_size.height; + } + if( MyDebug.LOG ) + Log.d(TAG, "time for querying codec capabilities: " + (System.currentTimeMillis() - time_s)); + + if( MyDebug.LOG ) + Log.d(TAG, "chosen video resolution: " + video_width + " x " + video_height); + if( preshot_width != video_width || preshot_height != video_height ) { + if( MyDebug.LOG ) + Log.d(TAG, "resize preshot bitmaps to: " + video_width + " x " + video_height); + for(int i=0;i bitmaps = new ArrayList<>(); + bitmaps.add(bitmap); + if( !processHDR(bitmaps, request, time_s) ) { + Log.e(TAG, "failed to apply DRO to preshot bitmap: " + i); + throw new IOException(); + } + bitmap = bitmaps.get(0); + }*/ + + PostProcessBitmapResult postProcessBitmapResult = postProcessBitmap(preshot_request, null, bitmap, true); + bitmap = postProcessBitmapResult.bitmap; + preshot_bitmaps.set(i, bitmap); + } + + if( MyDebug.LOG ) + Log.d(TAG, "convert preshot bitmaps to video"); + + method = main_activity.getApplicationInterface().createOutputVideoMethod(); + + if( MyDebug.LOG ) + Log.d(TAG, "method? " + method); + final String extension = "mp4"; + final int muxer_format = MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4; + if( method == ApplicationInterface.VideoMethod.FILE ) { + File videoFile = main_activity.getApplicationInterface().createOutputVideoFile(true, extension, request.current_date); + video_filename = videoFile.getAbsolutePath(); + if( MyDebug.LOG ) + Log.d(TAG, "save to: " + video_filename); + muxer = new MediaMuxer(video_filename, muxer_format); + } + else { + Uri uri; + if( method == ApplicationInterface.VideoMethod.SAF ) { + uri = main_activity.getApplicationInterface().createOutputVideoSAF(true, extension, request.current_date); + } + else if( method == ApplicationInterface.VideoMethod.MEDIASTORE ) { + uri = main_activity.getApplicationInterface().createOutputVideoMediaStore(true, extension, request.current_date); + } + else { + uri = main_activity.getApplicationInterface().createOutputVideoUri(); + } + if( MyDebug.LOG ) + Log.d(TAG, "save to: " + uri); + video_pfd_saf = main_activity.getContentResolver().openFileDescriptor(uri, "rw"); + video_uri = uri; + muxer = new MediaMuxer(video_pfd_saf.getFileDescriptor(), muxer_format); + } + muxer_info.muxer = muxer; + + if( MyDebug.LOG ) { + Log.d(TAG, "preshot width: " + video_width); + Log.d(TAG, "preshot height: " + video_height); + } + MediaFormat format = MediaFormat.createVideoFormat(mime_type, video_width, video_height); + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); + format.setInteger(MediaFormat.KEY_BIT_RATE, preshot_bitmaps.size()*500000*8); // 500KB per frame + format.setString(MediaFormat.KEY_FRAME_RATE, null); // format passed to MediaCodecList.findEncoderForFormat() must not specify a KEY_FRAME_RATE - so we set the KEY_FRAME_RATE later + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); + //int videoTrackIndex = muxer.addTrack(format); + //muxer.start(); + + //encoder = MediaCodec.createEncoderByType(mime_type); + String encoder_name = codecs.findEncoderForFormat(format); + if( MyDebug.LOG ) + Log.d(TAG, "encoder_name: " + encoder_name); + if( encoder_name == null ) { + Log.e(TAG, "failed to find encoder"); + throw new IOException(); + } + else { + encoder = MediaCodec.createByCodecName(encoder_name); + + // now set KEY_FRAME_RATE (must be after findEncoderForFormat(), see note above) + format.setInteger(MediaFormat.KEY_FRAME_RATE, 1000/Preview.preshot_interval_ms); + + encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + Surface inputSurface = encoder.createInputSurface(); + encoder.start(); + + if( request.store_location ) { + muxer.setLocation((float)request.location.getLatitude(), (float)request.location.getLongitude()); + } + + //int videoTrackIndex = -1; + //int videoTrackIndex = muxer.addTrack(encoder.getOutputFormat()); + //muxer.start(); + + long presentationTimeUs = 0; + for(int i=0;i= 0 ) { + // ByteBuffer inputBuffer = encoder.getInputBuffer(inputBufferIndex); + // inputBuffer.clear(); + // inputBuffer.put(buffer); + // encoder.queueInputBuffer(inputBufferIndex, 0, buffer.limit(), presentationTimeUs, 0); + //} + + //encodeVideoFrame(encoder, muxer_info, presentationTimeUs, false); + + Canvas canvas = inputSurface.lockCanvas(null); + int xpos = (canvas.getWidth() - bitmap.getWidth())/2; + int ypos = (canvas.getHeight() - bitmap.getHeight())/2; + if( MyDebug.LOG ) + Log.d(TAG, "render at: " + xpos + " , " + ypos); + canvas.drawBitmap(bitmap, xpos, ypos, null); + inputSurface.unlockCanvasAndPost(canvas); + + encodeVideoFrame(encoder, muxer_info, presentationTimeUs, false); + //if( true ) + // throw new IOException(); // test + + preshot_bitmaps.set(i, null); // so we know this bitmap is recycled + bitmap.recycle(); + presentationTimeUs += (Preview.preshot_interval_ms*1000); + } + + encodeVideoFrame(encoder, muxer_info, presentationTimeUs, true); + } + + saved_preshots = true; // success! + } + catch(IOException | IllegalStateException e) { + // ideally want to catch MediaCodec.CodecException, but then entire class would need to target + // Android L - instead we catch its superclass IllegalStateException + MyDebug.logStackTrace(TAG, "failed saving preshots video", e); + // cleanup + for(int i=0;i 1 && !first_only; + String filename_suffix = (multiple_jpegs || request.force_suffix) ? suffix + (i + request.suffix_offset) : ""; + if( request.process_type == Request.ProcessType.X_NIGHT ) { + filename_suffix = "_Night" + filename_suffix; + } + boolean share_image = share && (i == mid_image); + if( !saveSingleImageNow(request, image, null, filename_suffix, update_thumbnail, share_image, false, false) ) { + if( MyDebug.LOG ) + Log.e(TAG, "saveSingleImageNow failed for image: " + i); + success = false; + } + if( first_only ) + break; // only requested the first + } + return success; + } + + /** Saves all the images in request.jpeg_images, depending on the save_base option. + */ + private void saveBaseImages(Request request, String suffix) { + if( MyDebug.LOG ) + Log.d(TAG, "saveBaseImages"); + if( !request.image_capture_intent && request.save_base != Request.SaveBase.SAVEBASE_NONE ) { + if( MyDebug.LOG ) + Log.d(TAG, "save base images"); + + Request base_request = request; + if( request.process_type == Request.ProcessType.PANORAMA ) { + // Important to save base images for panorama in PNG format, to avoid risk of not being able to reproduce the + // same issue - decompressing JPEGs can vary between devices! + // Also disable options that don't really make sense for base panorama images. + base_request = request.copy(); + base_request.image_format = Request.ImageFormat.PNG; + base_request.preference_stamp = "preference_stamp_no"; + base_request.preference_textstamp = ""; + base_request.do_auto_stabilise = false; + base_request.mirror = false; + } + else if( request.process_type == Request.ProcessType.AVERAGE ) { + // In case the base image needs to be postprocessed, we still want to save base images for NR at the 100% JPEG quality + base_request = request.copy(); + base_request.image_quality = 100; + } + // don't update the thumbnails, only do this for the final image - so user doesn't think it's complete, click gallery, then wonder why the final image isn't there + // also don't mark these images as being shared + saveImages(base_request, suffix, base_request.save_base == Request.SaveBase.SAVEBASE_FIRST, false, false); + // ignore return of saveImages - as for deciding whether to pause preview or not (which is all we use the success return for), all that matters is whether we saved the final HDR image + } + } + + /** Computes the width and height of a centred crop region after having rotated an image. + * @param result - Array of length 2 which will be filled with the returned width and height. + * @param level_angle_rad_abs - Absolute value of angle of rotation, in radians. + * @param w0 - Rotated width. + * @param h0 - Rotated height. + * @param w1 - Original width. + * @param h1 - Original height. + * @param max_width - Maximum width to return. + * @param max_height - Maximum height to return. + * @return - Whether a crop region could be successfully calculated. + */ + public static boolean autoStabiliseCrop(int [] result, double level_angle_rad_abs, double w0, double h0, int w1, int h1, int max_width, int max_height) { + boolean ok = false; + result[0] = 0; + result[1] = 0; + + double tan_theta = Math.tan(level_angle_rad_abs); + double sin_theta = Math.sin(level_angle_rad_abs); + double denom = ( h0/w0 + tan_theta ); + double alt_denom = ( w0/h0 + tan_theta ); + if( denom < 1.0e-14 ) { + if( MyDebug.LOG ) + Log.d(TAG, "zero denominator?!"); + } + else if( alt_denom < 1.0e-14 ) { + if( MyDebug.LOG ) + Log.d(TAG, "zero alt denominator?!"); + } + else { + int w2 = (int)(( h0 + 2.0*h1*sin_theta*tan_theta - w0*tan_theta ) / denom); + int h2 = (int)(w2*h0/w0); + int alt_h2 = (int)(( w0 + 2.0*w1*sin_theta*tan_theta - h0*tan_theta ) / alt_denom); + int alt_w2 = (int)(alt_h2*w0/h0); + if( MyDebug.LOG ) { + //Log.d(TAG, "h0 " + h0 + " 2.0*h1*sin_theta*tan_theta " + 2.0*h1*sin_theta*tan_theta + " w0*tan_theta " + w0*tan_theta + " / h0/w0 " + h0/w0 + " tan_theta " + tan_theta); + Log.d(TAG, "w2 = " + w2 + " , h2 = " + h2); + Log.d(TAG, "alt_w2 = " + alt_w2 + " , alt_h2 = " + alt_h2); + } + if( alt_w2 < w2 ) { + if( MyDebug.LOG ) { + Log.d(TAG, "chose alt!"); + } + w2 = alt_w2; + h2 = alt_h2; + } + if( w2 <= 0 ) + w2 = 1; + else if( w2 > max_width ) + w2 = max_width; + if( h2 <= 0 ) + h2 = 1; + else if( h2 > max_height ) + h2 = max_height; + + ok = true; + result[0] = w2; + result[1] = h2; + } + return ok; + } + + /** Performs the auto-stabilise algorithm on the image. + * @param data The jpeg data. + * @param bitmap Optional argument - the bitmap if already unpacked from the jpeg data. + * @param level_angle The angle in degrees to rotate the image. + * @param is_front_facing Whether the camera is front-facing. + * @return A bitmap representing the auto-stabilised jpeg. + */ + private Bitmap autoStabilise(byte [] data, Bitmap bitmap, double level_angle, boolean is_front_facing) { + if( MyDebug.LOG ) { + Log.d(TAG, "autoStabilise"); + Log.d(TAG, "level_angle: " + level_angle); + Log.d(TAG, "is_front_facing: " + is_front_facing); + } + while( level_angle < -90 ) + level_angle += 180; + while( level_angle > 90 ) + level_angle -= 180; + if( MyDebug.LOG ) + Log.d(TAG, "auto stabilising... angle: " + level_angle); + if( bitmap == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "need to decode bitmap to auto-stabilise"); + // bitmap doesn't need to be mutable here, as this won't be the final bitmap returned from the auto-stabilise code + bitmap = loadBitmapWithRotation(data, false); + if( bitmap == null ) { + main_activity.getPreview().showToast(null, R.string.failed_to_auto_stabilise); + System.gc(); + } + } + if( bitmap != null ) { + int width = bitmap.getWidth(); + int height = bitmap.getHeight(); + if( MyDebug.LOG ) { + Log.d(TAG, "level_angle: " + level_angle); + Log.d(TAG, "decoded bitmap size " + width + ", " + height); + Log.d(TAG, "bitmap size: " + width*height*4); + } + /*for(int y=0;y= 7500 ) + scale *= 1.5f; + else + scale *= 2.0f; + } + if( MyDebug.LOG ) { + Log.d(TAG, "w0 = " + w0 + " , h0 = " + h0); + Log.d(TAG, "w1 = " + w1 + " , h1 = " + h1); + Log.d(TAG, "scale = sqrt " + orig_size + " / " + rotated_size + " = " + scale); + } + matrix.postScale(scale, scale); + w0 *= scale; + h0 *= scale; + // warning "Possibly lossy implicit cast in compound assignment" suppressed: + // it's intentional that we multiply int by float, and implicitly cast back to int + // (the suggested solution is to first cast the float to int before multiplying, which + // we don't want) + //noinspection lossy-conversions + w1 *= scale; + //noinspection lossy-conversions + h1 *= scale; + if( MyDebug.LOG ) { + Log.d(TAG, "after scaling: w0 = " + w0 + " , h0 = " + h0); + Log.d(TAG, "after scaling: w1 = " + w1 + " , h1 = " + h1); + } + if( is_front_facing ) { + matrix.postRotate((float)-level_angle); + } + else { + matrix.postRotate((float)level_angle); + } + Bitmap new_bitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height, matrix, true); + // careful, as new_bitmap is sometimes not a copy! + if( new_bitmap != bitmap ) { + bitmap.recycle(); + bitmap = new_bitmap; + } + System.gc(); + if( MyDebug.LOG ) { + Log.d(TAG, "rotated and scaled bitmap size " + bitmap.getWidth() + ", " + bitmap.getHeight()); + Log.d(TAG, "rotated and scaled bitmap size: " + bitmap.getWidth()*bitmap.getHeight()*4); + } + + int [] crop = new int [2]; + if( autoStabiliseCrop(crop, level_angle_rad_abs, w0, h0, w1, h1, bitmap.getWidth(), bitmap.getHeight()) ) { + int w2 = crop[0]; + int h2 = crop[1]; + int x0 = (bitmap.getWidth()-w2)/2; + int y0 = (bitmap.getHeight()-h2)/2; + if( MyDebug.LOG ) { + Log.d(TAG, "x0 = " + x0 + " , y0 = " + y0); + } + new_bitmap = Bitmap.createBitmap(bitmap, x0, y0, w2, h2); + if( new_bitmap != bitmap ) { + bitmap.recycle(); + bitmap = new_bitmap; + } + System.gc(); + } + + if( MyDebug.LOG ) + Log.d(TAG, "bitmap is mutable?: " + bitmap.isMutable()); + // Usually createBitmap will return a mutable bitmap, but not if the source bitmap (which we set as immutable) + // is returned (if the level angle is (tolerantly) 0. + // see testPhotoStamp() for testing this. + if( !bitmap.isMutable() ) { + new_bitmap = bitmap.copy(bitmap.getConfig(), true); + bitmap.recycle(); + bitmap = new_bitmap; + } + } + return bitmap; + } + + /** Mirrors the image. + * @param data The jpeg data. + * @param bitmap Optional argument - the bitmap if already unpacked from the jpeg data. + * @return A bitmap representing the mirrored jpeg. + */ + private Bitmap mirrorImage(byte [] data, Bitmap bitmap) { + if( MyDebug.LOG ) { + Log.d(TAG, "mirrorImage"); + } + if( bitmap == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "need to decode bitmap to mirror"); + // bitmap doesn't need to be mutable here, as this won't be the final bitmap returned from the mirroring code + bitmap = loadBitmapWithRotation(data, false); + if( bitmap == null ) { + // don't bother warning to the user - we simply won't mirror the image + System.gc(); + } + } + if( bitmap != null ) { + Matrix matrix = new Matrix(); + matrix.preScale(-1.0f, 1.0f); + int width = bitmap.getWidth(); + int height = bitmap.getHeight(); + Bitmap new_bitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height, matrix, true); + // careful, as new_bitmap is sometimes not a copy! + if( new_bitmap != bitmap ) { + bitmap.recycle(); + bitmap = new_bitmap; + } + if( MyDebug.LOG ) + Log.d(TAG, "bitmap is mutable?: " + bitmap.isMutable()); + } + return bitmap; + } + + /** Applies any photo stamp options (if they exist). + * @param data The jpeg data. + * @param bitmap Optional argument - the bitmap if already unpacked from the jpeg data. + * @return A bitmap representing the stamped jpeg. Will be null if the input bitmap is null and + * no photo stamp is applied. + */ + private Bitmap stampImage(final Request request, byte [] data, Bitmap bitmap) { + if( MyDebug.LOG ) { + Log.d(TAG, "stampImage"); + } + //final MyApplicationInterface applicationInterface = main_activity.getApplicationInterface(); + boolean dategeo_stamp = request.preference_stamp.equals("preference_stamp_yes"); + boolean text_stamp = !request.preference_textstamp.isEmpty(); + if( dategeo_stamp || text_stamp ) { + if( bitmap == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "decode bitmap in order to stamp info"); + bitmap = loadBitmapWithRotation(data, true); + if( bitmap == null ) { + main_activity.getPreview().showToast(null, R.string.failed_to_stamp); + System.gc(); + } + } + if( bitmap != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "stamp info to bitmap: " + bitmap); + if( MyDebug.LOG ) + Log.d(TAG, "bitmap is mutable?: " + bitmap.isMutable()); + + String stamp_string = ""; + /* We now stamp via a TextView instead of using MyApplicationInterface.drawTextWithBackground(). + * This is important in order to satisfy the Google emoji policy... + */ + + int font_size = request.font_size; + int color = request.color; + String pref_style = request.pref_style; + if( MyDebug.LOG ) + Log.d(TAG, "pref_style: " + pref_style); + String preference_stamp_dateformat = request.preference_stamp_dateformat; + String preference_stamp_timeformat = request.preference_stamp_timeformat; + String preference_stamp_gpsformat = request.preference_stamp_gpsformat; + int width = bitmap.getWidth(); + int height = bitmap.getHeight(); + if( MyDebug.LOG ) { + Log.d(TAG, "decoded bitmap size " + width + ", " + height); + Log.d(TAG, "bitmap size: " + width*height*4); + } + Canvas canvas = new Canvas(bitmap); + p.setColor(Color.WHITE); + // we don't use the density of the screen, because we're stamping to the image, not drawing on the screen (we don't want the font height to depend on the device's resolution) + // instead we go by 1 pt == 1/72 inch height, and scale for an image height (or width if in portrait) of 4" (this means the font height is also independent of the photo resolution) + int smallest_size = Math.min(width, height); + float scale = ((float)smallest_size) / (72.0f*4.0f); + int font_size_pixel = (int)(font_size * scale + 0.5f); // convert pt to pixels + if( MyDebug.LOG ) { + Log.d(TAG, "scale: " + scale); + Log.d(TAG, "font_size: " + font_size); + Log.d(TAG, "font_size_pixel: " + font_size_pixel); + } + p.setTextSize(font_size_pixel); + int offset_x = (int)(8 * scale + 0.5f); // convert pt to pixels + int offset_y = (int)(8 * scale + 0.5f); // convert pt to pixels + int diff_y = (int)((font_size+4) * scale + 0.5f); // convert pt to pixels + int ypos = height - offset_y; + p.setTextAlign(Align.RIGHT); + MyApplicationInterface.Shadow draw_shadowed = MyApplicationInterface.Shadow.SHADOW_NONE; + switch( pref_style ) { + case "preference_stamp_style_shadowed": + draw_shadowed = MyApplicationInterface.Shadow.SHADOW_OUTLINE; + break; + case "preference_stamp_style_plain": + draw_shadowed = MyApplicationInterface.Shadow.SHADOW_NONE; + break; + case "preference_stamp_style_background": + draw_shadowed = MyApplicationInterface.Shadow.SHADOW_BACKGROUND; + break; + } + if( MyDebug.LOG ) + Log.d(TAG, "draw_shadowed: " + draw_shadowed); + if( dategeo_stamp ) { + if( MyDebug.LOG ) + Log.d(TAG, "stamp date"); + // doesn't respect user preferences such as 12/24 hour - see note about in draw() about DateFormat.getTimeInstance() + String date_stamp = TextFormatter.getDateString(preference_stamp_dateformat, request.current_date); + String time_stamp = TextFormatter.getTimeString(preference_stamp_timeformat, request.current_date); + if( MyDebug.LOG ) { + Log.d(TAG, "date_stamp: " + date_stamp); + Log.d(TAG, "time_stamp: " + time_stamp); + } + if( !date_stamp.isEmpty() || !time_stamp.isEmpty() ) { + String datetime_stamp = ""; + if( !date_stamp.isEmpty() ) + datetime_stamp += date_stamp; + if( !time_stamp.isEmpty() ) { + if( !datetime_stamp.isEmpty() ) + datetime_stamp += " "; + datetime_stamp += time_stamp; + } + //applicationInterface.drawTextWithBackground(canvas, p, datetime_stamp, color, Color.BLACK, width - offset_x, ypos, MyApplicationInterface.Alignment.ALIGNMENT_BOTTOM, null, draw_shadowed); + if( stamp_string.isEmpty() ) + stamp_string = datetime_stamp; + else + stamp_string = datetime_stamp + "\n" + stamp_string; + } + ypos -= diff_y; + String gps_stamp = main_activity.getTextFormatter().getGPSString(preference_stamp_gpsformat, request.preference_units_distance, request.store_location, request.location, request.store_geo_direction, request.geo_direction); + if( !gps_stamp.isEmpty() ) { + // don't log gps_stamp, in case of privacy! + + /*Address address = null; + if( request.store_location && !request.preference_stamp_geo_address.equals("preference_stamp_geo_address_no") ) { + boolean block_geocoder; + synchronized(this) { + block_geocoder = app_is_paused; + } + // try to find an address + // n.b., if we update the class being used, consider whether the info on Geocoder in preference_stamp_geo_address_summary needs updating + if( block_geocoder ) { + // seems safer to not try to initiate potential network connections (via geocoder) if Open Camera + // has paused and we're still saving images + if( MyDebug.LOG ) + Log.d(TAG, "don't call geocoder for photostamp as app is paused"); + } + else if( Geocoder.isPresent() ) { + if( MyDebug.LOG ) + Log.d(TAG, "geocoder is present"); + Geocoder geocoder = new Geocoder(main_activity, Locale.getDefault()); + try { + List
addresses = geocoder.getFromLocation(request.location.getLatitude(), request.location.getLongitude(), 1); + if( addresses != null && addresses.size() > 0 ) { + address = addresses.get(0); + // don't log address, in case of privacy! + if( MyDebug.LOG ) { + Log.d(TAG, "max line index: " + address.getMaxAddressLineIndex()); + } + } + } + catch(Exception e) { + MyDebug.logStackTrace(TAG, "failed to read from geocoder", e); + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "geocoder not present"); + } + }*/ + + //if( address == null || request.preference_stamp_geo_address.equals("preference_stamp_geo_address_both") ) + { + if( MyDebug.LOG ) + Log.d(TAG, "display gps coords"); + // want GPS coords (either in addition to the address, or we don't have an address) + // we'll also enter here if store_location is false, but we have geo direction to display + //applicationInterface.drawTextWithBackground(canvas, p, gps_stamp, color, Color.BLACK, width - offset_x, ypos, MyApplicationInterface.Alignment.ALIGNMENT_BOTTOM, null, draw_shadowed); + if( stamp_string.isEmpty() ) + stamp_string = gps_stamp; + else + stamp_string = gps_stamp + "\n" + stamp_string; + ypos -= diff_y; + } + /*else if( request.store_geo_direction ) { + if( MyDebug.LOG ) + Log.d(TAG, "not displaying gps coords, but need to display geo direction"); + // we are displaying an address instead of GPS coords, but we still need to display the geo direction + gps_stamp = main_activity.getTextFormatter().getGPSString(preference_stamp_gpsformat, request.preference_units_distance, false, null, request.store_geo_direction, request.geo_direction); + if( gps_stamp.length() > 0 ) { + // don't log gps_stamp, in case of privacy! + //applicationInterface.drawTextWithBackground(canvas, p, gps_stamp, color, Color.BLACK, width - offset_x, ypos, MyApplicationInterface.Alignment.ALIGNMENT_BOTTOM, null, draw_shadowed); + if( stamp_string.length() == 0 ) + stamp_string = gps_stamp; + else + stamp_string = gps_stamp + "\n" + stamp_string; + ypos -= diff_y; + } + }*/ + + /*if( address != null ) { + for(int i=0;i<=address.getMaxAddressLineIndex();i++) { + // write in reverse order + String addressLine = address.getAddressLine(address.getMaxAddressLineIndex()-i); + //applicationInterface.drawTextWithBackground(canvas, p, addressLine, color, Color.BLACK, width - offset_x, ypos, MyApplicationInterface.Alignment.ALIGNMENT_BOTTOM, null, draw_shadowed); + if( stamp_string.length() == 0 ) + stamp_string = addressLine; + else + stamp_string = addressLine + "\n" + stamp_string; + ypos -= diff_y; + } + }*/ + } + } + if( text_stamp ) { + if( MyDebug.LOG ) + Log.d(TAG, "stamp text"); + + //applicationInterface.drawTextWithBackground(canvas, p, request.preference_textstamp, color, Color.BLACK, width - offset_x, ypos, MyApplicationInterface.Alignment.ALIGNMENT_BOTTOM, null, draw_shadowed); + if( stamp_string.isEmpty() ) + stamp_string = request.preference_textstamp; + else + stamp_string = request.preference_textstamp + "\n" + stamp_string; + + //noinspection UnusedAssignment + ypos -= diff_y; + } + + if( !stamp_string.isEmpty() ) { + // don't log stamp_string, in case of privacy! + + @SuppressLint("InflateParams") + final View stamp_view = LayoutInflater.from(main_activity).inflate(R.layout.stamp_image_text, null); + final LinearLayout layout = stamp_view.findViewById(R.id.layout); + final TextView textview = stamp_view.findViewById(R.id.text_view); + + textview.setVisibility(View.VISIBLE); + textview.setTextColor(color); + textview.setTextSize(TypedValue.COMPLEX_UNIT_PX, font_size_pixel); + textview.setText(stamp_string); + if( draw_shadowed == MyApplicationInterface.Shadow.SHADOW_OUTLINE ) { + //noinspection PointlessArithmeticExpression + float shadow_radius = (1.0f * scale + 0.5f); // convert pt to pixels + shadow_radius = Math.max(shadow_radius, 1.0f); + if( MyDebug.LOG ) + Log.d(TAG, "shadow_radius: " + shadow_radius); + textview.setShadowLayer(shadow_radius, 0.0f, 0.0f, Color.BLACK); + } + else if( draw_shadowed == MyApplicationInterface.Shadow.SHADOW_BACKGROUND ) { + textview.setBackgroundColor(Color.argb(64, 0, 0, 0)); + } + //textview.setBackgroundColor(Color.BLACK); // test + textview.setGravity(Gravity.END); // so text is right-aligned - important when there are multiple lines + + layout.measure(canvas.getWidth(), canvas.getHeight()); + layout.layout(0, 0, canvas.getWidth(), canvas.getHeight()); + canvas.translate(width - offset_x - textview.getWidth(), height - offset_y - textview.getHeight()); + layout.draw(canvas); + } + } + } + return bitmap; + } + + private static class PostProcessBitmapResult { + final Bitmap bitmap; + + PostProcessBitmapResult(Bitmap bitmap) { + this.bitmap = bitmap; + } + } + + /** Performs post-processing on the data, or bitmap if non-null, for saveSingleImageNow. + */ + private PostProcessBitmapResult postProcessBitmap(final Request request, byte [] data, Bitmap bitmap, boolean ignore_exif_orientation) throws IOException { + if( MyDebug.LOG ) + Log.d(TAG, "postProcessBitmap"); + long time_s = System.currentTimeMillis(); + + if( !ignore_exif_orientation ) { + if( bitmap != null ) { + // rotate the bitmap if necessary for exif tags + if( MyDebug.LOG ) + Log.d(TAG, "rotate pre-existing bitmap for exif tags?"); + bitmap = rotateForExif(bitmap, data); + } + } + + if( request.do_auto_stabilise ) { + bitmap = autoStabilise(data, bitmap, request.level_angle, request.is_front_facing); + } + if( MyDebug.LOG ) { + Log.d(TAG, "Save single image performance: time after auto-stabilise: " + (System.currentTimeMillis() - time_s)); + } + if( request.mirror ) { + bitmap = mirrorImage(data, bitmap); + } + if( request.image_format != Request.ImageFormat.STD && bitmap == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "need to decode bitmap to convert file format"); + bitmap = loadBitmapWithRotation(data, true); + if( bitmap == null ) { + // if we can't load bitmap for converting file formats, don't want to continue + System.gc(); + throw new IOException(); + } + } + if( request.remove_device_exif != Request.RemoveDeviceExif.OFF && bitmap == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "need to decode bitmap to strip exif tags"); + // if removing device exif data, it's easier to do this by going through the codepath that + // resaves the bitmap, and then we avoid transferring/adding exif tags that we don't want + bitmap = loadBitmapWithRotation(data, true); + if( bitmap == null ) { + // if we can't load bitmap for removing device tags, don't want to continue + System.gc(); + throw new IOException(); + } + } + bitmap = stampImage(request, data, bitmap); + if( MyDebug.LOG ) { + Log.d(TAG, "Save single image performance: time after photostamp: " + (System.currentTimeMillis() - time_s)); + } + return new PostProcessBitmapResult(bitmap); + } + + /** Converts from Request.ImageFormat to Bitmap.CompressFormat. + */ + private static Bitmap.CompressFormat getBitmapCompressFormat(Request.ImageFormat image_format) { + Bitmap.CompressFormat compress_format; + switch( image_format ) { + case WEBP: + compress_format = Bitmap.CompressFormat.WEBP; + break; + case PNG: + compress_format = Bitmap.CompressFormat.PNG; + break; + default: + compress_format = Bitmap.CompressFormat.JPEG; + break; + } + return compress_format; + } + + /** May be run in saver thread or picture callback thread (depending on whether running in background). + * The requests.images field is ignored, instead we save the supplied data or bitmap. + * If bitmap is null, then the supplied jpeg data is saved. If bitmap is non-null, then the bitmap is + * saved, but the supplied data is still used to read EXIF data from. + * @param update_thumbnail - Whether to update the thumbnail (and show the animation). + * @param share_image - Whether this image should be marked as the one to share (if multiple images can + * be saved from a single shot (e.g., saving exposure images with HDR). + * @param ignore_raw_only - If true, then save even if RAW Only is set (needed for HDR mode + * where we always save the HDR image even though it's a JPEG - the + * RAW preference only affects the base images. + * @param ignore_exif_orientation - If bitmap is non-null, then set this to true if the bitmap has already + * been rotated to account for Exif orientation tags in the data. + */ + @SuppressLint("SimpleDateFormat") + private boolean saveSingleImageNow(final Request request, byte [] data, Bitmap bitmap, String filename_suffix, boolean update_thumbnail, boolean share_image, boolean ignore_raw_only, boolean ignore_exif_orientation) { + if( MyDebug.LOG ) + Log.d(TAG, "saveSingleImageNow"); + + if( request.type != Request.Type.JPEG ) { + if( MyDebug.LOG ) + Log.d(TAG, "saveImageNow called with non-jpeg request"); + // throw runtime exception, as this is a programming error + throw new RuntimeException(); + } + else if( data == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "saveSingleImageNow called with no data"); + // throw runtime exception, as this is a programming error + throw new RuntimeException(); + } + long time_s = System.currentTimeMillis(); + + boolean success = false; + final MyApplicationInterface applicationInterface = main_activity.getApplicationInterface(); + boolean raw_only = !ignore_raw_only && applicationInterface.isRawOnly(); + if( MyDebug.LOG ) + Log.d(TAG, "raw_only: " + raw_only); + StorageUtils storageUtils = main_activity.getStorageUtils(); + + String extension; + switch( request.image_format ) { + case WEBP: + extension = "webp"; + break; + case PNG: + extension = "png"; + break; + default: + extension = "jpg"; + break; + } + if( MyDebug.LOG ) + Log.d(TAG, "extension: " + extension); + + main_activity.savingImage(true); + + // If using SAF or image_capture_intent is true, or using scoped storage, only saveUri is non-null + // Otherwise, only picFile is non-null + File picFile = null; + Uri saveUri = null; + boolean use_media_store = false; + ContentValues contentValues = null; // used if using scoped storage + try { + if( !raw_only ) { + PostProcessBitmapResult postProcessBitmapResult = postProcessBitmap(request, data, bitmap, ignore_exif_orientation); + bitmap = postProcessBitmapResult.bitmap; + } + + if( raw_only ) { + // don't save the JPEG + success = true; + } + else if( request.image_capture_intent ) { + if( MyDebug.LOG ) + Log.d(TAG, "image_capture_intent"); + if( request.image_capture_intent_uri != null ) + { + // Save the bitmap to the specified URI (use a try/catch block) + if( MyDebug.LOG ) + Log.d(TAG, "save to: " + request.image_capture_intent_uri); + saveUri = request.image_capture_intent_uri; + } + else + { + // If the intent doesn't contain an URI, send the bitmap as a parcel + // (it is a good idea to reduce its size to ~50k pixels before) + if( MyDebug.LOG ) + Log.d(TAG, "sent to intent via parcel"); + if( bitmap == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "create bitmap"); + // bitmap we return doesn't need to be mutable + bitmap = loadBitmapWithRotation(data, false); + } + if( bitmap != null ) { + int width = bitmap.getWidth(); + int height = bitmap.getHeight(); + if( MyDebug.LOG ) { + Log.d(TAG, "decoded bitmap size " + width + ", " + height); + Log.d(TAG, "bitmap size: " + width*height*4); + } + final int small_size_c = 128; + if( width > small_size_c ) { + float scale = ((float)small_size_c)/(float)width; + if( MyDebug.LOG ) + Log.d(TAG, "scale to " + scale); + Matrix matrix = new Matrix(); + matrix.postScale(scale, scale); + Bitmap new_bitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height, matrix, true); + // careful, as new_bitmap is sometimes not a copy! + if( new_bitmap != bitmap ) { + bitmap.recycle(); + bitmap = new_bitmap; + } + } + } + if( MyDebug.LOG ) { + if( bitmap != null ) { + Log.d(TAG, "returned bitmap size " + bitmap.getWidth() + ", " + bitmap.getHeight()); + Log.d(TAG, "returned bitmap size: " + bitmap.getWidth()*bitmap.getHeight()*4); + } + else { + Log.e(TAG, "no bitmap created"); + } + } + if( bitmap != null ) + main_activity.setResult(Activity.RESULT_OK, new Intent("inline-data").putExtra("data", bitmap)); + main_activity.finish(); + } + } + else if( storageUtils.isUsingSAF() ) { + saveUri = storageUtils.createOutputMediaFileSAF(StorageUtils.MEDIA_TYPE_IMAGE, filename_suffix, extension, request.current_date); + } + else if( MainActivity.useScopedStorage() ) { + if( MyDebug.LOG ) + Log.d(TAG, "use media store"); + use_media_store = true; + Uri folder = Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q ? + MediaStore.Images.Media.getContentUri(MediaStore.VOLUME_EXTERNAL_PRIMARY) : + MediaStore.Images.Media.EXTERNAL_CONTENT_URI; + contentValues = new ContentValues(); + String picName = storageUtils.createMediaFilename(StorageUtils.MEDIA_TYPE_IMAGE, filename_suffix, 0, "." + extension, request.current_date); + if( MyDebug.LOG ) + Log.d(TAG, "picName: " + picName); + contentValues.put(MediaStore.Images.Media.DISPLAY_NAME, picName); + String mime_type = storageUtils.getImageMimeType(extension); + if( MyDebug.LOG ) + Log.d(TAG, "mime_type: " + mime_type); + contentValues.put(MediaStore.Images.Media.MIME_TYPE, mime_type); + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q ) { + String relative_path = storageUtils.getSaveRelativeFolder(); + if( MyDebug.LOG ) + Log.d(TAG, "relative_path: " + relative_path); + contentValues.put(MediaStore.Images.Media.RELATIVE_PATH, relative_path); + contentValues.put(MediaStore.Images.Media.IS_PENDING, 1); + } + + // Note, we catch exceptions specific to insert() here and rethrow as IOException, + // rather than catching below, to avoid catching things too broadly - e.g., + // IllegalStateException can also be thrown via "new Canvas" (from + // postProcessBitmap()) but this is a programming error that we shouldn't catch. + // Catching too broadly could mean we miss genuine problems that should be fixed. + try { + saveUri = main_activity.getContentResolver().insert(folder, contentValues); + } + catch(IllegalArgumentException e) { + // can happen for mediastore method if invalid ContentResolver.insert() call + MyDebug.logStackTrace(TAG, "IllegalArgumentException inserting to mediastore", e); + throw new IOException(); + } + catch(IllegalStateException e) { + // have received Google Play crashes from ContentResolver.insert() call for mediastore method + MyDebug.logStackTrace(TAG, "IllegalStateException inserting to mediastore", e); + throw new IOException(); + } + if( MyDebug.LOG ) + Log.d(TAG, "saveUri: " + saveUri); + if( saveUri == null ) { + throw new IOException(); + } + } + else { + picFile = storageUtils.createOutputMediaFile(StorageUtils.MEDIA_TYPE_IMAGE, filename_suffix, extension, request.current_date); + if( MyDebug.LOG ) + Log.d(TAG, "save to: " + picFile.getAbsolutePath()); + } + + if( MyDebug.LOG ) + Log.d(TAG, "saveUri: " + saveUri); + + if( picFile != null || saveUri != null ) { + OutputStream outputStream; + if( picFile != null ) + outputStream = new FileOutputStream(picFile); + else + outputStream = main_activity.getContentResolver().openOutputStream(saveUri); + try { + if( bitmap != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "compress bitmap, quality " + request.image_quality); + Bitmap.CompressFormat compress_format = getBitmapCompressFormat(request.image_format); + bitmap.compress(compress_format, request.image_quality, outputStream); + } + else { + outputStream.write(data); + } + } + finally { + outputStream.close(); + } + if( MyDebug.LOG ) + Log.d(TAG, "saveImageNow saved photo"); + if( MyDebug.LOG ) { + Log.d(TAG, "Save single image performance: time after saving photo: " + (System.currentTimeMillis() - time_s)); + } + + if( saveUri == null ) { // if saveUri is non-null, then we haven't succeeded until we've copied to the saveUri + success = true; + } + + if( request.image_format == Request.ImageFormat.STD ) { + // handle transferring/setting Exif tags (JPEG format only) + if( bitmap != null ) { + // need to update EXIF data! (only supported for JPEG image formats) + if( MyDebug.LOG ) + Log.d(TAG, "set Exif tags from data"); + if( picFile != null ) { + setExifFromData(request, data, picFile); + } + else { + ParcelFileDescriptor parcelFileDescriptor = main_activity.getContentResolver().openFileDescriptor(saveUri, "rw"); + try { + if( parcelFileDescriptor != null ) { + FileDescriptor fileDescriptor = parcelFileDescriptor.getFileDescriptor(); + setExifFromData(request, data, fileDescriptor); + } + else { + Log.e(TAG, "failed to create ParcelFileDescriptor for saveUri: " + saveUri); + } + } + finally { + if( parcelFileDescriptor != null ) { + try { + parcelFileDescriptor.close(); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "fail to close parcelFileDescriptor", e); + } + } + } + } + } + else { + updateExif(request, picFile, saveUri); + if( MyDebug.LOG ) { + Log.d(TAG, "Save single image performance: time after updateExif: " + (System.currentTimeMillis() - time_s)); + } + } + } + + if( update_thumbnail ) { + // clear just in case we're unable to update this - don't want an out of date cached uri + storageUtils.clearLastMediaScanned(); + } + + boolean hasnoexifdatetime = request.remove_device_exif != Request.RemoveDeviceExif.OFF && request.remove_device_exif != Request.RemoveDeviceExif.KEEP_DATETIME; + + if( picFile != null && saveUri == null ) { + // broadcast for SAF is done later, when we've actually written out the file + storageUtils.broadcastFile(picFile, true, false, update_thumbnail, hasnoexifdatetime, null); + main_activity.test_last_saved_image = picFile.getAbsolutePath(); + } + + if( request.image_capture_intent ) { + if( MyDebug.LOG ) + Log.d(TAG, "finish activity due to being called from intent"); + main_activity.setResult(Activity.RESULT_OK); + main_activity.finish(); + } + + if( saveUri != null ) { + success = true; + + if( use_media_store ) { + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q ) { + contentValues.clear(); + contentValues.put(MediaStore.Images.Media.IS_PENDING, 0); + main_activity.getContentResolver().update(saveUri, contentValues, null, null); + } + + // no need to broadcast when using mediastore method + if( !request.image_capture_intent ) { + if( MyDebug.LOG ) + Log.d(TAG, "announce mediastore uri"); + // in theory this is pointless, as announceUri no longer does anything on Android 7+, + // and mediastore method is only used on Android 10+, but keep this just in case + // announceUri does something in future + storageUtils.announceUri(saveUri, true, false); + if( update_thumbnail ) { + // we also want to save the uri - we can use the media uri directly, rather than having to scan it + storageUtils.setLastMediaScanned(saveUri, false, hasnoexifdatetime, saveUri); + } + } + } + else { + broadcastSAFFile(saveUri, update_thumbnail, hasnoexifdatetime, request.image_capture_intent); + } + + main_activity.test_last_saved_imageuri = saveUri; + } + } + } + catch(FileNotFoundException e) { + MyDebug.logStackTrace(TAG, "file not found", e); + main_activity.getPreview().showToast(null, R.string.failed_to_save_photo); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "I/O error writing file", e); + main_activity.getPreview().showToast(null, R.string.failed_to_save_photo); + } + catch(SecurityException e) { + // received security exception from copyFileToUri()->openOutputStream() from Google Play + // update: no longer have copyFileToUri() (as no longer use temporary files for SAF), but might as well keep this + MyDebug.logStackTrace(TAG, "security exception writing file", e); + main_activity.getPreview().showToast(null, R.string.failed_to_save_photo); + } + + if( raw_only ) { + // no saved image to record + } + else if( success && saveUri == null ) { + applicationInterface.addLastImage(picFile, share_image); + } + else if( success && storageUtils.isUsingSAF() ){ + applicationInterface.addLastImageSAF(saveUri, share_image); + } + else if( success && use_media_store ){ + applicationInterface.addLastImageMediaStore(saveUri, share_image); + } + + // I have received crashes where camera_controller was null - could perhaps happen if this thread was running just as the camera is closing? + if( success && main_activity.getPreview().getCameraController() != null && update_thumbnail ) { + // update thumbnail - this should be done after restarting preview, so that the preview is started asap + CameraController.Size size = main_activity.getPreview().getCameraController().getPictureSize(); + int ratio = (int) Math.ceil((double) size.width / main_activity.getPreview().getView().getWidth()); + int sample_size = Integer.highestOneBit(ratio); + sample_size *= request.sample_factor; + if( MyDebug.LOG ) { + Log.d(TAG, " picture width: " + size.width); + Log.d(TAG, " preview width: " + main_activity.getPreview().getView().getWidth()); + Log.d(TAG, " ratio : " + ratio); + Log.d(TAG, " sample_size : " + sample_size); + } + Bitmap thumbnail; + if( bitmap == null ) { + BitmapFactory.Options options = new BitmapFactory.Options(); + options.inMutable = false; + options.inSampleSize = sample_size; + thumbnail = BitmapFactory.decodeByteArray(data, 0, data.length, options); + if( MyDebug.LOG ) { + Log.d(TAG, "thumbnail width: " + thumbnail.getWidth()); + Log.d(TAG, "thumbnail height: " + thumbnail.getHeight()); + } + // now get the rotation from the Exif data + if( MyDebug.LOG ) + Log.d(TAG, "rotate thumbnail for exif tags?"); + thumbnail = rotateForExif(thumbnail, data); + } + else { + int width = bitmap.getWidth(); + int height = bitmap.getHeight(); + Matrix matrix = new Matrix(); + float scale = 1.0f / (float)sample_size; + matrix.postScale(scale, scale); + if( MyDebug.LOG ) + Log.d(TAG, " scale: " + scale); + try { + thumbnail = Bitmap.createBitmap(bitmap, 0, 0, width, height, matrix, true); + if( MyDebug.LOG ) { + Log.d(TAG, "thumbnail width: " + thumbnail.getWidth()); + Log.d(TAG, "thumbnail height: " + thumbnail.getHeight()); + } + // don't need to rotate for exif, as we already did that when creating the bitmap + } + catch(IllegalArgumentException e) { + // received IllegalArgumentException on Google Play from Bitmap.createBitmap; documentation suggests this + // means width or height are 0 - but trapping that didn't fix the problem + // or "the x, y, width, height values are outside of the dimensions of the source bitmap", but that can't be + // true here + // crashes seem to all be Android 7.1 or earlier, so maybe this is a bug that's been fixed - but catch it anyway + // as it's grown popular + MyDebug.logStackTrace(TAG, "can't create thumbnail bitmap due to IllegalArgumentException?!", e); + thumbnail = null; + } + } + if( thumbnail == null ) { + // received crashes on Google Play suggesting that thumbnail could not be created + if( MyDebug.LOG ) + Log.e(TAG, "failed to create thumbnail bitmap"); + } + else { + final Bitmap thumbnail_f = thumbnail; + main_activity.runOnUiThread(new Runnable() { + public void run() { + applicationInterface.updateThumbnail(thumbnail_f, false); + } + }); + if( MyDebug.LOG ) { + Log.d(TAG, "Save single image performance: time after creating thumbnail: " + (System.currentTimeMillis() - time_s)); + } + } + } + + if( bitmap != null ) { + bitmap.recycle(); + } + + System.gc(); + + main_activity.savingImage(false); + + if( MyDebug.LOG ) { + Log.d(TAG, "Save single image performance: total time: " + (System.currentTimeMillis() - time_s)); + } + return success; + } + + /** As setExifFromFile, but can read the Exif tags directly from the jpeg data rather than a file. + */ + private void setExifFromData(final Request request, byte [] data, File to_file) throws IOException { + if( MyDebug.LOG ) { + Log.d(TAG, "setExifFromData"); + Log.d(TAG, "to_file: " + to_file); + } + InputStream inputStream = null; + try { + inputStream = new ByteArrayInputStream(data); + ExifInterface exif = new ExifInterface(inputStream); + ExifInterface exif_new = new ExifInterface(to_file.getAbsolutePath()); + setExif(request, exif, exif_new); + } + finally { + if( inputStream != null ) { + inputStream.close(); + } + } + } + + private void broadcastSAFFile(Uri saveUri, boolean set_last_scanned, boolean hasnoexifdatetime, boolean image_capture_intent) { + if( MyDebug.LOG ) + Log.d(TAG, "broadcastSAFFile"); + StorageUtils storageUtils = main_activity.getStorageUtils(); + storageUtils.broadcastUri(saveUri, true, false, set_last_scanned, hasnoexifdatetime, image_capture_intent); + } + + /** As setExifFromFile, but can read the Exif tags directly from the jpeg data, and to a file descriptor, rather than a file. + */ + private void setExifFromData(final Request request, byte [] data, FileDescriptor to_file_descriptor) throws IOException { + if( MyDebug.LOG ) { + Log.d(TAG, "setExifFromData"); + Log.d(TAG, "to_file_descriptor: " + to_file_descriptor); + } + InputStream inputStream = null; + try { + inputStream = new ByteArrayInputStream(data); + ExifInterface exif = new ExifInterface(inputStream); + ExifInterface exif_new = new ExifInterface(to_file_descriptor); + setExif(request, exif, exif_new); + } + finally { + if( inputStream != null ) { + inputStream.close(); + } + } + } + + /** Transfers device exif info. Should only be called if request.remove_device_exif == Request.RemoveDeviceExif.OFF. + */ + private void transferDeviceExif(ExifInterface exif, ExifInterface exif_new) { + if( MyDebug.LOG ) + Log.d(TAG, "transferDeviceExif"); + + if( MyDebug.LOG ) + Log.d(TAG, "read back EXIF data"); + + String exif_aperture = exif.getAttribute(ExifInterface.TAG_F_NUMBER); // previously TAG_APERTURE + String exif_exposure_time = exif.getAttribute(ExifInterface.TAG_EXPOSURE_TIME); + String exif_flash = exif.getAttribute(ExifInterface.TAG_FLASH); + String exif_focal_length = exif.getAttribute(ExifInterface.TAG_FOCAL_LENGTH); + // leave TAG_IMAGE_WIDTH/TAG_IMAGE_LENGTH, as this may have changed! + //noinspection deprecation + String exif_iso = exif.getAttribute(ExifInterface.TAG_ISO_SPEED_RATINGS); // previously TAG_ISO + String exif_make = exif.getAttribute(ExifInterface.TAG_MAKE); + String exif_model = exif.getAttribute(ExifInterface.TAG_MODEL); + // leave orientation - since we rotate bitmaps to account for orientation, we don't want to write it to the saved image! + String exif_white_balance = exif.getAttribute(ExifInterface.TAG_WHITE_BALANCE); + + String exif_aperture_value; + String exif_brightness_value; + String exif_cfa_pattern; + String exif_color_space; + String exif_components_configuration; + String exif_compressed_bits_per_pixel; + String exif_compression; + String exif_contrast; + String exif_device_setting_description; + String exif_digital_zoom_ratio; + String exif_exposure_bias_value; + String exif_exposure_index; + String exif_exposure_mode; + String exif_exposure_program; + String exif_flash_energy; + String exif_focal_length_in_35mm_film; + String exif_focal_plane_resolution_unit; + String exif_focal_plane_x_resolution; + String exif_focal_plane_y_resolution; + String exif_gain_control; + String exif_gps_area_information; + String exif_gps_differential; + String exif_gps_dop; + String exif_gps_measure_mode; + String exif_image_description; + String exif_light_source; + String exif_maker_note; + String exif_max_aperture_value; + String exif_metering_mode; + String exif_oecf; + String exif_photometric_interpretation; + String exif_saturation; + String exif_scene_capture_type; + String exif_scene_type; + String exif_sensing_method; + String exif_sharpness; + String exif_shutter_speed_value; + String exif_software; + String exif_user_comment; + { + // tags that are new in Android N - note we skip tags unlikely to be relevant for camera photos + // update, now available in all Android versions thanks to using AndroidX ExifInterface + exif_aperture_value = exif.getAttribute(ExifInterface.TAG_APERTURE_VALUE); + exif_brightness_value = exif.getAttribute(ExifInterface.TAG_BRIGHTNESS_VALUE); + exif_cfa_pattern = exif.getAttribute(ExifInterface.TAG_CFA_PATTERN); + exif_color_space = exif.getAttribute(ExifInterface.TAG_COLOR_SPACE); + exif_components_configuration = exif.getAttribute(ExifInterface.TAG_COMPONENTS_CONFIGURATION); + exif_compressed_bits_per_pixel = exif.getAttribute(ExifInterface.TAG_COMPRESSED_BITS_PER_PIXEL); + exif_compression = exif.getAttribute(ExifInterface.TAG_COMPRESSION); + exif_contrast = exif.getAttribute(ExifInterface.TAG_CONTRAST); + exif_device_setting_description = exif.getAttribute(ExifInterface.TAG_DEVICE_SETTING_DESCRIPTION); + exif_digital_zoom_ratio = exif.getAttribute(ExifInterface.TAG_DIGITAL_ZOOM_RATIO); + // unclear if we should transfer TAG_EXIF_VERSION - don't want to risk conficting with whatever ExifInterface writes itself + exif_exposure_bias_value = exif.getAttribute(ExifInterface.TAG_EXPOSURE_BIAS_VALUE); + exif_exposure_index = exif.getAttribute(ExifInterface.TAG_EXPOSURE_INDEX); + exif_exposure_mode = exif.getAttribute(ExifInterface.TAG_EXPOSURE_MODE); + exif_exposure_program = exif.getAttribute(ExifInterface.TAG_EXPOSURE_PROGRAM); + exif_flash_energy = exif.getAttribute(ExifInterface.TAG_FLASH_ENERGY); + exif_focal_length_in_35mm_film = exif.getAttribute(ExifInterface.TAG_FOCAL_LENGTH_IN_35MM_FILM); + exif_focal_plane_resolution_unit = exif.getAttribute(ExifInterface.TAG_FOCAL_PLANE_RESOLUTION_UNIT); + exif_focal_plane_x_resolution = exif.getAttribute(ExifInterface.TAG_FOCAL_PLANE_X_RESOLUTION); + exif_focal_plane_y_resolution = exif.getAttribute(ExifInterface.TAG_FOCAL_PLANE_Y_RESOLUTION); + // TAG_F_NUMBER same as TAG_APERTURE + exif_gain_control = exif.getAttribute(ExifInterface.TAG_GAIN_CONTROL); + exif_gps_area_information = exif.getAttribute(ExifInterface.TAG_GPS_AREA_INFORMATION); + // don't care about TAG_GPS_DEST_* + exif_gps_differential = exif.getAttribute(ExifInterface.TAG_GPS_DIFFERENTIAL); + exif_gps_dop = exif.getAttribute(ExifInterface.TAG_GPS_DOP); + // TAG_GPS_IMG_DIRECTION, TAG_GPS_IMG_DIRECTION_REF won't have been recorded in the image yet - we add this ourselves in setGPSDirectionExif() + // don't care about TAG_GPS_MAP_DATUM? + exif_gps_measure_mode = exif.getAttribute(ExifInterface.TAG_GPS_MEASURE_MODE); + // don't care about TAG_GPS_SATELLITES? + // don't care about TAG_GPS_STATUS, TAG_GPS_TRACK, TAG_GPS_TRACK_REF, TAG_GPS_VERSION_ID + exif_image_description = exif.getAttribute(ExifInterface.TAG_IMAGE_DESCRIPTION); + // unclear what TAG_IMAGE_UNIQUE_ID, TAG_INTEROPERABILITY_INDEX are + // TAG_ISO_SPEED_RATINGS same as TAG_ISO + // skip TAG_JPEG_INTERCHANGE_FORMAT, TAG_JPEG_INTERCHANGE_FORMAT_LENGTH + exif_light_source = exif.getAttribute(ExifInterface.TAG_LIGHT_SOURCE); + exif_maker_note = exif.getAttribute(ExifInterface.TAG_MAKER_NOTE); + exif_max_aperture_value = exif.getAttribute(ExifInterface.TAG_MAX_APERTURE_VALUE); + exif_metering_mode = exif.getAttribute(ExifInterface.TAG_METERING_MODE); + exif_oecf = exif.getAttribute(ExifInterface.TAG_OECF); + exif_photometric_interpretation = exif.getAttribute(ExifInterface.TAG_PHOTOMETRIC_INTERPRETATION); + // skip PIXEL_X/Y_DIMENSION, as it may have changed + // don't care about TAG_PLANAR_CONFIGURATION + // don't care about TAG_PRIMARY_CHROMATICITIES, TAG_REFERENCE_BLACK_WHITE? + // don't care about TAG_RESOLUTION_UNIT + // TAG_ROWS_PER_STRIP may have changed (if it's even relevant) + // TAG_SAMPLES_PER_PIXEL may no longer be relevant if we've changed the image dimensions? + exif_saturation = exif.getAttribute(ExifInterface.TAG_SATURATION); + exif_scene_capture_type = exif.getAttribute(ExifInterface.TAG_SCENE_CAPTURE_TYPE); + exif_scene_type = exif.getAttribute(ExifInterface.TAG_SCENE_TYPE); + exif_sensing_method = exif.getAttribute(ExifInterface.TAG_SENSING_METHOD); + exif_sharpness = exif.getAttribute(ExifInterface.TAG_SHARPNESS); + exif_shutter_speed_value = exif.getAttribute(ExifInterface.TAG_SHUTTER_SPEED_VALUE); + exif_software = exif.getAttribute(ExifInterface.TAG_SOFTWARE); + // don't care about TAG_SPATIAL_FREQUENCY_RESPONSE, TAG_SPECTRAL_SENSITIVITY? + // don't care about TAG_STRIP_* + // don't care about TAG_SUBJECT_* + // TAG_SUBSEC_TIME_DIGITIZED same as TAG_SUBSEC_TIME_DIG + // TAG_SUBSEC_TIME_ORIGINAL same as TAG_SUBSEC_TIME_ORIG + // TAG_THUMBNAIL_IMAGE_* may have changed + // don't care about TAG_TRANSFER_FUNCTION? + exif_user_comment = exif.getAttribute(ExifInterface.TAG_USER_COMMENT); + // don't care about TAG_WHITE_POINT? + // TAG_X_RESOLUTION may have changed? + // don't care about TAG_Y_*? + } + + String exif_photographic_sensitivity = exif.getAttribute(ExifInterface.TAG_PHOTOGRAPHIC_SENSITIVITY); + String exif_sensitivity_type = exif.getAttribute(ExifInterface.TAG_SENSITIVITY_TYPE); + String exif_standard_output_sensitivity = exif.getAttribute(ExifInterface.TAG_STANDARD_OUTPUT_SENSITIVITY); + String exif_recommended_exposure_index = exif.getAttribute(ExifInterface.TAG_RECOMMENDED_EXPOSURE_INDEX); + String exif_iso_speed = exif.getAttribute(ExifInterface.TAG_ISO_SPEED); + String exif_custom_rendered = exif.getAttribute(ExifInterface.TAG_CUSTOM_RENDERED); + String exif_lens_specification = exif.getAttribute(ExifInterface.TAG_LENS_SPECIFICATION); + String exif_lens_name = exif.getAttribute(ExifInterface.TAG_LENS_MAKE); + String exif_lens_model = exif.getAttribute(ExifInterface.TAG_LENS_MODEL); + + if( MyDebug.LOG ) + Log.d(TAG, "now write new EXIF data"); + if( exif_aperture != null ) + exif_new.setAttribute(ExifInterface.TAG_F_NUMBER, exif_aperture); + if( exif_exposure_time != null ) + exif_new.setAttribute(ExifInterface.TAG_EXPOSURE_TIME, exif_exposure_time); + if( exif_flash != null ) + exif_new.setAttribute(ExifInterface.TAG_FLASH, exif_flash); + if( exif_focal_length != null ) + exif_new.setAttribute(ExifInterface.TAG_FOCAL_LENGTH, exif_focal_length); + if( exif_iso != null ) + //noinspection deprecation + exif_new.setAttribute(ExifInterface.TAG_ISO_SPEED_RATINGS, exif_iso); + if( exif_make != null ) + exif_new.setAttribute(ExifInterface.TAG_MAKE, exif_make); + if( exif_model != null ) + exif_new.setAttribute(ExifInterface.TAG_MODEL, exif_model); + if( exif_white_balance != null ) + exif_new.setAttribute(ExifInterface.TAG_WHITE_BALANCE, exif_white_balance); + + { + if( exif_aperture_value != null ) + exif_new.setAttribute(ExifInterface.TAG_APERTURE_VALUE, exif_aperture_value); + if( exif_brightness_value != null ) + exif_new.setAttribute(ExifInterface.TAG_BRIGHTNESS_VALUE, exif_brightness_value); + if( exif_cfa_pattern != null ) + exif_new.setAttribute(ExifInterface.TAG_CFA_PATTERN, exif_cfa_pattern); + if( exif_color_space != null ) + exif_new.setAttribute(ExifInterface.TAG_COLOR_SPACE, exif_color_space); + if( exif_components_configuration != null ) + exif_new.setAttribute(ExifInterface.TAG_COMPONENTS_CONFIGURATION, exif_components_configuration); + if( exif_compressed_bits_per_pixel != null ) + exif_new.setAttribute(ExifInterface.TAG_COMPRESSED_BITS_PER_PIXEL, exif_compressed_bits_per_pixel); + if( exif_compression != null ) + exif_new.setAttribute(ExifInterface.TAG_COMPRESSION, exif_compression); + if( exif_contrast != null ) + exif_new.setAttribute(ExifInterface.TAG_CONTRAST, exif_contrast); + if( exif_device_setting_description != null ) + exif_new.setAttribute(ExifInterface.TAG_DEVICE_SETTING_DESCRIPTION, exif_device_setting_description); + if( exif_digital_zoom_ratio != null ) + exif_new.setAttribute(ExifInterface.TAG_DIGITAL_ZOOM_RATIO, exif_digital_zoom_ratio); + if( exif_exposure_bias_value != null ) + exif_new.setAttribute(ExifInterface.TAG_EXPOSURE_BIAS_VALUE, exif_exposure_bias_value); + if( exif_exposure_index != null ) + exif_new.setAttribute(ExifInterface.TAG_EXPOSURE_INDEX, exif_exposure_index); + if( exif_exposure_mode != null ) + exif_new.setAttribute(ExifInterface.TAG_EXPOSURE_MODE, exif_exposure_mode); + if( exif_exposure_program != null ) + exif_new.setAttribute(ExifInterface.TAG_EXPOSURE_PROGRAM, exif_exposure_program); + if( exif_flash_energy != null ) + exif_new.setAttribute(ExifInterface.TAG_FLASH_ENERGY, exif_flash_energy); + if( exif_focal_length_in_35mm_film != null ) + exif_new.setAttribute(ExifInterface.TAG_FOCAL_LENGTH_IN_35MM_FILM, exif_focal_length_in_35mm_film); + if( exif_focal_plane_resolution_unit != null ) + exif_new.setAttribute(ExifInterface.TAG_FOCAL_PLANE_RESOLUTION_UNIT, exif_focal_plane_resolution_unit); + if( exif_focal_plane_x_resolution != null ) + exif_new.setAttribute(ExifInterface.TAG_FOCAL_PLANE_X_RESOLUTION, exif_focal_plane_x_resolution); + if( exif_focal_plane_y_resolution != null ) + exif_new.setAttribute(ExifInterface.TAG_FOCAL_PLANE_Y_RESOLUTION, exif_focal_plane_y_resolution); + if( exif_gain_control != null ) + exif_new.setAttribute(ExifInterface.TAG_GAIN_CONTROL, exif_gain_control); + if( exif_gps_area_information != null ) + exif_new.setAttribute(ExifInterface.TAG_GPS_AREA_INFORMATION, exif_gps_area_information); + if( exif_gps_differential != null ) + exif_new.setAttribute(ExifInterface.TAG_GPS_DIFFERENTIAL, exif_gps_differential); + if( exif_gps_dop != null ) + exif_new.setAttribute(ExifInterface.TAG_GPS_DOP, exif_gps_dop); + if( exif_gps_measure_mode != null ) + exif_new.setAttribute(ExifInterface.TAG_GPS_MEASURE_MODE, exif_gps_measure_mode); + if( exif_image_description != null ) + exif_new.setAttribute(ExifInterface.TAG_IMAGE_DESCRIPTION, exif_image_description); + if( exif_light_source != null ) + exif_new.setAttribute(ExifInterface.TAG_LIGHT_SOURCE, exif_light_source); + if( exif_maker_note != null ) + exif_new.setAttribute(ExifInterface.TAG_MAKER_NOTE, exif_maker_note); + if( exif_max_aperture_value != null ) + exif_new.setAttribute(ExifInterface.TAG_MAX_APERTURE_VALUE, exif_max_aperture_value); + if( exif_metering_mode != null ) + exif_new.setAttribute(ExifInterface.TAG_METERING_MODE, exif_metering_mode); + if( exif_oecf != null ) + exif_new.setAttribute(ExifInterface.TAG_OECF, exif_oecf); + if( exif_photometric_interpretation != null ) + exif_new.setAttribute(ExifInterface.TAG_PHOTOMETRIC_INTERPRETATION, exif_photometric_interpretation); + if( exif_saturation != null ) + exif_new.setAttribute(ExifInterface.TAG_SATURATION, exif_saturation); + if( exif_scene_capture_type != null ) + exif_new.setAttribute(ExifInterface.TAG_SCENE_CAPTURE_TYPE, exif_scene_capture_type); + if( exif_scene_type != null ) + exif_new.setAttribute(ExifInterface.TAG_SCENE_TYPE, exif_scene_type); + if( exif_sensing_method != null ) + exif_new.setAttribute(ExifInterface.TAG_SENSING_METHOD, exif_sensing_method); + if( exif_sharpness != null ) + exif_new.setAttribute(ExifInterface.TAG_SHARPNESS, exif_sharpness); + if( exif_shutter_speed_value != null ) + exif_new.setAttribute(ExifInterface.TAG_SHUTTER_SPEED_VALUE, exif_shutter_speed_value); + if( exif_software != null ) + exif_new.setAttribute(ExifInterface.TAG_SOFTWARE, exif_software); + if( exif_user_comment != null ) + exif_new.setAttribute(ExifInterface.TAG_USER_COMMENT, exif_user_comment); + } + + if( exif_photographic_sensitivity != null ) + exif_new.setAttribute(ExifInterface.TAG_PHOTOGRAPHIC_SENSITIVITY, exif_photographic_sensitivity); + if( exif_sensitivity_type != null ) + exif_new.setAttribute(ExifInterface.TAG_SENSITIVITY_TYPE, exif_sensitivity_type); + if( exif_standard_output_sensitivity != null ) + exif_new.setAttribute(ExifInterface.TAG_STANDARD_OUTPUT_SENSITIVITY, exif_standard_output_sensitivity); + if( exif_recommended_exposure_index != null ) + exif_new.setAttribute(ExifInterface.TAG_RECOMMENDED_EXPOSURE_INDEX, exif_recommended_exposure_index); + if( exif_iso_speed != null ) + exif_new.setAttribute(ExifInterface.TAG_ISO_SPEED, exif_iso_speed); + if( exif_custom_rendered != null ) + exif_new.setAttribute(ExifInterface.TAG_CUSTOM_RENDERED, exif_custom_rendered); + if( exif_lens_specification != null ) + exif_new.setAttribute(ExifInterface.TAG_LENS_SPECIFICATION, exif_lens_specification); + if( exif_lens_name != null ) + exif_new.setAttribute(ExifInterface.TAG_LENS_MAKE, exif_lens_name); + if( exif_lens_model != null ) + exif_new.setAttribute(ExifInterface.TAG_LENS_MODEL, exif_lens_model); + + } + + /** Transfers device exif info related to date and time. + */ + private void transferDeviceExifDateTime(ExifInterface exif, ExifInterface exif_new) { + if( MyDebug.LOG ) + Log.d(TAG, "transferDeviceExifDateTime"); + + // tags related to date and time + + String exif_datetime = exif.getAttribute(ExifInterface.TAG_DATETIME); + String exif_datetime_original = exif.getAttribute(ExifInterface.TAG_DATETIME_ORIGINAL); + String exif_datetime_digitized = exif.getAttribute(ExifInterface.TAG_DATETIME_DIGITIZED); + String exif_subsec_time = exif.getAttribute(ExifInterface.TAG_SUBSEC_TIME); + String exif_subsec_time_orig = exif.getAttribute(ExifInterface.TAG_SUBSEC_TIME_ORIGINAL); // previously TAG_SUBSEC_TIME_ORIG + String exif_subsec_time_dig = exif.getAttribute(ExifInterface.TAG_SUBSEC_TIME_DIGITIZED); // previously TAG_SUBSEC_TIME_DIG + String exif_offset_time = exif.getAttribute(ExifInterface.TAG_OFFSET_TIME); + String exif_offset_time_orig = exif.getAttribute(ExifInterface.TAG_OFFSET_TIME_ORIGINAL); + String exif_offset_time_dig = exif.getAttribute(ExifInterface.TAG_OFFSET_TIME_DIGITIZED); + + if( exif_datetime != null ) + exif_new.setAttribute(ExifInterface.TAG_DATETIME, exif_datetime); + if( exif_datetime_original != null ) + exif_new.setAttribute(ExifInterface.TAG_DATETIME_ORIGINAL, exif_datetime_original); + if( exif_datetime_digitized != null ) + exif_new.setAttribute(ExifInterface.TAG_DATETIME_DIGITIZED, exif_datetime_digitized); + if( exif_subsec_time != null ) + exif_new.setAttribute(ExifInterface.TAG_SUBSEC_TIME, exif_subsec_time); + if( exif_subsec_time_orig != null ) + exif_new.setAttribute(ExifInterface.TAG_SUBSEC_TIME_ORIGINAL, exif_subsec_time_orig); + if( exif_subsec_time_dig != null ) + exif_new.setAttribute(ExifInterface.TAG_SUBSEC_TIME_DIGITIZED, exif_subsec_time_dig); + if( exif_offset_time != null ) + exif_new.setAttribute(ExifInterface.TAG_OFFSET_TIME, exif_offset_time); + if( exif_offset_time_orig != null ) + exif_new.setAttribute(ExifInterface.TAG_OFFSET_TIME_ORIGINAL, exif_offset_time_orig); + if( exif_offset_time_dig != null ) + exif_new.setAttribute(ExifInterface.TAG_OFFSET_TIME_DIGITIZED, exif_offset_time_dig); + + } + + /** Transfers device exif info related to gps location. + */ + private void transferDeviceExifGPS(ExifInterface exif, ExifInterface exif_new) { + if( MyDebug.LOG ) + Log.d(TAG, "transferDeviceExifGPS"); + + // tags for gps info + + String exif_gps_processing_method = exif.getAttribute(ExifInterface.TAG_GPS_PROCESSING_METHOD); + String exif_gps_latitude = exif.getAttribute(ExifInterface.TAG_GPS_LATITUDE); + String exif_gps_latitude_ref = exif.getAttribute(ExifInterface.TAG_GPS_LATITUDE_REF); + String exif_gps_longitude = exif.getAttribute(ExifInterface.TAG_GPS_LONGITUDE); + String exif_gps_longitude_ref = exif.getAttribute(ExifInterface.TAG_GPS_LONGITUDE_REF); + String exif_gps_altitude = exif.getAttribute(ExifInterface.TAG_GPS_ALTITUDE); + String exif_gps_altitude_ref = exif.getAttribute(ExifInterface.TAG_GPS_ALTITUDE_REF); + String exif_gps_datestamp = exif.getAttribute(ExifInterface.TAG_GPS_DATESTAMP); + String exif_gps_timestamp = exif.getAttribute(ExifInterface.TAG_GPS_TIMESTAMP); + String exif_gps_speed = exif.getAttribute(ExifInterface.TAG_GPS_SPEED); + String exif_gps_speed_ref = exif.getAttribute(ExifInterface.TAG_GPS_SPEED_REF); + + if( exif_gps_processing_method != null ) + exif_new.setAttribute(ExifInterface.TAG_GPS_PROCESSING_METHOD, exif_gps_processing_method); + if( exif_gps_latitude != null ) + exif_new.setAttribute(ExifInterface.TAG_GPS_LATITUDE, exif_gps_latitude); + if( exif_gps_latitude_ref != null ) + exif_new.setAttribute(ExifInterface.TAG_GPS_LATITUDE_REF, exif_gps_latitude_ref); + if( exif_gps_longitude != null ) + exif_new.setAttribute(ExifInterface.TAG_GPS_LONGITUDE, exif_gps_longitude); + if( exif_gps_longitude_ref != null ) + exif_new.setAttribute(ExifInterface.TAG_GPS_LONGITUDE_REF, exif_gps_longitude_ref); + if( exif_gps_altitude != null ) + exif_new.setAttribute(ExifInterface.TAG_GPS_ALTITUDE, exif_gps_altitude); + if( exif_gps_altitude_ref != null ) + exif_new.setAttribute(ExifInterface.TAG_GPS_ALTITUDE_REF, exif_gps_altitude_ref); + if( exif_gps_datestamp != null ) + exif_new.setAttribute(ExifInterface.TAG_GPS_DATESTAMP, exif_gps_datestamp); + if( exif_gps_timestamp != null ) + exif_new.setAttribute(ExifInterface.TAG_GPS_TIMESTAMP, exif_gps_timestamp); + if( exif_gps_speed != null ) + exif_new.setAttribute(ExifInterface.TAG_GPS_SPEED, exif_gps_speed); + if( exif_gps_speed_ref != null ) + exif_new.setAttribute(ExifInterface.TAG_GPS_SPEED_REF, exif_gps_speed_ref); + } + + /** Explicitly removes tags based on the RemoveDeviceExif option. + * Note that in theory this method is unnecessary: we implement the RemoveDeviceExif options + * (if not OFF) by resaving the JPEG via a bitmap, and then limiting what Exif tags are + * transferred across. This method is for extra paranoia: first to reduce the risk of future + * bugs, secondly just in case saving via a bitmap does ever add exif tags. + */ + private void removeExifTags(ExifInterface exif_new, final Request request) { + if( MyDebug.LOG ) + Log.d(TAG, "removeExifTags"); + + if( request.remove_device_exif != Request.RemoveDeviceExif.OFF ) { + if( MyDebug.LOG ) + Log.d(TAG, "remove exif tags"); + exif_new.setAttribute(ExifInterface.TAG_F_NUMBER, null); + exif_new.setAttribute(ExifInterface.TAG_EXPOSURE_TIME, null); + exif_new.setAttribute(ExifInterface.TAG_FLASH, null); + exif_new.setAttribute(ExifInterface.TAG_FOCAL_LENGTH, null); + exif_new.setAttribute(ExifInterface.TAG_IMAGE_WIDTH, null); + exif_new.setAttribute(ExifInterface.TAG_IMAGE_LENGTH, null); + //noinspection deprecation + exif_new.setAttribute(ExifInterface.TAG_ISO_SPEED_RATINGS, null); + exif_new.setAttribute(ExifInterface.TAG_PHOTOGRAPHIC_SENSITIVITY, null); + exif_new.setAttribute(ExifInterface.TAG_MAKE, null); + exif_new.setAttribute(ExifInterface.TAG_MODEL, null); + exif_new.setAttribute(ExifInterface.TAG_WHITE_BALANCE, null); + exif_new.setAttribute(ExifInterface.TAG_APERTURE_VALUE, null); + exif_new.setAttribute(ExifInterface.TAG_BRIGHTNESS_VALUE, null); + exif_new.setAttribute(ExifInterface.TAG_CFA_PATTERN, null); + exif_new.setAttribute(ExifInterface.TAG_COLOR_SPACE, null); + exif_new.setAttribute(ExifInterface.TAG_COMPONENTS_CONFIGURATION, null); + exif_new.setAttribute(ExifInterface.TAG_COMPRESSED_BITS_PER_PIXEL, null); + exif_new.setAttribute(ExifInterface.TAG_COMPRESSION, null); + exif_new.setAttribute(ExifInterface.TAG_CONTRAST, null); + exif_new.setAttribute(ExifInterface.TAG_DEVICE_SETTING_DESCRIPTION, null); + exif_new.setAttribute(ExifInterface.TAG_DIGITAL_ZOOM_RATIO, null); + exif_new.setAttribute(ExifInterface.TAG_EXPOSURE_BIAS_VALUE, null); + exif_new.setAttribute(ExifInterface.TAG_EXPOSURE_INDEX, null); + exif_new.setAttribute(ExifInterface.TAG_EXPOSURE_MODE, null); + exif_new.setAttribute(ExifInterface.TAG_EXPOSURE_PROGRAM, null); + exif_new.setAttribute(ExifInterface.TAG_FLASH_ENERGY, null); + exif_new.setAttribute(ExifInterface.TAG_FOCAL_LENGTH_IN_35MM_FILM, null); + exif_new.setAttribute(ExifInterface.TAG_FOCAL_PLANE_RESOLUTION_UNIT, null); + exif_new.setAttribute(ExifInterface.TAG_FOCAL_PLANE_X_RESOLUTION, null); + exif_new.setAttribute(ExifInterface.TAG_FOCAL_PLANE_Y_RESOLUTION, null); + exif_new.setAttribute(ExifInterface.TAG_GAIN_CONTROL, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_AREA_INFORMATION, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_DEST_BEARING, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_DEST_BEARING_REF, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_DEST_DISTANCE, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_DEST_DISTANCE_REF, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_DEST_LATITUDE, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_DEST_LATITUDE_REF, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_DEST_LONGITUDE, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_DEST_LONGITUDE_REF, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_DIFFERENTIAL, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_DOP, null); + if( !request.store_geo_direction ) { + exif_new.setAttribute(ExifInterface.TAG_GPS_IMG_DIRECTION, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_IMG_DIRECTION_REF, null); + } + exif_new.setAttribute(ExifInterface.TAG_GPS_MAP_DATUM, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_MEASURE_MODE, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_SATELLITES, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_STATUS, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_TRACK, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_TRACK_REF, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_VERSION_ID, null); + exif_new.setAttribute(ExifInterface.TAG_IMAGE_DESCRIPTION, null); + exif_new.setAttribute(ExifInterface.TAG_IMAGE_UNIQUE_ID, null); + exif_new.setAttribute(ExifInterface.TAG_INTEROPERABILITY_INDEX, null); + exif_new.setAttribute(ExifInterface.TAG_JPEG_INTERCHANGE_FORMAT, null); + exif_new.setAttribute(ExifInterface.TAG_JPEG_INTERCHANGE_FORMAT_LENGTH, null); + exif_new.setAttribute(ExifInterface.TAG_LIGHT_SOURCE, null); + exif_new.setAttribute(ExifInterface.TAG_MAKER_NOTE, null); + exif_new.setAttribute(ExifInterface.TAG_MAX_APERTURE_VALUE, null); + exif_new.setAttribute(ExifInterface.TAG_METERING_MODE, null); + exif_new.setAttribute(ExifInterface.TAG_OECF, null); + exif_new.setAttribute(ExifInterface.TAG_PHOTOMETRIC_INTERPRETATION, null); + exif_new.setAttribute(ExifInterface.TAG_PIXEL_X_DIMENSION, null); + exif_new.setAttribute(ExifInterface.TAG_PIXEL_Y_DIMENSION, null); + exif_new.setAttribute(ExifInterface.TAG_PLANAR_CONFIGURATION, null); + exif_new.setAttribute(ExifInterface.TAG_PRIMARY_CHROMATICITIES, null); + exif_new.setAttribute(ExifInterface.TAG_REFERENCE_BLACK_WHITE, null); + exif_new.setAttribute(ExifInterface.TAG_RESOLUTION_UNIT, null); + exif_new.setAttribute(ExifInterface.TAG_ROWS_PER_STRIP, null); + exif_new.setAttribute(ExifInterface.TAG_SAMPLES_PER_PIXEL, null); + exif_new.setAttribute(ExifInterface.TAG_SATURATION, null); + exif_new.setAttribute(ExifInterface.TAG_SCENE_CAPTURE_TYPE, null); + exif_new.setAttribute(ExifInterface.TAG_SCENE_TYPE, null); + exif_new.setAttribute(ExifInterface.TAG_SENSING_METHOD, null); + exif_new.setAttribute(ExifInterface.TAG_SHARPNESS, null); + exif_new.setAttribute(ExifInterface.TAG_SHUTTER_SPEED_VALUE, null); + exif_new.setAttribute(ExifInterface.TAG_SOFTWARE, null); + exif_new.setAttribute(ExifInterface.TAG_SPATIAL_FREQUENCY_RESPONSE, null); + exif_new.setAttribute(ExifInterface.TAG_SPECTRAL_SENSITIVITY, null); + exif_new.setAttribute(ExifInterface.TAG_STRIP_BYTE_COUNTS, null); + exif_new.setAttribute(ExifInterface.TAG_STRIP_OFFSETS, null); + exif_new.setAttribute(ExifInterface.TAG_SUBJECT_AREA, null); + exif_new.setAttribute(ExifInterface.TAG_SUBJECT_DISTANCE, null); + exif_new.setAttribute(ExifInterface.TAG_SUBJECT_DISTANCE_RANGE, null); + exif_new.setAttribute(ExifInterface.TAG_SUBJECT_LOCATION, null); + exif_new.setAttribute(ExifInterface.TAG_THUMBNAIL_IMAGE_WIDTH, null); + exif_new.setAttribute(ExifInterface.TAG_THUMBNAIL_IMAGE_LENGTH, null); + exif_new.setAttribute(ExifInterface.TAG_TRANSFER_FUNCTION, null); + if( !request.store_ypr ) { + exif_new.setAttribute(ExifInterface.TAG_USER_COMMENT, null); + } + exif_new.setAttribute(ExifInterface.TAG_WHITE_POINT, null); + exif_new.setAttribute(ExifInterface.TAG_X_RESOLUTION, null); + exif_new.setAttribute(ExifInterface.TAG_Y_CB_CR_COEFFICIENTS, null); + exif_new.setAttribute(ExifInterface.TAG_Y_CB_CR_POSITIONING, null); + exif_new.setAttribute(ExifInterface.TAG_Y_CB_CR_SUB_SAMPLING, null); + exif_new.setAttribute(ExifInterface.TAG_Y_RESOLUTION, null); + if( !(request.custom_tag_artist != null && !request.custom_tag_artist.isEmpty() ) ) { + exif_new.setAttribute(ExifInterface.TAG_ARTIST, null); + } + if( !(request.custom_tag_copyright != null && !request.custom_tag_copyright.isEmpty() ) ) { + exif_new.setAttribute(ExifInterface.TAG_COPYRIGHT, null); + } + + exif_new.setAttribute(ExifInterface.TAG_BITS_PER_SAMPLE, null); + exif_new.setAttribute(ExifInterface.TAG_EXIF_VERSION, null); + exif_new.setAttribute(ExifInterface.TAG_FLASHPIX_VERSION, null); + exif_new.setAttribute(ExifInterface.TAG_GAMMA, null); + exif_new.setAttribute(ExifInterface.TAG_RELATED_SOUND_FILE, null); + exif_new.setAttribute(ExifInterface.TAG_SENSITIVITY_TYPE, null); + exif_new.setAttribute(ExifInterface.TAG_STANDARD_OUTPUT_SENSITIVITY, null); + exif_new.setAttribute(ExifInterface.TAG_RECOMMENDED_EXPOSURE_INDEX, null); + exif_new.setAttribute(ExifInterface.TAG_ISO_SPEED, null); + exif_new.setAttribute(ExifInterface.TAG_ISO_SPEED_LATITUDE_YYY, null); + exif_new.setAttribute(ExifInterface.TAG_ISO_SPEED_LATITUDE_ZZZ, null); + exif_new.setAttribute(ExifInterface.TAG_FILE_SOURCE, null); + exif_new.setAttribute(ExifInterface.TAG_CUSTOM_RENDERED, null); + exif_new.setAttribute(ExifInterface.TAG_CAMERA_OWNER_NAME, null); + exif_new.setAttribute(ExifInterface.TAG_BODY_SERIAL_NUMBER, null); + exif_new.setAttribute(ExifInterface.TAG_LENS_SPECIFICATION, null); + exif_new.setAttribute(ExifInterface.TAG_LENS_MAKE, null); + exif_new.setAttribute(ExifInterface.TAG_LENS_MODEL, null); + exif_new.setAttribute(ExifInterface.TAG_LENS_SERIAL_NUMBER, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_H_POSITIONING_ERROR, null); + exif_new.setAttribute(ExifInterface.TAG_DNG_VERSION, null); + exif_new.setAttribute(ExifInterface.TAG_DEFAULT_CROP_SIZE, null); + exif_new.setAttribute(ExifInterface.TAG_ORF_THUMBNAIL_IMAGE, null); + exif_new.setAttribute(ExifInterface.TAG_ORF_PREVIEW_IMAGE_START, null); + exif_new.setAttribute(ExifInterface.TAG_ORF_PREVIEW_IMAGE_LENGTH, null); + exif_new.setAttribute(ExifInterface.TAG_ORF_ASPECT_FRAME, null); + exif_new.setAttribute(ExifInterface.TAG_RW2_SENSOR_BOTTOM_BORDER, null); + exif_new.setAttribute(ExifInterface.TAG_RW2_SENSOR_LEFT_BORDER, null); + exif_new.setAttribute(ExifInterface.TAG_RW2_SENSOR_RIGHT_BORDER, null); + exif_new.setAttribute(ExifInterface.TAG_RW2_SENSOR_TOP_BORDER, null); + exif_new.setAttribute(ExifInterface.TAG_RW2_ISO, null); + exif_new.setAttribute(ExifInterface.TAG_RW2_JPG_FROM_RAW, null); + exif_new.setAttribute(ExifInterface.TAG_XMP, null); + exif_new.setAttribute(ExifInterface.TAG_NEW_SUBFILE_TYPE, null); + exif_new.setAttribute(ExifInterface.TAG_SUBFILE_TYPE, null); + + if( request.remove_device_exif != Request.RemoveDeviceExif.KEEP_DATETIME ) { + if( MyDebug.LOG ) + Log.d(TAG, "remove datetime tags"); + exif_new.setAttribute(ExifInterface.TAG_DATETIME, null); + exif_new.setAttribute(ExifInterface.TAG_DATETIME_ORIGINAL, null); + exif_new.setAttribute(ExifInterface.TAG_DATETIME_DIGITIZED, null); + exif_new.setAttribute(ExifInterface.TAG_SUBSEC_TIME, null); + exif_new.setAttribute(ExifInterface.TAG_SUBSEC_TIME_ORIGINAL, null); + exif_new.setAttribute(ExifInterface.TAG_SUBSEC_TIME_DIGITIZED, null); + exif_new.setAttribute(ExifInterface.TAG_OFFSET_TIME, null); + exif_new.setAttribute(ExifInterface.TAG_OFFSET_TIME_ORIGINAL, null); + exif_new.setAttribute(ExifInterface.TAG_OFFSET_TIME_DIGITIZED, null); + } + + if( !request.store_location ) { + if( MyDebug.LOG ) + Log.d(TAG, "remove gps tags"); + exif_new.setAttribute(ExifInterface.TAG_GPS_PROCESSING_METHOD, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_LATITUDE, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_LATITUDE_REF, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_LONGITUDE, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_LONGITUDE_REF, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_ALTITUDE, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_ALTITUDE_REF, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_DATESTAMP, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_TIMESTAMP, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_SPEED, null); + exif_new.setAttribute(ExifInterface.TAG_GPS_SPEED_REF, null); + } + } + } + + /** Transfers exif tags from exif to exif_new, and then applies any extra Exif tags according to the preferences in the request. + * Note that we use several ExifInterface tags that are now deprecated in API level 23 and 24. These are replaced with new tags that have + * the same string value (e.g., TAG_APERTURE replaced with TAG_F_NUMBER, but both have value "FNumber"). We use the deprecated versions + * to avoid complicating the code (we'd still have to read the deprecated values for older devices). + */ + private void setExif(final Request request, ExifInterface exif, ExifInterface exif_new) throws IOException { + if( MyDebug.LOG ) + Log.d(TAG, "setExif"); + + if( request.remove_device_exif == Request.RemoveDeviceExif.OFF ) { + transferDeviceExif(exif, exif_new); + } + + if( request.remove_device_exif == Request.RemoveDeviceExif.OFF || request.remove_device_exif == Request.RemoveDeviceExif.KEEP_DATETIME ) { + transferDeviceExifDateTime(exif, exif_new); + } + + if( request.remove_device_exif == Request.RemoveDeviceExif.OFF || request.store_location ) { + // If geotagging is enabled, we explicitly override the remove_device_exif setting. + // Arguably we don't need an if statement here at all - but if there was some device strangely + // setting GPS tags even when we haven't set them, it's better to remove them if the user has not + // requested RemoveDeviceExif.OFF. + transferDeviceExifGPS(exif, exif_new); + } + + modifyExif(exif_new, request.remove_device_exif, request.type == Request.Type.JPEG, request.using_camera2, request.using_camera_extensions, request.current_date, request.store_location, request.location, request.store_geo_direction, request.geo_direction, request.custom_tag_artist, request.custom_tag_copyright, request.level_angle, request.pitch_angle, request.store_ypr); + + removeExifTags(exif_new, request); // must be last, before saving attributes + exif_new.saveAttributes(); + } + + /** May be run in saver thread or picture callback thread (depending on whether running in background). + */ + private boolean saveImageNowRaw(Request request) { + if( MyDebug.LOG ) + Log.d(TAG, "saveImageNowRaw"); + + StorageUtils storageUtils = main_activity.getStorageUtils(); + boolean success = false; + + main_activity.savingImage(true); + + OutputStream output = null; + RawImage raw_image = request.raw_image; + try { + File picFile = null; + Uri saveUri = null; + boolean use_media_store = false; + ContentValues contentValues = null; // used if using scoped storage + + String suffix = "_"; + String filename_suffix = (request.force_suffix) ? suffix + (request.suffix_offset) : ""; + if( storageUtils.isUsingSAF() ) { + saveUri = storageUtils.createOutputMediaFileSAF(StorageUtils.MEDIA_TYPE_IMAGE, filename_suffix, "dng", request.current_date); + if( MyDebug.LOG ) + Log.d(TAG, "saveUri: " + saveUri); + // When using SAF, we don't save to a temp file first (unlike for JPEGs). Firstly we don't need to modify Exif, so don't + // need a real file; secondly copying to a temp file is much slower for RAW. + } + else if( MainActivity.useScopedStorage() ) { + use_media_store = true; + Uri folder = Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q ? + MediaStore.Images.Media.getContentUri(MediaStore.VOLUME_EXTERNAL_PRIMARY) : + MediaStore.Images.Media.EXTERNAL_CONTENT_URI; + contentValues = new ContentValues(); + String picName = storageUtils.createMediaFilename(StorageUtils.MEDIA_TYPE_IMAGE, filename_suffix, 0, ".dng", request.current_date); + contentValues.put(MediaStore.Images.Media.DISPLAY_NAME, picName); + contentValues.put(MediaStore.Images.Media.MIME_TYPE, "image/dng"); + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q ) { + contentValues.put(MediaStore.Images.Media.RELATIVE_PATH, storageUtils.getSaveRelativeFolder()); + contentValues.put(MediaStore.Images.Media.IS_PENDING, 1); + } + + // Note, we catch exceptions specific to insert() here and rethrow as IOException, + // rather than catching below, to avoid catching things too broadly. + // Catching too broadly could mean we miss genuine problems that should be fixed. + try { + saveUri = main_activity.getContentResolver().insert(folder, contentValues); + } + catch(IllegalArgumentException e) { + // can happen for mediastore method if invalid ContentResolver.insert() call + MyDebug.logStackTrace(TAG, "IllegalArgumentException inserting to mediastore", e); + throw new IOException(); + } + catch(IllegalStateException e) { + MyDebug.logStackTrace(TAG, "IllegalStateException inserting to mediastore", e); + throw new IOException(); + } + if( MyDebug.LOG ) + Log.d(TAG, "saveUri: " + saveUri); + if( saveUri == null ) + throw new IOException(); + } + else { + picFile = storageUtils.createOutputMediaFile(StorageUtils.MEDIA_TYPE_IMAGE, filename_suffix, "dng", request.current_date); + if( MyDebug.LOG ) + Log.d(TAG, "save to: " + picFile.getAbsolutePath()); + } + + if( picFile != null ) { + output = new FileOutputStream(picFile); + } + else { + output = main_activity.getContentResolver().openOutputStream(saveUri); + } + raw_image.writeImage(output); + raw_image.close(); + raw_image = null; + output.close(); + output = null; + success = true; + + // set last image for share/trash options for pause preview + // Must be done before broadcastFile() (because on Android 7+ with non-SAF, we update + // the LastImage's uri from the MediaScannerConnection.scanFile() callback from + // StorageUtils.broadcastFile(), which assumes the last image has already been set. + MyApplicationInterface applicationInterface = main_activity.getApplicationInterface(); + boolean raw_only = applicationInterface.isRawOnly(); + if( MyDebug.LOG ) + Log.d(TAG, "raw_only: " + raw_only); + if( saveUri == null ) { + applicationInterface.addLastImage(picFile, raw_only); + } + else if( storageUtils.isUsingSAF() ){ + applicationInterface.addLastImageSAF(saveUri, raw_only); + } + else if( success && use_media_store ){ + applicationInterface.addLastImageMediaStore(saveUri, raw_only); + } + + // if RAW only, need to update the cached uri + if( raw_only ) { + // clear just in case we're unable to update this - don't want an out of date cached uri + storageUtils.clearLastMediaScanned(); + } + + // n.b., at time of writing, remove_device_exif will always be OFF for RAW, but have added the code for future proofing + boolean hasnoexifdatetime = request.remove_device_exif != Request.RemoveDeviceExif.OFF && request.remove_device_exif != Request.RemoveDeviceExif.KEEP_DATETIME; + + if( saveUri == null ) { + storageUtils.broadcastFile(picFile, true, false, raw_only, hasnoexifdatetime, null); + } + else if( use_media_store ) { + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q ) { + contentValues.clear(); + contentValues.put(MediaStore.Images.Media.IS_PENDING, 0); + main_activity.getContentResolver().update(saveUri, contentValues, null, null); + } + + // no need to broadcast when using mediastore method + + // in theory this is pointless, as announceUri no longer does anything on Android 7+, + // and mediastore method is only used on Android 10+, but keep this just in case + // announceUri does something in future + storageUtils.announceUri(saveUri, true, false); + + if( raw_only ) { + // we also want to save the uri - we can use the media uri directly, rather than having to scan it + storageUtils.setLastMediaScanned(saveUri, true, hasnoexifdatetime, saveUri); + } + } + else { + storageUtils.broadcastUri(saveUri, true, false, raw_only, hasnoexifdatetime, false); + } + } + catch(FileNotFoundException e) { + MyDebug.logStackTrace(TAG, "file not found", e); + main_activity.getPreview().showToast(null, R.string.failed_to_save_photo_raw); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "ioexception writing raw image file", e); + main_activity.getPreview().showToast(null, R.string.failed_to_save_photo_raw); + } + finally { + if( output != null ) { + try { + output.close(); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "ioexception closing raw output", e); + } + } + if( raw_image != null ) { + raw_image.close(); + } + } + + System.gc(); + + main_activity.savingImage(false); + + return success; + } + + /** Rotates the supplied bitmap according to the orientation tag stored in the exif data. If no + * rotation is required, the input bitmap is returned. If rotation is required, the input + * bitmap is recycled. + * @param data Jpeg data containing the Exif information to use. + */ + private Bitmap rotateForExif(Bitmap bitmap, byte [] data) { + if( MyDebug.LOG ) + Log.d(TAG, "rotateForExif"); + if( bitmap == null ) { + // support thumbnail being null - as this can happen according to Google Play crashes, see comment in saveSingleImageNow() + return null; + } + InputStream inputStream = null; + try { + ExifInterface exif; + + if( MyDebug.LOG ) + Log.d(TAG, "use data stream to read exif tags"); + inputStream = new ByteArrayInputStream(data); + exif = new ExifInterface(inputStream); + + int exif_orientation_s = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_UNDEFINED); + if( MyDebug.LOG ) + Log.d(TAG, " exif orientation string: " + exif_orientation_s); + boolean needs_tf = false; + int exif_orientation = 0; + // see http://jpegclub.org/exif_orientation.html + // and http://stackoverflow.com/questions/20478765/how-to-get-the-correct-orientation-of-the-image-selected-from-the-default-image + switch (exif_orientation_s) { + case ExifInterface.ORIENTATION_UNDEFINED: + case ExifInterface.ORIENTATION_NORMAL: + // leave unchanged + break; + case ExifInterface.ORIENTATION_ROTATE_180: + needs_tf = true; + exif_orientation = 180; + break; + case ExifInterface.ORIENTATION_ROTATE_90: + needs_tf = true; + exif_orientation = 90; + break; + case ExifInterface.ORIENTATION_ROTATE_270: + needs_tf = true; + exif_orientation = 270; + break; + default: + // just leave unchanged for now + if (MyDebug.LOG) + Log.e(TAG, " unsupported exif orientation: " + exif_orientation_s); + break; + } + if( MyDebug.LOG ) + Log.d(TAG, " exif orientation: " + exif_orientation); + + if( needs_tf ) { + if( MyDebug.LOG ) + Log.d(TAG, " need to rotate bitmap due to exif orientation tag"); + Matrix m = new Matrix(); + m.setRotate(exif_orientation, bitmap.getWidth() * 0.5f, bitmap.getHeight() * 0.5f); + Bitmap rotated_bitmap = Bitmap.createBitmap(bitmap, 0, 0,bitmap.getWidth(), bitmap.getHeight(), m, true); + if( rotated_bitmap != bitmap ) { + bitmap.recycle(); + bitmap = rotated_bitmap; + } + } + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "exif orientation ioexception", e); + } + catch(NoClassDefFoundError e) { + // have had Google Play crashes from new ExifInterface() for Galaxy Ace4 (vivalto3g), Galaxy S Duos3 (vivalto3gvn) + MyDebug.logStackTrace(TAG, "exif orientation NoClassDefFoundError", e); + } + finally { + if( inputStream != null ) { + try { + inputStream.close(); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "failed to close inputStream", e); + } + } + } + return bitmap; + } + + /** Loads the bitmap from the supplied jpeg data, rotating if necessary according to the + * supplied EXIF orientation tag. + * @param data The jpeg data. + * @param mutable Whether to create a mutable bitmap. + * @return A bitmap representing the correctly rotated jpeg. + */ + private Bitmap loadBitmapWithRotation(byte [] data, boolean mutable) { + Bitmap bitmap = loadBitmap(data, mutable, 1); + if( bitmap != null ) { + // rotate the bitmap if necessary for exif tags + if( MyDebug.LOG ) + Log.d(TAG, "rotate bitmap for exif tags?"); + bitmap = rotateForExif(bitmap, data); + } + return bitmap; + } + + /* In some cases we may create an ExifInterface with a FileDescriptor obtained from a + * ParcelFileDescriptor (via getFileDescriptor()). It's important to keep a reference to the + * ParcelFileDescriptor object for as long as the exif interface, otherwise there's a risk of + * the ParcelFileDescriptor being garbage collected, invalidating the file descriptor still + * being used by the ExifInterface! + * This didn't cause any known bugs, but good practice to fix, similar to the issue reported in + * https://sourceforge.net/p/opencamera/tickets/417/ . + * Also important to call the close() method when done with it, to close the + * ParcelFileDescriptor (if one was created). + */ + private static class ExifInterfaceHolder { + // see documentation above about keeping hold of pdf due to the garbage collector! + private final ParcelFileDescriptor pfd; + private final ExifInterface exif; + + ExifInterfaceHolder(ParcelFileDescriptor pfd, ExifInterface exif) { + this.pfd = pfd; + this.exif = exif; + } + + ExifInterface getExif() { + return this.exif; + } + + void close() { + if( this.pfd != null ) { + try { + this.pfd.close(); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "failed to close parcelfiledescriptor", e); + } + } + } + } + + /** Creates a new exif interface for reading and writing. + * If picFile==null, then saveUri must be non-null, and will be used instead to write the exif + * tags too. + * The returned ExifInterfaceHolder will always be non-null, but the contained getExif() may + * return null if this method was unable to create the exif interface. + * The caller should call close() on the returned ExifInterfaceHolder when no longer required. + */ + private ExifInterfaceHolder createExifInterface(File picFile, Uri saveUri) throws IOException { + ParcelFileDescriptor parcelFileDescriptor = null; + ExifInterface exif = null; + if( picFile != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "write to picFile: " + picFile); + exif = new ExifInterface(picFile.getAbsolutePath()); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "write direct to saveUri: " + saveUri); + parcelFileDescriptor = main_activity.getContentResolver().openFileDescriptor(saveUri, "rw"); + if( parcelFileDescriptor != null ) { + FileDescriptor fileDescriptor = parcelFileDescriptor.getFileDescriptor(); + exif = new ExifInterface(fileDescriptor); + } + else { + Log.e(TAG, "failed to create ParcelFileDescriptor for saveUri: " + saveUri); + } + } + return new ExifInterfaceHolder(parcelFileDescriptor, exif); + } + + /** Makes various modifications to the saved image file, according to the preferences in request. + * This method is used when saving directly from the JPEG data rather than a bitmap. + * If picFile==null, then saveUri must be non-null, and will be used instead to write the exif + * tags too. + */ + private void updateExif(Request request, File picFile, Uri saveUri) throws IOException { + if( MyDebug.LOG ) + Log.d(TAG, "updateExif: " + picFile); + if( request.store_geo_direction || request.store_ypr || hasCustomExif(request.custom_tag_artist, request.custom_tag_copyright) || + request.using_camera_extensions || // when using camera extensions, we need to call modifyExif() to fix up various missing tags + needGPSExifFix(request.type == Request.Type.JPEG, request.using_camera2, request.store_location) ) { + long time_s = System.currentTimeMillis(); + if( MyDebug.LOG ) + Log.d(TAG, "add additional exif info"); + try { + ExifInterfaceHolder exif_holder = createExifInterface(picFile, saveUri); + if( MyDebug.LOG ) + Log.d(TAG, "*** time after create exif: " + (System.currentTimeMillis() - time_s)); + try { + ExifInterface exif = exif_holder.getExif(); + if( exif != null ) { + modifyExif(exif, request.remove_device_exif, request.type == Request.Type.JPEG, request.using_camera2, request.using_camera_extensions, request.current_date, request.store_location, request.location, request.store_geo_direction, request.geo_direction, request.custom_tag_artist, request.custom_tag_copyright, request.level_angle, request.pitch_angle, request.store_ypr); + + if( MyDebug.LOG ) + Log.d(TAG, "*** time after modifyExif: " + (System.currentTimeMillis() - time_s)); + exif.saveAttributes(); + if( MyDebug.LOG ) + Log.d(TAG, "*** time after saveAttributes: " + (System.currentTimeMillis() - time_s)); + } + } + finally { + exif_holder.close(); + } + } + catch(NoClassDefFoundError e) { + // have had Google Play crashes from new ExifInterface() elsewhere for Galaxy Ace4 (vivalto3g), Galaxy S Duos3 (vivalto3gvn), so also catch here just in case + MyDebug.logStackTrace(TAG, "exif orientation NoClassDefFoundError", e); + } + if( MyDebug.LOG ) + Log.d(TAG, "*** time to add additional exif info: " + (System.currentTimeMillis() - time_s)); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "no exif data to update for: " + picFile); + } + } + + /** Makes various modifications to the exif data, if necessary. + * Any fix-ups should respect the setting of RemoveDeviceExif! + */ + private void modifyExif(ExifInterface exif, Request.RemoveDeviceExif remove_device_exif, boolean is_jpeg, boolean using_camera2, boolean using_camera_extensions, Date current_date, boolean store_location, Location location, boolean store_geo_direction, double geo_direction, String custom_tag_artist, String custom_tag_copyright, double level_angle, double pitch_angle, boolean store_ypr) { + if( MyDebug.LOG ) + Log.d(TAG, "modifyExif"); + setGPSDirectionExif(exif, store_geo_direction, geo_direction); + if( store_ypr ) { + float geo_angle = (float)Math.toDegrees(geo_direction); + if( geo_angle < 0.0f ) { + geo_angle += 360.0f; + } + String encoding = "ASCII\0\0\0"; + // fine to ignore request.remove_device_exif, as this is a separate user option + //exif.setAttribute(ExifInterface.TAG_USER_COMMENT,"Yaw:" + geo_angle + ",Pitch:" + pitch_angle + ",Roll:" + level_angle); + exif.setAttribute(ExifInterface.TAG_USER_COMMENT,encoding + "Yaw:" + geo_angle + ",Pitch:" + pitch_angle + ",Roll:" + level_angle); + if( MyDebug.LOG ) + Log.d(TAG, "UserComment: " + exif.getAttribute(ExifInterface.TAG_USER_COMMENT)); + } + setCustomExif(exif, custom_tag_artist, custom_tag_copyright); + + boolean force_location = false; // whether we need to add location data + if( store_location ) { + // Normally if geotagging is enabled, location should have already been added via the Camera API. + // But we need this when using camera extensions (since Camera API doesn't support location for camera extensions). + // And some devices (e.g., Pixel 6 Pro with Camera2 API) seem to not store location data, so we always check if we need to add it. + // Similarly Fairphone 5 always has longitude stored as 0.0. + // fine to ignore request.remove_device_exif, as this is a separate user option + if( !exif.hasAttribute(ExifInterface.TAG_GPS_LATITUDE) || !exif.hasAttribute(ExifInterface.TAG_GPS_LONGITUDE) ) { + if( MyDebug.LOG ) + Log.d(TAG, "force location as not present in exif"); + force_location = true; + } + else { + double [] lat_long = exif.getLatLong(); + if( lat_long == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "force location as not present in exif"); + force_location = true; + } + else if( lat_long[0] == 0.0 || lat_long[1] == 0.0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "force location as longitude or latitude is 0.0"); + force_location = true; + } + } + } + if( force_location ) { + if( MyDebug.LOG ) + Log.d(TAG, "force store location"); // don't log location for privacy reasons! + exif.setGpsInfo(location); + } + + if( using_camera_extensions ) { + if( remove_device_exif == Request.RemoveDeviceExif.OFF || remove_device_exif == Request.RemoveDeviceExif.KEEP_DATETIME ) { + addDateTimeExif(exif, current_date); + } + } + else if( needGPSExifFix(is_jpeg, using_camera2, store_location) ) { + // fine to ignore request.remove_device_exif, as this is a separate user option + fixGPSTimestamp(exif, current_date); + } + } + + private void setGPSDirectionExif(ExifInterface exif, boolean store_geo_direction, double geo_direction) { + if( MyDebug.LOG ) + Log.d(TAG, "setGPSDirectionExif"); + if( store_geo_direction ) { + float geo_angle = (float)Math.toDegrees(geo_direction); + if( geo_angle < 0.0f ) { + geo_angle += 360.0f; + } + if( MyDebug.LOG ) + Log.d(TAG, "save geo_angle: " + geo_angle); + // see http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/GPS.html + String GPSImgDirection_string = Math.round(geo_angle*100) + "/100"; + if( MyDebug.LOG ) + Log.d(TAG, "GPSImgDirection_string: " + GPSImgDirection_string); + // fine to ignore request.remove_device_exif, as this is a separate user option + exif.setAttribute(ExifInterface.TAG_GPS_IMG_DIRECTION, GPSImgDirection_string); + exif.setAttribute(ExifInterface.TAG_GPS_IMG_DIRECTION_REF, "M"); + } + } + + /** Whether custom exif tags need to be applied to the image file. + */ + private boolean hasCustomExif(String custom_tag_artist, String custom_tag_copyright) { + if( custom_tag_artist != null && !custom_tag_artist.isEmpty() ) + return true; + if( custom_tag_copyright != null && !custom_tag_copyright.isEmpty() ) + return true; + return false; + } + + /** Applies the custom exif tags to the ExifInterface. + */ + private void setCustomExif(ExifInterface exif, String custom_tag_artist, String custom_tag_copyright) { + if( MyDebug.LOG ) + Log.d(TAG, "setCustomExif"); + if( custom_tag_artist != null && !custom_tag_artist.isEmpty() ) { + if( MyDebug.LOG ) + Log.d(TAG, "apply TAG_ARTIST: " + custom_tag_artist); + // fine to ignore request.remove_device_exif, as this is a separate user option + exif.setAttribute(ExifInterface.TAG_ARTIST, custom_tag_artist); + } + if( custom_tag_copyright != null && !custom_tag_copyright.isEmpty()) { + if( MyDebug.LOG ) + Log.d(TAG, "apply TAG_COPYRIGHT: " + custom_tag_copyright); + // fine to ignore request.remove_device_exif, as this is a separate user option + exif.setAttribute(ExifInterface.TAG_COPYRIGHT, custom_tag_copyright); + } + } + + /** Adds exif tags for datetime from the supplied date, if not present. Needed for camera vendor + * extensions which (at least on Galaxy S10e) don't seem to have these tags set at all! + */ + private void addDateTimeExif(ExifInterface exif, Date current_date) { + if( MyDebug.LOG ) + Log.d(TAG, "addDateTimeExif"); + String exif_datetime = exif.getAttribute(ExifInterface.TAG_DATETIME); + if( MyDebug.LOG ) + Log.d(TAG, "existing exif TAG_DATETIME: " + exif_datetime); + if( exif_datetime == null ) { + SimpleDateFormat date_fmt = new SimpleDateFormat("yyyy:MM:dd HH:mm:ss", Locale.US); + date_fmt.setTimeZone(TimeZone.getDefault()); // need local timezone for TAG_DATETIME + exif_datetime = date_fmt.format(current_date); + if( MyDebug.LOG ) + Log.d(TAG, "new TAG_DATETIME: " + exif_datetime); + + exif.setAttribute(ExifInterface.TAG_DATETIME, exif_datetime); + // set these tags too (even if already present, overwrite to be consistent) + exif.setAttribute(ExifInterface.TAG_DATETIME_ORIGINAL, exif_datetime); + exif.setAttribute(ExifInterface.TAG_DATETIME_DIGITIZED, exif_datetime); + + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.N ) { + // XXX requires Android 7 + // needs to be -/+HH:mm format, which is given by XXX + date_fmt = new SimpleDateFormat("XXX", Locale.US); + date_fmt.setTimeZone(TimeZone.getDefault()); + String timezone = date_fmt.format(current_date); + if( MyDebug.LOG ) + Log.d(TAG, "timezone: " + timezone); + exif.setAttribute(ExifInterface.TAG_OFFSET_TIME, timezone); + exif.setAttribute(ExifInterface.TAG_OFFSET_TIME_ORIGINAL, timezone); + exif.setAttribute(ExifInterface.TAG_OFFSET_TIME_DIGITIZED, timezone); + } + } + } + + private void fixGPSTimestamp(ExifInterface exif, Date current_date) { + if( MyDebug.LOG ) { + Log.d(TAG, "fixGPSTimestamp"); + Log.d(TAG, "current datestamp: " + exif.getAttribute(ExifInterface.TAG_GPS_DATESTAMP)); + Log.d(TAG, "current timestamp: " + exif.getAttribute(ExifInterface.TAG_GPS_TIMESTAMP)); + Log.d(TAG, "current datetime: " + exif.getAttribute(ExifInterface.TAG_DATETIME)); + } + // Hack: Problem on Camera2 API (at least on Nexus 6) that if geotagging is enabled, then the resultant image has incorrect Exif TAG_GPS_DATESTAMP and TAG_GPS_TIMESTAMP (GPSDateStamp) set (date tends to be around 2038 - possibly a driver bug of casting long to int?). + // This causes problems when viewing with Gallery apps (e.g., Gallery ICS; Google Photos seems fine however), as they show this incorrect date. + // Update: Before v1.34 this was "fixed" by calling: exif.setAttribute(ExifInterface.TAG_GPS_TIMESTAMP, Long.toString(System.currentTimeMillis())); + // However this stopped working on or before 20161006. This wasn't a change in Open Camera (whilst this was working fine in + // 1.33 when I released it, the bug had come back when I retested that version) and I'm not sure how this ever worked, since + // TAG_GPS_TIMESTAMP is meant to be a string such "21:45:23", and not the number of ms since 1970 - possibly it wasn't really + // working , and was simply invalidating it such that Gallery then fell back to looking elsewhere for the datetime? + // So now hopefully fixed properly... + // Note, this problem also occurs on OnePlus 3T and Gallery ICS, if we don't have this function called + SimpleDateFormat date_fmt = new SimpleDateFormat("yyyy:MM:dd", Locale.US); + date_fmt.setTimeZone(TimeZone.getTimeZone("UTC")); // needs to be UTC time for the GPS datetime tags + String datestamp = date_fmt.format(current_date); + + SimpleDateFormat time_fmt = new SimpleDateFormat("HH:mm:ss", Locale.US); + time_fmt.setTimeZone(TimeZone.getTimeZone("UTC")); + String timestamp = time_fmt.format(current_date); + + if( MyDebug.LOG ) { + Log.d(TAG, "datestamp: " + datestamp); + Log.d(TAG, "timestamp: " + timestamp); + } + exif.setAttribute(ExifInterface.TAG_GPS_DATESTAMP, datestamp); + exif.setAttribute(ExifInterface.TAG_GPS_TIMESTAMP, timestamp); + + if( MyDebug.LOG ) + Log.d(TAG, "fixGPSTimestamp exit"); + } + + /** Whether we need to fix up issues with location. + * See comments in fixGPSTimestamp(), where some devices with Camera2 need fixes for TAG_GPS_DATESTAMP and TAG_GPS_TIMESTAMP. + * Also some devices (e.g. Pixel 6 Pro) have problem that location is not stored in images with Camera2 API, so we need to + * enter modifyExif() to add it if not present; similarly Fairphone 5 needs correcting due to storing longitude as 0.0. + */ + private boolean needGPSExifFix(boolean is_jpeg, boolean using_camera2, boolean store_location) { + if( is_jpeg && using_camera2 ) { + return store_location; + } + return false; + } + + // for testing: + + HDRProcessor getHDRProcessor() { + return hdrProcessor; + } + + public PanoramaProcessor getPanoramaProcessor() { + return panoramaProcessor; + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/JavaImageFunctions.java b/app/src/main/java/net/sourceforge/opencamera/JavaImageFunctions.java new file mode 100644 index 0000000..a7c7838 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/JavaImageFunctions.java @@ -0,0 +1,4934 @@ +package net.sourceforge.opencamera; + +import android.graphics.Bitmap; +//import android.util.Log; + +import java.util.List; + +public class JavaImageFunctions { + private static final String TAG = "JavaImageFunctions"; + + static class CreateMTBApplyFunction implements JavaImageProcessing.ApplyFunctionInterface { + private final boolean use_mtb; + private final int median_value; + + CreateMTBApplyFunction(boolean use_mtb, int median_value) { + this.use_mtb = use_mtb; + this.median_value = median_value; + } + + @Override + public void init(int n_threads) { + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int [] pixels, int off_x, int off_y, int this_width, int this_height) { + int [] pixels_out = output.getCachedPixelsI(); + if( use_mtb ) { + for(int y=off_y,c=0;y> 16) & 0xFF; + int g = (color >> 8) & 0xFF; + int b = color & 0xFF; + + int value = Math.max(r, g); + value = Math.max(value, b); + + // ignore small differences to reduce effect of noise - this helps testHDR22 + int diff; + if( value > median_value ) + diff = value - median_value; + else + diff = median_value - value; + + if( diff <= 4 ) // should be same value as min_diff_c in HDRProcessor.autoAlignment() + pixels_out[c] = 127 << 24; + else if( value <= median_value ) + pixels_out[c] = 0; + else + pixels_out[c] = 255 << 24; + } + } + } + else { + for(int y=off_y,c=0;y> 16) & 0xFF; + int g = (color >> 8) & 0xFF; + int b = color & 0xFF; + + int value = Math.max(r, g); + value = Math.max(value, b); + + pixels_out[c] = value << 24; + } + } + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + } + + static class AlignMTBApplyFunction implements JavaImageProcessing.ApplyFunctionInterface { + private int [][] errors = null; + private final boolean use_mtb; + private final Bitmap bitmap0; + private JavaImageProcessing.FastAccessBitmap [] fast_bitmap0; + private final Bitmap bitmap1; + private JavaImageProcessing.FastAccessBitmap [] fast_bitmap1; + private final int offset_x, offset_y; + private final int step_size; + + AlignMTBApplyFunction(boolean use_mtb, Bitmap bitmap0, Bitmap bitmap1, int offset_x, int offset_y, int step_size) { + this.use_mtb = use_mtb; + this.bitmap0 = bitmap0; + this.bitmap1 = bitmap1; + this.offset_x = offset_x; + this.offset_y = offset_y; + this.step_size = step_size; + } + + @Override + public void init(int n_threads) { + errors = new int[n_threads][]; + fast_bitmap0 = new JavaImageProcessing.FastAccessBitmap[n_threads]; + fast_bitmap1 = new JavaImageProcessing.FastAccessBitmap[n_threads]; + for(int i=0;i= bitmap1_height-step_size ) + ey--; + for(int cy=sy;cy= bitmap1_width-step_size ) + ex--; + for(int cx=sx;cx= step_size && x_plus_offset < bitmap1_width-step_size && y_plus_offset >= step_size && y_plus_offset < bitmap1_height-step_size ) + { + //int pixel0 = fast_bitmap0[thread_index].getPixel(x, y) >>> 24; + int pixel0 = bitmap0_cache_pixels[y_rel_bitmap0_cache*bitmap0_width+x] >>> 24; + + /*int c=0; + for(int dy=-1;dy<=1;dy++) { + for(int dx=-1;dx<=1;dx++) { + int pixel1 = fast_bitmap1[thread_index].getPixel(x_plus_offset+dx*step_size, y_plus_offset+dy*step_size) >>> 24; + if( pixel0 != pixel1 && pixel0 != 127 && pixel1 != 127 ) { + // check against 127 to ignore noise - see CreateMTBApplyFunction + errors[thread_index][c]++; + } + c++; + } + }*/ + + // unroll loops + // check against 127 to ignore noise - see CreateMTBApplyFunction + int pixel1; + + //pixel1 = fast_bitmap1[thread_index].getPixel(x_plus_offset-step_size, y_plus_offset-step_size) >>> 24; + pixel1 = bitmap1_cache_pixels[(y_rel_bitmap1_cache_plus_offset-step_size)*bitmap1_width+(x_plus_offset-step_size)] >>> 24; + if( pixel0 != pixel1 && pixel0 != 127 && pixel1 != 127 ) { + errors[thread_index][0]++; + } + + //pixel1 = fast_bitmap1[thread_index].getPixel(x_plus_offset, y_plus_offset-step_size) >>> 24; + pixel1 = bitmap1_cache_pixels[(y_rel_bitmap1_cache_plus_offset-step_size)*bitmap1_width+(x_plus_offset)] >>> 24; + if( pixel0 != pixel1 && pixel0 != 127 && pixel1 != 127 ) { + errors[thread_index][1]++; + } + + //pixel1 = fast_bitmap1[thread_index].getPixel(x_plus_offset+step_size, y_plus_offset-step_size) >>> 24; + pixel1 = bitmap1_cache_pixels[(y_rel_bitmap1_cache_plus_offset-step_size)*bitmap1_width+(x_plus_offset+step_size)] >>> 24; + if( pixel0 != pixel1 && pixel0 != 127 && pixel1 != 127 ) { + errors[thread_index][2]++; + } + + //pixel1 = fast_bitmap1[thread_index].getPixel(x_plus_offset-step_size, y_plus_offset) >>> 24; + pixel1 = bitmap1_cache_pixels[(y_rel_bitmap1_cache_plus_offset)*bitmap1_width+(x_plus_offset-step_size)] >>> 24; + if( pixel0 != pixel1 && pixel0 != 127 && pixel1 != 127 ) { + errors[thread_index][3]++; + } + + //pixel1 = fast_bitmap1[thread_index].getPixel(x_plus_offset, y_plus_offset) >>> 24; + pixel1 = bitmap1_cache_pixels[(y_rel_bitmap1_cache_plus_offset)*bitmap1_width+(x_plus_offset)] >>> 24; + if( pixel0 != pixel1 && pixel0 != 127 && pixel1 != 127 ) { + errors[thread_index][4]++; + } + + //pixel1 = fast_bitmap1[thread_index].getPixel(x_plus_offset+step_size, y_plus_offset) >>> 24; + pixel1 = bitmap1_cache_pixels[(y_rel_bitmap1_cache_plus_offset)*bitmap1_width+(x_plus_offset+step_size)] >>> 24; + if( pixel0 != pixel1 && pixel0 != 127 && pixel1 != 127 ) { + errors[thread_index][5]++; + } + + //pixel1 = fast_bitmap1[thread_index].getPixel(x_plus_offset-step_size, y_plus_offset+step_size) >>> 24; + pixel1 = bitmap1_cache_pixels[(y_rel_bitmap1_cache_plus_offset+step_size)*bitmap1_width+(x_plus_offset-step_size)] >>> 24; + if( pixel0 != pixel1 && pixel0 != 127 && pixel1 != 127 ) { + errors[thread_index][6]++; + } + + //pixel1 = fast_bitmap1[thread_index].getPixel(x_plus_offset, y_plus_offset+step_size) >>> 24; + pixel1 = bitmap1_cache_pixels[(y_rel_bitmap1_cache_plus_offset+step_size)*bitmap1_width+(x_plus_offset)] >>> 24; + if( pixel0 != pixel1 && pixel0 != 127 && pixel1 != 127 ) { + errors[thread_index][7]++; + } + + //pixel1 = fast_bitmap1[thread_index].getPixel(x_plus_offset+step_size, y_plus_offset+step_size) >>> 24; + pixel1 = bitmap1_cache_pixels[(y_rel_bitmap1_cache_plus_offset+step_size)*bitmap1_width+(x_plus_offset+step_size)] >>> 24; + if( pixel0 != pixel1 && pixel0 != 127 && pixel1 != 127 ) { + errors[thread_index][8]++; + } + } + } + } + } + else { + int sy = off_y, ey = off_y+this_height; + while( sy*step_size+offset_y < step_size ) + sy++; + while( (ey-1)*step_size+offset_y >= bitmap1_height-step_size ) + ey--; + for(int cy=sy;cy= bitmap1_width-step_size ) + ex--; + for(int cx=sx;cx= step_size && x_plus_offset < bitmap1_width-step_size && y_plus_offset >= step_size && y_plus_offset < bitmap1_height-step_size ) + { + //int pixel0 = fast_bitmap0[thread_index].getPixel(x, y) >>> 24; + int pixel0 = bitmap0_cache_pixels[y_rel_bitmap0_cache*bitmap0_width+x] >>> 24; + /*if( MyDebug.LOG ) { + Log.d(TAG, "int = " + fast_bitmap0[thread_index].getPixel(x, y)); + Log.d(TAG, "pixel0 = " + pixel0); + }*/ + + /*int c=0; + for(int dy=-1;dy<=1;dy++) { + for(int dx=-1;dx<=1;dx++) { + //int pixel1 = fast_bitmap1[thread_index].getPixel(x_plus_offset+dx*step_size, y_plus_offset+dy*step_size) >>> 24; + int pixel1 = bitmap1_cache_pixels[(y_rel_bitmap1_cache_plus_offset+dy*step_size)*bitmap1_width+(x_plus_offset+dx*step_size)] >>> 24; + int diff = pixel1 - pixel0; + //if( Math.abs(diff) > 255 ) + // throw new RuntimeException("diff too high: " + diff); + int diff2 = diff*diff; + //diff2 = pixel0; + //if( MyDebug.LOG ) + // Log.d(TAG, "diff = " + diff); + if( errors[thread_index][c] < 2000000000 ) { // avoid risk of overflow + errors[thread_index][c] += diff2; + } + c++; + } + }*/ + + // unroll loops + int pixel1; + int diff; + final int overflow_check_c = 2000000000; + + pixel1 = bitmap1_cache_pixels[(y_rel_bitmap1_cache_plus_offset-step_size)*bitmap1_width+(x_plus_offset-step_size)] >>> 24; + diff = pixel1 - pixel0; + //if( Math.abs(diff) > 255 ) + // throw new RuntimeException("diff too high: " + diff); + if( errors[thread_index][0] < overflow_check_c ) { // avoid risk of overflow + errors[thread_index][0] += diff*diff; + } + + pixel1 = bitmap1_cache_pixels[(y_rel_bitmap1_cache_plus_offset-step_size)*bitmap1_width+(x_plus_offset)] >>> 24; + diff = pixel1 - pixel0; + //if( Math.abs(diff) > 255 ) + // throw new RuntimeException("diff too high: " + diff); + if( errors[thread_index][1] < overflow_check_c ) { // avoid risk of overflow + errors[thread_index][1] += diff*diff; + } + + pixel1 = bitmap1_cache_pixels[(y_rel_bitmap1_cache_plus_offset-step_size)*bitmap1_width+(x_plus_offset+step_size)] >>> 24; + diff = pixel1 - pixel0; + //if( Math.abs(diff) > 255 ) + // throw new RuntimeException("diff too high: " + diff); + if( errors[thread_index][2] < overflow_check_c ) { // avoid risk of overflow + errors[thread_index][2] += diff*diff; + } + + pixel1 = bitmap1_cache_pixels[(y_rel_bitmap1_cache_plus_offset)*bitmap1_width+(x_plus_offset-step_size)] >>> 24; + diff = pixel1 - pixel0; + //if( Math.abs(diff) > 255 ) + // throw new RuntimeException("diff too high: " + diff); + if( errors[thread_index][3] < overflow_check_c ) { // avoid risk of overflow + errors[thread_index][3] += diff*diff; + } + + pixel1 = bitmap1_cache_pixels[(y_rel_bitmap1_cache_plus_offset)*bitmap1_width+(x_plus_offset)] >>> 24; + diff = pixel1 - pixel0; + //if( Math.abs(diff) > 255 ) + // throw new RuntimeException("diff too high: " + diff); + if( errors[thread_index][4] < overflow_check_c ) { // avoid risk of overflow + errors[thread_index][4] += diff*diff; + } + + pixel1 = bitmap1_cache_pixels[(y_rel_bitmap1_cache_plus_offset)*bitmap1_width+(x_plus_offset+step_size)] >>> 24; + diff = pixel1 - pixel0; + //if( Math.abs(diff) > 255 ) + // throw new RuntimeException("diff too high: " + diff); + if( errors[thread_index][5] < overflow_check_c ) { // avoid risk of overflow + errors[thread_index][5] += diff*diff; + } + + pixel1 = bitmap1_cache_pixels[(y_rel_bitmap1_cache_plus_offset+step_size)*bitmap1_width+(x_plus_offset-step_size)] >>> 24; + diff = pixel1 - pixel0; + //if( Math.abs(diff) > 255 ) + // throw new RuntimeException("diff too high: " + diff); + if( errors[thread_index][6] < overflow_check_c ) { // avoid risk of overflow + errors[thread_index][6] += diff*diff; + } + + pixel1 = bitmap1_cache_pixels[(y_rel_bitmap1_cache_plus_offset+step_size)*bitmap1_width+(x_plus_offset)] >>> 24; + diff = pixel1 - pixel0; + //if( Math.abs(diff) > 255 ) + // throw new RuntimeException("diff too high: " + diff); + if( errors[thread_index][7] < overflow_check_c ) { // avoid risk of overflow + errors[thread_index][7] += diff*diff; + } + + pixel1 = bitmap1_cache_pixels[(y_rel_bitmap1_cache_plus_offset+step_size)*bitmap1_width+(x_plus_offset+step_size)] >>> 24; + diff = pixel1 - pixel0; + //if( Math.abs(diff) > 255 ) + // throw new RuntimeException("diff too high: " + diff); + if( errors[thread_index][8] < overflow_check_c ) { // avoid risk of overflow + errors[thread_index][8] += diff*diff; + } + + } + } + } + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + + int [] getErrors() { + int [] total_errors = new int[errors[0].length]; + // for each errors, add its entries to the total errors + for(int [] error : errors) { + for (int j=0;j low_x ) { + this.gain_A = (gain * mid_x - low_x) / (mid_x - low_x); + this.gain_B = low_x*mid_x*(1.0f-gain)/ (mid_x - low_x); + } + else { + this.gain_A = 1.0f; + this.gain_B = 0.0f; + } + + for(int value=0;value<256;value++) { + float new_value = (float)Math.pow(value/max_x, gamma) * 255.0f; + value_to_gamma_scale_lut[value] = new_value / value; + } + } + + @Override + public void init(int n_threads) { + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int [] pixels, int off_x, int off_y, int this_width, int this_height) { + int [] pixels_out = output.getCachedPixelsI(); + for(int y=off_y,c=0;y> 16) & 0xFF; + int g = (color >> 8) & 0xFF; + int b = color & 0xFF; + + float fr = r, fg = g, fb = b; + float value = Math.max(fr, fg); + value = Math.max(value, fb); + + // apply piecewise function of gain vs gamma + if( value <= low_x ) { + // don't scale + } + else if( value <= mid_x ) { + //float alpha = (value-low_x)/(mid_x-low_x); + //float new_value = (1.0-alpha)*low_x + alpha*gain*mid_x; + // gain_A and gain_B should be set so that new_value meets the commented out code above + // This code is critical for performance! + + fr *= (gain_A + gain_B/value); + fg *= (gain_A + gain_B/value); + fb *= (gain_A + gain_B/value); + } + else { + // use LUT for performance + /*float new_value = (float)Math.pow(value/max_x, gamma) * 255.0f; + float gamma_scale = new_value / value;*/ + float gamma_scale = value_to_gamma_scale_lut[(int)(value+0.5f)]; + + fr *= gamma_scale; + fg *= gamma_scale; + fb *= gamma_scale; + + } + + r = (int)(fr+0.5f); + g = (int)(fg+0.5f); + b = (int)(fb+0.5f); + + r = Math.max(0, Math.min(255, r)); + g = Math.max(0, Math.min(255, g)); + b = Math.max(0, Math.min(255, b)); + + // this code is performance critical; note it's faster to avoid calls to Color.argb() + pixels_out[c] = (255 << 24) | (r << 16) | (g << 8) | b; + } + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + byte [] pixels_out = output.getCachedPixelsB(); + for(int y=off_y,c=0;y> 10) & 0x3FF) / 4.0f; + this.fb = (float)((rgb >> 20) & 0x3FF) / 4.0f; + this.lum = Math.max(Math.max(fr, fg), fb); + }*/ + + void setRGB(float fr, float fg, float fb) { + this.fr = fr; + this.fg = fg; + this.fb = fb; + this.lum = Math.max(Math.max(fr, fg), fb); + } + + void setRGB(final float [] pixels_in_rgbf, int x, int y, int width) { + int indx = (y*width+x)*3; + setRGB(pixels_in_rgbf[indx], pixels_in_rgbf[indx+1], pixels_in_rgbf[indx+2]); + } + } + + static class AvgApplyFunction implements JavaImageProcessing.ApplyFunctionInterface { + private final float [] pixels_rgbf; // output + private final Bitmap bitmap_new; // new bitmap being added to the input + private JavaImageProcessing.FastAccessBitmap [] fast_bitmap_new; + private final Bitmap bitmap_orig; // original bitmap (first image) + private JavaImageProcessing.FastAccessBitmap [] fast_bitmap_orig; + private final int width, height; + private final int offset_x_new, offset_y_new; + private final float avg_factor; + private final float wiener_C; + private final float wiener_C_cutoff; + + final int radius = 2; // must be less than the radius we actually read from below + //final int n_pixels_c = 5; // number of pixels we read from + //final int [] sample_x = new int[]{-2, 2, 0, -2, 2}; + //final int [] sample_y = new int[]{-2, -2, 0, 2, 2}; + + /*final float [] pixels_avg_fr; + final float [] pixels_avg_fg; + final float [] pixels_avg_fb;*/ + + AvgApplyFunction(float [] pixels_rgbf, Bitmap bitmap_new, Bitmap bitmap_orig, int width, int height, int offset_x_new, int offset_y_new, float avg_factor, float wiener_C, float wiener_C_cutoff) { + this.pixels_rgbf = pixels_rgbf; + this.bitmap_new = bitmap_new; + this.bitmap_orig = bitmap_orig; + this.width = width; + this.height = height; + this.offset_x_new = offset_x_new; + this.offset_y_new = offset_y_new; + this.avg_factor = avg_factor; + this.wiener_C = wiener_C; + this.wiener_C_cutoff = wiener_C_cutoff; + /*this.pixels_avg_fr = new float[width]; + this.pixels_avg_fg = new float[width]; + this.pixels_avg_fb = new float[width];*/ + } + + @Override + public void init(int n_threads) { + fast_bitmap_new = new JavaImageProcessing.FastAccessBitmap[n_threads]; + fast_bitmap_orig = new JavaImageProcessing.FastAccessBitmap[n_threads]; + + for(int i=0;i> 10) & 0x3FF) / 4.0f; + float fb = (float)((color >> 20) & 0x3FF) / 4.0f;*/ + float fr = pixels_in_rgbf[indx++]; + float fg = pixels_in_rgbf[indx++]; + float fb = pixels_in_rgbf[indx++]; + + /*int r = (int)(fr+0.5f); + int g = (int)(fg+0.5f); + int b = (int)(fb+0.5f); + + r = Math.max(0, Math.min(255, r)); + g = Math.max(0, Math.min(255, g)); + b = Math.max(0, Math.min(255, b));*/ + + if( x > 0 && x < width-1 && y > 0 && y < height-1 ) + { + // median filter for noise reduction + // performs better than spatial filter; reduces black/white speckles in: testAvg23, + // testAvg28, testAvg31, testAvg33 + // note that one has to typically zoom to 400% to see the improvement + + /*int color0 = pixels_in[(y-1)*width+(x)]; + int color1 = pixels_in[(y)*width+(x-1)]; + int color2 = color; + int color3 = pixels_in[(y)*width+(x+1)]; + int color4 = pixels_in[(y+1)*width+(x)]; + + rgbf_luminances[0].setRGB101010(color0); + rgbf_luminances[1].setRGB101010(color1); + rgbf_luminances[2].setRGB101010(color2); + rgbf_luminances[3].setRGB101010(color3); + rgbf_luminances[4].setRGB101010(color4);*/ + + rgbf_luminances[0].setRGB(pixels_in_rgbf, x, y-1, width); + rgbf_luminances[1].setRGB(pixels_in_rgbf, x-1, y, width); + rgbf_luminances[2].setRGB(fr, fg, fb); + rgbf_luminances[3].setRGB(pixels_in_rgbf, x+1, y, width); + rgbf_luminances[4].setRGB(pixels_in_rgbf, x, y+1, width); + + // if changing this code, see if the test code in UnitTest.findMedian() should be updated + + // new faster version: + if( rgbf_luminances[0].lum > rgbf_luminances[1].lum ) { + RGBf_luminance temp_p = rgbf_luminances[0]; + rgbf_luminances[0] = rgbf_luminances[1]; + rgbf_luminances[1] = temp_p; + } + if( rgbf_luminances[3].lum > rgbf_luminances[4].lum ) { + RGBf_luminance temp_p = rgbf_luminances[3]; + rgbf_luminances[3] = rgbf_luminances[4]; + rgbf_luminances[4] = temp_p; + } + if( rgbf_luminances[0].lum > rgbf_luminances[3].lum ) { + RGBf_luminance temp_p = rgbf_luminances[0]; + rgbf_luminances[0] = rgbf_luminances[3]; + rgbf_luminances[3] = temp_p; + + temp_p = rgbf_luminances[1]; + rgbf_luminances[1] = rgbf_luminances[4]; + rgbf_luminances[4] = temp_p; + } + if( rgbf_luminances[1].lum > rgbf_luminances[2].lum ) { + if( rgbf_luminances[2].lum > rgbf_luminances[3].lum ) { + if( rgbf_luminances[2].lum > rgbf_luminances[4].lum ) { + RGBf_luminance temp_p = rgbf_luminances[2]; + rgbf_luminances[2] = rgbf_luminances[4]; + rgbf_luminances[4] = temp_p; + } + // else median is rgbf_luminances[2] + } + else { + if( rgbf_luminances[1].lum > rgbf_luminances[3].lum ) { + RGBf_luminance temp_p = rgbf_luminances[2]; + rgbf_luminances[2] = rgbf_luminances[3]; + rgbf_luminances[3] = temp_p; + } + else { + RGBf_luminance temp_p = rgbf_luminances[2]; + rgbf_luminances[2] = rgbf_luminances[1]; + rgbf_luminances[1] = temp_p; + } + } + } + else { + if( rgbf_luminances[1].lum > rgbf_luminances[3].lum ) { + if( rgbf_luminances[1].lum > rgbf_luminances[4].lum ) { + RGBf_luminance temp_p = rgbf_luminances[2]; + rgbf_luminances[2] = rgbf_luminances[4]; + rgbf_luminances[4] = temp_p; + } + else { + RGBf_luminance temp_p = rgbf_luminances[2]; + rgbf_luminances[2] = rgbf_luminances[1]; + rgbf_luminances[1] = temp_p; + } + } + else { + if( rgbf_luminances[2].lum > rgbf_luminances[3].lum ) { + RGBf_luminance temp_p = rgbf_luminances[2]; + rgbf_luminances[2] = rgbf_luminances[3]; + rgbf_luminances[3] = temp_p; + } + // else median is rgbf_luminances[2] + } + } + + // original slower version: + /*if( rgbf_luminances[0].lum > rgbf_luminances[1].lum ) { + RGBf_luminance temp = rgbf_luminances[0]; + rgbf_luminances[0] = rgbf_luminances[1]; + rgbf_luminances[1] = temp; + } + if( rgbf_luminances[0].lum > rgbf_luminances[2].lum ) { + RGBf_luminance temp = rgbf_luminances[0]; + rgbf_luminances[0] = rgbf_luminances[2]; + rgbf_luminances[2] = temp; + } + if( rgbf_luminances[0].lum > rgbf_luminances[3].lum ) { + RGBf_luminance temp = rgbf_luminances[0]; + rgbf_luminances[0] = rgbf_luminances[3]; + rgbf_luminances[3] = temp; + } + if( rgbf_luminances[0].lum > rgbf_luminances[4].lum ) { + RGBf_luminance temp = rgbf_luminances[0]; + rgbf_luminances[0] = rgbf_luminances[4]; + rgbf_luminances[4] = temp; + } + // + if( rgbf_luminances[1].lum > rgbf_luminances[2].lum ) { + RGBf_luminance temp = rgbf_luminances[1]; + rgbf_luminances[1] = rgbf_luminances[2]; + rgbf_luminances[2] = temp; + } + if( rgbf_luminances[1].lum > rgbf_luminances[3].lum ) { + RGBf_luminance temp = rgbf_luminances[1]; + rgbf_luminances[1] = rgbf_luminances[3]; + rgbf_luminances[3] = temp; + } + if( rgbf_luminances[1].lum > rgbf_luminances[4].lum ) { + RGBf_luminance temp = rgbf_luminances[1]; + rgbf_luminances[1] = rgbf_luminances[4]; + rgbf_luminances[4] = temp; + } + // + if( rgbf_luminances[2].lum > rgbf_luminances[3].lum ) { + RGBf_luminance temp = rgbf_luminances[2]; + rgbf_luminances[2] = rgbf_luminances[3]; + rgbf_luminances[3] = temp; + } + if( rgbf_luminances[2].lum > rgbf_luminances[4].lum ) { + RGBf_luminance temp = rgbf_luminances[2]; + rgbf_luminances[2] = rgbf_luminances[4]; + rgbf_luminances[4] = temp; + } + // don't care about sorting p3 and p4 + */ + + fr = (1.0f - median_filter_strength) * fr + median_filter_strength * rgbf_luminances[2].fr; + fg = (1.0f - median_filter_strength) * fg + median_filter_strength * rgbf_luminances[2].fg; + fb = (1.0f - median_filter_strength) * fb + median_filter_strength * rgbf_luminances[2].fb; + } + + { + // spatial noise reduction filter, colour only + // if making changes to this (especially radius, C), run AvgTests - in particular, pay close + // attention to: + // testAvg6: don't want to make the postcard too blurry + // testAvg8: zoom in to 600%, ensure still appears reasonably sharp + // testAvg23: ensure we do reduce the noise, e.g., view around "vicks", without making the + // text blurry + // testAvg24: want to reduce the colour noise near the wall, but don't blur out detail, e.g. + // at the flowers + // testAvg31 + // Also need to be careful of performance. + //float old_value = Math.max(fr, fg); + //old_value = Math.max(old_value, fb); + float old_value = fg; // use only green component for performance + float sum_fr = 0.0f; + float sum_fg = 0.0f; + float sum_fb = 0.0f; + //int radius = 3; + int radius = 2; + int count = 0; + int sx = (x >= radius) ? x-radius : 0; + int ex = (x < width-radius) ? x+radius : width-1; + int sy = (y >= radius) ? y-radius : 0; + int ey = (y < height-radius) ? y+radius : height-1; + for(int cy=sy;cy<=ey;cy++) { + int this_indx = (cy*width+sx)*3; + for(int cx=sx;cx<=ex;cx++) { + //if( cx >= 0 && cx < width && cy >= 0 && cy < height ) + { + /*int this_pixel = pixels_in[cy*width+cx]; + float this_fr = (float)((this_pixel) & 0x3FF) / 4.0f; + float this_fg = (float)((this_pixel >> 10) & 0x3FF) / 4.0f; + float this_fb = (float)((this_pixel >> 20) & 0x3FF) / 4.0f;*/ + float this_fr = pixels_in_rgbf[this_indx++]; + float this_fg = pixels_in_rgbf[this_indx++]; + float this_fb = pixels_in_rgbf[this_indx++]; + { + //float this_value = Math.max(this_fr, this_fg); + //this_value = Math.max(this_value, this_fb); + float this_value = this_fg; // use only green component for performance + if( this_value > 0.5f ) { + float scale = old_value/this_value; + this_fr *= scale; + this_fg *= scale; + this_fb *= scale; + } + /*if( this_fg > 0.5f ) { + float scale = fg/this_fg; + this_fr *= scale; + this_fg *= scale; + this_fb *= scale; + }*/ + // use a wiener filter, so that more similar pixels have greater contribution + // smaller value of C means stronger filter (i.e., less averaging) + // for now set at same value as standard spatial filter above + //final float C = 64.0f*64.0f/8.0f; + //final float C = 512.0f; + //final float C = 16.0f*16.0f/8.0f; + final float C = 32.0f; + + float diff_r = fr - this_fr; + float diff_g = fg - this_fg; + float diff_b = fb - this_fb; + + float L = diff_r*diff_r + diff_g*diff_g + diff_b*diff_b; + //L = 0.0f; // test no wiener filter + float weight = L/(L+C); + + /*float weight1 = 1.0f-weight; + this_fr = weight * fr + weight1 * this_fr; + this_fg = weight * fg + weight1 * this_fg; + this_fb = weight * fb + weight1 * this_fb;*/ + + // faster version: + this_fr = this_fr + weight * diff_r; + this_fg = this_fg + weight * diff_g; + this_fb = this_fb + weight * diff_b; + } + sum_fr += this_fr; + sum_fg += this_fg; + sum_fb += this_fb; + count++; + } + } + } + + fr = sum_fr / count; + fg = sum_fg / count; + fb = sum_fb / count; + } + + { + // sharpen + // helps: testAvg12, testAvg16, testAvg23, testAvg30, testAvg32 + if( x >= 1 && x < width-1 && y >= 1 && y < height-1 ) { + /*int color00 = pixels_in[(y-1)*width+(x-1)]; + int color10 = pixels_in[(y-1)*width+(x)]; + int color20 = pixels_in[(y-1)*width+(x+1)]; + + int color01 = pixels_in[(y)*width+(x-1)]; + int color21 = pixels_in[(y)*width+(x+1)]; + + int color02 = pixels_in[(y+1)*width+(x-1)]; + int color12 = pixels_in[(y+1)*width+(x)]; + int color22 = pixels_in[(y+1)*width+(x+1)]; + + float fr00 = (float)((color00) & 0x3FF) / 4.0f; + float fg00 = (float)((color00 >> 10) & 0x3FF) / 4.0f; + float fb00 = (float)((color00 >> 20) & 0x3FF) / 4.0f; + float fr10 = (float)((color10) & 0x3FF) / 4.0f; + float fg10 = (float)((color10 >> 10) & 0x3FF) / 4.0f; + float fb10 = (float)((color10 >> 20) & 0x3FF) / 4.0f; + float fr20 = (float)((color20) & 0x3FF) / 4.0f; + float fg20 = (float)((color20 >> 10) & 0x3FF) / 4.0f; + float fb20 = (float)((color20 >> 20) & 0x3FF) / 4.0f; + + float fr01 = (float)((color01) & 0x3FF) / 4.0f; + float fg01 = (float)((color01 >> 10) & 0x3FF) / 4.0f; + float fb01 = (float)((color01 >> 20) & 0x3FF) / 4.0f; + float fr21 = (float)((color21) & 0x3FF) / 4.0f; + float fg21 = (float)((color21 >> 10) & 0x3FF) / 4.0f; + float fb21 = (float)((color21 >> 20) & 0x3FF) / 4.0f; + + float fr02 = (float)((color02) & 0x3FF) / 4.0f; + float fg02 = (float)((color02 >> 10) & 0x3FF) / 4.0f; + float fb02 = (float)((color02 >> 20) & 0x3FF) / 4.0f; + float fr12 = (float)((color12) & 0x3FF) / 4.0f; + float fg12 = (float)((color12 >> 10) & 0x3FF) / 4.0f; + float fb12 = (float)((color12 >> 20) & 0x3FF) / 4.0f; + float fr22 = (float)((color22) & 0x3FF) / 4.0f; + float fg22 = (float)((color22 >> 10) & 0x3FF) / 4.0f; + float fb22 = (float)((color22 >> 20) & 0x3FF) / 4.0f;*/ + + int indx00 = ((y-1)*width+(x-1))*3; + int indx10 = ((y-1)*width+(x))*3; + int indx20 = ((y-1)*width+(x+1))*3; + + int indx01 = ((y)*width+(x-1))*3; + int indx21 = ((y)*width+(x+1))*3; + + int indx02 = ((y+1)*width+(x-1))*3; + int indx12 = ((y+1)*width+(x))*3; + int indx22 = ((y+1)*width+(x+1))*3; + + float fr00 = pixels_in_rgbf[indx00]; + float fg00 = pixels_in_rgbf[indx00+1]; + float fb00 = pixels_in_rgbf[indx00+2]; + float fr10 = pixels_in_rgbf[indx10]; + float fg10 = pixels_in_rgbf[indx10+1]; + float fb10 = pixels_in_rgbf[indx10+2]; + float fr20 = pixels_in_rgbf[indx20]; + float fg20 = pixels_in_rgbf[indx20+1]; + float fb20 = pixels_in_rgbf[indx20+2]; + + float fr01 = pixels_in_rgbf[indx01]; + float fg01 = pixels_in_rgbf[indx01+1]; + float fb01 = pixels_in_rgbf[indx01+2]; + float fr21 = pixels_in_rgbf[indx21]; + float fg21 = pixels_in_rgbf[indx21+1]; + float fb21 = pixels_in_rgbf[indx21+2]; + + float fr02 = pixels_in_rgbf[indx02]; + float fg02 = pixels_in_rgbf[indx02+1]; + float fb02 = pixels_in_rgbf[indx02+2]; + float fr12 = pixels_in_rgbf[indx12]; + float fg12 = pixels_in_rgbf[indx12+1]; + float fb12 = pixels_in_rgbf[indx12+2]; + float fr22 = pixels_in_rgbf[indx22]; + float fg22 = pixels_in_rgbf[indx22+1]; + float fb22 = pixels_in_rgbf[indx22+2]; + + float blurred_fr = (fr00 + fr10 + fr20 + fr01 + 8.0f*fr + fr21 + fr02 + fr12 + fr22)/16.0f; + float blurred_fg = (fg00 + fg10 + fg20 + fg01 + 8.0f*fg + fg21 + fg02 + fg12 + fg22)/16.0f; + float blurred_fb = (fb00 + fb10 + fb20 + fb01 + 8.0f*fb + fb21 + fb02 + fb12 + fb22)/16.0f; + float shift_fr = 1.5f * (fr-blurred_fr); + float shift_fg = 1.5f * (fg-blurred_fg); + float shift_fb = 1.5f * (fb-blurred_fb); + final float threshold2 = 8*8; + if( shift_fr*shift_fr + shift_fg*shift_fg + shift_fb*shift_fb > threshold2 ) + { + fr += shift_fr; + fg += shift_fg; + fb += shift_fb; + } + + fr = Math.max(0.0f, Math.min(255.0f, fr)); + fg = Math.max(0.0f, Math.min(255.0f, fg)); + fb = Math.max(0.0f, Math.min(255.0f, fb)); + } + } + + fr = fr - black_level; + fg = fg - black_level; + fb = fb - black_level; + fr *= white_level; + fg *= white_level; + fb *= white_level; + fr = Math.max(0.0f, Math.min(255.0f, fr)); + fg = Math.max(0.0f, Math.min(255.0f, fg)); + fb = Math.max(0.0f, Math.min(255.0f, fb)); + + float value = Math.max(fr, fg); + value = Math.max(value, fb); + + // apply piecewise function of gain vs gamma + if( value <= brighten.low_x ) { + // don't scale + } + else if( value <= brighten.mid_x ) { + //float alpha = (value-low_x)/(mid_x-low_x); + //float new_value = (1.0-alpha)*low_x + alpha*gain*mid_x; + // gain_A and gain_B should be set so that new_value meets the commented out code above + // This code is critical for performance! + + float scale = (brighten.gain_A + brighten.gain_B/value); + fr *= scale; + fg *= scale; + fb *= scale; + } + else { + // use LUT for performance + /*float new_value = (float)Math.pow(value/brighten.max_x, brighten.gamma) * 255.0f; + float gamma_scale = new_value / value;*/ + float gamma_scale = value_to_gamma_scale_lut[(int)(value+0.5f)]; + + fr *= gamma_scale; + fg *= gamma_scale; + fb *= gamma_scale; + } + + int r = (int)(fr+0.5f); + int g = (int)(fg+0.5f); + int b = (int)(fb+0.5f); + + r = Math.max(0, Math.min(255, r)); + g = Math.max(0, Math.min(255, g)); + b = Math.max(0, Math.min(255, b)); + + // this code is performance critical; note it's faster to avoid calls to Color.argb() + pixels_out[c] = (255 << 24) | (r << 16) | (g << 8) | b; + } + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + /*int [] pixels_out = output.getCachedPixelsI(); + for(int y=off_y,c=0;y> 16) & 0xFF; + int g = (color >> 8) & 0xFF; + int b = color & 0xFF; + + // this code is performance critical; note it's faster to avoid calls to Color.argb() + pixels_out[c] = (255 << 24) | (r << 16) | (g << 8) | b; + } + }*/ + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + } + + static class HDRApplyFunction implements JavaImageProcessing.ApplyFunctionInterface { + private final HDRProcessor.TonemappingAlgorithm tonemap_algorithm; + private final float tonemap_scale; // for Reinhard + private final float W; // for FU2 + private final float linear_scale; + private final Bitmap bitmap0; + JavaImageProcessing.FastAccessBitmap [] fast_bitmap0; + private final Bitmap bitmap2; + JavaImageProcessing.FastAccessBitmap [] fast_bitmap2; + final int offset_x0; + final int offset_y0; + final int offset_x2; + final int offset_y2; + final int width; + final int height; + float [] parameter_A; + float [] parameter_B; + + HDRApplyFunction(HDRProcessor.TonemappingAlgorithm tonemap_algorithm, float tonemap_scale, float W, float linear_scale, Bitmap bitmap0, Bitmap bitmap2, int offset_x0, int offset_y0, int offset_x2, int offset_y2, int width, int height, float [] parameter_A, float [] parameter_B) { + this.tonemap_algorithm = tonemap_algorithm; + this.tonemap_scale = tonemap_scale; + this.W = W; + this.linear_scale = linear_scale; + this.bitmap0 = bitmap0; + this.bitmap2 = bitmap2; + this.offset_x0 = offset_x0; + this.offset_y0 = offset_y0; + this.offset_x2 = offset_x2; + this.offset_y2 = offset_y2; + this.width = width; + this.height = height; + + if( parameter_A.length != parameter_B.length ) { + throw new RuntimeException("unequal parameter lengths"); + } + this.parameter_A = new float[parameter_A.length]; + System.arraycopy(parameter_A, 0, this.parameter_A, 0, parameter_A.length); + this.parameter_B = new float[parameter_B.length]; + System.arraycopy(parameter_B, 0, this.parameter_B, 0, parameter_B.length); + } + + @Override + public void init(int n_threads) { + fast_bitmap0 = new JavaImageProcessing.FastAccessBitmap[n_threads]; + if( bitmap2 != null ) + fast_bitmap2 = new JavaImageProcessing.FastAccessBitmap[n_threads]; + for(int i=0;i 255 || test_g > 255 || test_b > 255 ) { + out.r = 255; + out.g = 0; + out.b = 255; + }*/ + break; + } + case TONEMAPALGORITHM_FU2: + { + // FU2 (Filmic) + // for FU2; should match setting in HDRProcessor.java: + final float fu2_exposure_bias = 2.0f / 255.0f; + float white_scale = 255.0f / FU2Tonemap(W); + float curr_r = FU2Tonemap(fu2_exposure_bias * hdr_r); + float curr_g = FU2Tonemap(fu2_exposure_bias * hdr_g); + float curr_b = FU2Tonemap(fu2_exposure_bias * hdr_b); + curr_r *= white_scale; + curr_g *= white_scale; + curr_b *= white_scale; + out[0] = (int)Math.max(Math.min(curr_r+0.5f, 255.0f), 0.0f); + out[1] = (int)Math.max(Math.min(curr_g+0.5f, 255.0f), 0.0f); + out[2] = (int)Math.max(Math.min(curr_b+0.5f, 255.0f), 0.0f); + break; + } + case TONEMAPALGORITHM_ACES: + { + // https://knarkowicz.wordpress.com/2016/01/06/aces-filmic-tone-mapping-curve/ (released under public domain cc0) + final float a = 2.51f; + final float b = 0.03f; + final float c = 2.43f; + final float d = 0.59f; + final float e = 0.14f; + float xr = hdr_r/255.0f; + float xg = hdr_g/255.0f; + float xb = hdr_b/255.0f; + float out_fr = 255.0f * (xr*(a*xr+b))/(xr*(c*xr+d)+e); + float out_fg = 255.0f * (xg*(a*xg+b))/(xg*(c*xg+d)+e); + float out_fb = 255.0f * (xb*(a*xb+b))/(xb*(c*xb+d)+e); + out[0] = (int)Math.max(Math.min(out_fr+0.5f, 255.0f), 0.0f); + out[1] = (int)Math.max(Math.min(out_fg+0.5f, 255.0f), 0.0f); + out[2] = (int)Math.max(Math.min(out_fb+0.5f, 255.0f), 0.0f); + break; + } + } + //return out; + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int [] pixels, int off_x, int off_y, int this_width, int this_height) { + int [] pixels_out = output.getCachedPixelsI(); + + // although we could move temp_rgb to a class member for performance, remember we'd have to have a version per-thread + final int [] temp_rgb = new int[3]; + + //final int max_bitmaps_c = 3; + //int n_bitmaps = 3; + //final int mid_indx = (n_bitmaps-1)/2; + //int pixels_r[max_bitmaps_c]; + //int pixels_g[max_bitmaps_c]; + //int pixels_b[max_bitmaps_c]; + int pixel0_r, pixel0_g, pixel0_b; + int pixel1_r, pixel1_g, pixel1_b; + int pixel2_r, pixel2_g, pixel2_b; + + for(int y=off_y,c=0;y> 16) & 0xFF; + pixel1_g = (pixel1 >> 8) & 0xFF; + pixel1_b = pixel1 & 0xFF; + + if( x+offset_x0 >= 0 && y+offset_y0 >= 0 && x+offset_x0 < width && y+offset_y0 < height ) { + //int pixel0 = fast_bitmap0[thread_index].getPixel(x+offset_x0, y+offset_y0); + int pixel0 = bitmap0_cache_pixels[(y_rel_bitmap0_cache+offset_y0)*width+(x+offset_x0)]; + pixel0_r = (pixel0 >> 16) & 0xFF; + pixel0_g = (pixel0 >> 8) & 0xFF; + pixel0_b = pixel0 & 0xFF; + } + else { + pixel0_r = pixel1_r; + pixel0_g = pixel1_g; + pixel0_b = pixel1_b; + this_parameter_A0 = this_parameter_A1; + this_parameter_B0 = this_parameter_B1; + } + + if( x+offset_x2 >= 0 && y+offset_y2 >= 0 && x+offset_x2 < width && y+offset_y2 < height ) { + //int pixel2 = fast_bitmap2[thread_index].getPixel(x+offset_x2, y+offset_y2); + int pixel2 = bitmap2_cache_pixels[(y_rel_bitmap2_cache+offset_y2)*width+(x+offset_x2)]; + pixel2_r = (pixel2 >> 16) & 0xFF; + pixel2_g = (pixel2 >> 8) & 0xFF; + pixel2_b = pixel2 & 0xFF; + } + else { + pixel2_r = pixel1_r; + pixel2_g = pixel1_g; + pixel2_b = pixel1_b; + this_parameter_A2 = this_parameter_A1; + this_parameter_B2 = this_parameter_B1; + } + + float hdr_r = 0.0f; + float hdr_g = 0.0f; + float hdr_b = 0.0f; + float sum_weight = 0.0f; + + // assumes 3 bitmaps, with middle bitmap being the "base" exposure, and first image being darker, third image being brighter + { + final float safe_range_c = 96.0f; + float rgb_r = pixel1_r; + float rgb_g = pixel1_g; + float rgb_b = pixel1_b; + float avg = (rgb_r+rgb_g+rgb_b) / 3.0f; + // avoid Math.abs as this line seems costly for performance: + //float diff = Math.abs( avg - 127.5f ); + float weight = 1.0f; + if( avg <= 127.5f ) { + // We now intentionally have the weights be non-symmetric, and have the weight fall to 0 + // faster for dark pixels than bright pixels. This fixes ghosting problems of testHDR62, + // where we have very dark regions where we get ghosting between the middle and bright + // images, and the image is too dark for the deghosting algorithm below to resolve this. + // We're better off using smaller weight, so that more of the pixel comes from the + // bright image. + // This also gives improved lighting/colour in: testHDR1, testHDR2, testHDR11, + // testHDR12, testHDR21, testHDR52. + final float range_low_c = 32.0f; + final float range_high_c = 48.0f; + if( avg <= range_low_c ) { + weight = 0.0f; + } + else if( avg <= range_high_c ) { + weight = (avg - range_low_c) / (range_high_c - range_low_c); + } + } + else if( (avg - 127.5f) > safe_range_c ) { + // scaling chosen so that 0 and 255 map to a non-zero weight of 0.01 + weight = 1.0f - 0.99f * ((avg - 127.5f) - safe_range_c) / (127.5f - safe_range_c); + } + + // response function + rgb_r = this_parameter_A1 * rgb_r + this_parameter_B1; + rgb_g = this_parameter_A1 * rgb_g + this_parameter_B1; + rgb_b = this_parameter_A1 * rgb_b + this_parameter_B1; + + hdr_r += weight * rgb_r; + hdr_g += weight * rgb_g; + hdr_b += weight * rgb_b; + sum_weight += weight; + + if( weight < 1.0 ) { + float base_rgb_r = rgb_r; + float base_rgb_g = rgb_g; + float base_rgb_b = rgb_b; + + // now look at a neighbour image + weight = 1.0f - weight; + + if( avg <= 127.5f ) { + rgb_r = pixel2_r; + rgb_g = pixel2_g; + rgb_b = pixel2_b; + /* In some cases it can be that even on the neighbour image, the brightness is too + dark/bright - but it should still be a better choice than the base image. + If we change this (including say for handling more than 3 images), need to be + careful of unpredictable effects. In particular, consider a pixel that is brightness + 255 on the base image. As the brightness on the neighbour image increases, we + should expect that the resultant image also increases (or at least, doesn't + decrease). See testHDR36 for such an example. + */ + /*avg = (rgb.r+rgb.g+rgb.b) / 3.0f; + diff = fabs( avg - 127.5f ); + if( diff > safe_range_c ) { + // scaling chosen so that 0 and 255 map to a non-zero weight of 0.01 + weight *= 1.0f - 0.99f * (diff - safe_range_c) / (127.5f - safe_range_c); + }*/ + + rgb_r = this_parameter_A2 * rgb_r + this_parameter_B2; + rgb_g = this_parameter_A2 * rgb_g + this_parameter_B2; + rgb_b = this_parameter_A2 * rgb_b + this_parameter_B2; + } + else { + rgb_r = pixel0_r; + rgb_g = pixel0_g; + rgb_b = pixel0_b; + // see note above for why this is commented out + /*avg = (rgb.r+rgb.g+rgb.b) / 3.0f; + diff = fabs( avg - 127.5f ); + if( diff > safe_range_c ) { + // scaling chosen so that 0 and 255 map to a non-zero weight of 0.01 + weight *= 1.0f - 0.99f * (diff - safe_range_c) / (127.5f - safe_range_c); + }*/ + + rgb_r = this_parameter_A0 * rgb_r + this_parameter_B0; + rgb_g = this_parameter_A0 * rgb_g + this_parameter_B0; + rgb_b = this_parameter_A0 * rgb_b + this_parameter_B0; + } + + float value = Math.max(rgb_r, rgb_g); + value = Math.max(value, rgb_b); + if( value <= 250.0f ) + { + // deghosting + // for overexposed pixels, we don't have a reliable value for that pixel, so we can't distinguish between + // pixels that are overexposed, and those that need deghosting, so we limit to value <= 250.0f + // tests that benefit from deghosting for dark pixels: testHDR2, testHDR9, testHDR19, testHDR21, testHDR30, + // testHDR35, testHDR37, testHDR40, testHDR41, testHDR42, testHDR44 + // tests that benefit from deghosting for bright pixels: testHDR2, testHDR41, testHDR42 + // for 127.5-avg = 96.0, we want wiener_C = wiener_C_lo + // for 127.5-avg = 127.5f, we want wiener_C = wiener_C_hi + final float wiener_C_lo = 2000.0f; + final float wiener_C_hi = 8000.0f; + float wiener_C = wiener_C_lo; // higher value means more HDR but less ghosting + float xx = Math.abs( value - 127.5f ) - 96.0f; + if( xx > 0.0f ) { + final float scale = (wiener_C_hi-wiener_C_lo)/(127.5f-96.0f); + wiener_C = wiener_C_lo + xx*scale; + } + float diff_r = base_rgb_r - rgb_r; + float diff_g = base_rgb_g - rgb_g; + float diff_b = base_rgb_b - rgb_b; + float L = (diff_r*diff_r) + (diff_g*diff_g) + (diff_b*diff_b); + float ghost_weight = L/(L+wiener_C); + rgb_r = ghost_weight * base_rgb_r + (1.0f-ghost_weight) * rgb_r; + rgb_g = ghost_weight * base_rgb_g + (1.0f-ghost_weight) * rgb_g; + rgb_b = ghost_weight * base_rgb_b + (1.0f-ghost_weight) * rgb_b; + } + + hdr_r += weight * rgb_r; + hdr_g += weight * rgb_g; + hdr_b += weight * rgb_b; + sum_weight += weight; + + // testing: make all non-safe images purple: + //hdr_r = 255; + //hdr_g = 0; + //hdr_b = 255; + } + } + + hdr_r /= sum_weight; + hdr_g /= sum_weight; + hdr_b /= sum_weight; + + tonemap(temp_rgb, hdr_r, hdr_g, hdr_b); + /*{ + float value = Math.max(hdr_r, hdr_g); + value = Math.max(value, hdr_b); + float scale = 255.0f / ( tonemap_scale + value ); + scale *= linear_scale; + // shouldn't need to clamp - linear_scale should be such that values don't map to more than 255 + temp_rgb[0] = (int)(scale * hdr_r + 0.5f); + temp_rgb[1] = (int)(scale * hdr_g + 0.5f); + temp_rgb[2] = (int)(scale * hdr_b + 0.5f); + }*/ + + // this code is performance critical; note it's faster to avoid calls to Color.argb() + pixels_out[c] = (255 << 24) | (temp_rgb[0] << 16) | (temp_rgb[1] << 8) | temp_rgb[2]; + } + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + } + + static class HDRNApplyFunction extends HDRApplyFunction { + private final int n_bitmaps; + private final Bitmap bitmap1; + JavaImageProcessing.FastAccessBitmap [] fast_bitmap1; + private final Bitmap bitmap3; + JavaImageProcessing.FastAccessBitmap [] fast_bitmap3; + private final Bitmap bitmap4; + JavaImageProcessing.FastAccessBitmap [] fast_bitmap4; + private final Bitmap bitmap5; + JavaImageProcessing.FastAccessBitmap [] fast_bitmap5; + private final Bitmap bitmap6; + JavaImageProcessing.FastAccessBitmap [] fast_bitmap6; + final int offset_x1; + final int offset_y1; + final int offset_x3; + final int offset_y3; + final int offset_x4; + final int offset_y4; + final int offset_x5; + final int offset_y5; + final int offset_x6; + final int offset_y6; + + HDRNApplyFunction(HDRProcessor.TonemappingAlgorithm tonemap_algorithm, float tonemap_scale, float W, float linear_scale, List bitmaps, int [] offsets_x, int [] offsets_y, int width, int height, float [] parameter_A, float [] parameter_B) { + super(tonemap_algorithm, tonemap_scale, W, linear_scale, bitmaps.get(0), bitmaps.size() > 2 ? bitmaps.get(2) : null, offsets_x[0], offsets_y[0], offsets_x.length > 2 ? offsets_x[2] : 0, offsets_y.length > 2 ? offsets_y[2] : 0, width, height, parameter_A, parameter_B); + + this.n_bitmaps = bitmaps.size(); + if( n_bitmaps < 2 || n_bitmaps > 7 ) { + throw new RuntimeException("n_bitmaps not supported: " + n_bitmaps); + } + else if( offsets_x.length != n_bitmaps ) { + throw new RuntimeException("offsets_x unexpected length: " + offsets_x.length); + } + else if( offsets_y.length != n_bitmaps ) { + throw new RuntimeException("offsets_y unexpected length: " + offsets_y.length); + } + + this.bitmap1 = bitmaps.get(1); + this.bitmap3 = n_bitmaps > 3 ? bitmaps.get(3) : null; + this.bitmap4 = n_bitmaps > 4 ? bitmaps.get(4) : null; + this.bitmap5 = n_bitmaps > 5 ? bitmaps.get(5) : null; + this.bitmap6 = n_bitmaps > 6 ? bitmaps.get(6) : null; + + this.offset_x1 = offsets_x[1]; + this.offset_y1 = offsets_y[1]; + this.offset_x3 = n_bitmaps > 3 ? offsets_x[3] : 0; + this.offset_y3 = n_bitmaps > 3 ? offsets_y[3] : 0; + this.offset_x4 = n_bitmaps > 4 ? offsets_x[4] : 0; + this.offset_y4 = n_bitmaps > 4 ? offsets_y[4] : 0; + this.offset_x5 = n_bitmaps > 5 ? offsets_x[5] : 0; + this.offset_y5 = n_bitmaps > 5 ? offsets_y[5] : 0; + this.offset_x6 = n_bitmaps > 6 ? offsets_x[6] : 0; + this.offset_y6 = n_bitmaps > 6 ? offsets_y[6] : 0; + + if( parameter_A.length != n_bitmaps || parameter_B.length != n_bitmaps ) { + throw new RuntimeException("unexpected parameter lengths"); + } + } + + @Override + public void init(int n_threads) { + super.init(n_threads); + + if( bitmap1 != null ) + fast_bitmap1 = new JavaImageProcessing.FastAccessBitmap[n_threads]; + if( bitmap3 != null ) + fast_bitmap3 = new JavaImageProcessing.FastAccessBitmap[n_threads]; + if( bitmap4 != null ) + fast_bitmap4 = new JavaImageProcessing.FastAccessBitmap[n_threads]; + if( bitmap5 != null ) + fast_bitmap5 = new JavaImageProcessing.FastAccessBitmap[n_threads]; + if( bitmap6 != null ) + fast_bitmap6 = new JavaImageProcessing.FastAccessBitmap[n_threads]; + for(int i=0;i 2 ) { + fast_bitmap2[thread_index].ensureCache(y+offset_y2, y+offset_y2); // force cache to cover rows needed by this row + int bitmap2_cache_y = fast_bitmap2[thread_index].getCacheY(); + y_rel_bitmap2_cache = y-bitmap2_cache_y; + bitmap2_cache_pixels = fast_bitmap2[thread_index].getCachedPixelsI(); + + if( n_bitmaps > 3 ) { + fast_bitmap3[thread_index].ensureCache(y+offset_y3, y+offset_y3); // force cache to cover rows needed by this row + int bitmap3_cache_y = fast_bitmap3[thread_index].getCacheY(); + y_rel_bitmap3_cache = y-bitmap3_cache_y; + bitmap3_cache_pixels = fast_bitmap3[thread_index].getCachedPixelsI(); + + if( n_bitmaps > 4 ) { + fast_bitmap4[thread_index].ensureCache(y+offset_y4, y+offset_y4); // force cache to cover rows needed by this row + int bitmap4_cache_y = fast_bitmap4[thread_index].getCacheY(); + y_rel_bitmap4_cache = y-bitmap4_cache_y; + bitmap4_cache_pixels = fast_bitmap4[thread_index].getCachedPixelsI(); + + if( n_bitmaps > 5 ) { + fast_bitmap5[thread_index].ensureCache(y+offset_y5, y+offset_y5); // force cache to cover rows needed by this row + int bitmap5_cache_y = fast_bitmap5[thread_index].getCacheY(); + y_rel_bitmap5_cache = y-bitmap5_cache_y; + bitmap5_cache_pixels = fast_bitmap5[thread_index].getCachedPixelsI(); + + if( n_bitmaps > 6 ) { + fast_bitmap6[thread_index].ensureCache(y+offset_y6, y+offset_y6); // force cache to cover rows needed by this row + int bitmap6_cache_y = fast_bitmap6[thread_index].getCacheY(); + y_rel_bitmap6_cache = y-bitmap6_cache_y; + bitmap6_cache_pixels = fast_bitmap6[thread_index].getCachedPixelsI(); + } + } + } + } + } + + for(int x=off_x;x> 16) & 0xFF; + base_pixel_g = (base_pixel >> 8) & 0xFF; + base_pixel_b = base_pixel & 0xFF; + + if( x+offset_x0 >= 0 && y+offset_y0 >= 0 && x+offset_x0 < width && y+offset_y0 < height ) { + //int pixel = fast_bitmap0[thread_index].getPixel(x+offset_x0, y+offset_y0); + int pixel = bitmap0_cache_pixels[(y_rel_bitmap0_cache+offset_y0)*width+(x+offset_x0)]; + pixels_r[0] = (pixel >> 16) & 0xFF; + pixels_g[0] = (pixel >> 8) & 0xFF; + pixels_b[0] = pixel & 0xFF; + } + else { + pixels_r[0] = base_pixel_r; + pixels_g[0] = base_pixel_g; + pixels_b[0] = base_pixel_b; + this_parameter_A[0] = this_parameter_A[mid_indx]; + this_parameter_B[0] = this_parameter_B[mid_indx]; + } + + if( x+offset_x1 >= 0 && y+offset_y1 >= 0 && x+offset_x1 < width && y+offset_y1 < height ) { + //int pixel = fast_bitmap1[thread_index].getPixel(x+offset_x1, y+offset_y1); + int pixel = bitmap1_cache_pixels[(y_rel_bitmap1_cache+offset_y1)*width+(x+offset_x1)]; + pixels_r[1] = (pixel >> 16) & 0xFF; + pixels_g[1] = (pixel >> 8) & 0xFF; + pixels_b[1] = pixel & 0xFF; + } + else { + pixels_r[1] = base_pixel_r; + pixels_g[1] = base_pixel_g; + pixels_b[1] = base_pixel_b; + this_parameter_A[1] = this_parameter_A[mid_indx]; + this_parameter_B[1] = this_parameter_B[mid_indx]; + } + + if( n_bitmaps > 2 ) { + if( x+offset_x2 >= 0 && y+offset_y2 >= 0 && x+offset_x2 < width && y+offset_y2 < height ) { + //int pixel = fast_bitmap2[thread_index].getPixel(x+offset_x2, y+offset_y2); + int pixel = bitmap2_cache_pixels[(y_rel_bitmap2_cache+offset_y2)*width+(x+offset_x2)]; + pixels_r[2] = (pixel >> 16) & 0xFF; + pixels_g[2] = (pixel >> 8) & 0xFF; + pixels_b[2] = pixel & 0xFF; + } + else { + pixels_r[2] = base_pixel_r; + pixels_g[2] = base_pixel_g; + pixels_b[2] = base_pixel_b; + this_parameter_A[2] = this_parameter_A[mid_indx]; + this_parameter_B[2] = this_parameter_B[mid_indx]; + } + + if( n_bitmaps > 3 ) { + if( x+offset_x3 >= 0 && y+offset_y3 >= 0 && x+offset_x3 < width && y+offset_y3 < height ) { + //int pixel = fast_bitmap3[thread_index].getPixel(x+offset_x3, y+offset_y3); + int pixel = bitmap3_cache_pixels[(y_rel_bitmap3_cache+offset_y3)*width+(x+offset_x3)]; + pixels_r[3] = (pixel >> 16) & 0xFF; + pixels_g[3] = (pixel >> 8) & 0xFF; + pixels_b[3] = pixel & 0xFF; + } + else { + pixels_r[3] = base_pixel_r; + pixels_g[3] = base_pixel_g; + pixels_b[3] = base_pixel_b; + this_parameter_A[3] = this_parameter_A[mid_indx]; + this_parameter_B[3] = this_parameter_B[mid_indx]; + } + + if( n_bitmaps > 4 ) { + if( x+offset_x4 >= 0 && y+offset_y4 >= 0 && x+offset_x4 < width && y+offset_y4 < height ) { + //int pixel = fast_bitmap4[thread_index].getPixel(x+offset_x4, y+offset_y4); + int pixel = bitmap4_cache_pixels[(y_rel_bitmap4_cache+offset_y4)*width+(x+offset_x4)]; + pixels_r[4] = (pixel >> 16) & 0xFF; + pixels_g[4] = (pixel >> 8) & 0xFF; + pixels_b[4] = pixel & 0xFF; + } + else { + pixels_r[4] = base_pixel_r; + pixels_g[4] = base_pixel_g; + pixels_b[4] = base_pixel_b; + this_parameter_A[4] = this_parameter_A[mid_indx]; + this_parameter_B[4] = this_parameter_B[mid_indx]; + } + + if( n_bitmaps > 5 ) { + if( x+offset_x5 >= 0 && y+offset_y5 >= 0 && x+offset_x5 < width && y+offset_y5 < height ) { + //int pixel = fast_bitmap5[thread_index].getPixel(x+offset_x5, y+offset_y5); + int pixel = bitmap5_cache_pixels[(y_rel_bitmap5_cache+offset_y5)*width+(x+offset_x5)]; + pixels_r[5] = (pixel >> 16) & 0xFF; + pixels_g[5] = (pixel >> 8) & 0xFF; + pixels_b[5] = pixel & 0xFF; + } + else { + pixels_r[5] = base_pixel_r; + pixels_g[5] = base_pixel_g; + pixels_b[5] = base_pixel_b; + this_parameter_A[5] = this_parameter_A[mid_indx]; + this_parameter_B[5] = this_parameter_B[mid_indx]; + } + + if( n_bitmaps > 6 ) { + if( x+offset_x6 >= 0 && y+offset_y6 >= 0 && x+offset_x6 < width && y+offset_y6 < height ) { + //int pixel = fast_bitmap6[thread_index].getPixel(x+offset_x6, y+offset_y6); + int pixel = bitmap6_cache_pixels[(y_rel_bitmap6_cache+offset_y6)*width+(x+offset_x6)]; + pixels_r[6] = (pixel >> 16) & 0xFF; + pixels_g[6] = (pixel >> 8) & 0xFF; + pixels_b[6] = pixel & 0xFF; + } + else { + pixels_r[6] = base_pixel_r; + pixels_g[6] = base_pixel_g; + pixels_b[6] = base_pixel_b; + this_parameter_A[6] = this_parameter_A[mid_indx]; + this_parameter_B[6] = this_parameter_B[mid_indx]; + } + } + } + } + } + } + + float hdr_r = 0.0f; + float hdr_g = 0.0f; + float hdr_b = 0.0f; + float sum_weight = 0.0f; + + // assumes from 2 to 7 bitmaps, with middle bitmap being the "base" exposure, and first images being darker, last images being brighter + { + final float safe_range_c = 96.0f; + float rgb_r = pixels_r[mid_indx]; + float rgb_g = pixels_g[mid_indx]; + float rgb_b = pixels_b[mid_indx]; + float avg = (rgb_r+rgb_g+rgb_b) / 3.0f; + // avoid Math.abs as this line seems costly for performance: + //float diff = Math.abs( avg - 127.5f ); + float weight = 1.0f; + if( avg <= 127.5f ) { + // see comment for corresponding code in HDRApplyFunction + final float range_low_c = 32.0f; + final float range_high_c = 48.0f; + if( avg <= range_low_c ) { + weight = 0.0f; + } + else if( avg <= range_high_c ) { + weight = (avg - range_low_c) / (range_high_c - range_low_c); + } + } + else if( (avg - 127.5f) > safe_range_c ) { + // scaling chosen so that 0 and 255 map to a non-zero weight of 0.01 + weight = 1.0f - 0.99f * ((avg - 127.5f) - safe_range_c) / (127.5f - safe_range_c); + } + + // response function + rgb_r = this_parameter_A[mid_indx] * rgb_r + this_parameter_B[mid_indx]; + rgb_g = this_parameter_A[mid_indx] * rgb_g + this_parameter_B[mid_indx]; + rgb_b = this_parameter_A[mid_indx] * rgb_b + this_parameter_B[mid_indx]; + + hdr_r += weight * rgb_r; + hdr_g += weight * rgb_g; + hdr_b += weight * rgb_b; + sum_weight += weight; + + if( even ) { + float rgb1_r = pixels_r[mid_indx+1]; + float rgb1_g = pixels_g[mid_indx+1]; + float rgb1_b = pixels_b[mid_indx+1]; + float avg1 = (rgb1_r+rgb1_g+rgb1_b) / 3.0f; + float diff1 = Math.abs( avg1 - 127.5f ); + float weight1 = 1.0f; + if( diff1 > safe_range_c ) { + // scaling chosen so that 0 and 255 map to a non-zero weight of 0.01 + weight1 = 1.0f - 0.99f * (diff1 - safe_range_c) / (127.5f - safe_range_c); + } + rgb1_r = this_parameter_A[mid_indx+1] * rgb1_r + this_parameter_B[mid_indx+1]; + rgb1_g = this_parameter_A[mid_indx+1] * rgb1_g + this_parameter_B[mid_indx+1]; + rgb1_b = this_parameter_A[mid_indx+1] * rgb1_b + this_parameter_B[mid_indx+1]; + + hdr_r += weight1 * rgb1_r; + hdr_g += weight1 * rgb1_g; + hdr_b += weight1 * rgb1_b; + sum_weight += weight1; + + avg = (avg+avg1)/2.0f; + weight = (weight+weight1)/2.0f; + } + + if( weight < 1.0 ) { + float base_rgb_r = rgb_r; + float base_rgb_g = rgb_g; + float base_rgb_b = rgb_b; + + int adj_indx = mid_indx; + int step_dir = avg <= 127.5f ? 1 : -1; + if( even && step_dir == 1 ) { + adj_indx++; // so we move one beyond the middle pair of images (since mid_indx will be the darker of the pair) + } + + float diff = 0.0f; + int n_adj = (n_bitmaps-1)/2; + for(int k=0;k safe_range_c ) { + // scaling chosen so that 0 and 255 map to a non-zero weight of 0.01 + weight *= 1.0f - 0.99f * (diff - safe_range_c) / (127.5f - safe_range_c); + } + } + + rgb_r = this_parameter_A[adj_indx] * rgb_r + this_parameter_B[adj_indx]; + rgb_g = this_parameter_A[adj_indx] * rgb_g + this_parameter_B[adj_indx]; + rgb_b = this_parameter_A[adj_indx] * rgb_b + this_parameter_B[adj_indx]; + + float value = Math.max(rgb_r, rgb_g); + value = Math.max(value, rgb_b); + if( value <= 250.0f ) + { + // deghosting + // for overexposed pixels, we don't have a reliable value for that pixel, so we can't distinguish between + // pixels that are overexposed, and those that need deghosting, so we limit to value <= 250.0f + // tests that benefit from deghosting for dark pixels: testHDR2, testHDR9, testHDR19, testHDR21, testHDR30, + // testHDR35, testHDR37, testHDR40, testHDR41, testHDR42, testHDR44 + // tests that benefit from deghosting for bright pixels: testHDR2, testHDR41, testHDR42 + // for 127.5-avg = 96.0, we want wiener_C = wiener_C_lo + // for 127.5-avg = 127.5f, we want wiener_C = wiener_C_hi + final float wiener_C_lo = 2000.0f; + final float wiener_C_hi = 8000.0f; + float wiener_C = wiener_C_lo; // higher value means more HDR but less ghosting + float xx = Math.abs( value - 127.5f ) - 96.0f; + if( xx > 0.0f ) { + final float scale = (wiener_C_hi-wiener_C_lo)/(127.5f-96.0f); + wiener_C = wiener_C_lo + xx*scale; + } + float diff_r = base_rgb_r - rgb_r; + float diff_g = base_rgb_g - rgb_g; + float diff_b = base_rgb_b - rgb_b; + float L = (diff_r*diff_r) + (diff_g*diff_g) + (diff_b*diff_b); + float ghost_weight = L/(L+wiener_C); + rgb_r = ghost_weight * base_rgb_r + (1.0f-ghost_weight) * rgb_r; + rgb_g = ghost_weight * base_rgb_g + (1.0f-ghost_weight) * rgb_g; + rgb_b = ghost_weight * base_rgb_b + (1.0f-ghost_weight) * rgb_b; + } + + hdr_r += weight * rgb_r; + hdr_g += weight * rgb_g; + hdr_b += weight * rgb_b; + sum_weight += weight; + + if( diff <= safe_range_c ) { + break; + } + + // testing: make all non-safe images purple: + //hdr_r = 255; + //hdr_g = 0; + //hdr_b = 255; + + } + } + } + + hdr_r /= sum_weight; + hdr_g /= sum_weight; + hdr_b /= sum_weight; + + tonemap(temp_rgb, hdr_r, hdr_g, hdr_b); + + // this code is performance critical; note it's faster to avoid calls to Color.argb() + pixels_out[c] = (255 << 24) | (temp_rgb[0] << 16) | (temp_rgb[1] << 8) | temp_rgb[2]; + } + } + } + } + + static class AdjustHistogramApplyFunction implements JavaImageProcessing.ApplyFunctionInterface { + private final float hdr_alpha; // 0.0 means no change, 1.0 means fully equalise + private final int n_tiles; + private final int width; + private final int height; + private final int [] c_histogram; + + AdjustHistogramApplyFunction(float hdr_alpha, int n_tiles, int width, int height, int [] c_histogram) { + this.hdr_alpha = hdr_alpha; + this.n_tiles = n_tiles; + this.width = width; + this.height = height; + this.c_histogram = c_histogram; + } + + private int getEqualValue(int histogram_offset, int value) { + int cdf_v = c_histogram[histogram_offset+value]; + int cdf_0 = c_histogram[histogram_offset]; + int n_pixels = c_histogram[histogram_offset+255]; + float num = (float)(cdf_v - cdf_0); + float den = (float)(n_pixels - cdf_0); + int equal_value = (int)( 255.0f * (num/den) ); // value that we should choose to fully equalise the histogram + return equal_value; + } + + @Override + public void init(int n_threads) { + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int [] pixels, int off_x, int off_y, int this_width, int this_height) { + int [] pixels_out = output.getCachedPixelsI(); + for(int y=off_y,c=0;y> 16) & 0xFF; + int g = (color >> 8) & 0xFF; + int b = color & 0xFF; + + int value = Math.max(r, g); + value = Math.max(value, b); + + float tx = ((float)x*n_tiles)/(float)width - 0.5f; + float ty = ((float)y*n_tiles)/(float)height - 0.5f; + + // inline floor for performance + //int ix = (int)Math.floor(tx); + //int iy = (int)Math.floor(ty); + int ix = tx >= 0.0 ? (int)tx : (int)tx - 1; + int iy = ty >= 0.0 ? (int)ty : (int)ty - 1; + /*if( ix != (int)Math.floor(tx) || iy != (int)Math.floor(ty) ) { + throw new RuntimeException("floor error"); + }*/ + int equal_value; + if( ix >= 0 && ix < n_tiles-1 && iy >= 0 && iy < n_tiles-1 ) { + int histogram_offset00 = 256*(ix*n_tiles+iy); + int histogram_offset10 = 256*((ix+1)*n_tiles+iy); + int histogram_offset01 = 256*(ix*n_tiles+iy+1); + int histogram_offset11 = 256*((ix+1)*n_tiles+iy+1); + int equal_value00 = getEqualValue(histogram_offset00, value); + int equal_value10 = getEqualValue(histogram_offset10, value); + int equal_value01 = getEqualValue(histogram_offset01, value); + int equal_value11 = getEqualValue(histogram_offset11, value); + float alpha = tx - ix; + float beta = ty - iy; + + float equal_value0 = (1.0f-alpha)*equal_value00 + alpha*equal_value10; + float equal_value1 = (1.0f-alpha)*equal_value01 + alpha*equal_value11; + equal_value = (int)((1.0f-beta)*equal_value0 + beta*equal_value1); + } + else if( ix >= 0 && ix < n_tiles-1 ) { + int this_y = (iy<0) ? iy+1 : iy; + int histogram_offset0 = 256*(ix*n_tiles+this_y); + int histogram_offset1 = 256*((ix+1)*n_tiles+this_y); + int equal_value0 = getEqualValue(histogram_offset0, value); + int equal_value1 = getEqualValue(histogram_offset1, value); + float alpha = tx - ix; + equal_value = (int)((1.0f-alpha)*equal_value0 + alpha*equal_value1); + } + else if( iy >= 0 && iy < n_tiles-1 ) { + int this_x = (ix<0) ? ix+1 : ix; + int histogram_offset0 = 256*(this_x*n_tiles+iy); + int histogram_offset1 = 256*(this_x*n_tiles+iy+1); + int equal_value0 = getEqualValue(histogram_offset0, value); + int equal_value1 = getEqualValue(histogram_offset1, value); + float beta = ty - iy; + equal_value = (int)((1.0f-beta)*equal_value0 + beta*equal_value1); + } + else { + int this_x = (ix<0) ? ix+1 : ix; + int this_y = (iy<0) ? iy+1 : iy; + int histogram_offset = 256*(this_x*n_tiles+this_y); + equal_value = getEqualValue(histogram_offset, value); + } + + int new_value = (int)( (1.0f-hdr_alpha) * value + hdr_alpha * equal_value ); + + //float use_hdr_alpha = smart_contrast_enhancement ? hdr_alpha*((float)value/255.0f) : hdr_alpha; + //float use_hdr_alpha = smart_contrast_enhancement ? hdr_alpha*pow(((float)value/255.0f), 0.5f) : hdr_alpha; + //int new_value = (int)( (1.0f-use_hdr_alpha) * value + use_hdr_alpha * equal_value ); + + float scale = ((float)new_value) / (float)value; + + // need to add +0.5 so that we round to nearest - particularly important as due to floating point rounding, we + // can end up with incorrect behaviour even when new_value==value! + r = Math.min(255, (int)(r * scale + 0.5f)); + g = Math.min(255, (int)(g * scale + 0.5f)); + b = Math.min(255, (int)(b * scale + 0.5f)); + // this code is performance critical; note it's faster to avoid calls to Color.argb() + pixels_out[c] = (255 << 24) | (r << 16) | (g << 8) | b; + } + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + byte [] pixels_out = output.getCachedPixelsB(); + for(int y=off_y,c=0;y= 0.0 ? (int)tx : (int)tx - 1; + int iy = ty >= 0.0 ? (int)ty : (int)ty - 1; + /*if( ix != (int)Math.floor(tx) || iy != (int)Math.floor(ty) ) { + throw new RuntimeException("floor error"); + }*/ + int equal_value; + if( ix >= 0 && ix < n_tiles-1 && iy >= 0 && iy < n_tiles-1 ) { + int histogram_offset00 = 256*(ix*n_tiles+iy); + int histogram_offset10 = 256*((ix+1)*n_tiles+iy); + int histogram_offset01 = 256*(ix*n_tiles+iy+1); + int histogram_offset11 = 256*((ix+1)*n_tiles+iy+1); + int equal_value00 = getEqualValue(histogram_offset00, value); + int equal_value10 = getEqualValue(histogram_offset10, value); + int equal_value01 = getEqualValue(histogram_offset01, value); + int equal_value11 = getEqualValue(histogram_offset11, value); + float alpha = tx - ix; + float beta = ty - iy; + + float equal_value0 = (1.0f-alpha)*equal_value00 + alpha*equal_value10; + float equal_value1 = (1.0f-alpha)*equal_value01 + alpha*equal_value11; + equal_value = (int)((1.0f-beta)*equal_value0 + beta*equal_value1); + } + else if( ix >= 0 && ix < n_tiles-1 ) { + int this_y = (iy<0) ? iy+1 : iy; + int histogram_offset0 = 256*(ix*n_tiles+this_y); + int histogram_offset1 = 256*((ix+1)*n_tiles+this_y); + int equal_value0 = getEqualValue(histogram_offset0, value); + int equal_value1 = getEqualValue(histogram_offset1, value); + float alpha = tx - ix; + equal_value = (int)((1.0f-alpha)*equal_value0 + alpha*equal_value1); + } + else if( iy >= 0 && iy < n_tiles-1 ) { + int this_x = (ix<0) ? ix+1 : ix; + int histogram_offset0 = 256*(this_x*n_tiles+iy); + int histogram_offset1 = 256*(this_x*n_tiles+iy+1); + int equal_value0 = getEqualValue(histogram_offset0, value); + int equal_value1 = getEqualValue(histogram_offset1, value); + float beta = ty - iy; + equal_value = (int)((1.0f-beta)*equal_value0 + beta*equal_value1); + } + else { + int this_x = (ix<0) ? ix+1 : ix; + int this_y = (iy<0) ? iy+1 : iy; + int histogram_offset = 256*(this_x*n_tiles+this_y); + equal_value = getEqualValue(histogram_offset, value); + } + + int new_value = (int)( (1.0f-hdr_alpha) * value + hdr_alpha * equal_value ); + + //float use_hdr_alpha = smart_contrast_enhancement ? hdr_alpha*((float)value/255.0f) : hdr_alpha; + //float use_hdr_alpha = smart_contrast_enhancement ? hdr_alpha*pow(((float)value/255.0f), 0.5f) : hdr_alpha; + //int new_value = (int)( (1.0f-use_hdr_alpha) * value + use_hdr_alpha * equal_value ); + + float scale = ((float)new_value) / (float)value; + + // need to add +0.5 so that we round to nearest - particularly important as due to floating point rounding, we + // can end up with incorrect behaviour even when new_value==value! + pixels_out[c] = (byte)Math.min(255, (int)(r * scale + 0.5f)); + pixels_out[c+1] = (byte)Math.min(255, (int)(g * scale + 0.5f)); + pixels_out[c+2] = (byte)Math.min(255, (int)(b * scale + 0.5f)); + pixels_out[c+3] = (byte)255; + } + } + } + } + + public static class ComputeHistogramApplyFunction implements JavaImageProcessing.ApplyFunctionInterface { + private int [][] histograms = null; + private float [] pixels_rgb_f; + private int pixels_width; + public enum Type { + TYPE_RGB, // returns array of length 3*256, containing the red histogram, followed by green, then blue + TYPE_LUMINANCE, // 0.299f*r + 0.587f*g + 0.114f*b + TYPE_VALUE, // max(r,g,b) + TYPE_INTENSITY, // mean(r, g, b) + TYPE_LIGHTNESS // mean( min(r,g,b), max(r,g,b) ) + } + private final Type type; + + public ComputeHistogramApplyFunction(Type type) { + this.type = type; + } + + /** For use when we want to operate over a full pixel array, instead of an input supplied to applyFunction(). + */ + void setPixelsRGBf(float [] pixels_rgb_f, int pixels_width) { + this.pixels_rgb_f = pixels_rgb_f; + this.pixels_width = pixels_width; + } + + @Override + public void init(int n_threads) { + histograms = new int[n_threads][]; + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int off_x, int off_y, int this_width, int this_height) { + // version for operating on the supplied floating point array in rgb format + if( type != Type.TYPE_VALUE ) + throw new RuntimeException("type not supported: " + type); + if( histograms[thread_index] == null ) + histograms[thread_index] = new int[256]; + for(int y=off_y;y> 16) & 0xFF)]++; // red + histograms[thread_index][256 + ((color >> 8) & 0xFF)]++; // green + histograms[thread_index][512 + (color & 0xFF)]++; // blue + } + break; + case TYPE_LUMINANCE: + for(int c=0;c> 16) & 0xFF); + float fg = (float)((color >> 8) & 0xFF); + float fb = (float)(color & 0xFF); + float avg = (0.299f*fr + 0.587f*fg + 0.114f*fb); + int value = (int)(avg+0.5); // round to nearest + value = Math.min(value, 255); // just in case + histograms[thread_index][value]++; + } + break; + case TYPE_VALUE: + for(int c=0;c> 16) & 0xFF, (color >> 8) & 0xFF ); + value = Math.max( value, color & 0xFF ); + histograms[thread_index][value]++; + } + break; + case TYPE_INTENSITY: + for(int c=0;c> 16) & 0xFF); + float fg = (float)((color >> 8) & 0xFF); + float fb = (float)(color & 0xFF); + float avg = (fr + fg + fb)/3.0f; + int value = (int)(avg+0.5); // round to nearest + value = Math.min(value, 255); // just in case + histograms[thread_index][value]++; + } + break; + case TYPE_LIGHTNESS: + for(int c=0;c> 16) & 0xFF; + int g = (color >> 8) & 0xFF; + int b = color & 0xFF; + int max_value = Math.max( r, g ); + max_value = Math.max( max_value, b ); + int min_value = Math.min( r, g ); + min_value = Math.min( min_value, b ); + float avg = (min_value + max_value)/2.0f; + int value = (int)(avg+0.5); // round to nearest + value = Math.min(value, 255); // just in case + histograms[thread_index][value]++; + } + break; + default: + throw new RuntimeException("unknown: " + type); + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + /*if( MyDebug.LOG ) + Log.d(TAG, "ComputeHistogramApplyFunction.apply [byte vector array]");*/ + if( histograms[thread_index] == null ) + histograms[thread_index] = new int[256]; + for(int c=0;c<4*this_width*this_height;) { // n.b., we increment c inside the loop + int r = pixels[c++]; + int g = pixels[c++]; + int b = pixels[c++]; + // bytes are signed! + if( r < 0 ) + r += 256; + if( g < 0 ) + g += 256; + if( b < 0 ) + b += 256; + c++; // skip padding + int value = Math.max(r, g); + value = Math.max(value, b); + value = Math.min(value, 255); + value = Math.max(value, 0); + histograms[thread_index][value]++; + } + } + + public int [] getHistogram() { + int [] total_histogram = new int[histograms[0].length]; + // for each histogram, add its entries to the total histogram + for(int [] histogram : histograms) { + for (int j=0;j> 16) & 0xFF, (color >> 8) & 0xFF ); + value = Math.max( value, color & 0xFF ); + + if( value >= zebra_stripes_threshold ) { + int stripe = (x+y)/zebra_stripes_width; + if( stripe % 2 == 0 ) { + pixels_out[c] = zebra_stripes_background; + } + else { + pixels_out[c] = zebra_stripes_foreground; + } + } + else { + pixels_out[c] = 0; // transparent (zero alpha) + } + } + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + } + + public static class FocusPeakingApplyFunction implements JavaImageProcessing.ApplyFunctionInterface { + private final Bitmap bitmap; + private final int width, height; + private JavaImageProcessing.FastAccessBitmap [] fast_bitmap; + + public FocusPeakingApplyFunction(Bitmap bitmap) { + this.bitmap = bitmap; + this.width = bitmap.getWidth(); + this.height = bitmap.getHeight(); + } + + @Override + public void init(int n_threads) { + fast_bitmap = new JavaImageProcessing.FastAccessBitmap[n_threads]; + + for(int i=0;i= 1 && x < width-1 && y >= 1 && y < height-1 ) { + fast_bitmap[thread_index].ensureCache(y-1, y+1); // force cache to cover rows needed by this row + int bitmap_cache_y = fast_bitmap[thread_index].getCacheY(); + int y_rel_bitmap_cache = y-bitmap_cache_y; + int [] bitmap_cache_pixels = fast_bitmap[thread_index].getCachedPixelsI(); + + //int pixel0c = fast_bitmap[thread_index].getPixel(x-1, y-1); + int pixel0c = bitmap_cache_pixels[(y_rel_bitmap_cache-1)*width+(x-1)]; + int pixel1c = bitmap_cache_pixels[(y_rel_bitmap_cache-1)*width+(x)]; + int pixel2c = bitmap_cache_pixels[(y_rel_bitmap_cache-1)*width+(x+1)]; + int pixel3c = bitmap_cache_pixels[(y_rel_bitmap_cache)*width+(x-1)]; + int pixel4c = pixels[c]; + /*if( pixels[c] != bitmap_cache_pixels[(y_rel_bitmap_cache)*width+(x)] ) { + throw new RuntimeException("pixel4c incorrect"); + }*/ + int pixel5c = bitmap_cache_pixels[(y_rel_bitmap_cache)*width+(x+1)]; + int pixel6c = bitmap_cache_pixels[(y_rel_bitmap_cache+1)*width+(x-1)]; + int pixel7c = bitmap_cache_pixels[(y_rel_bitmap_cache+1)*width+(x)]; + int pixel8c = bitmap_cache_pixels[(y_rel_bitmap_cache+1)*width+(x+1)]; + + int pixel0r = (pixel0c >> 16) & 0xFF; + int pixel0g = (pixel0c >> 8) & 0xFF; + int pixel0b = pixel0c & 0xFF; + + int pixel1r = (pixel1c >> 16) & 0xFF; + int pixel1g = (pixel1c >> 8) & 0xFF; + int pixel1b = pixel1c & 0xFF; + + int pixel2r = (pixel2c >> 16) & 0xFF; + int pixel2g = (pixel2c >> 8) & 0xFF; + int pixel2b = pixel2c & 0xFF; + + int pixel3r = (pixel3c >> 16) & 0xFF; + int pixel3g = (pixel3c >> 8) & 0xFF; + int pixel3b = pixel3c & 0xFF; + + int pixel4r = (pixel4c >> 16) & 0xFF; + int pixel4g = (pixel4c >> 8) & 0xFF; + int pixel4b = pixel4c & 0xFF; + + int pixel5r = (pixel5c >> 16) & 0xFF; + int pixel5g = (pixel5c >> 8) & 0xFF; + int pixel5b = pixel5c & 0xFF; + + int pixel6r = (pixel6c >> 16) & 0xFF; + int pixel6g = (pixel6c >> 8) & 0xFF; + int pixel6b = pixel6c & 0xFF; + + int pixel7r = (pixel7c >> 16) & 0xFF; + int pixel7g = (pixel7c >> 8) & 0xFF; + int pixel7b = pixel7c & 0xFF; + + int pixel8r = (pixel8c >> 16) & 0xFF; + int pixel8g = (pixel8c >> 8) & 0xFF; + int pixel8b = pixel8c & 0xFF; + + int value_r = ( 8*pixel4r - pixel0r - pixel1r - pixel2r - pixel3r - pixel5r - pixel6r - pixel7r - pixel8r ); + int value_g = ( 8*pixel4g - pixel0g - pixel1g - pixel2g - pixel3g - pixel5g - pixel6g - pixel7g - pixel8g ); + int value_b = ( 8*pixel4b - pixel0b - pixel1b - pixel2b - pixel3b - pixel5b - pixel6b - pixel7b - pixel8b ); + strength = value_r*value_r + value_g*value_g + value_b*value_b; + } + + if( strength > 256*256 ) { + pixels_out[c] = (255 << 24) | (255 << 16) | (255 << 8) | 255; + } + else { + pixels_out[c] = 0; // transparent (zero alpha) + } + } + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + } + + public static class FocusPeakingFilteredApplyFunction implements JavaImageProcessing.ApplyFunctionInterface { + private final Bitmap bitmap; + private final int width, height; + private JavaImageProcessing.FastAccessBitmap [] fast_bitmap; + + public FocusPeakingFilteredApplyFunction(Bitmap bitmap) { + this.bitmap = bitmap; + this.width = bitmap.getWidth(); + this.height = bitmap.getHeight(); + } + + @Override + public void init(int n_threads) { + fast_bitmap = new JavaImageProcessing.FastAccessBitmap[n_threads]; + + for(int i=0;i= 1 && x < width-1 && y >= 1 && y < height-1 ) { + fast_bitmap[thread_index].ensureCache(y-1, y+1); // force cache to cover rows needed by this row + int bitmap_cache_y = fast_bitmap[thread_index].getCacheY(); + int y_rel_bitmap_cache = y-bitmap_cache_y; + int [] bitmap_cache_pixels = fast_bitmap[thread_index].getCachedPixelsI(); + + // only need to read one component, as input image is now greyscale + int pixel1 = bitmap_cache_pixels[(y_rel_bitmap_cache-1)*width+(x)] & 0xFF; + int pixel3 = bitmap_cache_pixels[(y_rel_bitmap_cache)*width+(x-1)] & 0xFF; + int pixel4 = pixels[c] & 0xFF; + /*if( pixels[c] != bitmap_cache_pixels[(y_rel_bitmap_cache)*width+(x)] ) { + throw new RuntimeException("pixel4c incorrect"); + }*/ + int pixel5 = bitmap_cache_pixels[(y_rel_bitmap_cache)*width+(x+1)] & 0xFF; + int pixel7 = bitmap_cache_pixels[(y_rel_bitmap_cache+1)*width+(x)] & 0xFF; + + if( pixel1 == 255 ) + count++; + if( pixel3 == 255 ) + count++; + if( pixel4 == 255 ) + count++; + if( pixel5 == 255 ) + count++; + if( pixel7 == 255 ) + count++; + + } + + if( count >= 3 ) { + pixels_out[c] = (255 << 24) | (255 << 16) | (255 << 8) | 255; + } + else { + pixels_out[c] = 0; // transparent (zero alpha) + } + } + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + } + + static class ConvertToGreyscaleFunction implements JavaImageProcessing.ApplyFunctionInterface { + + ConvertToGreyscaleFunction() { + } + + @Override + public void init(int n_threads) { + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int [] pixels, int off_x, int off_y, int this_width, int this_height) { + int [] pixels_out = output.getCachedPixelsI(); + for(int y=off_y,c=0;y> 16) & 0xFF; + int g = (color >> 8) & 0xFF; + int b = color & 0xFF; + + int value = (int)(0.3* (float) r + 0.59* (float) g + 0.11* (float) b); + + // this code is performance critical; note it's faster to avoid calls to Color.argb() + pixels_out[c] = value << 24; + } + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + } + + static class ComputeDerivativesFunction implements JavaImageProcessing.ApplyFunctionInterface { + private final Bitmap bitmap_Ix; // output for x derivatives + private final Bitmap bitmap_Iy; // output for y derivatives + private final Bitmap bitmap_in; + private final int width, height; + private JavaImageProcessing.FastAccessBitmap [] fast_bitmap_in; + + ComputeDerivativesFunction(Bitmap bitmap_Ix, Bitmap bitmap_Iy, Bitmap bitmap_in) { + this.bitmap_Ix = bitmap_Ix; + this.bitmap_Iy = bitmap_Iy; + this.bitmap_in = bitmap_in; + this.width = bitmap_in.getWidth(); + this.height = bitmap_in.getHeight(); + } + + @Override + public void init(int n_threads) { + fast_bitmap_in = new JavaImageProcessing.FastAccessBitmap[n_threads]; + + for(int i=0;i= 1 && x < width-1 && y >= 1 && y < height-1 ) { + // use Sobel operator + + //int pixel0 = fast_bitmap_in[thread_index].getPixel(x-1, y-1) >>> 24; + //int pixel0 = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache-1)*width+(x-1)] >>> 24; + int pixel1 = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache-1)*width+(x)] >>> 24; + //int pixel2 = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache-1)*width+(x+1)] >>> 24; + int pixel3 = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache)*width+(x-1)] >>> 24; + int pixel5 = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache)*width+(x+1)] >>> 24; + //int pixel6 = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache+1)*width+(x-1)] >>> 24; + int pixel7 = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache+1)*width+(x)] >>> 24; + //int pixel8 = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache+1)*width+(x+1)] >>> 24; + + //int iIx = (pixel2 + 2*pixel5 + pixel8) - (pixel0 + 2*pixel3 + pixel6); + //int iIy = (pixel6 + 2*pixel7 + pixel8) - (pixel0 + 2*pixel1 + pixel2); + //iIx /= 8; + //iIy /= 8; + int iIx = (pixel5 - pixel3)/2; + int iIy = (pixel7 - pixel1)/2; + + // convert so we can store in range 0-255 + + iIx = Math.max(iIx, -127); + iIx = Math.min(iIx, 128); + iIx += 127; // iIx now runs from 0 to 255 + + iIy = Math.max(iIy, -127); + iIy = Math.min(iIy, 128); + iIy += 127; // iIy now runs from 0 to 255 + + Ix = iIx; + Iy = iIy; + } + + //bitmap_Ix.setPixel(x, y, Ix << 24); + //bitmap_Iy.setPixel(x, y, Iy << 24); + cache_bitmap_Ix[c] = Ix << 24; + cache_bitmap_Iy[c] = Iy << 24; + } + } + + bitmap_Ix.setPixels(cache_bitmap_Ix, 0, this_width, off_x, off_y, this_width, this_height); + bitmap_Iy.setPixels(cache_bitmap_Iy, 0, this_width, off_x, off_y, this_width, this_height); + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + } + + static class CornerDetectorFunction implements JavaImageProcessing.ApplyFunctionInterface { + private final float [] pixels_f; // output + private final Bitmap bitmap_Ix; + private final Bitmap bitmap_Iy; + private final int width, height; + private JavaImageProcessing.FastAccessBitmap [] fast_bitmap_Ix; + private JavaImageProcessing.FastAccessBitmap [] fast_bitmap_Iy; + + CornerDetectorFunction(float [] pixels_f, Bitmap bitmap_Ix, Bitmap bitmap_Iy) { + this.pixels_f = pixels_f; + this.bitmap_Ix = bitmap_Ix; + this.bitmap_Iy = bitmap_Iy; + this.width = bitmap_Ix.getWidth(); + this.height = bitmap_Ix.getHeight(); + } + + @Override + public void init(int n_threads) { + fast_bitmap_Ix = new JavaImageProcessing.FastAccessBitmap[n_threads]; + fast_bitmap_Iy = new JavaImageProcessing.FastAccessBitmap[n_threads]; + + for(int i=0;i= radius+1 && x < width-radius-1 && y >= radius+1 && y < height-radius-1 ) { + float h00 = 0.0f; + float h01 = 0.0f; + float h11 = 0.0f; + for(int cy=y-radius;cy<=y+radius;cy++) { + for(int cx=x-radius;cx<=x+radius;cx++) { + int dx = cx - x; + int dy = cy - y; + + int Ix = bitmap_Ix_cache_pixels[(y_rel_bitmap_Ix_cache+dy)*width+(cx)] >>> 24; + int Iy = bitmap_Iy_cache_pixels[(y_rel_bitmap_Iy_cache+dy)*width+(cx)] >>> 24; + + // convert from 0-255 to -127 - +128: + Ix -= 127; + Iy -= 127; + + /*float dist2 = dx*dx + dy*dy; + const float sigma2 = 0.25f; + float weight = exp(-dist2/(2.0f*sigma2)) / (6.28318530718f*sigma2); + //float weight = 1.0; + weight /= 65025.0f; // scale from (0, 255) to (0, 1) + */ + float weight = weights[2+dx] * weights[2+dy]; + //weight = 36; + + h00 += weight*Ix*Ix; + h01 += weight*Ix*Iy; + h11 += weight*Iy*Iy; + } + } + + float det_H = h00*h11 - h01*h01; + float tr_H = h00 + h11; + //out = det_H - 0.1f*tr_H*tr_H; + out = det_H - 0.06f*tr_H*tr_H; + } + + pixels_f[y*width+x] = out; + } + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + } + + static class LocalMaximumFunction implements JavaImageProcessing.ApplyFunctionInterface { + private final float [] pixels_f; // input + private final byte [] bytes; // output + private final int width, height; + private final float corner_threshold; + + LocalMaximumFunction(float [] pixels_f, byte [] bytes, int width, int height, float corner_threshold) { + this.pixels_f = pixels_f; + this.bytes = bytes; + this.width = width; + this.height = height; + this.corner_threshold = corner_threshold; + } + + @Override + public void init(int n_threads) { + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int off_x, int off_y, int this_width, int this_height) { + for(int y=off_y,c=0;y= corner_threshold ) { + //out = 255; + // best of 3x3: + /*if( x >= 1 && x < width-1 && y >= 1 && y < height-1 ) { + if( in > rsGetElementAt_float(bitmap, x-1, y-1) && + in > rsGetElementAt_float(bitmap, x, y-1) && + in > rsGetElementAt_float(bitmap, x+1, y-1) && + + in > rsGetElementAt_float(bitmap, x-1, y) && + in > rsGetElementAt_float(bitmap, x+1, y) && + + in > rsGetElementAt_float(bitmap, x-1, y+1) && + in > rsGetElementAt_float(bitmap, x, y+1) && + in > rsGetElementAt_float(bitmap, x+1, y+1) + ) { + out = 255; + } + }*/ + // best of 5x5: + if( x >= 2 && x < width-2 && y >= 2 && y < height-2 ) { + if( in > pixels_f[(y-2)*width+(x-2)] && + in > pixels_f[(y-2)*width+(x-1)] && + in > pixels_f[(y-2)*width+(x)] && + in > pixels_f[(y-2)*width+(x+1)] && + in > pixels_f[(y-2)*width+(x+2)] && + + in > pixels_f[(y-1)*width+(x-2)] && + in > pixels_f[(y-1)*width+(x-1)] && + in > pixels_f[(y-1)*width+(x)] && + in > pixels_f[(y-1)*width+(x+1)] && + in > pixels_f[(y-1)*width+(x+2)] && + + in > pixels_f[(y)*width+(x-2)] && + in > pixels_f[(y)*width+(x-1)] && + in > pixels_f[(y)*width+(x+1)] && + in > pixels_f[(y)*width+(x+2)] && + + in > pixels_f[(y+1)*width+(x-2)] && + in > pixels_f[(y+1)*width+(x-1)] && + in > pixels_f[(y+1)*width+(x)] && + in > pixels_f[(y+1)*width+(x+1)] && + in > pixels_f[(y+1)*width+(x+2)] && + + in > pixels_f[(y+2)*width+(x-2)] && + in > pixels_f[(y+2)*width+(x-1)] && + in > pixels_f[(y+2)*width+(x)] && + in > pixels_f[(y+2)*width+(x+1)] && + in > pixels_f[(y+2)*width+(x+2)] + ) { + out = 255; + } + } + } + + bytes[y*width+x] = (byte)out; + } + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + } + + public static class PyramidBlendingComputeErrorFunction implements JavaImageProcessing.ApplyFunctionInterface { + private int [] errors; // error per thread + private final Bitmap bitmap; + private JavaImageProcessing.FastAccessBitmap [] fast_bitmap; + private final int width; + + public PyramidBlendingComputeErrorFunction(Bitmap bitmap) { + this.bitmap = bitmap; + this.width = bitmap.getWidth(); + } + + @Override + public void init(int n_threads) { + errors = new int[n_threads]; + fast_bitmap = new JavaImageProcessing.FastAccessBitmap[n_threads]; + + for(int i=0;i> 16) & 0xFF; + int g0 = (color0 >> 8) & 0xFF; + int b0 = color0 & 0xFF; + + int color1 = bitmap_cache_pixels[(y_rel_bitmap_cache)*width+(x)]; + int r1 = (color1 >> 16) & 0xFF; + int g1 = (color1 >> 8) & 0xFF; + int b1 = color1 & 0xFF; + + int dr = r0 - r1; + int dg = g0 - g1; + int db = b0 - b1; + int diff2 = dr*dr + dg*dg + db*db; + if( errors[thread_index] < 2000000000 ) { // avoid risk of overflow + errors[thread_index] += diff2; + } + } + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + + public int getError() { + int total_error = 0; + for(int error : errors) { + total_error += error; + } + return total_error; + } + } + + private static final float [] pyramid_blending_weights = new float[]{0.05f, 0.25f, 0.4f, 0.25f, 0.05f}; + + static class ReduceBitmapFunction implements JavaImageProcessing.ApplyFunctionInterface { + private final Bitmap bitmap_in; + private final int width, height; + private JavaImageProcessing.FastAccessBitmap [] fast_bitmap_in; + + ReduceBitmapFunction(Bitmap bitmap_in) { + this.bitmap_in = bitmap_in; + this.width = bitmap_in.getWidth(); + this.height = bitmap_in.getHeight(); + } + + @Override + public void init(int n_threads) { + fast_bitmap_in = new JavaImageProcessing.FastAccessBitmap[n_threads]; + + for(int i=0;i= 2 && sx < width-2 && sy >= 2 & sy < height-2 ) { + + float sum_fr = 0.0f; + float sum_fg = 0.0f; + float sum_fb = 0.0f; + + for(int dy=-2;dy<=2;dy++) { + for(int dx=-2;dx<=2;dx++) { + + int color = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache+dy)*width+(sx+dx)]; + //int color = bitmap_in.getPixel(sx+dx, sy+dy); + int r = (color >> 16) & 0xFF; + int g = (color >> 8) & 0xFF; + int b = color & 0xFF; + + // commented out version might be faster, but needs to be tested as gives slightly different results due to numerical wobble + /*float fr = r, fg = g, fb = b; + float weight = pyramid_blending_weights[2+dx] * pyramid_blending_weights[2+dy]; + fr *= weight; + fg *= weight; + fb *= weight;*/ + float fr = ((float)r) * pyramid_blending_weights[2+dx] * pyramid_blending_weights[2+dy]; + float fg = ((float)g) * pyramid_blending_weights[2+dx] * pyramid_blending_weights[2+dy]; + float fb = ((float)b) * pyramid_blending_weights[2+dx] * pyramid_blending_weights[2+dy]; + sum_fr += fr; + sum_fg += fg; + sum_fb += fb; + } + } + + int r = (int)(sum_fr+0.5f); + int g = (int)(sum_fg+0.5f); + int b = (int)(sum_fb+0.5f); + + r = Math.max(0, Math.min(255, r)); + g = Math.max(0, Math.min(255, g)); + b = Math.max(0, Math.min(255, b)); + + // this code is performance critical; note it's faster to avoid calls to Color.argb() + pixels_out[c] = (255 << 24) | (r << 16) | (g << 8) | b; + } + else { + int color = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache)*width+(sx)]; + //int color = bitmap_in.getPixel(sx, sy); + pixels_out[c] = color; + } + } + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + } + + static class ReduceBitmapXFunction implements JavaImageProcessing.ApplyFunctionInterface { + private final Bitmap bitmap_in; + private final int width; + private JavaImageProcessing.FastAccessBitmap [] fast_bitmap_in; + + ReduceBitmapXFunction(Bitmap bitmap_in) { + this.bitmap_in = bitmap_in; + this.width = bitmap_in.getWidth(); + } + + @Override + public void init(int n_threads) { + fast_bitmap_in = new JavaImageProcessing.FastAccessBitmap[n_threads]; + + for(int i=0;i= 2 && sx < width-2 ) { + + float sum_fr = 0.0f; + float sum_fg = 0.0f; + float sum_fb = 0.0f; + + /*for(int dx=-2;dx<=2;dx++) { + int color = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache)*width+(sx+dx)]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[2+dx]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[2+dx]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[2+dx]; + }*/ + + // unroll loops + + int offset = (y_rel_bitmap_in_cache)*width+(sx); + int color; + + color = bitmap_in_cache_pixels[offset-2]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[0]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[0]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[0]; + + color = bitmap_in_cache_pixels[offset-1]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[1]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[1]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[1]; + + color = bitmap_in_cache_pixels[offset]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[2]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[2]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[2]; + + color = bitmap_in_cache_pixels[offset+1]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[3]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[3]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[3]; + + color = bitmap_in_cache_pixels[offset+2]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[4]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[4]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[4]; + + // end unroll loops + + int r = (int)(sum_fr+0.5f); + int g = (int)(sum_fg+0.5f); + int b = (int)(sum_fb+0.5f); + + /*r = Math.max(0, Math.min(255, r)); + g = Math.max(0, Math.min(255, g)); + b = Math.max(0, Math.min(255, b));*/ + + // this code is performance critical; note it's faster to avoid calls to Color.argb() + pixels_out[c] = (255 << 24) | (r << 16) | (g << 8) | b; + } + else { + int color = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache)*width+(sx)]; + //int color = bitmap_in.getPixel(sx, y); + pixels_out[c] = color; + } + } + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + } + + static class ReduceBitmapYFunction implements JavaImageProcessing.ApplyFunctionInterface { + private final Bitmap bitmap_in; + private final int width, height; + private JavaImageProcessing.FastAccessBitmap [] fast_bitmap_in; + + ReduceBitmapYFunction(Bitmap bitmap_in) { + this.bitmap_in = bitmap_in; + this.width = bitmap_in.getWidth(); + this.height = bitmap_in.getHeight(); + } + + @Override + public void init(int n_threads) { + fast_bitmap_in = new JavaImageProcessing.FastAccessBitmap[n_threads]; + + for(int i=0;i= 2 & sy < height-2 ) { + for(int x=off_x;x> 16) & 0xFF)) * pyramid_blending_weights[2+dy]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[2+dy]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[2+dy]; + }*/ + + // unroll loops + + int color; + + color = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache-2)*width+(x)]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[0]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[0]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[0]; + + color = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache-1)*width+(x)]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[1]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[1]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[1]; + + color = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache)*width+(x)]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[2]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[2]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[2]; + + color = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache+1)*width+(x)]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[3]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[3]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[3]; + + color = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache+2)*width+(x)]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[4]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[4]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[4]; + + // end unroll loops + + int r = (int)(sum_fr+0.5f); + int g = (int)(sum_fg+0.5f); + int b = (int)(sum_fb+0.5f); + + /*r = Math.max(0, Math.min(255, r)); + g = Math.max(0, Math.min(255, g)); + b = Math.max(0, Math.min(255, b));*/ + + // this code is performance critical; note it's faster to avoid calls to Color.argb() + pixels_out[c] = (255 << 24) | (r << 16) | (g << 8) | b; + } + } + else { + for(int x=off_x;x= 2 && sx < (2*width)-2 ) { + float sum_fr = 0.0f; + float sum_fg = 0.0f; + float sum_fb = 0.0f; + + /*for(int dx=-2;dx<=2;dx++) { + int color = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache)*width+(sx+dx)]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[2+dx]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[2+dx]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[2+dx]; + }*/ + + // unroll loops + + int offset; + + offset = pixel_index-8; + sum_fr += ((float)(bitmap_in[offset+1] & 0xFF)) * pyramid_blending_weights[0]; + sum_fg += ((float)(bitmap_in[offset+2] & 0xFF)) * pyramid_blending_weights[0]; + sum_fb += ((float)(bitmap_in[offset+3] & 0xFF)) * pyramid_blending_weights[0]; + + offset = pixel_index-4; + sum_fr += ((float)(bitmap_in[offset+1] & 0xFF)) * pyramid_blending_weights[1]; + sum_fg += ((float)(bitmap_in[offset+2] & 0xFF)) * pyramid_blending_weights[1]; + sum_fb += ((float)(bitmap_in[offset+3] & 0xFF)) * pyramid_blending_weights[1]; + + offset = pixel_index; + sum_fr += ((float)(bitmap_in[offset+1] & 0xFF)) * pyramid_blending_weights[2]; + sum_fg += ((float)(bitmap_in[offset+2] & 0xFF)) * pyramid_blending_weights[2]; + sum_fb += ((float)(bitmap_in[offset+3] & 0xFF)) * pyramid_blending_weights[2]; + + offset = pixel_index+4; + sum_fr += ((float)(bitmap_in[offset+1] & 0xFF)) * pyramid_blending_weights[3]; + sum_fg += ((float)(bitmap_in[offset+2] & 0xFF)) * pyramid_blending_weights[3]; + sum_fb += ((float)(bitmap_in[offset+3] & 0xFF)) * pyramid_blending_weights[3]; + + offset = pixel_index+8; + sum_fr += ((float)(bitmap_in[offset+1] & 0xFF)) * pyramid_blending_weights[4]; + sum_fg += ((float)(bitmap_in[offset+2] & 0xFF)) * pyramid_blending_weights[4]; + sum_fb += ((float)(bitmap_in[offset+3] & 0xFF)) * pyramid_blending_weights[4]; + + // end unroll loops + + int r = (int)(sum_fr+0.5f); + int g = (int)(sum_fg+0.5f); + int b = (int)(sum_fb+0.5f); + + /*r = Math.max(0, Math.min(255, r)); + g = Math.max(0, Math.min(255, g)); + b = Math.max(0, Math.min(255, b));*/ + + bitmap_out[c] = (byte)255; + bitmap_out[c+1] = (byte)r; + bitmap_out[c+2] = (byte)g; + bitmap_out[c+3] = (byte)b; + } + else { + bitmap_out[c] = (byte)255; + bitmap_out[c+1] = bitmap_in[pixel_index+1]; + bitmap_out[c+2] = bitmap_in[pixel_index+2]; + bitmap_out[c+3] = bitmap_in[pixel_index+3]; + } + } + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + } + + static class ReduceBitmapYFullFunction implements JavaImageProcessing.ApplyFunctionInterface { + // bitmaps in ARGB format + private final byte [] bitmap_in; + private final byte [] bitmap_out; + private final int width; // width of bitmap_out (bitmap_in should be the same width) + private final int height; // width of bitmap_out (bitmap_in should be twice the height) + + ReduceBitmapYFullFunction(byte [] bitmap_in, byte [] bitmap_out, int width, int height) { + this.bitmap_in = bitmap_in; + this.bitmap_out = bitmap_out; + this.width = width; + this.height = height; + } + + @Override + public void init(int n_threads) { + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int off_x, int off_y, int this_width, int this_height) { + for(int y=off_y;y= 2 & sy < (2*height)-2 ) { + for(int x=off_x;x> 16) & 0xFF)) * pyramid_blending_weights[2+dy]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[2+dy]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[2+dy]; + }*/ + + // unroll loops + + int offset; + + offset = 4*((sy-2)*(width)+(x)); + sum_fr += ((float)(bitmap_in[offset+1] & 0xFF)) * pyramid_blending_weights[0]; + sum_fg += ((float)(bitmap_in[offset+2] & 0xFF)) * pyramid_blending_weights[0]; + sum_fb += ((float)(bitmap_in[offset+3] & 0xFF)) * pyramid_blending_weights[0]; + + offset = 4*((sy-1)*(width)+(x)); + sum_fr += ((float)(bitmap_in[offset+1] & 0xFF)) * pyramid_blending_weights[1]; + sum_fg += ((float)(bitmap_in[offset+2] & 0xFF)) * pyramid_blending_weights[1]; + sum_fb += ((float)(bitmap_in[offset+3] & 0xFF)) * pyramid_blending_weights[1]; + + offset = 4*((sy)*(width)+(x)); + sum_fr += ((float)(bitmap_in[offset+1] & 0xFF)) * pyramid_blending_weights[2]; + sum_fg += ((float)(bitmap_in[offset+2] & 0xFF)) * pyramid_blending_weights[2]; + sum_fb += ((float)(bitmap_in[offset+3] & 0xFF)) * pyramid_blending_weights[2]; + + offset = 4*((sy+1)*(width)+(x)); + sum_fr += ((float)(bitmap_in[offset+1] & 0xFF)) * pyramid_blending_weights[3]; + sum_fg += ((float)(bitmap_in[offset+2] & 0xFF)) * pyramid_blending_weights[3]; + sum_fb += ((float)(bitmap_in[offset+3] & 0xFF)) * pyramid_blending_weights[3]; + + offset = 4*((sy+2)*(width)+(x)); + sum_fr += ((float)(bitmap_in[offset+1] & 0xFF)) * pyramid_blending_weights[4]; + sum_fg += ((float)(bitmap_in[offset+2] & 0xFF)) * pyramid_blending_weights[4]; + sum_fb += ((float)(bitmap_in[offset+3] & 0xFF)) * pyramid_blending_weights[4]; + + // end unroll loops + + int r = (int)(sum_fr+0.5f); + int g = (int)(sum_fg+0.5f); + int b = (int)(sum_fb+0.5f); + + /*r = Math.max(0, Math.min(255, r)); + g = Math.max(0, Math.min(255, g)); + b = Math.max(0, Math.min(255, b));*/ + + bitmap_out[c] = (byte)255; + bitmap_out[c+1] = (byte)r; + bitmap_out[c+2] = (byte)g; + bitmap_out[c+3] = (byte)b; + } + } + else { + for(int x=off_x;x= 2 && x < width-2 ) + { + float sum_fr = 0.0f; + float sum_fg = 0.0f; + float sum_fb = 0.0f; + + /*for(int dx=-2;dx<=2;dx++) { + int color = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache)*width+(x+dx)]; + int r = (color >> 16) & 0xFF; + int g = (color >> 8) & 0xFF; + int b = color & 0xFF; + + float fr = ((float)r) * pyramid_blending_weights[2+dx]; + float fg = ((float)g) * pyramid_blending_weights[2+dx]; + float fb = ((float)b) * pyramid_blending_weights[2+dx]; + sum_fr += fr; + sum_fg += fg; + sum_fb += fb; + }*/ + + // unroll loop + + int color; + int pixel_index = (y_rel_bitmap_in_cache)*width+x; + + // when blending, we can take advantage of the fact that pixels will be 0 at odd x coordinates (due to the result of ExpandBitmapFunction) + if( x % 2 == 1 ) { + // odd coordinate: so only immediately adjacent coordinates will be non-0 + + // pixel_index-2 is zero + + color = bitmap_in_cache_pixels[pixel_index-1]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[1]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[1]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[1]; + + // pixel_index is zero + + color = bitmap_in_cache_pixels[pixel_index+1]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[3]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[3]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[3]; + + // pixel_index+2 is zero + } + else { + // even coordinate: so adjacent coordinates will be 0 + color = bitmap_in_cache_pixels[pixel_index-2]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[0]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[0]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[0]; + + // pixel_index-1 is zero + + color = bitmap_in_cache_pixels[pixel_index]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[2]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[2]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[2]; + + // pixel_index+1 is zero + + color = bitmap_in_cache_pixels[pixel_index+2]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[4]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[4]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[4]; + } + /* + color = bitmap_in_cache_pixels[pixel_index-2]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[0]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[0]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[0]; + + color = bitmap_in_cache_pixels[pixel_index-1]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[1]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[1]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[1]; + + color = bitmap_in_cache_pixels[pixel_index]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[2]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[2]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[2]; + + color = bitmap_in_cache_pixels[pixel_index+1]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[3]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[3]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[3]; + + color = bitmap_in_cache_pixels[pixel_index+2]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[4]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[4]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[4]; + */ + + // end unrolled loop + + sum_fr *= 2.0f; + sum_fg *= 2.0f; + sum_fb *= 2.0f; + + int r = (int)(sum_fr+0.5f); + int g = (int)(sum_fg+0.5f); + int b = (int)(sum_fb+0.5f); + + r = Math.max(0, Math.min(255, r)); + g = Math.max(0, Math.min(255, g)); + b = Math.max(0, Math.min(255, b)); + + // this code is performance critical; note it's faster to avoid calls to Color.argb() + pixels_out[c] = (255 << 24) | (r << 16) | (g << 8) | b; + } + /*else { + pixels_out[c] = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache)*width+(x)]; + }*/ + } + + for(int x=ex;x= width-2 + pixels_out[c] = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache)*width+(x)]; + } + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + } + + /** Note that this is optimised for being called on a result of ExpandBitmapFunction (where only + * the top-left pixel in each group of 2x2 will be non-zero), that was then processed with + * Blur1dXFunction, rather than being a general blur function. + */ + static class Blur1dYFunction implements JavaImageProcessing.ApplyFunctionInterface { + private final Bitmap bitmap_in; + private final int width, height; + private JavaImageProcessing.FastAccessBitmap [] fast_bitmap_in; + + Blur1dYFunction(Bitmap bitmap_in) { + this.bitmap_in = bitmap_in; + this.width = bitmap_in.getWidth(); + this.height = bitmap_in.getHeight(); + } + + @Override + public void init(int n_threads) { + fast_bitmap_in = new JavaImageProcessing.FastAccessBitmap[n_threads]; + + for(int i=0;i= 2 && y < height-2 ) { + for(int x=off_x;x> 16) & 0xFF; + int g = (color >> 8) & 0xFF; + int b = color & 0xFF; + + float fr = ((float)r) * pyramid_blending_weights[2+dy]; + float fg = ((float)g) * pyramid_blending_weights[2+dy]; + float fb = ((float)b) * pyramid_blending_weights[2+dy]; + sum_fr += fr; + sum_fg += fg; + sum_fb += fb; + }*/ + + // unroll loop: + + int color; + + // when blending, due to having blurred X the result of ExpandBitmapFunction, we will now have odd-y lines being zero, even-y lines being non-zero + if( y % 2 == 1 ) { + // odd coordinate: so only immediately adjacent coordinates will be non-0 + + // pixel_index-2 is zero + + color = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache-1)*width+(x)]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[1]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[1]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[1]; + + // pixel_index is zero + + color = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache+1)*width+(x)]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[3]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[3]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[3]; + + // pixel_index+2 is zero + } + else { + // even coordinate: so adjacent coordinates will be 0 + color = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache-2)*width+(x)]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[0]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[0]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[0]; + + // pixel_index-1 is zero + + color = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache)*width+(x)]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[2]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[2]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[2]; + + // pixel_index+1 is zero + + color = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache+2)*width+(x)]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[4]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[4]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[4]; + } + + /* + color = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache-2)*width+(x)]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[0]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[0]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[0]; + + color = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache-1)*width+(x)]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[1]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[1]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[1]; + + color = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache)*width+(x)]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[2]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[2]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[2]; + + color = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache+1)*width+(x)]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[3]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[3]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[3]; + + color = bitmap_in_cache_pixels[(y_rel_bitmap_in_cache+2)*width+(x)]; + sum_fr += ((float)((color >> 16) & 0xFF)) * pyramid_blending_weights[4]; + sum_fg += ((float)((color >> 8) & 0xFF)) * pyramid_blending_weights[4]; + sum_fb += ((float)(color & 0xFF)) * pyramid_blending_weights[4]; + */ + + // end unrolled loop + + sum_fr *= 2.0f; + sum_fg *= 2.0f; + sum_fb *= 2.0f; + + int r = (int)(sum_fr+0.5f); + int g = (int)(sum_fg+0.5f); + int b = (int)(sum_fb+0.5f); + + r = Math.max(0, Math.min(255, r)); + g = Math.max(0, Math.min(255, g)); + b = Math.max(0, Math.min(255, b)); + + // this code is performance critical; note it's faster to avoid calls to Color.argb() + pixels_out[c] = (255 << 24) | (r << 16) | (g << 8) | b; + } + } + else { + for(int x=off_x;x= 2 && x < width-2 ) + { + float sum_fr = 0.0f; + float sum_fg = 0.0f; + float sum_fb = 0.0f; + + /*for(int dx=-2;dx<=2;dx++) { + int index = 4*((y)*width+(x+dx)); + sum_fr += ((float)(bitmap_in[index+1] & 0xFF)) * pyramid_blending_weights[2+dx]; + sum_fg += ((float)(bitmap_in[index+2] & 0xFF)) * pyramid_blending_weights[2+dx]; + sum_fb += ((float)(bitmap_in[index+3] & 0xFF)) * pyramid_blending_weights[2+dx]; + }*/ + + // unroll loop + + int pixel_index = 4*((y)*width+(x)), index; + + // when blending, we can take advantage of the fact that pixels will be 0 at odd x coordinates (due to the result of ExpandBitmapFunction) + if( x % 2 == 1 ) { + // odd coordinate: so only immediately adjacent coordinates will be non-0 + + // pixel_index-2 is zero + + index = pixel_index-4; + sum_fr += ((float)(bitmap_in[index+1] & 0xFF)) * pyramid_blending_weights[1]; + sum_fg += ((float)(bitmap_in[index+2] & 0xFF)) * pyramid_blending_weights[1]; + sum_fb += ((float)(bitmap_in[index+3] & 0xFF)) * pyramid_blending_weights[1]; + + // pixel_index is zero + + index = pixel_index+4; + sum_fr += ((float)(bitmap_in[index+1] & 0xFF)) * pyramid_blending_weights[3]; + sum_fg += ((float)(bitmap_in[index+2] & 0xFF)) * pyramid_blending_weights[3]; + sum_fb += ((float)(bitmap_in[index+3] & 0xFF)) * pyramid_blending_weights[3]; + + // pixel_index+2 is zero + } + else { + // even coordinate: so adjacent coordinates will be 0 + index = pixel_index-8; + sum_fr += ((float)(bitmap_in[index+1] & 0xFF)) * pyramid_blending_weights[0]; + sum_fg += ((float)(bitmap_in[index+2] & 0xFF)) * pyramid_blending_weights[0]; + sum_fb += ((float)(bitmap_in[index+3] & 0xFF)) * pyramid_blending_weights[0]; + + // pixel_index-1 is zero + + index = pixel_index; + sum_fr += ((float)(bitmap_in[index+1] & 0xFF)) * pyramid_blending_weights[2]; + sum_fg += ((float)(bitmap_in[index+2] & 0xFF)) * pyramid_blending_weights[2]; + sum_fb += ((float)(bitmap_in[index+3] & 0xFF)) * pyramid_blending_weights[2]; + + // pixel_index+1 is zero + + index = pixel_index+8; + sum_fr += ((float)(bitmap_in[index+1] & 0xFF)) * pyramid_blending_weights[4]; + sum_fg += ((float)(bitmap_in[index+2] & 0xFF)) * pyramid_blending_weights[4]; + sum_fb += ((float)(bitmap_in[index+3] & 0xFF)) * pyramid_blending_weights[4]; + } + + // end unrolled loop + + sum_fr *= 2.0f; + sum_fg *= 2.0f; + sum_fb *= 2.0f; + + int r = (int)(sum_fr+0.5f); + int g = (int)(sum_fg+0.5f); + int b = (int)(sum_fb+0.5f); + + //r = Math.max(0, Math.min(255, r)); + //g = Math.max(0, Math.min(255, g)); + //b = Math.max(0, Math.min(255, b)); + + //bitmap_out[c] = (byte)255; + bitmap_out[c+1] = (byte)r; + bitmap_out[c+2] = (byte)g; + bitmap_out[c+3] = (byte)b; + } + /*else { + bitmap_out[c] = (byte)255; + bitmap_out[c+1] = bitmap_in[c+1]; + bitmap_out[c+2] = bitmap_in[c+2]; + bitmap_out[c+3] = bitmap_in[c+3]; + }*/ + } + + for(int x=ex;x= width-2 + //bitmap_out[c] = bitmap_in[c]; + bitmap_out[c+1] = bitmap_in[c+1]; + bitmap_out[c+2] = bitmap_in[c+2]; + bitmap_out[c+3] = bitmap_in[c+3]; + } + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + } + + /** Note that this is optimised for being called on a result of ExpandBitmapFunction (where only + * the top-left pixel in each group of 2x2 will be non-zero), that was then processed with + * Blur1dXFunction, rather than being a general blur function. + * Alpha isn't written as 255, rather than being based on input alpha. + */ + static class Blur1dYFullFunction implements JavaImageProcessing.ApplyFunctionInterface { + // bitmaps in ARGB format + private final byte [] bitmap_in; + private final byte [] bitmap_out; + private final int width, height; + + Blur1dYFullFunction(byte [] bitmap_in, byte [] bitmap_out, int width, int height) { + this.bitmap_in = bitmap_in; + this.bitmap_out = bitmap_out; + this.width = width; + this.height = height; + } + + @Override + public void init(int n_threads) { + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int off_x, int off_y, int this_width, int this_height) { + for(int y=off_y;y= 2 && y < height-2 ) { + for(int x=off_x;x> 16) & 0xFF); + float pixel0_fg = (float)((color0 >> 8) & 0xFF); + float pixel0_fb = (float)(color0 & 0xFF); + + //int color1 = fast_bitmap1[thread_index].getPixel(x, y); + int color1 = bitmap1_cache_pixels[(y_rel_bitmap1_cache)*width+(x)]; + float pixel1_fr = (float)((color1 >> 16) & 0xFF); + float pixel1_fg = (float)((color1 >> 8) & 0xFF); + float pixel1_fb = (float)(color1 & 0xFF); + + float fr = pixel0_fr - pixel1_fr; + float fg = pixel0_fg - pixel1_fg; + float fb = pixel0_fb - pixel1_fb; + + this.pixels_rgbf[pixels_rgbf_indx] = fr; + this.pixels_rgbf[pixels_rgbf_indx+1] = fg; + this.pixels_rgbf[pixels_rgbf_indx+2] = fb; + } + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + } + + static class MergefFunction implements JavaImageProcessing.ApplyFunctionInterface { + private final float [] pixels_rgbf0; // input + private final float [] pixels_rgbf1; // input + private final int width; + private final int [] interpolated_best_path; + private final int merge_blend_width; + //private final int start_blend_x; + + MergefFunction(float [] pixels_rgbf0, float [] pixels_rgbf1, int blend_width, int width, int [] interpolated_best_path) { + this.pixels_rgbf0 = pixels_rgbf0; + this.pixels_rgbf1 = pixels_rgbf1; + this.width = width; + this.interpolated_best_path = interpolated_best_path; + + merge_blend_width = blend_width; + //start_blend_x = (full_width - merge_blend_width)/2; + } + + @Override + public void init(int n_threads) { + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int off_x, int off_y, int this_width, int this_height) { + for(int y=off_y;y> 16) & 0xFF); + float pixel0_fg = (float)((color0 >> 8) & 0xFF); + float pixel0_fb = (float)(color0 & 0xFF); + + //int color1 = fast_bitmap1[thread_index].getPixel(x, y); + int color1 = bitmap1_cache_pixels[(y_rel_bitmap1_cache)*width+(x)]; + float pixel1_fr = (float)((color1 >> 16) & 0xFF); + float pixel1_fg = (float)((color1 >> 8) & 0xFF); + float pixel1_fb = (float)(color1 & 0xFF); + + float alpha = ((float)( x-(mid_x-merge_blend_width/2) )) / (float)merge_blend_width; + alpha = Math.max(alpha, 0.0f); + alpha = Math.min(alpha, 1.0f); + + float fr = (1.0f-alpha)*pixel0_fr + alpha*pixel1_fr; + float fg = (1.0f-alpha)*pixel0_fg + alpha*pixel1_fg; + float fb = (1.0f-alpha)*pixel0_fb + alpha*pixel1_fb; + + int r = (int)(fr+0.5f); + int g = (int)(fg+0.5f); + int b = (int)(fb+0.5f); + + r = Math.max(0, Math.min(255, r)); + g = Math.max(0, Math.min(255, g)); + b = Math.max(0, Math.min(255, b)); + + // this code is performance critical; note it's faster to avoid calls to Color.argb() + pixels_out[c] = (255 << 24) | (r << 16) | (g << 8) | b; + } + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + } + + static class AddBitmapFunction implements JavaImageProcessing.ApplyFunctionInterface { + private final float [] pixels_rgbf1; + private final int width; + + AddBitmapFunction(float [] pixels_rgbf1, int width) { + this.pixels_rgbf1 = pixels_rgbf1; + this.width = width; + } + + @Override + public void init(int n_threads) { + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, int [] pixels, int off_x, int off_y, int this_width, int this_height) { + int [] pixels_out = output.getCachedPixelsI(); + + for(int y=off_y,c=0;y> 16) & 0xFF); + float pixel0_fg = (float)((color0 >> 8) & 0xFF); + float pixel0_fb = (float)(color0 & 0xFF); + + float pixel1_fr = pixels_rgbf1[pixels_rgbf_indx]; + float pixel1_fg = pixels_rgbf1[pixels_rgbf_indx+1]; + float pixel1_fb = pixels_rgbf1[pixels_rgbf_indx+2]; + + float fr = pixel0_fr + pixel1_fr; + float fg = pixel0_fg + pixel1_fg; + float fb = pixel0_fb + pixel1_fb; + + int r = (int)(fr+0.5f); + int g = (int)(fg+0.5f); + int b = (int)(fb+0.5f); + + r = Math.max(0, Math.min(255, r)); + g = Math.max(0, Math.min(255, g)); + b = Math.max(0, Math.min(255, b)); + + // this code is performance critical; note it's faster to avoid calls to Color.argb() + pixels_out[c] = (255 << 24) | (r << 16) | (g << 8) | b; + } + } + } + + @Override + public void apply(JavaImageProcessing.CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height) { + // unused + throw new RuntimeException("not implemented"); + } + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/JavaImageProcessing.java b/app/src/main/java/net/sourceforge/opencamera/JavaImageProcessing.java new file mode 100644 index 0000000..6dff2d6 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/JavaImageProcessing.java @@ -0,0 +1,269 @@ +package net.sourceforge.opencamera; + +import android.graphics.Bitmap; +import android.util.Log; + +public class JavaImageProcessing { + private static final String TAG = "JavaImageProcessing"; + + public interface ApplyFunctionInterface { + void init(int n_threads); + void apply(CachedBitmap output, int thread_index, int off_x, int off_y, int this_width, int this_height); // version with no input + + /** + * @param pixels An array of pixels for the subset being operated on. I.e., pixels[0] represents the input pixel at (off_x, off_y), and + * the pixels array is of size this_width*this_height. + */ + void apply(CachedBitmap output, int thread_index, int [] pixels, int off_x, int off_y, int this_width, int this_height); + /** + * @param pixels An array of pixels for the subset being operated on. I.e., pixels[0] represents the input pixel at (off_x, off_y), and + * the pixels array is of size 4*this_width*this_height. + */ + void apply(CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height); + } + + /** Encapsulates a Bitmap, but optimised for reading individual pixels. + * This differs to CachedBitmap in that FastAccessBitmap automatically decides which to cache, + * based on the requested pixels. + */ + static class FastAccessBitmap { + private final Bitmap bitmap; + private final int bitmap_width; + private final int cache_height; + private final int [] cache_pixels_i; + private int cache_y = -1; + + FastAccessBitmap(Bitmap bitmap) { + this.bitmap = bitmap; + this.bitmap_width = bitmap.getWidth(); + this.cache_height = Math.min(128, bitmap.getHeight()); + this.cache_pixels_i = new int[bitmap_width*cache_height]; + // better for performance to initialise the cache, rather than having to keep checking if it's initialised + cache(0); + } + + private void cache(int y) { + /*if( MyDebug.LOG ) + Log.d(TAG, ">>> cache: " + y + " [ " + this + " ]");*/ + y = Math.max(0, y-4); + this.cache_y = Math.min(y, bitmap.getHeight()-cache_height); + this.bitmap.getPixels(cache_pixels_i, 0, bitmap_width, 0, cache_y, bitmap_width, cache_height); + } + + int getPixel(int x, int y) { + if( y < cache_y || y >= cache_y+cache_height ) { + // update cache + cache(y); + } + // read from cache + return cache_pixels_i[(y-cache_y)*bitmap_width+x]; + } + + void ensureCache(int sy, int ey) { + if( ey - sy > cache_height ) { + throw new RuntimeException("can't cache this many rows: " + sy + " to " + ey + " vs cache_height: " + cache_height); + } + if( sy < cache_y || ey >= cache_y+cache_height ) { + cache(sy); + } + } + + int getCacheY() { + return this.cache_y; + } + + int [] getCachedPixelsI() { + return this.cache_pixels_i; + } + } + + /** Encapsulates a Bitmap, together with caching of pixels. + * This differs to FastAccessBitmap in that CachedBitmap requires the caller to actually do the + * caching. + */ + public static class CachedBitmap { + private final Bitmap bitmap; + private final int [] cache_pixels_i; + private final byte [] cache_pixels_b; + + CachedBitmap(Bitmap bitmap, int cache_width, int cache_height) { + this.bitmap = bitmap; + this.cache_pixels_i = new int[cache_width*cache_height]; + this.cache_pixels_b = null; + } + + int [] getCachedPixelsI() { + return this.cache_pixels_i; + } + + byte [] getCachedPixelsB() { + return this.cache_pixels_b; + } + } + + /** Generic thread to apply a Java function to a bunch of pixels. + */ + private static class ApplyFunctionThread extends Thread { + private final int thread_index; + private final ApplyFunctionInterface function; + private final CachedBitmap input; + private final int start_x, start_y, stop_x, stop_y; + private int chunk_size; // number of lines to process at a time + private CachedBitmap output; // optional + private int output_start_x, output_start_y; + + private static int getChunkSize(int start_y, int stop_y) { + int height = stop_y - start_y; + //return height; + //return (int)Math.ceil(height/4.0); + //return Math.min(512, height); + return Math.min(64, height); + //return Math.min(32, height); + } + + ApplyFunctionThread(int thread_index, ApplyFunctionInterface function, Bitmap bitmap, int start_x, int start_y, int stop_x, int stop_y) { + super("ApplyFunctionThread"); + /*if( MyDebug.LOG ) { + Log.d(TAG, " thread_index: " + thread_index); + Log.d(TAG, " start_x: " + start_x); + Log.d(TAG, " start_y: " + start_y); + Log.d(TAG, " stop_x: " + stop_x); + Log.d(TAG, " stop_y: " + stop_y); + }*/ + this.thread_index = thread_index; + this.function = function; + this.start_x = start_x; + this.start_y = start_y; + this.stop_x = stop_x; + this.stop_y = stop_y; + this.chunk_size = getChunkSize(start_y, stop_y); + /*if( MyDebug.LOG ) + Log.d(TAG, " chunk_size: " + chunk_size);*/ + if( bitmap != null ) + this.input = new CachedBitmap(bitmap, stop_x-start_x, chunk_size); + else + this.input = null; + } + + void setOutput(Bitmap bitmap, int output_start_x, int output_start_y) { + /*if( MyDebug.LOG ) { + Log.d(TAG, " output_start_x: " + output_start_x); + Log.d(TAG, " output_start_y: " + output_start_y); + }*/ + this.output = new CachedBitmap(bitmap, stop_x-start_x, chunk_size); + this.output_start_x = output_start_x; + this.output_start_y = output_start_y; + } + + public void run() { + /*if( MyDebug.LOG ) + Log.d(TAG, "ApplyFunctionThread.run");*/ + int width = stop_x-start_x; + int this_start_y = start_y; + int output_shift_y = output_start_y - start_y; + /*if( MyDebug.LOG ) { + Log.d(TAG, "start_y: " + start_y); + Log.d(TAG, "output_start_y: " + output_start_y); + Log.d(TAG, "output_shift_y: " + output_shift_y); + }*/ + if( input == null && output == null ) { + this.chunk_size = stop_y-start_y; + /*if( MyDebug.LOG ) + Log.d(TAG, "reset chunk_size to: " + chunk_size);*/ + } + + final int chunk_size_f = chunk_size; + while(this_start_y < stop_y) { + int this_stop_y = Math.min(this_start_y+chunk_size_f, stop_y); + int this_height = this_stop_y-this_start_y; + //if( MyDebug.LOG ) + // Log.d(TAG, "chunks from " + this_start_y + " to " + this_stop_y); + + //long time_s = System.currentTimeMillis(); + if( input == null ) { + // nothing to copy to cache + function.apply(output, thread_index, start_x, this_start_y, width, this_height); + } + else if( input.bitmap != null ) { + input.bitmap.getPixels(input.cache_pixels_i, 0, width, start_x, this_start_y, width, this_height); + /*if( MyDebug.LOG ) + Log.d(TAG, "### ApplyFunctionThread: time after reading pixels: " + (System.currentTimeMillis() - time_s));*/ + function.apply(output, thread_index, input.cache_pixels_i, start_x, this_start_y, width, this_height); + } + /*if( MyDebug.LOG ) + Log.d(TAG, "### ApplyFunctionThread: time after apply: " + (System.currentTimeMillis() - time_s));*/ + + if( output != null ) { + // write cached pixels back to output bitmap + if( output.bitmap != null ) { + /*if( MyDebug.LOG ) { + Log.d(TAG, "this_start_y: " + this_start_y); + Log.d(TAG, "output_shift_y: " + output_shift_y); + Log.d(TAG, "this_height: " + this_height); + Log.d(TAG, "height: " + output.bitmap.getHeight()); + }*/ + output.bitmap.setPixels(output.cache_pixels_i, 0, width, output_start_x, this_start_y+output_shift_y, width, this_height); + } + } + + this_start_y = this_stop_y; + } + } + } + + /** Applies a function to the specified pixels of the supplied bitmap. + */ + public static void applyFunction(ApplyFunctionInterface function, Bitmap bitmap, Bitmap output, int start_x, int start_y, int stop_x, int stop_y) { + applyFunction(function, bitmap, output, start_x, start_y, stop_x, stop_y, start_x, start_y); + } + + /** Applies a function to the specified pixels of the supplied bitmap. + */ + static void applyFunction(ApplyFunctionInterface function, Bitmap bitmap, Bitmap output, int start_x, int start_y, int stop_x, int stop_y, int output_start_x, int output_start_y) { + if( MyDebug.LOG ) + Log.d(TAG, "applyFunction [bitmap]"); + long time_s = System.currentTimeMillis(); + + int height = stop_y-start_y; + if( MyDebug.LOG ) + Log.d(TAG, "height: " + height); + //final int n_threads = 1; + final int n_threads = height >= 16 ? 4 : 1; + //final int n_threads = height >= 16 ? 8 : 1; + function.init(n_threads); + ApplyFunctionThread [] threads = new ApplyFunctionThread[n_threads]; + int st_indx = 0; + for(int i=0;i= Build.VERSION_CODES.O) { + KeyguardManager keyguardManager = (KeyguardManager) activity.getSystemService(KEYGUARD_SERVICE); + if (keyguardManager == null || !keyguardManager.isKeyguardLocked()) { + callback.run(); + return; + } + keyguardManager.requestDismissKeyguard(activity, new KeyguardManager.KeyguardDismissCallback() { + @Override + public void onDismissSucceeded() { + if( MyDebug.LOG ) + Log.d(TAG, "onDismissSucceeded"); + callback.run(); + if( MyDebug.LOG ) + Log.d(TAG, "onDismissSucceeded: after callback run"); + } + }); + } else { + callback.run(); + } + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/LocationSupplier.java b/app/src/main/java/net/sourceforge/opencamera/LocationSupplier.java new file mode 100644 index 0000000..8c7d2df --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/LocationSupplier.java @@ -0,0 +1,357 @@ +package net.sourceforge.opencamera; + +import android.Manifest; +import android.content.Context; +import android.content.SharedPreferences; +import android.content.pm.PackageManager; +import android.location.Location; +import android.location.LocationListener; +import android.location.LocationManager; +import android.location.LocationProvider; +import android.os.Build; +import android.os.Bundle; +import android.preference.PreferenceManager; + +import androidx.annotation.NonNull; +import androidx.core.content.ContextCompat; +import android.util.Log; + +/** Handles listening for GPS location (both coarse and fine). + */ +public class LocationSupplier { + private static final String TAG = "LocationSupplier"; + + private final Context context; + private final LocationManager locationManager; + private MyLocationListener [] locationListeners; + private volatile boolean test_force_no_location; // if true, always return null location; must be volatile for test project setting the state + + private Location cached_location; + private long cached_location_ms; + + LocationSupplier(Context context) { + this.context = context; + locationManager = (LocationManager)context.getSystemService(Context.LOCATION_SERVICE); + } + + private Location getCachedLocation() { + if( cached_location != null ) { + long time_ms = System.currentTimeMillis(); + if( time_ms <= cached_location_ms + 20000 ) { + return cached_location; + } + else { + cached_location = null; + } + } + return null; + } + + /** Cache the current best location. Note that we intentionally call getLocation() from this + * method rather than passing it a location from onLocationChanged(), as we don't want a + * coarse location overriding a better fine location. + */ + private void cacheLocation() { + if( MyDebug.LOG ) + Log.d(TAG, "cacheLocation"); + Location location = getLocation(); + if( location == null ) { + // this isn't an error as it can happen that we receive a call to onLocationChanged() after + // having freed the location listener (possibly because LocationManager had already queued + // a call to onLocationChanged? + // we should not set cached_location to null in such cases + Log.d(TAG, "### asked to cache location when location not available"); + } + else { + cached_location = new Location(location); + cached_location_ms = System.currentTimeMillis(); + } + } + + public static class LocationInfo { + private boolean location_was_cached; + + public boolean LocationWasCached() { + return location_was_cached; + } + } + + /** If adding extra calls to this, consider whether explicit user permission is required, and whether + * privacy policy or data privacy section needs updating. + * @return Returns null if location not available. + */ + public Location getLocation() { + return getLocation(null); + } + + /** If adding extra calls to this, consider whether explicit user permission is required, and whether + * privacy policy or data privacy section needs updating. + * @param locationInfo Optional class to return additional information about the location. + * @return Returns null if location not available. + */ + public Location getLocation(LocationInfo locationInfo) { + if( locationInfo != null ) + locationInfo.location_was_cached = false; // init + + if( locationListeners == null ) { + // if we have disabled location listening, then don't return a cached location anyway - + // in theory, callers should have already checked for user permission/setting before calling + // getLocation(), but just in case we didn't, don't want to return a cached location + return null; + } + if( test_force_no_location ) + return null; + // location listeners should be stored in order best to worst + for(MyLocationListener locationListener : locationListeners) { + Location location = locationListener.getLocation(); + if( location != null ) + return location; + } + Location location = getCachedLocation(); + if( location != null && locationInfo != null ) + locationInfo.location_was_cached = true; + return location; + } + + private class MyLocationListener implements LocationListener { + private Location location; + volatile boolean test_has_received_location; // must be volatile for test project reading the state + + Location getLocation() { + return location; + } + + public void onLocationChanged(@NonNull Location location) { + if( MyDebug.LOG ) + Log.d(TAG, "onLocationChanged"); + this.test_has_received_location = true; + // Android camera source claims we need to check lat/long != 0.0d + // also check for not being null just in case - had a nullpointerexception on Google Play! + if( location != null && ( location.getLatitude() != 0.0d || location.getLongitude() != 0.0d ) ) { + if( MyDebug.LOG ) { + Log.d(TAG, "received location"); + // don't log location, in case of privacy! + } + this.location = location; + cacheLocation(); + } + } + + public void onStatusChanged(String provider, int status, Bundle extras) { + switch( status ) { + case LocationProvider.OUT_OF_SERVICE: + case LocationProvider.TEMPORARILY_UNAVAILABLE: + { + if( MyDebug.LOG ) { + if( status == LocationProvider.OUT_OF_SERVICE ) + Log.d(TAG, "location provider out of service"); + else if( status == LocationProvider.TEMPORARILY_UNAVAILABLE ) + Log.d(TAG, "location provider temporarily unavailable"); + } + this.location = null; + this.test_has_received_location = false; + cached_location = null; + break; + } + default: + break; + } + } + + public void onProviderEnabled(@NonNull String provider) { + } + + public void onProviderDisabled(@NonNull String provider) { + if( MyDebug.LOG ) + Log.d(TAG, "onProviderDisabled"); + this.location = null; + this.test_has_received_location = false; + cached_location = null; + } + } + + /** Best to only call this from MainActivity.initLocation(). + * @return Returns false if location permission not available for either coarse or fine. + * Important to only return false if we actually want/need to ask the user for location + * permission! + */ + boolean setupLocationListener() { + if( MyDebug.LOG ) + Log.d(TAG, "setupLocationListener"); + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); + // Define a listener that responds to location updates + // we only set it up if store_location is true, important for privacy and unnecessary battery use + boolean store_location = sharedPreferences.getBoolean(PreferenceKeys.LocationPreferenceKey, false); + if( store_location && locationListeners == null ) { + // Note, ContextCompat.checkSelfPermission is meant to handle being called on any Android version, i.e., pre + // Android Marshmallow it should return true as permissions are set an installation, and can't be switched off by + // the user. However on Galaxy Nexus Android 4.3 and Nexus 7 (2013) Android 5.1.1, ACCESS_COARSE_LOCATION returns + // PERMISSION_DENIED! So we keep the checks to Android Marshmallow or later (where we need them), and avoid + // checking behaviour for earlier devices. + boolean has_coarse_location_permission; + boolean has_fine_location_permission; + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.M ) { + if( MyDebug.LOG ) + Log.d(TAG, "check for location permissions"); + has_coarse_location_permission = ContextCompat.checkSelfPermission(context, Manifest.permission.ACCESS_COARSE_LOCATION) == PackageManager.PERMISSION_GRANTED; + has_fine_location_permission = ContextCompat.checkSelfPermission(context, Manifest.permission.ACCESS_FINE_LOCATION) == PackageManager.PERMISSION_GRANTED; + if( MyDebug.LOG ) { + Log.d(TAG, "has_coarse_location_permission? " + has_coarse_location_permission); + Log.d(TAG, "has_fine_location_permission? " + has_fine_location_permission); + } + //has_coarse_location_permission = false; // test + //has_fine_location_permission = false; // test + // require at least one permission to be present + // will be important for Android 12+ where user can grant only coarse permission - we still + // want to support geotagging in such cases + if( !has_coarse_location_permission && !has_fine_location_permission ) { + if( MyDebug.LOG ) + Log.d(TAG, "location permission not available"); + // return false, which tells caller to request permission - we'll call this function again if permission is granted + return false; + } + } + else { + // permissions always available pre-Android 6 + has_coarse_location_permission = true; + has_fine_location_permission = true; + } + + locationListeners = new MyLocationListener[2]; + locationListeners[0] = new MyLocationListener(); + locationListeners[1] = new MyLocationListener(); + + // location listeners should be stored in order best to worst + // also see https://sourceforge.net/p/opencamera/tickets/1/ - need to check provider is available + // now also need to check for permissions - need to support devices that might have one but not both of fine and coarse permissions supplied + if( has_coarse_location_permission && locationManager.getAllProviders().contains(LocationManager.NETWORK_PROVIDER) ) { + locationManager.requestLocationUpdates(LocationManager.NETWORK_PROVIDER, 1000, 0, locationListeners[1]); + if( MyDebug.LOG ) + Log.d(TAG, "created coarse (network) location listener"); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "don't have a NETWORK_PROVIDER"); + } + if( has_fine_location_permission && locationManager.getAllProviders().contains(LocationManager.GPS_PROVIDER) ) { + locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 1000, 0, locationListeners[0]); + if( MyDebug.LOG ) + Log.d(TAG, "created fine (gps) location listener"); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "don't have a GPS_PROVIDER"); + } + } + else if( !store_location ) { + freeLocationListeners(); + } + // important to return true even if we didn't set up decide the location listeners - as + // returning false indicates to ask user for location permission (which we don't want to + // do if PreferenceKeys.LocationPreferenceKey preference isn't true) + return true; + } + + void freeLocationListeners() { + if( MyDebug.LOG ) + Log.d(TAG, "freeLocationListeners"); + if( locationListeners != null ) { + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.M ) { + // Android Lint claims we need location permission for LocationManager.removeUpdates(). + // also see http://stackoverflow.com/questions/32715189/location-manager-remove-updates-permission + if( MyDebug.LOG ) + Log.d(TAG, "check for location permissions"); + boolean has_coarse_location_permission = ContextCompat.checkSelfPermission(context, Manifest.permission.ACCESS_COARSE_LOCATION) == PackageManager.PERMISSION_GRANTED; + boolean has_fine_location_permission = ContextCompat.checkSelfPermission(context, Manifest.permission.ACCESS_FINE_LOCATION) == PackageManager.PERMISSION_GRANTED; + if( MyDebug.LOG ) { + Log.d(TAG, "has_coarse_location_permission? " + has_coarse_location_permission); + Log.d(TAG, "has_fine_location_permission? " + has_fine_location_permission); + } + // require at least one permission to be present + if( !has_coarse_location_permission && !has_fine_location_permission ) { + if( MyDebug.LOG ) + Log.d(TAG, "location permission not available"); + return; + } + } + for(int i=0;i preloaded_bitmap_resources = new Hashtable<>(); + private ValueAnimator gallery_save_anim; + private boolean last_continuous_fast_burst; // whether the last photo operation was a continuous_fast_burst + private Future update_gallery_future; + + private TextToSpeech textToSpeech; + private boolean textToSpeechSuccess; + + private AudioListener audio_listener; // may be null - created when needed + + //private boolean ui_placement_right = true; + + //private final boolean edge_to_edge_mode = false; // whether running always in edge-to-edge mode + //private final boolean edge_to_edge_mode = true; // whether running always in edge-to-edge mode + private final boolean edge_to_edge_mode = Build.VERSION.SDK_INT >= Build.VERSION_CODES.VANILLA_ICE_CREAM; // whether running always in edge-to-edge mode + private boolean want_no_limits; // whether we want to run with FLAG_LAYOUT_NO_LIMITS + private boolean set_window_insets_listener; // whether we've enabled a setOnApplyWindowInsetsListener() + private int navigation_gap; // gap for navigation bar along bottom (portrait) or right (landscape) + private int navigation_gap_landscape; // gap for navigation bar along left (portrait) or bottom (landscape); only set for edge_to_edge_mode==true + private int navigation_gap_reverse_landscape; // gap for navigation bar along right (portrait) or top (landscape); only set for edge_to_edge_mode==true + public static volatile boolean test_preview_want_no_limits; // test flag, if set to true then instead use test_preview_want_no_limits_value; needs to be static, as it needs to be set before activity is created to take effect + public static volatile boolean test_preview_want_no_limits_value; + public volatile boolean test_set_show_under_navigation; // test flag, the value of enable for the last call of showUnderNavigation() (or false if not yet called) + public static volatile boolean test_force_system_orientation; // test flag, if set to true, that getSystemOrientation() returns test_system_orientation + public static volatile SystemOrientation test_system_orientation = SystemOrientation.PORTRAIT; + public static volatile boolean test_force_window_insets; // test flag, if set to true, then the OnApplyWindowInsetsListener will read from the following flags + public static volatile Insets test_insets; // test insets for WindowInsets.Type.navigationBars() | WindowInsets.Type.displayCutout() + public static volatile Insets test_cutout_insets; // test insets for WindowInsets.Type.displayCutout() + + // whether this is a multi-camera device (note, this isn't simply having more than 1 camera, but also having more than one with the same facing) + // note that in most cases, code should check the MultiCamButtonPreferenceKey preference as well as the is_multi_cam flag, + // this can be done via isMultiCamEnabled(). + private boolean is_multi_cam; + // These lists are lists of camera IDs with the same "facing" (front, back or external). + // Only initialised if is_multi_cam==true. + private List back_camera_ids; + private List front_camera_ids; + private List other_camera_ids; + + private final ToastBoxer switch_video_toast = new ToastBoxer(); + private final ToastBoxer screen_locked_toast = new ToastBoxer(); + private final ToastBoxer stamp_toast = new ToastBoxer(); + private final ToastBoxer changed_auto_stabilise_toast = new ToastBoxer(); + private final ToastBoxer white_balance_lock_toast = new ToastBoxer(); + private final ToastBoxer exposure_lock_toast = new ToastBoxer(); + private final ToastBoxer audio_control_toast = new ToastBoxer(); + private final ToastBoxer store_location_toast = new ToastBoxer(); + private boolean block_startup_toast = false; // used when returning from Settings/Popup - if we're displaying a toast anyway, don't want to display the info toast too + private String push_info_toast_text; // can be used to "push" extra text to the info text for showPhotoVideoToast() + private boolean push_switched_camera = false; // whether to display animation for switching front/back cameras + + // application shortcuts: + static private final String ACTION_SHORTCUT_CAMERA = "net.sourceforge.opencamera.SHORTCUT_CAMERA"; + static private final String ACTION_SHORTCUT_SELFIE = "net.sourceforge.opencamera.SHORTCUT_SELFIE"; + static private final String ACTION_SHORTCUT_VIDEO = "net.sourceforge.opencamera.SHORTCUT_VIDEO"; + static private final String ACTION_SHORTCUT_GALLERY = "net.sourceforge.opencamera.SHORTCUT_GALLERY"; + static private final String ACTION_SHORTCUT_SETTINGS = "net.sourceforge.opencamera.SHORTCUT_SETTINGS"; + + private static final int CHOOSE_SAVE_FOLDER_SAF_CODE = 42; + private static final int CHOOSE_GHOST_IMAGE_SAF_CODE = 43; + private static final int CHOOSE_LOAD_SETTINGS_SAF_CODE = 44; + + // for testing; must be volatile for test project reading the state + // n.b., avoid using static, as static variables are shared between different instances of an application, + // and won't be reset in subsequent tests in a suite! + public boolean is_test; // whether called from OpenCamera.test testing + public volatile Bitmap gallery_bitmap; + public volatile boolean test_low_memory; + public volatile boolean test_have_angle; + public volatile float test_angle; + public volatile Uri test_last_saved_imageuri; // uri of last image; set if using scoped storage OR using SAF + public volatile String test_last_saved_image; // filename (including full path) of last image; set if not using scoped storage nor using SAF (i.e., writing using File API) + public static boolean test_force_supports_camera2; // okay to be static, as this is set for an entire test suite + public volatile String test_save_settings_file; + + // update: notifications now removed due to needing permissions on Android 13+ + //private boolean has_notification; + //private final String CHANNEL_ID = "open_camera_channel"; + //private final int image_saving_notification_id = 1; + + private static final float WATER_DENSITY_FRESHWATER = 1.0f; + private static final float WATER_DENSITY_SALTWATER = 1.03f; + private float mWaterDensity = 1.0f; + + // whether to lock to landscape orientation, or allow switching between portrait and landscape orientations + //public static final boolean lock_to_landscape = true; + public static final boolean lock_to_landscape = false; + + // handling for lock_to_landscape==false: + + public enum SystemOrientation { + LANDSCAPE, + PORTRAIT, + REVERSE_LANDSCAPE + } + + private MyDisplayListener displayListener; + + private boolean has_cached_system_orientation; + private SystemOrientation cached_system_orientation; + + private boolean hasOldSystemOrientation; + private SystemOrientation oldSystemOrientation; + + private boolean has_cached_display_rotation; + private long cached_display_rotation_time_ms; + private int cached_display_rotation; + + List exposure_seekbar_values; // mapping from exposure_seekbar progress value to preview exposure compensation + private int exposure_seekbar_values_zero; // index in exposure_seekbar_values that maps to zero preview exposure compensation + + @Override + protected void onCreate(Bundle savedInstanceState) { + long debug_time = 0; + if( MyDebug.LOG ) { + Log.d(TAG, "onCreate: " + this); + debug_time = System.currentTimeMillis(); + } + activity_count++; + if( MyDebug.LOG ) + Log.d(TAG, "activity_count: " + activity_count); + //EdgeToEdge.enable(this, SystemBarStyle.auto(Color.TRANSPARENT, Color.TRANSPARENT), SystemBarStyle.dark(Color.TRANSPARENT)); // test edge-to-edge on pre-Android 15 + super.onCreate(savedInstanceState); + + setContentView(R.layout.activity_main); + PreferenceManager.setDefaultValues(this, R.xml.preferences, false); // initialise any unset preferences to their default values + if( MyDebug.LOG ) + Log.d(TAG, "onCreate: time after setting default preference values: " + (System.currentTimeMillis() - debug_time)); + + if( getIntent() != null && getIntent().getExtras() != null ) { + // whether called from testing + is_test = getIntent().getExtras().getBoolean("test_project"); + if( MyDebug.LOG ) + Log.d(TAG, "is_test: " + is_test); + } + /*if( getIntent() != null && getIntent().getExtras() != null ) { + // whether called from Take Photo widget + if( MyDebug.LOG ) + Log.d(TAG, "take_photo?: " + getIntent().getExtras().getBoolean(TakePhoto.TAKE_PHOTO)); + }*/ + if( MyDebug.LOG ) { + // whether called from Take Photo widget + Log.d(TAG, "take_photo?: " + TakePhoto.TAKE_PHOTO); + } + if( getIntent() != null && getIntent().getAction() != null ) { + // invoked via the manifest shortcut? + if( MyDebug.LOG ) + Log.d(TAG, "shortcut: " + getIntent().getAction()); + } + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + + // determine whether we should support "auto stabilise" feature + // risk of running out of memory on lower end devices, due to manipulation of large bitmaps + ActivityManager activityManager = (ActivityManager) getSystemService(ACTIVITY_SERVICE); + if( MyDebug.LOG ) { + Log.d(TAG, "large max memory = " + activityManager.getLargeMemoryClass() + "MB"); + } + large_heap_memory = activityManager.getLargeMemoryClass(); + if( large_heap_memory >= 128 ) { + supports_auto_stabilise = true; + } + if( MyDebug.LOG ) + Log.d(TAG, "supports_auto_stabilise? " + supports_auto_stabilise); + + // hack to rule out phones unlikely to have 4K video, so no point even offering the option! + // both S5 and Note 3 have 128MB standard and 512MB large heap (tested via Samsung RTL), as does Galaxy K Zoom + if( activityManager.getLargeMemoryClass() >= 512 ) { + supports_force_video_4k = true; + } + if( MyDebug.LOG ) + Log.d(TAG, "supports_force_video_4k? " + supports_force_video_4k); + + // set up components + bluetoothRemoteControl = new BluetoothRemoteControl(this); + permissionHandler = new PermissionHandler(this); + settingsManager = new SettingsManager(this); + mainUI = new MainUI(this); + manualSeekbars = new ManualSeekbars(); + applicationInterface = new MyApplicationInterface(this, savedInstanceState); + if( MyDebug.LOG ) + Log.d(TAG, "onCreate: time after creating application interface: " + (System.currentTimeMillis() - debug_time)); + textFormatter = new TextFormatter(this); + soundPoolManager = new SoundPoolManager(this); + magneticSensor = new MagneticSensor(this); + //speechControl = new SpeechControl(this); + + // determine whether we support Camera2 API + // must be done before setDeviceDefaults() + initCamera2Support(); + + // set some per-device defaults + // must be done before creating the Preview (as setDeviceDefaults() may set Camera2 API) + boolean has_done_first_time = sharedPreferences.contains(PreferenceKeys.FirstTimePreferenceKey); + if( MyDebug.LOG ) + Log.d(TAG, "has_done_first_time: " + has_done_first_time); + if( !has_done_first_time ) { + // must be done after initCamera2Support() + setDeviceDefaults(); + } + + boolean settings_is_open = settingsIsOpen(); + if( MyDebug.LOG ) + Log.d(TAG, "settings_is_open?: " + settings_is_open); + // settings_is_open==true can happen if application is recreated when settings is open + // to reproduce: go to settings, then turn screen off and on (and unlock) + if( !settings_is_open ) { + // set up window flags for normal operation + setWindowFlagsForCamera(); + } + if( MyDebug.LOG ) + Log.d(TAG, "onCreate: time after setting window flags: " + (System.currentTimeMillis() - debug_time)); + + save_location_history = new SaveLocationHistory(this, PreferenceKeys.SaveLocationHistoryBasePreferenceKey, getStorageUtils().getSaveLocation()); + checkSaveLocations(); + if( applicationInterface.getStorageUtils().isUsingSAF() ) { + if( MyDebug.LOG ) + Log.d(TAG, "create new SaveLocationHistory for SAF"); + save_location_history_saf = new SaveLocationHistory(this, PreferenceKeys.SaveLocationHistorySAFBasePreferenceKey, getStorageUtils().getSaveLocationSAF()); + } + if( MyDebug.LOG ) + Log.d(TAG, "onCreate: time after updating folder history: " + (System.currentTimeMillis() - debug_time)); + + // set up sensors + mSensorManager = (SensorManager)getSystemService(Context.SENSOR_SERVICE); + + // accelerometer sensor (for device orientation) + if( mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER) != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "found accelerometer"); + mSensorAccelerometer = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "no support for accelerometer"); + } + if( MyDebug.LOG ) + Log.d(TAG, "onCreate: time after creating accelerometer sensor: " + (System.currentTimeMillis() - debug_time)); + + // magnetic sensor (for compass direction) + magneticSensor.initSensor(mSensorManager); + if( MyDebug.LOG ) + Log.d(TAG, "onCreate: time after creating magnetic sensor: " + (System.currentTimeMillis() - debug_time)); + + // clear any seek bars (just in case??) + mainUI.closeExposureUI(); + + // set up the camera and its preview + preview = new Preview(applicationInterface, (this.findViewById(R.id.preview))); + if( MyDebug.LOG ) + Log.d(TAG, "onCreate: time after creating preview: " + (System.currentTimeMillis() - debug_time)); + + if( settings_is_open ) { + // must be done after creating preview + setWindowFlagsForSettings(); + } + + { + // don't show orientation animations + // must be done after creating Preview (so we know if Camera2 API or not) + WindowManager.LayoutParams layout = getWindow().getAttributes(); + // If locked to landscape, ROTATION_ANIMATION_SEAMLESS/JUMPCUT has the problem that when going to + // Settings in portrait, we briefly see the UI change - this is because we set the flag + // to no longer lock to landscape, and that change happens too quickly. + // This isn't a problem when lock_to_landscape==false, and we want + // ROTATION_ANIMATION_SEAMLESS so that there is no/minimal pause from the preview when + // rotating the device. However if using old camera API, we get an ugly transition with + // ROTATION_ANIMATION_SEAMLESS (probably related to not using TextureView?) + if( lock_to_landscape || !preview.usingCamera2API() ) + layout.rotationAnimation = WindowManager.LayoutParams.ROTATION_ANIMATION_CROSSFADE; + else if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.O ) + layout.rotationAnimation = WindowManager.LayoutParams.ROTATION_ANIMATION_SEAMLESS; + else + layout.rotationAnimation = WindowManager.LayoutParams.ROTATION_ANIMATION_JUMPCUT; + getWindow().setAttributes(layout); + } + + // Setup multi-camera buttons (must be done after creating preview so we know which Camera API is being used, + // and before initialising on-screen visibility). + // We only allow the separate icon for switching cameras if: + // - there are at least 2 types of "facing" camera, and + // - there are at least 2 cameras with the same "facing". + // If there are multiple cameras but all with different "facing", then the switch camera + // icon is used to iterate over all cameras. + // If there are more than two cameras, but all cameras have the same "facing, we still stick + // with using the switch camera icon to iterate over all cameras. + int n_cameras = preview.getCameraControllerManager().getNumberOfCameras(); + if( n_cameras > 2 ) { + this.back_camera_ids = new ArrayList<>(); + this.front_camera_ids = new ArrayList<>(); + this.other_camera_ids = new ArrayList<>(); + for(int i=0;i= 2 || front_camera_ids.size() >= 2 || other_camera_ids.size() >= 2; + int n_facing = 0; + if( !back_camera_ids.isEmpty() ) + n_facing++; + if( !front_camera_ids.isEmpty() ) + n_facing++; + if( !other_camera_ids.isEmpty() ) + n_facing++; + this.is_multi_cam = multi_same_facing && n_facing >= 2; + //this.is_multi_cam = false; // test + if( MyDebug.LOG ) { + Log.d(TAG, "multi_same_facing: " + multi_same_facing); + Log.d(TAG, "n_facing: " + n_facing); + Log.d(TAG, "is_multi_cam: " + is_multi_cam); + } + + if( !is_multi_cam ) { + this.back_camera_ids = null; + this.front_camera_ids = null; + this.other_camera_ids = null; + } + } + + // initialise on-screen button visibility + View switchCameraButton = findViewById(R.id.switch_camera); + switchCameraButton.setVisibility(n_cameras > 1 ? View.VISIBLE : View.GONE); + // switchMultiCameraButton visibility updated below in mainUI.updateOnScreenIcons(), as it also depends on user preference + View speechRecognizerButton = findViewById(R.id.audio_control); + speechRecognizerButton.setVisibility(View.GONE); // disabled by default, until the speech recognizer is created + if( MyDebug.LOG ) + Log.d(TAG, "onCreate: time after setting button visibility: " + (System.currentTimeMillis() - debug_time)); + View pauseVideoButton = findViewById(R.id.pause_video); + pauseVideoButton.setVisibility(View.GONE); + View takePhotoVideoButton = findViewById(R.id.take_photo_when_video_recording); + takePhotoVideoButton.setVisibility(View.GONE); + View cancelPanoramaButton = findViewById(R.id.cancel_panorama); + cancelPanoramaButton.setVisibility(View.GONE); + + // We initialise optional controls to invisible/gone, so they don't show while the camera is opening - the actual visibility is + // set in cameraSetup(). + // Note that ideally we'd set this in the xml, but doing so for R.id.zoom causes a crash on Galaxy Nexus startup beneath + // setContentView()! + // To be safe, we also do so for take_photo and zoom_seekbar (we already know we've had no reported crashes for focus_seekbar, + // however). + View takePhotoButton = findViewById(R.id.take_photo); + takePhotoButton.setVisibility(View.INVISIBLE); + View zoomSeekbar = findViewById(R.id.zoom_seekbar); + zoomSeekbar.setVisibility(View.INVISIBLE); + + // initialise state of on-screen icons + mainUI.updateOnScreenIcons(); + + if( MainActivity.lock_to_landscape ) { + // listen for orientation event change (only required if lock_to_landscape==true + // (MainUI.onOrientationChanged() does nothing if lock_to_landscape==false) + orientationEventListener = new OrientationEventListener(this) { + @Override + public void onOrientationChanged(int orientation) { + MainActivity.this.mainUI.onOrientationChanged(orientation); + } + }; + if( MyDebug.LOG ) + Log.d(TAG, "onCreate: time after setting orientation event listener: " + (System.currentTimeMillis() - debug_time)); + } + + layoutChangeListener = new View.OnLayoutChangeListener() { + @Override + public void onLayoutChange(View v, int left, int top, int right, int bottom, int oldLeft, int oldTop, int oldRight, int oldBottom) { + if( MyDebug.LOG ) + Log.d(TAG, "onLayoutChange"); + + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.N && isInMultiWindowMode() ) { + Point display_size = new Point(); + applicationInterface.getDisplaySize(display_size, true); + if( MyDebug.LOG ) { + Log.d(TAG, " display width: " + display_size.x); + Log.d(TAG, " display height: " + display_size.y); + Log.d(TAG, " layoutUI display width: " + mainUI.layoutUI_display_w); + Log.d(TAG, " layoutUI display height: " + mainUI.layoutUI_display_h); + } + // We need to call layoutUI when the window is resized without an orientation change - + // this can happen in split-screen or multi-window mode, where onConfigurationChanged + // is not guaranteed to be called. + // We check against the size of when layoutUI was last called, to avoid repeated calls + // when the resize is due to the device rotating and onConfigurationChanged is called - + // in fact we'd have a problem of repeatedly calling layoutUI, since doing layoutUI + // causes onLayoutChange() to be called again. + if( display_size.x != mainUI.layoutUI_display_w || display_size.y != mainUI.layoutUI_display_h ) { + if( MyDebug.LOG ) + Log.d(TAG, "call layoutUI due to resize"); + mainUI.layoutUI(); + } + } + } + }; + + // set up take photo long click + takePhotoButton.setOnLongClickListener(new View.OnLongClickListener() { + @Override + public boolean onLongClick(View v) { + if( !allowLongPress() ) { + // return false, so a regular click will still be triggered when the user releases the touch + return false; + } + return longClickedTakePhoto(); + } + }); + // set up on touch listener so we can detect if we've released from a long click + takePhotoButton.setOnTouchListener(new View.OnTouchListener() { + // the suppressed warning ClickableViewAccessibility suggests calling view.performClick for ACTION_UP, but this + // results in an additional call to clickedTakePhoto() - that is, if there is no long press, we get two calls to + // clickedTakePhoto instead one one; and if there is a long press, we get one call to clickedTakePhoto where + // there should be none. + @SuppressLint("ClickableViewAccessibility") + @Override + public boolean onTouch(View view, MotionEvent motionEvent) { + if( motionEvent.getAction() == MotionEvent.ACTION_UP ) { + if( MyDebug.LOG ) + Log.d(TAG, "takePhotoButton ACTION_UP"); + takePhotoButtonLongClickCancelled(); + if( MyDebug.LOG ) + Log.d(TAG, "takePhotoButton ACTION_UP done"); + } + return false; + } + }); + + // set up gallery button long click + View galleryButton = findViewById(R.id.gallery); + galleryButton.setOnLongClickListener(new View.OnLongClickListener() { + @Override + public boolean onLongClick(View v) { + if( !allowLongPress() ) { + // return false, so a regular click will still be triggered when the user releases the touch + return false; + } + //preview.showToast(null, "Long click"); + longClickedGallery(); + return true; + } + }); + + if( MyDebug.LOG ) + Log.d(TAG, "onCreate: time after setting long click listeners: " + (System.currentTimeMillis() - debug_time)); + + // listen for gestures + gestureDetector = new GestureDetector(this, new MyGestureDetector()); + if( MyDebug.LOG ) + Log.d(TAG, "onCreate: time after creating gesture detector: " + (System.currentTimeMillis() - debug_time)); + + setupSystemUiVisibilityListener(); + if( MyDebug.LOG ) + Log.d(TAG, "onCreate: time after setting system ui visibility listener: " + (System.currentTimeMillis() - debug_time)); + + // show "about" dialog for first time use + if( !has_done_first_time ) { + if( !is_test ) { + AlertDialog.Builder alertDialog = new AlertDialog.Builder(this); + alertDialog.setTitle(R.string.app_name); + alertDialog.setMessage(R.string.intro_text); + alertDialog.setPositiveButton(android.R.string.ok, null); + alertDialog.setNegativeButton(R.string.preference_online_help, new DialogInterface.OnClickListener() { + @Override + public void onClick(DialogInterface dialog, int which) { + if( MyDebug.LOG ) + Log.d(TAG, "online help"); + launchOnlineHelp(); + } + }); + alertDialog.show(); + } + + setFirstTimeFlag(); + } + + { + // handle What's New dialog + int version_code = -1; + try { + PackageInfo pInfo = getPackageManager().getPackageInfo(getPackageName(), 0); + version_code = pInfo.versionCode; + } + catch(PackageManager.NameNotFoundException e) { + MyDebug.logStackTrace(TAG, "NameNotFoundException exception trying to get version number", e); + } + if( version_code != -1 ) { + int latest_version = sharedPreferences.getInt(PreferenceKeys.LatestVersionPreferenceKey, 0); + if( MyDebug.LOG ) { + Log.d(TAG, "version_code: " + version_code); + Log.d(TAG, "latest_version: " + latest_version); + } + //final boolean whats_new_enabled = false; + final boolean whats_new_enabled = true; + if( whats_new_enabled ) { + // whats_new_version is the version code that the What's New text is written for. Normally it will equal the + // current release (version_code), but it some cases we may want to leave it unchanged. + // E.g., we have a "What's New" for 1.44 (64), but then push out a quick fix for 1.44.1 (65). We don't want to + // show the dialog again to people who already received 1.44 (64), but we still want to show the dialog to people + // upgrading from earlier versions. + int whats_new_version = 93; // 1.55 + whats_new_version = Math.min(whats_new_version, version_code); // whats_new_version should always be <= version_code, but just in case! + if( MyDebug.LOG ) { + Log.d(TAG, "whats_new_version: " + whats_new_version); + } + final boolean force_whats_new = false; + //final boolean force_whats_new = true; // for testing + boolean allow_show_whats_new = sharedPreferences.getBoolean(PreferenceKeys.ShowWhatsNewPreferenceKey, true); + if( MyDebug.LOG ) + Log.d(TAG, "allow_show_whats_new: " + allow_show_whats_new); + // don't show What's New if this is the first time the user has run + if( has_done_first_time && allow_show_whats_new && ( force_whats_new || whats_new_version > latest_version ) ) { + AlertDialog.Builder alertDialog = new AlertDialog.Builder(this); + alertDialog.setTitle(R.string.whats_new); + alertDialog.setMessage(R.string.whats_new_text); + alertDialog.setPositiveButton(android.R.string.ok, null); + alertDialog.show(); + } + } + // We set the latest_version whether or not the dialog is shown - if we showed the first time dialog, we don't + // want to then show the What's New dialog next time we run! Similarly if the user had disabled showing the dialog, + // but then enables it, we still shouldn't show the dialog until the new time Open Camera upgrades. + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putInt(PreferenceKeys.LatestVersionPreferenceKey, version_code); + editor.apply(); + } + } + + setModeFromIntents(savedInstanceState); + + // load icons + preloadIcons(R.array.flash_icons); + preloadIcons(R.array.focus_mode_icons); + if( MyDebug.LOG ) + Log.d(TAG, "onCreate: time after preloading icons: " + (System.currentTimeMillis() - debug_time)); + + // initialise text to speech engine + textToSpeechSuccess = false; + // run in separate thread so as to not delay startup time + new Thread(new Runnable() { + public void run() { + textToSpeech = new TextToSpeech(MainActivity.this, new TextToSpeech.OnInitListener() { + @Override + public void onInit(int status) { + if( MyDebug.LOG ) + Log.d(TAG, "TextToSpeech initialised"); + if( status == TextToSpeech.SUCCESS ) { + textToSpeechSuccess = true; + if( MyDebug.LOG ) + Log.d(TAG, "TextToSpeech succeeded"); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "TextToSpeech failed"); + } + } + }); + } + }).start(); + + // handle on back behaviour + popupOnBackPressedCallback = new PopupOnBackPressedCallback(false); + this.getOnBackPressedDispatcher().addCallback(this, popupOnBackPressedCallback); + pausePreviewOnBackPressedCallback = new PausePreviewOnBackPressedCallback(false); + this.getOnBackPressedDispatcher().addCallback(this, pausePreviewOnBackPressedCallback); + screenLockOnBackPressedCallback = new ScreenLockOnBackPressedCallback(false); + this.getOnBackPressedDispatcher().addCallback(this, screenLockOnBackPressedCallback); + + // create notification channel - only needed on Android 8+ + // update: notifications now removed due to needing permissions on Android 13+ + /*if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.O ) { + CharSequence name = "Open Camera Image Saving"; + String description = "Notification channel for processing and saving images in the background"; + int importance = NotificationManager.IMPORTANCE_LOW; + NotificationChannel channel = new NotificationChannel(CHANNEL_ID, name, importance); + channel.setDescription(description); + // Register the channel with the system; you can't change the importance + // or other notification behaviors after this + NotificationManager notificationManager = getSystemService(NotificationManager.class); + notificationManager.createNotificationChannel(channel); + }*/ + + // so we get the icons rotation even when rotating for the first time - see onSystemOrientationChanged + this.hasOldSystemOrientation = true; + this.oldSystemOrientation = getSystemOrientation(); + + if( MyDebug.LOG ) + Log.d(TAG, "onCreate: total time for Activity startup: " + (System.currentTimeMillis() - debug_time)); + } + + /** Whether to use codepaths that are compatible with scoped storage. + */ + public static boolean useScopedStorage() { + //return false; + //return true; + return Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q; + } + + /** Whether this is a multi camera device, and the user preference is set to enable the multi-camera button. + */ + public boolean isMultiCamEnabled() { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + return is_multi_cam && sharedPreferences.getBoolean(PreferenceKeys.MultiCamButtonPreferenceKey, true); + } + + /** Whether this is a multi camera device, whether or not the user preference is set to enable + * the multi-camera button. + */ + public boolean isMultiCam() { + return is_multi_cam; + } + + /* Returns the camera Id in use by the preview - or the one we requested, if the camera failed + * to open. + * Needed as Preview.getCameraId() returns 0 if camera_controller==null, but if the camera + * fails to open, we want the switch camera icons to still work as expected! + */ + private int getActualCameraId() { + if( preview.getCameraController() == null ) + return applicationInterface.getCameraIdPref(); + else + return preview.getCameraId(); + } + + /** Whether the icon switch_multi_camera should be displayed. This is if the following are all + * true: + * - The device is a multi camera device (MainActivity.is_multi_cam==true). + * - The user preference for using the separate icons is enabled + * (PreferenceKeys.MultiCamButtonPreferenceKey). + * - For the current camera ID, there are at least two cameras with the same front/back/external + * "facing" (e.g., imagine a device with two back cameras, but only one front camera - no point + * showing the multi-cam icon for just a single logical front camera). + * OR there are physical cameras for the current camera, and again the user preference + * PreferenceKeys.MultiCamButtonPreferenceKey is enabled. + */ + public boolean showSwitchMultiCamIcon() { + if( preview.hasPhysicalCameras() ) { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + if( sharedPreferences.getBoolean(PreferenceKeys.MultiCamButtonPreferenceKey, true) ) + return true; + } + if( isMultiCamEnabled() ) { + int cameraId = getActualCameraId(); + switch( preview.getCameraControllerManager().getFacing(cameraId) ) { + case FACING_BACK: + if( back_camera_ids.size() > 1 ) + return true; + break; + case FACING_FRONT: + if( front_camera_ids.size() > 1 ) + return true; + break; + default: + if( other_camera_ids.size() > 1 ) + return true; + break; + } + } + return false; + } + + /** Whether user preference is set to allow long press actions. + */ + private boolean allowLongPress() { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + return sharedPreferences.getBoolean(PreferenceKeys.AllowLongPressPreferenceKey, true); + } + + /* This method sets the preference defaults which are set specific for a particular device. + * This method should be called when Open Camera is run for the very first time after installation, + * or when the user has requested to "Reset settings". + */ + void setDeviceDefaults() { + if( MyDebug.LOG ) + Log.d(TAG, "setDeviceDefaults"); + boolean is_samsung = Build.MANUFACTURER.toLowerCase(Locale.US).contains("samsung"); + //SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + //boolean is_samsung = Build.MANUFACTURER.toLowerCase(Locale.US).contains("samsung"); + //boolean is_oneplus = Build.MANUFACTURER.toLowerCase(Locale.US).contains("oneplus"); + //boolean is_nexus = Build.MODEL.toLowerCase(Locale.US).contains("nexus"); + //boolean is_nexus6 = Build.MODEL.toLowerCase(Locale.US).contains("nexus 6"); + //boolean is_pixel_phone = Build.DEVICE != null && Build.DEVICE.equals("sailfish"); + //boolean is_pixel_xl_phone = Build.DEVICE != null && Build.DEVICE.equals("marlin"); + /*if( MyDebug.LOG ) { + //Log.d(TAG, "is_samsung? " + is_samsung); + //Log.d(TAG, "is_oneplus? " + is_oneplus); + //Log.d(TAG, "is_nexus? " + is_nexus); + //Log.d(TAG, "is_nexus6? " + is_nexus6); + //Log.d(TAG, "is_pixel_phone? " + is_pixel_phone); + //Log.d(TAG, "is_pixel_xl_phone? " + is_pixel_xl_phone); + }*/ + /*if( is_samsung || is_oneplus ) { + // The problems we used to have on Samsung Galaxy devices are now fixed, by setting + // TEMPLATE_PREVIEW for the precaptureBuilder in CameraController2. This also fixes the + // problems with OnePlus 3T having blue tinge if flash is on, and the scene is bright + // enough not to need it + if( MyDebug.LOG ) + Log.d(TAG, "set fake flash for camera2"); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putBoolean(PreferenceKeys.Camera2FakeFlashPreferenceKey, true); + editor.apply(); + }*/ + /*if( is_nexus6 ) { + // Nexus 6 captureBurst() started having problems with Android 7 upgrade - images appeared in wrong order (and with wrong order of shutter speeds in exif info), as well as problems with the camera failing with serious errors + // we set this even for Nexus 6 devices not on Android 7, as at some point they'll likely be upgraded to Android 7 + // Update: now fixed in v1.37, this was due to bug where we set RequestTag.CAPTURE for all captures in takePictureBurstExpoBracketing(), rather than just the last! + if( MyDebug.LOG ) + Log.d(TAG, "disable fast burst for camera2"); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putBoolean(PreferenceKeys.Camera2FastBurstPreferenceKey, false); + editor.apply(); + }*/ + if( is_samsung && !is_test ) { + // Samsung Galaxy devices (including S10e, S24) have problems with HDR/expo - base images come out with wrong exposures. + // This can be fixed by not using fast bast, allowing us to adjust the preview exposure to match. + if( MyDebug.LOG ) + Log.d(TAG, "disable fast burst for camera2"); + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putBoolean(PreferenceKeys.Camera2FastBurstPreferenceKey, false); + editor.apply(); + } + if( supports_camera2 && !is_test ) { + // n.b., when testing, we explicitly decide whether to run with Camera2 API or not + CameraControllerManager2 manager2 = new CameraControllerManager2(this); + int n_cameras = manager2.getNumberOfCameras(); + boolean all_supports_camera2 = true; // whether all cameras have at least LIMITED support for Camera2 (risky to default to Camera2 if any cameras are LEGACY, as not easy to test such devices) + for(int i=0;i= Build.VERSION_CODES.S ) + default_to_camera2 = true; + else if( is_nokia && Build.VERSION.SDK_INT >= Build.VERSION_CODES.P ) + default_to_camera2 = true; + else if( is_samsung && Build.VERSION.SDK_INT >= Build.VERSION_CODES.S ) + default_to_camera2 = true; + else if( is_oneplus && Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE ) + default_to_camera2 = true; + + if( default_to_camera2 ) { + if( MyDebug.LOG ) + Log.d(TAG, "default to camera2 API"); + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putString(PreferenceKeys.CameraAPIPreferenceKey, "preference_camera_api_camera2"); + editor.apply(); + } + } + } + } + + /** Switches modes if required, if called from a relevant intent/tile. + */ + private void setModeFromIntents(Bundle savedInstanceState) { + if( MyDebug.LOG ) + Log.d(TAG, "setModeFromIntents"); + if( savedInstanceState != null ) { + // If we're restoring from a saved state, we shouldn't be resetting any modes + if( MyDebug.LOG ) + Log.d(TAG, "restoring from saved state"); + return; + } + boolean done_facing = false; + String action = this.getIntent().getAction(); + if( MediaStore.INTENT_ACTION_VIDEO_CAMERA.equals(action) || MediaStore.ACTION_VIDEO_CAPTURE.equals(action) ) { + if( MyDebug.LOG ) + Log.d(TAG, "launching from video intent"); + applicationInterface.setVideoPref(true); + } + else if( MediaStore.ACTION_IMAGE_CAPTURE.equals(action) || MediaStore.ACTION_IMAGE_CAPTURE_SECURE.equals(action) || MediaStore.INTENT_ACTION_STILL_IMAGE_CAMERA.equals(action) || MediaStore.INTENT_ACTION_STILL_IMAGE_CAMERA_SECURE.equals(action) ) { + if( MyDebug.LOG ) + Log.d(TAG, "launching from photo intent"); + applicationInterface.setVideoPref(false); + } + else if( (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N && MyTileService.TILE_ID.equals(action)) || ACTION_SHORTCUT_CAMERA.equals(action) ) { + if( MyDebug.LOG ) + Log.d(TAG, "launching from quick settings tile or application shortcut for Open Camera: photo mode"); + applicationInterface.setVideoPref(false); + } + else if( (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N && MyTileServiceVideo.TILE_ID.equals(action)) || ACTION_SHORTCUT_VIDEO.equals(action) ) { + if( MyDebug.LOG ) + Log.d(TAG, "launching from quick settings tile or application shortcut for Open Camera: video mode"); + applicationInterface.setVideoPref(true); + } + else if( (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N && MyTileServiceFrontCamera.TILE_ID.equals(action)) || ACTION_SHORTCUT_SELFIE.equals(action) ) { + if( MyDebug.LOG ) + Log.d(TAG, "launching from quick settings tile or application shortcut for Open Camera: selfie mode"); + done_facing = true; + applicationInterface.switchToCamera(true); + } + else if( ACTION_SHORTCUT_GALLERY.equals(action) ) { + if( MyDebug.LOG ) + Log.d(TAG, "launching from application shortcut for Open Camera: gallery"); + openGallery(); + } + else if( ACTION_SHORTCUT_SETTINGS.equals(action) ) { + if( MyDebug.LOG ) + Log.d(TAG, "launching from application shortcut for Open Camera: settings"); + openSettings(); + } + + Bundle extras = this.getIntent().getExtras(); + if( extras != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "handle intent extra information"); + if( !done_facing ) { + int camera_facing = extras.getInt("android.intent.extras.CAMERA_FACING", -1); + if( camera_facing == 0 || camera_facing == 1 ) { + if( MyDebug.LOG ) + Log.d(TAG, "found android.intent.extras.CAMERA_FACING: " + camera_facing); + applicationInterface.switchToCamera(camera_facing==1); + done_facing = true; + } + } + if( !done_facing ) { + if( extras.getInt("android.intent.extras.LENS_FACING_FRONT", -1) == 1 ) { + if( MyDebug.LOG ) + Log.d(TAG, "found android.intent.extras.LENS_FACING_FRONT"); + applicationInterface.switchToCamera(true); + done_facing = true; + } + } + if( !done_facing ) { + if( extras.getInt("android.intent.extras.LENS_FACING_BACK", -1) == 1 ) { + if( MyDebug.LOG ) + Log.d(TAG, "found android.intent.extras.LENS_FACING_BACK"); + applicationInterface.switchToCamera(false); + done_facing = true; + } + } + if( !done_facing ) { + if( extras.getBoolean("android.intent.extra.USE_FRONT_CAMERA", false) ) { + if( MyDebug.LOG ) + Log.d(TAG, "found android.intent.extra.USE_FRONT_CAMERA"); + applicationInterface.switchToCamera(true); + done_facing = true; + } + } + } + + // N.B., in practice the hasSetCameraId() check is pointless as we don't save the camera ID in shared preferences, so it will always + // be false when application is started from onCreate(), unless resuming from saved instance (in which case we shouldn't be here anyway) + if( !done_facing && !applicationInterface.hasSetCameraId() ) { + if( MyDebug.LOG ) + Log.d(TAG, "initialise to back camera"); + // most devices have first camera as back camera anyway so this wouldn't be needed, but some (e.g., LG G6) have first camera + // as front camera, so we should explicitly switch to back camera + applicationInterface.switchToCamera(false); + } + } + + /** Determine whether we support Camera2 API. + */ + private void initCamera2Support() { + if( MyDebug.LOG ) + Log.d(TAG, "initCamera2Support"); + supports_camera2 = false; + { + // originally we allowed Camera2 if all cameras support at least LIMITED + // as of 1.45, we allow Camera2 if at least one camera supports at least LIMITED - this + // is to support devices that might have a camera with LIMITED or better support, but + // also a LEGACY camera + CameraControllerManager2 manager2 = new CameraControllerManager2(this); + supports_camera2 = false; + int n_cameras = manager2.getNumberOfCameras(); + if( n_cameras == 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "Camera2 reports 0 cameras"); + supports_camera2 = false; + } + for(int i=0;i=0;i--) { + String this_location = save_location_history.get(i); + res = checkSaveLocation(this_location); + if( !res.res ) { + if( MyDebug.LOG ) + Log.d(TAG, "save_location in history " + i + " not valid with scoped storage: " + this_location); + if( res.alt == null ) { + // no alternative, remove + save_location_history.remove(i); + } + else { + // replace with the alternative + if( MyDebug.LOG ) + Log.d(TAG, "alternative: " + res.alt); + save_location_history.set(i, res.alt); + } + any_changes = true; + } + } + + if( any_changes ) { + this.save_location_history.updateFolderHistory(this.getStorageUtils().getSaveLocation(), false); + } + } + } + + /** Result from checkSaveLocation. Ideally we'd just use android.util.Pair, but that's not mocked + * for use in unit tests. + * See checkSaveLocation() for documentation. + */ + public static class CheckSaveLocationResult { + final boolean res; + final String alt; + + public CheckSaveLocationResult(boolean res, String alt) { + this.res = res; + this.alt = alt; + } + + @Override + public boolean equals(Object o) { + if( !(o instanceof CheckSaveLocationResult) ) { + return false; + } + CheckSaveLocationResult that = (CheckSaveLocationResult)o; + // stop dumb inspection that suggests replacing warning with an error(!) (Objects class is not available on all API versions) + // and the other inspection suggests replacing with code that would cause a nullpointerexception + //noinspection EqualsReplaceableByObjectsCall,StringEquality + return that.res == this.res && ( (that.alt == this.alt) || (that.alt != null && that.alt.equals(this.alt) ) ); + //return that.res == this.res && ( (that.alt == this.alt) || (that.alt != null && that.alt.equals(this.alt) ) ); + } + + @Override + public int hashCode() { + return (res ? 1249 : 1259) ^ (alt == null ? 0 : alt.hashCode()); + } + + @NonNull + @Override + public String toString() { + return "CheckSaveLocationResult{" + res + " , " + alt + "}"; + } + } + + public static CheckSaveLocationResult checkSaveLocation(final String folder) { + return checkSaveLocation(folder, null); + } + + /** Checks to see if the supplied folder (in the format as used by our preferences) is supported + * with scoped storage. + * @return The Boolean is always non-null, and returns whether the save location is valid. + * If the return is false, then if the String is non-null, this stores an alternative + * form that is valid. If null, there is no valid alternative. + * @param base_folder This should normally be null, but can be used to specify manually the + * folder instead of using StorageUtils.getBaseFolder() - needed for unit + * tests as Environment class (for Environment.getExternalStoragePublicDirectory()) + * is not mocked. + */ + public static CheckSaveLocationResult checkSaveLocation(final String folder, String base_folder) { + /*if( MyDebug.LOG ) + Log.d(TAG, "DCIM path: " + Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM).getAbsolutePath());*/ + if( StorageUtils.saveFolderIsFull(folder) ) { + if( MyDebug.LOG ) + Log.d(TAG, "checkSaveLocation for full path: " + folder); + // But still check to see if the full path is part of DCIM. Since when using the + // file dialog method with non-scoped storage, if the user specifies multiple subfolders + // e.g. DCIM/blah_a/blah_b, we don't spot that in FolderChooserDialog.useFolder(), and + // instead still store that as the full path. + + if( base_folder == null ) + base_folder = StorageUtils.getBaseFolder().getAbsolutePath(); + // strip '/' as last character - makes it easier to also spot cases where the folder is the + // DCIM folder, but doesn't have a '/' last character + if( !base_folder.isEmpty() && base_folder.charAt(base_folder.length()-1) == '/' ) + base_folder = base_folder.substring(0, base_folder.length()-1); + if( MyDebug.LOG ) + Log.d(TAG, " compare to base_folder: " + base_folder); + String alt_folder = null; + if( folder.startsWith(base_folder) ) { + alt_folder = folder.substring(base_folder.length()); + // also need to strip the first '/' if it exists + if( !alt_folder.isEmpty() && alt_folder.charAt(0) == '/' ) + alt_folder = alt_folder.substring(1); + } + + return new CheckSaveLocationResult(false, alt_folder); + } + else { + // already in expected format (indicates a sub-folder of DCIM) + return new CheckSaveLocationResult(true, null); + } + } + + private void preloadIcons(int icons_id) { + long debug_time = 0; + if( MyDebug.LOG ) { + Log.d(TAG, "preloadIcons: " + icons_id); + debug_time = System.currentTimeMillis(); + } + String [] icons = getResources().getStringArray(icons_id); + for(String icon : icons) { + int resource = getResources().getIdentifier(icon, null, this.getApplicationContext().getPackageName()); + if( MyDebug.LOG ) + Log.d(TAG, "load resource: " + resource); + Bitmap bm = BitmapFactory.decodeResource(getResources(), resource); + this.preloaded_bitmap_resources.put(resource, bm); + } + if( MyDebug.LOG ) { + Log.d(TAG, "preloadIcons: total time for preloadIcons: " + (System.currentTimeMillis() - debug_time)); + Log.d(TAG, "size of preloaded_bitmap_resources: " + preloaded_bitmap_resources.size()); + } + } + + @Override + protected void onStop() { + if( MyDebug.LOG ) + Log.d(TAG, "onStop"); + super.onStop(); + + // we stop location listening in onPause, but done here again just to be certain! + applicationInterface.getLocationSupplier().freeLocationListeners(); + } + + @Override + protected void onDestroy() { + if( MyDebug.LOG ) { + Log.d(TAG, "onDestroy"); + Log.d(TAG, "size of preloaded_bitmap_resources: " + preloaded_bitmap_resources.size()); + } + activity_count--; + if( MyDebug.LOG ) + Log.d(TAG, "activity_count: " + activity_count); + + // should do asap before waiting for images to be saved - as risk the application will be killed whilst waiting for that to happen, + // and we want to avoid notifications hanging around + cancelImageSavingNotification(); + + if( want_no_limits && navigation_gap != 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "clear FLAG_LAYOUT_NO_LIMITS"); + // it's unclear why this matters - but there is a bug when exiting split-screen mode, if the split-screen mode had set want_no_limits: + // even though the application is created when leaving split-screen mode, we still end up with the window flags for showing + // under the navigation bar! + // update: this issue is also fixed by not allowing want_no_limits mode in multi-window mode, but still good to reset things here + // just in case + showUnderNavigation(false); + } + + // reduce risk of losing any images + // we don't do this in onPause or onStop, due to risk of ANRs + // note that even if we did call this earlier in onPause or onStop, we'd still want to wait again here: as it can happen + // that a new image appears after onPause/onStop is called, in which case we want to wait until images are saved, + waitUntilImageQueueEmpty(); + + preview.onDestroy(); + if( applicationInterface != null ) { + applicationInterface.onDestroy(); + } + // Need to recycle to avoid out of memory when running tests - probably good practice to do anyway + for(Map.Entry entry : preloaded_bitmap_resources.entrySet()) { + if( MyDebug.LOG ) + Log.d(TAG, "recycle: " + entry.getKey()); + entry.getValue().recycle(); + } + preloaded_bitmap_resources.clear(); + if( textToSpeech != null ) { + // http://stackoverflow.com/questions/4242401/tts-error-leaked-serviceconnection-android-speech-tts-texttospeech-solved + if( MyDebug.LOG ) + Log.d(TAG, "free textToSpeech"); + textToSpeech.stop(); + textToSpeech.shutdown(); + textToSpeech = null; + } + + // we stop location listening in onPause, but done here again just to be certain! + applicationInterface.getLocationSupplier().freeLocationListeners(); + + super.onDestroy(); + if( MyDebug.LOG ) + Log.d(TAG, "onDestroy done"); + } + + @Override + public boolean onCreateOptionsMenu(Menu menu) { + // Inflate the menu; this adds items to the action bar if it is present. + getMenuInflater().inflate(R.menu.main, menu); + return true; + } + + private void setFirstTimeFlag() { + if( MyDebug.LOG ) + Log.d(TAG, "setFirstTimeFlag"); + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putBoolean(PreferenceKeys.FirstTimePreferenceKey, true); + editor.apply(); + } + + private static String getOnlineHelpUrl(String append) { + if( MyDebug.LOG ) + Log.d(TAG, "getOnlineHelpUrl: " + append); + // if we change this, remember that any page linked to must abide by Google Play developer policies! + // also if we change this method name or where it's located, remember to update the mention in + // opencamera_source.txt + //return "https://opencamera.sourceforge.io/" + append; + return "https://opencamera.org.uk/" + append; + } + + void launchOnlineHelp() { + if( MyDebug.LOG ) + Log.d(TAG, "launchOnlineHelp"); + // if we change this, remember that any page linked to must abide by Google Play developer policies! + Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(getOnlineHelpUrl(""))); + startActivity(browserIntent); + } + + void launchOnlinePrivacyPolicy() { + if( MyDebug.LOG ) + Log.d(TAG, "launchOnlinePrivacyPolicy"); + // if we change this, remember that any page linked to must abide by Google Play developer policies! + //Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(getOnlineHelpUrl("index.html#privacy"))); + Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(getOnlineHelpUrl("privacy_oc.html"))); + startActivity(browserIntent); + } + + void launchOnlineLicences() { + if( MyDebug.LOG ) + Log.d(TAG, "launchOnlineLicences"); + // if we change this, remember that any page linked to must abide by Google Play developer policies! + Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(getOnlineHelpUrl("#licence"))); + startActivity(browserIntent); + } + + /* Audio trigger - either loud sound, or speech recognition. + * This performs some additional checks before taking a photo. + */ + void audioTrigger() { + if( MyDebug.LOG ) + Log.d(TAG, "ignore audio trigger due to popup open"); + if( popupIsOpen() ) { + if( MyDebug.LOG ) + Log.d(TAG, "ignore audio trigger due to popup open"); + } + else if( camera_in_background ) { + if( MyDebug.LOG ) + Log.d(TAG, "ignore audio trigger due to camera in background"); + } + else if( preview.isTakingPhotoOrOnTimer() ) { + if( MyDebug.LOG ) + Log.d(TAG, "ignore audio trigger due to already taking photo or on timer"); + } + else if( preview.isVideoRecording() ) { + if( MyDebug.LOG ) + Log.d(TAG, "ignore audio trigger due to already recording video"); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "schedule take picture due to loud noise"); + //takePicture(); + this.runOnUiThread(new Runnable() { + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "taking picture due to audio trigger"); + takePicture(false); + } + }); + } + } + + public boolean onKeyDown(int keyCode, KeyEvent event) { + if( MyDebug.LOG ) + Log.d(TAG, "onKeyDown: " + keyCode); + if( camera_in_background ) { + // don't allow keys such as volume keys for taking photo when camera in background! + if( MyDebug.LOG ) + Log.d(TAG, "camera is in background"); + } + else { + boolean handled = mainUI.onKeyDown(keyCode, event); + if( handled ) + return true; + } + return super.onKeyDown(keyCode, event); + } + + public boolean onKeyUp(int keyCode, KeyEvent event) { + if( MyDebug.LOG ) + Log.d(TAG, "onKeyUp: " + keyCode); + if( camera_in_background ) { + // don't allow keys such as volume keys for taking photo when camera in background! + if( MyDebug.LOG ) + Log.d(TAG, "camera is in background"); + } + else { + mainUI.onKeyUp(keyCode, event); + } + return super.onKeyUp(keyCode, event); + } + + private void zoomByStep(int change) { + if( MyDebug.LOG ) + Log.d(TAG, "zoomByStep: " + change); + if( preview.supportsZoom() && change != 0 ) { + if( preview.getCameraController() != null ) { + // If the minimum zoom is < 1.0, the seekbar will have repeated entries for 1x zoom + // (so it's easier for the user to zoom to exactly 1.0x). But if using the -/+ buttons, + // volume keys etc to zoom, we want to skip over these repeated values. + int zoom_factor = preview.getCameraController().getZoom(); + int new_zoom_factor = zoom_factor + change; + if( MyDebug.LOG ) + Log.d(TAG, "new_zoom_factor: " + new_zoom_factor); + while( new_zoom_factor > 0 && new_zoom_factor < preview.getMaxZoom() && preview.getZoomRatio(new_zoom_factor) == preview.getZoomRatio() ) { + if( change > 0 ) + change++; + else + change--; + new_zoom_factor = zoom_factor + change; + if( MyDebug.LOG ) + Log.d(TAG, "skip over constant region: " + new_zoom_factor); + } + } + + mainUI.changeSeekbar(R.id.zoom_seekbar, -change); // seekbar is opposite direction to zoom array + } + } + + public void zoomIn() { + zoomByStep(1); + } + + public void zoomOut() { + zoomByStep(-1); + } + + public void changeExposure(int change) { + if( preview.supportsExposures() ) { + if( exposure_seekbar_values != null ) { + SeekBar seekBar = this.findViewById(R.id.exposure_seekbar); + int progress = seekBar.getProgress(); + int new_progress = progress + change; + int current_exposure = getExposureSeekbarValue(progress); + if( new_progress < 0 || new_progress > exposure_seekbar_values.size()-1 ) { + // skip + } + else if( getExposureSeekbarValue(new_progress) == 0 && current_exposure != 0 ) { + // snap to the central repeated zero + new_progress = exposure_seekbar_values_zero; + change = new_progress - progress; + } + else { + // skip over the repeated zeroes + while( new_progress > 0 && new_progress < exposure_seekbar_values.size()-1 && getExposureSeekbarValue(new_progress) == current_exposure ) { + if( change > 0 ) + change++; + else + change--; + new_progress = progress + change; + if( MyDebug.LOG ) + Log.d(TAG, "skip over constant region: " + new_progress); + } + } + } + mainUI.changeSeekbar(R.id.exposure_seekbar, change); + } + } + + public int getExposureSeekbarProgressZero() { + return exposure_seekbar_values_zero; + } + + /** Returns the exposure compensation corresponding to a progress on the seekbar. + * Caller is responsible for checking that progress is within valid range. + */ + public int getExposureSeekbarValue(int progress) { + return exposure_seekbar_values.get(progress); + } + + public void changeISO(int change) { + if( preview.supportsISORange() ) { + mainUI.changeSeekbar(R.id.iso_seekbar, change); + } + } + + public void changeFocusDistance(int change, boolean is_target_distance) { + mainUI.changeSeekbar(is_target_distance ? R.id.focus_bracketing_target_seekbar : R.id.focus_seekbar, change); + } + + private final SensorEventListener accelerometerListener = new SensorEventListener() { + @Override + public void onAccuracyChanged(Sensor sensor, int accuracy) { + } + + @Override + public void onSensorChanged(SensorEvent event) { + preview.onAccelerometerSensorChanged(event); + } + }; + + public float getWaterDensity() { + return this.mWaterDensity; + } + + @Override + protected void onResume() { + long debug_time = 0; + if( MyDebug.LOG ) { + Log.d(TAG, "onResume"); + debug_time = System.currentTimeMillis(); + } + super.onResume(); + this.app_is_paused = false; // must be set before initLocation() at least + + // this is intentionally true, not false, as the uncovering happens in DrawPreview when we receive frames from the camera after it's opened + // (this should already have been set from the call in onPause(), but we set it here again just in case) + applicationInterface.getDrawPreview().setCoverPreview(true); + + applicationInterface.getDrawPreview().clearDimPreview(); // shouldn't be needed, but just in case the dim preview flag got set somewhere + + cancelImageSavingNotification(); + + // Set black window background; also needed if we hide the virtual buttons in immersive mode + // Note that we do it here rather than customising the theme's android:windowBackground, so this doesn't affect other views - in particular, the MyPreferenceFragment settings + getWindow().getDecorView().getRootView().setBackgroundColor(Color.BLACK); + + if( edge_to_edge_mode && Build.VERSION.SDK_INT >= Build.VERSION_CODES.VANILLA_ICE_CREAM ) { + // needed on Android 15, otherwise the navigation bar is not transparent + getWindow().setNavigationBarContrastEnforced(false); + } + + registerDisplayListener(); + + mSensorManager.registerListener(accelerometerListener, mSensorAccelerometer, SensorManager.SENSOR_DELAY_NORMAL); + magneticSensor.registerMagneticListener(mSensorManager); + if( orientationEventListener != null ) { + orientationEventListener.enable(); + } + getWindow().getDecorView().addOnLayoutChangeListener(layoutChangeListener); + + // if BLE remote control is enabled, then start the background BLE service + bluetoothRemoteControl.startRemoteControl(); + + //speechControl.initSpeechRecognizer(); + initLocation(); + initGyroSensors(); + applicationInterface.getImageSaver().onResume(); + soundPoolManager.initSound(); + soundPoolManager.loadSound(R.raw.mybeep); + soundPoolManager.loadSound(R.raw.mybeep_hi); + + resetCachedSystemOrientation(); // just in case? + mainUI.layoutUI(); + + // If the cached last media has exif datetime info, it's fine to just call updateGalleryIcon(), + // which will find the most recent media (and takes care of if the cached last image may have + // been deleted). + // If it doesn't have exif datetime tags, updateGalleryIcon() may not be able to find the most + // recent media, so we stick with the cached uri if we can test that it's still accessible. + if( !getStorageUtils().getLastMediaScannedHasNoExifDateTime() ) { + updateGalleryIcon(); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "last media has no exif datetime, so check it still exists"); + boolean uri_exists = false; + InputStream inputStream = null; + Uri check_uri = getStorageUtils().getLastMediaScannedCheckUri(); + if( MyDebug.LOG ) + Log.d(TAG, "check_uri: " + check_uri); + try { + inputStream = this.getContentResolver().openInputStream(check_uri); + if( inputStream != null ) + uri_exists = true; + } + catch(Exception ignored) { + } + finally { + if( inputStream != null ) { + try { + inputStream.close(); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "failed to close inputStream", e); + } + } + } + + if( uri_exists ) { + if( MyDebug.LOG ) + Log.d(TAG, " most recent uri exists"); + // also re-allow ghost image again in case that option is set (since we won't be + // doing this via updateGalleryIcon()) + applicationInterface.getDrawPreview().allowGhostImage(); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, " most recent uri no longer valid"); + updateGalleryIcon(); + } + } + + applicationInterface.reset(false); // should be called before opening the camera in preview.onResume() + + if( !camera_in_background ) { + // don't restart camera if we're showing a dialog or settings + preview.onResume(); + } + + { + // show a toast for the camera if it's not the first for front of back facing (otherwise on multi-front/back camera + // devices, it's easy to forget if set to a different camera) + // but we only show this when resuming, not every time the camera opens + // OR show the toast for the camera if it's a physical camera + int cameraId = applicationInterface.getCameraIdPref(); + String cameraIdSPhysical = applicationInterface.getCameraIdSPhysicalPref(); + if( cameraId > 0 || cameraIdSPhysical != null ) { + CameraControllerManager camera_controller_manager = preview.getCameraControllerManager(); + CameraController.Facing front_facing = camera_controller_manager.getFacing(cameraId); + if( MyDebug.LOG ) + Log.d(TAG, "front_facing: " + front_facing); + if( camera_controller_manager.getNumberOfCameras() > 2 || cameraIdSPhysical != null ) { + boolean camera_is_default = true; + if( cameraIdSPhysical != null ) + camera_is_default = false; + for(int i=0;i 0) { + createImageSavingNotification(); + } + + if( update_gallery_future != null ) { + update_gallery_future.cancel(true); + } + + // intentionally do this again, just in case something turned location on since - keep this right at the end: + applicationInterface.getLocationSupplier().freeLocationListeners(); + + // don't want to enter immersive mode when in background + // needs to be last in case anything above indirectly called initImmersiveMode() + cancelImmersiveTimer(); + + if( MyDebug.LOG ) { + Log.d(TAG, "onPause: total time to pause: " + (System.currentTimeMillis() - debug_time)); + } + } + + private class MyDisplayListener implements DisplayManager.DisplayListener { + private int old_rotation; + + private MyDisplayListener() { + int rotation = MainActivity.this.getWindowManager().getDefaultDisplay().getRotation(); + if( MyDebug.LOG ) { + Log.d(TAG, "MyDisplayListener"); + Log.d(TAG, "rotation: " + rotation); + } + old_rotation = rotation; + } + + @Override + public void onDisplayAdded(int displayId) { + } + + @Override + public void onDisplayRemoved(int displayId) { + } + + @Override + public void onDisplayChanged(int displayId) { + int rotation = MainActivity.this.getWindowManager().getDefaultDisplay().getRotation(); + if( MyDebug.LOG ) { + Log.d(TAG, "onDisplayChanged: " + displayId); + Log.d(TAG, "rotation: " + rotation); + Log.d(TAG, "old_rotation: " + old_rotation); + } + if( ( rotation == Surface.ROTATION_0 && old_rotation == Surface.ROTATION_180 ) || + ( rotation == Surface.ROTATION_180 && old_rotation == Surface.ROTATION_0 ) || + ( rotation == Surface.ROTATION_90 && old_rotation == Surface.ROTATION_270 ) || + ( rotation == Surface.ROTATION_270 && old_rotation == Surface.ROTATION_90 ) + ) { + if( MyDebug.LOG ) + Log.d(TAG, "onDisplayChanged: switched between landscape and reverse orientation"); + onSystemOrientationChanged(); + } + + old_rotation = rotation; + } + } + + /** Creates and registers a display listener, needed to handle switches between landscape and + * reverse landscape (without going via portrait) when lock_to_landscape==false. + */ + private void registerDisplayListener() { + if( MyDebug.LOG ) + Log.d(TAG, "registerDisplayListener"); + if( !lock_to_landscape ) { + displayListener = new MyDisplayListener(); + DisplayManager displayManager = (DisplayManager) this.getSystemService(Context.DISPLAY_SERVICE); + displayManager.registerDisplayListener(displayListener, null); + } + } + + private void unregisterDisplayListener() { + if( MyDebug.LOG ) + Log.d(TAG, "unregisterDisplayListener"); + if( displayListener != null ) { + DisplayManager displayManager = (DisplayManager) this.getSystemService(Context.DISPLAY_SERVICE); + displayManager.unregisterDisplayListener(displayListener); + displayListener = null; + } + } + + @Override + public void onConfigurationChanged(@NonNull Configuration newConfig) { + if( MyDebug.LOG ) + Log.d(TAG, "onConfigurationChanged(): " + newConfig.orientation); + // configuration change can include screen orientation (landscape/portrait) when not locked (when settings is open) + // needed if app is paused/resumed when settings is open and device is in portrait mode + // update: need this all the time when lock_to_landscape==false + onSystemOrientationChanged(); + super.onConfigurationChanged(newConfig); + } + + private void onSystemOrientationChanged() { + if( MyDebug.LOG ) + Log.d(TAG, "onSystemOrientationChanged"); + + // n.b., need to call this first, before preview.setCameraDisplayOrientation(), since + // preview.setCameraDisplayOrientation() will call getDisplayRotation() and we don't want + // to be using the outdated cached value now that the rotation has changed! + // update: no longer relevant, as preview.setCameraDisplayOrientation() now sets + // prefer_later to true to avoid using cached value. But might as well call it first anyway. + resetCachedSystemOrientation(); + + preview.setCameraDisplayOrientation(); + if( !lock_to_landscape ) { + SystemOrientation newSystemOrientation = getSystemOrientation(); + if( hasOldSystemOrientation && oldSystemOrientation == newSystemOrientation ) { + if( MyDebug.LOG ) + Log.d(TAG, "onSystemOrientationChanged: orientation hasn't changed"); + } + else { + if( hasOldSystemOrientation ) { + // handle rotation animation + int start_rotation = getRotationFromSystemOrientation(oldSystemOrientation) - getRotationFromSystemOrientation(newSystemOrientation); + if( MyDebug.LOG ) + Log.d(TAG, "start_rotation: " + start_rotation); + if( start_rotation < -180 ) + start_rotation += 360; + else if( start_rotation > 180 ) + start_rotation -= 360; + mainUI.layoutUIWithRotation(start_rotation); + } + else { + mainUI.layoutUI(); + } + applicationInterface.getDrawPreview().updateSettings(); + + hasOldSystemOrientation = true; + oldSystemOrientation = newSystemOrientation; + } + } + } + + /** Returns the current system orientation. + * Note if lock_to_landscape is true, this always returns LANDSCAPE even if called when we're + * allowing configuration changes (e.g., in Settings or a dialog is showing). (This method, + * and hence calls to it, were added to support lock_to_landscape==false behaviour, and we + * want to avoid changing behaviour for lock_to_landscape==true behaviour.) + * Note that this also caches the orientation: firstly for performance (as this is called from + * DrawPreview), secondly to support REVERSE_LANDSCAPE, we don't want a sudden change if + * getDefaultDisplay().getRotation() changes after the configuration changes. + */ + public SystemOrientation getSystemOrientation() { + if( test_force_system_orientation ) { + return test_system_orientation; + } + if( lock_to_landscape ) { + return SystemOrientation.LANDSCAPE; + } + if( has_cached_system_orientation ) { + return cached_system_orientation; + } + SystemOrientation result; + int system_orientation = getResources().getConfiguration().orientation; + if( MyDebug.LOG ) + Log.d(TAG, "system orientation: " + system_orientation); + switch( system_orientation ) { + case Configuration.ORIENTATION_LANDSCAPE: + result = SystemOrientation.LANDSCAPE; + // now try to distinguish between landscape and reverse landscape + { + int rotation = getWindowManager().getDefaultDisplay().getRotation(); + if( MyDebug.LOG ) + Log.d(TAG, "rotation: " + rotation); + switch( rotation ) { + case Surface.ROTATION_0: + case Surface.ROTATION_90: + // landscape + if( MyDebug.LOG ) + Log.d(TAG, "landscape"); + break; + case Surface.ROTATION_180: + case Surface.ROTATION_270: + // reverse landscape + if( MyDebug.LOG ) + Log.d(TAG, "reverse landscape"); + result = SystemOrientation.REVERSE_LANDSCAPE; + break; + default: + if( MyDebug.LOG ) + Log.e(TAG, "unknown rotation: " + rotation); + break; + } + } + break; + case Configuration.ORIENTATION_PORTRAIT: + result = SystemOrientation.PORTRAIT; + break; + case Configuration.ORIENTATION_UNDEFINED: + default: + if( MyDebug.LOG ) + Log.e(TAG, "unknown system orientation: " + system_orientation); + result = SystemOrientation.LANDSCAPE; + break; + } + if( MyDebug.LOG ) + Log.d(TAG, "system orientation is now: " + result); + this.has_cached_system_orientation = true; + this.cached_system_orientation = result; + return result; + } + + /** Returns rotation in degrees (as a multiple of 90 degrees) corresponding to the supplied + * system orientation. + */ + public static int getRotationFromSystemOrientation(SystemOrientation system_orientation) { + int rotation; + if( system_orientation == MainActivity.SystemOrientation.PORTRAIT ) + rotation = 270; + else if( system_orientation == MainActivity.SystemOrientation.REVERSE_LANDSCAPE ) + rotation = 180; + else + rotation = 0; + return rotation; + } + + private void resetCachedSystemOrientation() { + this.has_cached_system_orientation = false; + this.has_cached_display_rotation = false; + } + + /** A wrapper for getWindowManager().getDefaultDisplay().getRotation(), except if + * lock_to_landscape==false && prefer_later==false, this uses a cached value. + */ + public int getDisplayRotation(boolean prefer_later) { + /*if( MyDebug.LOG ) { + Log.d(TAG, "getDisplayRotationDegrees"); + Log.d(TAG, "prefer_later: " + prefer_later); + }*/ + if( lock_to_landscape || prefer_later ) { + return getWindowManager().getDefaultDisplay().getRotation(); + } + // we cache to reduce effect of annoying problem where rotation changes shortly before the + // configuration actually changes (several frames), so on-screen elements would briefly show + // in wrong location when device rotates from/to portrait and landscape; also not a bad idea + // to cache for performance anyway, to avoid calling + // getWindowManager().getDefaultDisplay().getRotation() every frame + long time_ms = System.currentTimeMillis(); + if( has_cached_display_rotation && time_ms < cached_display_rotation_time_ms + 1000 ) { + return cached_display_rotation; + } + has_cached_display_rotation = true; + int rotation = getWindowManager().getDefaultDisplay().getRotation(); + cached_display_rotation = rotation; + cached_display_rotation_time_ms = time_ms; + return rotation; + } + + public void waitUntilImageQueueEmpty() { + if( MyDebug.LOG ) + Log.d(TAG, "waitUntilImageQueueEmpty"); + applicationInterface.getImageSaver().waitUntilDone(); + } + + /** + * @return True if the long-click is handled, otherwise return false to indicate that regular + * click should still be triggered when the user releases the touch. + */ + private boolean longClickedTakePhoto() { + if( MyDebug.LOG ) + Log.d(TAG, "longClickedTakePhoto"); + if( preview.isVideo() ) { + // no long-click action for video mode + } + else if( supportsFastBurst() ) { + // need to check whether fast burst is supported (including for the current resolution), + // in case we're in Standard photo mode + CameraController.Size current_size = preview.getCurrentPictureSize(); + if( current_size != null && current_size.supports_burst ) { + MyApplicationInterface.PhotoMode photo_mode = applicationInterface.getPhotoMode(); + if( photo_mode == MyApplicationInterface.PhotoMode.Standard && + applicationInterface.isRawOnly(photo_mode) ) { + if( MyDebug.LOG ) + Log.d(TAG, "fast burst not supported in RAW-only mode"); + // in JPEG+RAW mode, a continuous fast burst will only produce JPEGs which is fine; but in RAW only mode, + // no images at all would be saved! (Or we could switch to produce JPEGs anyway, but this seems misleading + // in RAW only mode.) + } + else if( photo_mode == MyApplicationInterface.PhotoMode.Standard || + photo_mode == MyApplicationInterface.PhotoMode.FastBurst ) { + this.takePicturePressed(false, true); + return true; + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "fast burst not supported for this resolution"); + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "fast burst not supported"); + } + // return false, so a regular click will still be triggered when the user releases the touch + return false; + } + + public void clickedTakePhoto(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedTakePhoto"); + this.takePicture(false); + } + + /** User has clicked button to take a photo snapshot whilst video recording. + */ + public void clickedTakePhotoVideoSnapshot(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedTakePhotoVideoSnapshot"); + this.takePicture(true); + } + + public void clickedPauseVideo(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedPauseVideo"); + pauseVideo(); + } + + public void pauseVideo() { + if( MyDebug.LOG ) + Log.d(TAG, "pauseVideo"); + if( preview.isVideoRecording() ) { // just in case + preview.pauseVideo(); + mainUI.setPauseVideoContentDescription(); + } + } + + public void clickedCancelPanorama(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedCancelPanorama"); + applicationInterface.stopPanorama(true); + } + + public void clickedCycleRaw(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedCycleRaw"); + + final SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + String new_value = null; + switch( sharedPreferences.getString(PreferenceKeys.RawPreferenceKey, "preference_raw_no") ) { + case "preference_raw_no": + new_value = "preference_raw_yes"; + break; + case "preference_raw_yes": + new_value = "preference_raw_only"; + break; + case "preference_raw_only": + new_value = "preference_raw_no"; + break; + default: + Log.e(TAG, "unrecognised raw preference"); + break; + } + if( new_value != null ) { + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putString(PreferenceKeys.RawPreferenceKey, new_value); + editor.apply(); + + mainUI.updateCycleRawIcon(); + applicationInterface.getDrawPreview().updateSettings(); + preview.reopenCamera(); // needed for RAW options to take effect + } + } + + public void clickedStoreLocation(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedStoreLocation"); + boolean value = applicationInterface.getGeotaggingPref(); + value = !value; + + final SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putBoolean(PreferenceKeys.LocationPreferenceKey, value); + editor.apply(); + + mainUI.updateStoreLocationIcon(); + applicationInterface.getDrawPreview().updateSettings(); // because we cache the geotagging setting + initLocation(); // required to enable or disable GPS, also requests permission if necessary + this.closePopup(); + + String message = getResources().getString(R.string.preference_location) + ": " + getResources().getString(value ? R.string.on : R.string.off); + preview.showToast(store_location_toast, message, true); + } + + public void clickedTextStamp(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedTextStamp"); + this.closePopup(); + + AlertDialog.Builder alertDialog = new AlertDialog.Builder(this); + alertDialog.setTitle(R.string.preference_textstamp); + + final View dialog_view = LayoutInflater.from(this).inflate(R.layout.alertdialog_edittext, null); + final EditText editText = dialog_view.findViewById(R.id.edit_text); + // set hint instead of content description for EditText, see https://support.google.com/accessibility/android/answer/6378120 + editText.setHint(getResources().getString(R.string.preference_textstamp)); + editText.setText(applicationInterface.getTextStampPref()); + alertDialog.setView(dialog_view); + alertDialog.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { + @Override + public void onClick(DialogInterface dialogInterface, int i) { + if( MyDebug.LOG ) + Log.d(TAG, "custom text stamp clicked okay"); + + String custom_text = editText.getText().toString(); + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(MainActivity.this); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putString(PreferenceKeys.TextStampPreferenceKey, custom_text); + editor.apply(); + + mainUI.updateTextStampIcon(); + } + }); + alertDialog.setNegativeButton(android.R.string.cancel, null); + + final AlertDialog alert = alertDialog.create(); + alert.setOnDismissListener(new DialogInterface.OnDismissListener() { + @Override + public void onDismiss(DialogInterface arg0) { + if( MyDebug.LOG ) + Log.d(TAG, "custom stamp text dialog dismissed"); + setWindowFlagsForCamera(); + showPreview(true); + } + }); + + showPreview(false); + setWindowFlagsForSettings(); + showAlert(alert); + } + + public void clickedStamp(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedStamp"); + + this.closePopup(); + + boolean value = applicationInterface.getStampPref().equals("preference_stamp_yes"); + value = !value; + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putString(PreferenceKeys.StampPreferenceKey, value ? "preference_stamp_yes" : "preference_stamp_no"); + editor.apply(); + + mainUI.updateStampIcon(); + applicationInterface.getDrawPreview().updateSettings(); + preview.showToast(stamp_toast, value ? R.string.stamp_enabled : R.string.stamp_disabled, true); + } + + public void clickedFocusPeaking(View view) { + clickedFocusPeaking(); + } + + public void clickedFocusPeaking() { + if( MyDebug.LOG ) + Log.d(TAG, "clickedFocusPeaking"); + boolean value = applicationInterface.getFocusPeakingPref(); + value = !value; + + final SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putString(PreferenceKeys.FocusPeakingPreferenceKey, value ? "preference_focus_peaking_on" : "preference_focus_peaking_off"); + editor.apply(); + + mainUI.updateFocusPeakingIcon(); + applicationInterface.getDrawPreview().updateSettings(); // needed to update focus peaking + } + + public void clickedAutoLevel(View view) { + clickedAutoLevel(); + } + + public void clickedAutoLevel() { + if( MyDebug.LOG ) + Log.d(TAG, "clickedAutoLevel"); + boolean value = applicationInterface.getAutoStabilisePref(); + value = !value; + + final SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putBoolean(PreferenceKeys.AutoStabilisePreferenceKey, value); + editor.apply(); + + boolean done_dialog = false; + if( value ) { + boolean done_auto_stabilise_info = sharedPreferences.contains(PreferenceKeys.AutoStabiliseInfoPreferenceKey); + if( !done_auto_stabilise_info ) { + mainUI.showInfoDialog(R.string.preference_auto_stabilise, R.string.auto_stabilise_info, PreferenceKeys.AutoStabiliseInfoPreferenceKey); + done_dialog = true; + } + } + + if( !done_dialog ) { + String message = getResources().getString(R.string.preference_auto_stabilise) + ": " + getResources().getString(value ? R.string.on : R.string.off); + preview.showToast(this.getChangedAutoStabiliseToastBoxer(), message, true); + } + + mainUI.updateAutoLevelIcon(); + applicationInterface.getDrawPreview().updateSettings(); // because we cache the auto-stabilise setting + this.closePopup(); + } + + public void clickedCycleFlash(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedCycleFlash"); + + preview.cycleFlash(true, true); + mainUI.updateCycleFlashIcon(); + } + + public void clickedFaceDetection(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedFaceDetection"); + + this.closePopup(); + + boolean value = applicationInterface.getFaceDetectionPref(); + value = !value; + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putBoolean(PreferenceKeys.FaceDetectionPreferenceKey, value); + editor.apply(); + + mainUI.updateFaceDetectionIcon(); + preview.showToast(stamp_toast, value ? R.string.face_detection_enabled : R.string.face_detection_disabled, true); + block_startup_toast = true; // so the toast from reopening camera is suppressed, otherwise it conflicts with the face detection toast + preview.reopenCamera(); + } + + public void clickedAudioControl(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedAudioControl"); + // check hasAudioControl just in case! + if( !hasAudioControl() ) { + if( MyDebug.LOG ) + Log.e(TAG, "clickedAudioControl, but hasAudioControl returns false!"); + return; + } + this.closePopup(); + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + String audio_control = sharedPreferences.getString(PreferenceKeys.AudioControlPreferenceKey, "none"); + /*if( audio_control.equals("voice") && speechControl.hasSpeechRecognition() ) { + if( speechControl.isStarted() ) { + speechControl.stopListening(); + } + else { + boolean has_audio_permission = true; + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.M ) { + // we restrict the checks to Android 6 or later just in case, see note in LocationSupplier.setupLocationListener() + if( MyDebug.LOG ) + Log.d(TAG, "check for record audio permission"); + if( ContextCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED ) { + if( MyDebug.LOG ) + Log.d(TAG, "record audio permission not available"); + applicationInterface.requestRecordAudioPermission(); + has_audio_permission = false; + } + } + if( has_audio_permission ) { + speechControl.showToast(true); + speechControl.startSpeechRecognizerIntent(); + speechControl.speechRecognizerStarted(); + } + } + } + else*/ if( audio_control.equals("noise") ){ + if( audio_listener != null ) { + freeAudioListener(false); + } + else { + startAudioListener(); + } + } + } + + /* Returns the cameraId that the "Switch camera" button will switch to. + * Note that this may not necessarily be the next camera ID, on multi camera devices (if + * isMultiCamEnabled() returns true). + */ + public int getNextCameraId() { + if( MyDebug.LOG ) + Log.d(TAG, "getNextCameraId"); + int cameraId = getActualCameraId(); + if( MyDebug.LOG ) + Log.d(TAG, "current cameraId: " + cameraId); + if( this.preview.canSwitchCamera() ) { + if( isMultiCamEnabled() ) { + // don't use preview.getCameraController(), as it may be null if user quickly switches between cameras + switch( preview.getCameraControllerManager().getFacing(cameraId) ) { + case FACING_BACK: + if( !front_camera_ids.isEmpty() ) + cameraId = front_camera_ids.get(0); + else if( !other_camera_ids.isEmpty() ) + cameraId = other_camera_ids.get(0); + break; + case FACING_FRONT: + if( !other_camera_ids.isEmpty() ) + cameraId = other_camera_ids.get(0); + else if( !back_camera_ids.isEmpty() ) + cameraId = back_camera_ids.get(0); + break; + default: + if( !back_camera_ids.isEmpty() ) + cameraId = back_camera_ids.get(0); + else if( !front_camera_ids.isEmpty() ) + cameraId = front_camera_ids.get(0); + break; + } + } + else { + int n_cameras = preview.getCameraControllerManager().getNumberOfCameras(); + cameraId = (cameraId+1) % n_cameras; + } + } + if( MyDebug.LOG ) + Log.d(TAG, "next cameraId: " + cameraId); + return cameraId; + } + + /* Returns the next cameraId with the same-facing as current camera. + * Should only be called if isMultiCamEnabled() returns true. + * Only used for testing, now that we bring up a menu instead of cycling. + */ + /*public int testGetNextMultiCameraId() { + if( MyDebug.LOG ) + Log.d(TAG, "testGetNextMultiCameraId"); + if( !isMultiCamEnabled() ) { + Log.e(TAG, "testGetNextMultiCameraId() called but not in multi-cam mode"); + throw new RuntimeException("testGetNextMultiCameraId() called but not in multi-cam mode"); + } + List camera_set; + // don't use preview.getCameraController(), as it may be null if user quickly switches between cameras + int currCameraId = getActualCameraId(); + switch( preview.getCameraControllerManager().getFacing(currCameraId) ) { + case FACING_BACK: + camera_set = back_camera_ids; + break; + case FACING_FRONT: + camera_set = front_camera_ids; + break; + default: + camera_set = other_camera_ids; + break; + } + int cameraId; + int indx = camera_set.indexOf(currCameraId); + if( indx == -1 ) { + Log.e(TAG, "camera id not in current camera set"); + // this shouldn't happen, but if it does, revert to the first camera id in the set + // update: oddly had reports of IndexOutOfBoundsException crashes from Google Play from camera_set.get(0) + // because of camera_set having length 0, so stick with currCameraId in such cases + if( camera_set.size() == 0 ) { + Log.e(TAG, "camera_set is empty"); + cameraId = currCameraId; + } + else + cameraId = camera_set.get(0); + } + else { + indx = (indx+1) % camera_set.size(); + cameraId = camera_set.get(indx); + } + if( MyDebug.LOG ) + Log.d(TAG, "next multi cameraId: " + cameraId); + return cameraId; + }*/ + + private void pushCameraIdToast(int cameraId, String cameraIdSPhysical) { + if( MyDebug.LOG ) + Log.d(TAG, "pushCameraIdToast: " + cameraId); + if( preview.getCameraControllerManager().getNumberOfCameras() > 2 || cameraIdSPhysical != null ) { + // telling the user which camera is pointless for only two cameras, but on devices that now + // expose many cameras it can be confusing, so show a toast to at least display the id + // similarly we want to show a toast if using a physical camera, so user doesn't forget + String description = cameraIdSPhysical != null ? + preview.getCameraControllerManager().getDescription(null, this, cameraIdSPhysical, true, true) : + preview.getCameraControllerManager().getDescription(this, cameraId); + if( description != null ) { + String toast_string = description; + if( cameraIdSPhysical == null ) // only add the ID if not a physical camera + toast_string += ": " + getResources().getString(R.string.camera_id) + " " + cameraId; + //preview.showToast(null, toast_string); + this.push_info_toast_text = toast_string; + } + } + } + + public void userSwitchToCamera(int cameraId, String cameraIdSPhysical) { + if( MyDebug.LOG ) + Log.d(TAG, "userSwitchToCamera: " + cameraId + " / " + cameraIdSPhysical); + View switchCameraButton = findViewById(R.id.switch_camera); + View switchMultiCameraButton = findViewById(R.id.switch_multi_camera); + // prevent slowdown if user repeatedly clicks: + switchCameraButton.setEnabled(false); + switchMultiCameraButton.setEnabled(false); + applicationInterface.reset(true); + this.getApplicationInterface().getDrawPreview().setDimPreview(true); + if( this.switch_multi_camera_dialog != null ) { + // only clear if switching to a different camera ID (switching between lenses is fine) + int curr_camera_id = getActualCameraId(); + if( MyDebug.LOG ) + Log.d(TAG, "curr_camera_id: " + curr_camera_id); + if( cameraId != curr_camera_id ) { + if( MyDebug.LOG ) + Log.d(TAG, "clear switch_multi_camera_dialog"); + this.switch_multi_camera_dialog = null; + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "keep switch_multi_camera_dialog"); + } + } + this.preview.setCamera(cameraId, cameraIdSPhysical); + switchCameraButton.setEnabled(true); + switchMultiCameraButton.setEnabled(true); + // no need to call mainUI.setSwitchCameraContentDescription - this will be called from Preview.cameraSetup when the + // new camera is opened + this.announceCameraForAccessibility(cameraId, cameraIdSPhysical); + } + + /** + * Selects the next camera on the phone - in practice, switches between + * front and back cameras + */ + public void clickedSwitchCamera(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedSwitchCamera"); + if( preview.isOpeningCamera() ) { + if( MyDebug.LOG ) + Log.d(TAG, "already opening camera in background thread"); + return; + } + this.closePopup(); + if( this.preview.canSwitchCamera() ) { + int cameraId = getNextCameraId(); + if( !isMultiCamEnabled() ) { + pushCameraIdToast(cameraId, null); + } + else { + // In multi-cam mode, no need to show the toast when just switching between front and back cameras. + // But it is useful to clear an active fake toast, otherwise have issue if the user uses + // clickedSwitchMultiCamera() (which displays a fake toast for the camera via the info toast), then + // immediately uses clickedSwitchCamera() - the toast for the wrong camera will still be lingering + // until it expires, which looks a bit strange. + // (If using non-fake toasts, this isn't an issue, at least on Android 10+, as now toasts seem to + // disappear when the user touches the screen anyway.) + preview.clearActiveFakeToast(); + } + userSwitchToCamera(cameraId, null); + + push_switched_camera = true; + } + } + + /** Returns list of logical cameras with same facing as the supplied camera_id. + */ + public List getSameFacingLogicalCameras(int camera_id) { + List logical_camera_ids = new ArrayList<>(); + CameraController.Facing this_facing = preview.getCameraControllerManager().getFacing(camera_id); + for(int i=0;i logical_camera_ids = getSameFacingLogicalCameras(curr_camera_id); + if( MyDebug.LOG ) + Log.d(TAG, "createSwitchMultiCameraDialog: time after logical_camera_ids: " + (System.currentTimeMillis() - debug_time)); + + int n_logical_cameras = logical_camera_ids.size(); + int n_cameras = n_logical_cameras; + if( preview.hasPhysicalCameras() ) { + n_cameras += preview.getPhysicalCameras().size(); + //n_cameras++; // for the info message + } + CharSequence [] items = new CharSequence[n_cameras]; + int [] items_logical_camera_id = new int[n_cameras]; + String [] items_physical_camera_id = new String[n_cameras]; + int index=0; + int selected=-1; + String curr_physical_camera_id = applicationInterface.getCameraIdSPhysicalPref(); + for(int i=0;i"; + String html_camera_name = "" + camera_name + ""; + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.N ) { + items[index] = Html.fromHtml(html_camera_name, Html.FROM_HTML_MODE_LEGACY); + } + else { + items[index] = Html.fromHtml(html_camera_name); + } + } + else { + // a physical camera is in use, so don't bold this entry + items[index] = camera_name; + } + items_logical_camera_id[index] = logical_camera_id; + items_physical_camera_id[index] = null; + index++; + + if( preview.hasPhysicalCameras() ) { + // also add the physical cameras that underlie the current logical camera + Set physical_camera_ids = preview.getPhysicalCameras(); + + // sort by view angle + class PhysicalCamera { + private final String id; + private final String description; + private final SizeF view_angle; + + private PhysicalCamera(String id) { + this.id = id; + CameraControllerManager.CameraInfo info = new CameraControllerManager.CameraInfo(); + this.description = preview.getCameraControllerManager().getDescription(info, MainActivity.this, id, false, true); + this.view_angle = info.view_angle; + } + } + ArrayList physical_cameras = new ArrayList<>(); + for(String physical_id : physical_camera_ids) { + if( MyDebug.LOG ) + Log.d(TAG, "createSwitchMultiCameraDialog: time before getDescription: " + (System.currentTimeMillis() - debug_time)); + physical_cameras.add(new PhysicalCamera(physical_id)); + if( MyDebug.LOG ) + Log.d(TAG, "createSwitchMultiCameraDialog: time after getDescription: " + (System.currentTimeMillis() - debug_time)); + } + { + Collections.sort(physical_cameras, new Comparator<>() { + @Override + public int compare(PhysicalCamera o1, PhysicalCamera o2) { + float diff = o2.view_angle.getWidth() - o1.view_angle.getWidth(); + if( Math.abs(diff) < 1.0e-5f ) + return 0; + else if( diff > 0.0f ) + return 1; + else + return -1; + } + }); + } + + int j=0; + String indent = "    "; + for(PhysicalCamera physical_camera : physical_cameras) { + String physical_id = physical_camera.id; + camera_name = getResources().getString(R.string.lens) + " " + j + ": " + physical_camera.description; + String html_camera_name; + if( curr_physical_camera_id != null && curr_physical_camera_id.equals(physical_id) ) { + // this is the current physical camera + selected = index; + //html_camera_name = indent + "[" + camera_name + "]"; + html_camera_name = indent + "" + camera_name + ""; + } + else { + html_camera_name = indent + camera_name; + } + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.N ) { + items[index] = Html.fromHtml(html_camera_name, Html.FROM_HTML_MODE_LEGACY); + } + else { + items[index] = Html.fromHtml(html_camera_name); + } + items_logical_camera_id[index] = logical_camera_id; + items_physical_camera_id[index] = physical_id; + index++; + + j++; + } + } + } + else { + items[index] = camera_name; + items_logical_camera_id[index] = logical_camera_id; + items_physical_camera_id[index] = null; + index++; + } + } + /*if( preview.hasPhysicalCameras() ) { + items[index] = getResources().getString(R.string.physical_cameras_info); + items_logical_camera_id[index] = -1; + items_physical_camera_id[index] = null; + //index++; + }*/ + if( MyDebug.LOG ) + Log.d(TAG, "createSwitchMultiCameraDialog: time after building menu: " + (System.currentTimeMillis() - debug_time)); + + //alertDialog.setItems(items, new DialogInterface.OnClickListener() { + alertDialog.setSingleChoiceItems(items, selected, new DialogInterface.OnClickListener() { + @Override + public void onClick(DialogInterface dialog, int which) { + if( MyDebug.LOG ) + Log.d(TAG, "selected: " + which); + int logical_camera = items_logical_camera_id[which]; + String physical_camera = items_physical_camera_id[which]; + if( MyDebug.LOG ) { + Log.d(TAG, "logical_camera: " + logical_camera); + Log.d(TAG, "physical_camera: " + physical_camera); + } + int n_cameras = preview.getCameraControllerManager().getNumberOfCameras(); + if( logical_camera >= 0 && logical_camera < n_cameras ) { + if( preview.isOpeningCamera() ) { + if( MyDebug.LOG ) + Log.d(TAG, "already opening camera in background thread"); + return; + } + MainActivity.this.closePopup(); + if( MainActivity.this.preview.canSwitchCamera() ) { + pushCameraIdToast(logical_camera, physical_camera); + userSwitchToCamera(logical_camera, physical_camera); + } + } + //setWindowFlagsForCamera(); + //showPreview(true); + dialog.dismiss(); // need to explicitly dismiss for setSingleChoiceItems + } + }); + if( MyDebug.LOG ) + Log.d(TAG, "createSwitchMultiCameraDialog: time after setting items: " + (System.currentTimeMillis() - debug_time)); + /*alertDialog.setOnCancelListener(new DialogInterface.OnCancelListener() { + @Override + public void onCancel(DialogInterface arg0) { + setWindowFlagsForCamera(); + showPreview(true); + } + });*/ + //setWindowFlagsForSettings(false); // set set_lock_protect to false - no need to protect this dialog with lock screen (fine to run above lock screen if that option is set) + //showAlert(alertDialog.create()); + AlertDialog dialog = alertDialog.create(); + if( MyDebug.LOG ) + Log.d(TAG, "createSwitchMultiCameraDialog: time after dialog create: " + (System.currentTimeMillis() - debug_time)); + if( preview.hasPhysicalCameras() ) { + TextView footer = new TextView(this); + footer.setText(R.string.physical_cameras_info); + final float scale = getResources().getDisplayMetrics().density; + final int padding = (int) (5 * scale + 0.5f); // convert dps to pixels + footer.setPadding(padding, padding, padding, padding); + dialog.getListView().addFooterView(footer, null, false); + if( MyDebug.LOG ) + Log.d(TAG, "createSwitchMultiCameraDialog: time after adding footer: " + (System.currentTimeMillis() - debug_time)); + } + if( dialog.getWindow() != null ) { + dialog.getWindow().setWindowAnimations(R.style.DialogAnimation); + } + return dialog; + } + + /** User can long-click on switch multi cam icon to bring up a menu to switch to any camera. + * Update: from v1.53 onwards with support for exposing physical lens, we always call this with + * a regular click on the switch multi cam icon. + */ + public void clickedSwitchMultiCamera(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedSwitchMultiCamera"); + + long debug_time = 0; + if( MyDebug.LOG ) { + debug_time = System.currentTimeMillis(); + } + //showPreview(false); + //AlertDialog dialog = createSwitchMultiCameraDialog(); + if( switch_multi_camera_dialog == null ) { + switch_multi_camera_dialog = createSwitchMultiCameraDialog(); + } + AlertDialog dialog = switch_multi_camera_dialog; + if( MyDebug.LOG ) + Log.d(TAG, "clickedSwitchMultiCamera: time before showing dialog: " + (System.currentTimeMillis() - debug_time)); + dialog.show(); + if( MyDebug.LOG ) + Log.d(TAG, "clickedSwitchMultiCamera: total time: " + (System.currentTimeMillis() - debug_time)); + } + + /** + * Toggles Photo/Video mode + */ + public void clickedSwitchVideo(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedSwitchVideo"); + this.closePopup(); + mainUI.destroyPopup(); // important as we don't want to use a cached popup, as we can show different options depending on whether we're in photo or video mode + + // In practice stopping the gyro sensor shouldn't be needed as (a) we don't show the switch + // photo/video icon when recording, (b) at the time of writing switching to video mode + // reopens the camera, which will stop panorama recording anyway, but we do this just to be + // safe. + applicationInterface.stopPanorama(true); + + View switchVideoButton = findViewById(R.id.switch_video); + switchVideoButton.setEnabled(false); // prevent slowdown if user repeatedly clicks + applicationInterface.reset(false); + this.getApplicationInterface().getDrawPreview().setDimPreview(true); + this.preview.switchVideo(false, true); + switchVideoButton.setEnabled(true); + + mainUI.setTakePhotoIcon(); + mainUI.setPopupIcon(); // needed as turning to video mode or back can turn flash mode off or back on + + // ensure icons invisible if they're affected by being in video mode or not (e.g., on-screen RAW icon) + // (if enabling them, we'll make the icon visible later on) + checkDisableGUIIcons(); + + if( !block_startup_toast ) { + this.showPhotoVideoToast(true); + } + } + + public void clickedWhiteBalanceLock(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedWhiteBalanceLock"); + this.preview.toggleWhiteBalanceLock(); + mainUI.updateWhiteBalanceLockIcon(); + preview.showToast(white_balance_lock_toast, preview.isWhiteBalanceLocked() ? R.string.white_balance_locked : R.string.white_balance_unlocked, true); + } + + public void clickedExposureLock(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedExposureLock"); + this.preview.toggleExposureLock(); + mainUI.updateExposureLockIcon(); + preview.showToast(exposure_lock_toast, preview.isExposureLocked() ? R.string.exposure_locked : R.string.exposure_unlocked, true); + } + + public void clickedExposure(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedExposure"); + mainUI.toggleExposureUI(); + } + + public void clickedSettings(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedSettings"); + KeyguardUtils.requireKeyguard(this, this::openSettings); + } + + public boolean popupIsOpen() { + return mainUI.popupIsOpen(); + } + + // for testing + public View getUIButton(String key) { + return mainUI.getUIButton(key); + } + + public void closePopup() { + mainUI.closePopup(); + } + + public Bitmap getPreloadedBitmap(int resource) { + return this.preloaded_bitmap_resources.get(resource); + } + + public void clickedPopupSettings(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedPopupSettings"); + mainUI.togglePopupSettings(); + } + + private final PreferencesListener preferencesListener = new PreferencesListener(); + + /** Keeps track of changes to SharedPreferences. + */ + class PreferencesListener implements SharedPreferences.OnSharedPreferenceChangeListener { + private static final String TAG = "PreferencesListener"; + + private boolean any_significant_change; // whether any changes that require updateForSettings have been made since startListening() + private boolean any_change; // whether any changes have been made since startListening() + + void startListening() { + if( MyDebug.LOG ) + Log.d(TAG, "startListening"); + any_significant_change = false; + any_change = false; + + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(MainActivity.this); + // n.b., registerOnSharedPreferenceChangeListener warns that we must keep a reference to the listener (which + // is this class) as long as we want to listen for changes, otherwise the listener may be garbage collected! + sharedPreferences.registerOnSharedPreferenceChangeListener(this); + } + + void stopListening() { + if( MyDebug.LOG ) + Log.d(TAG, "stopListening"); + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(MainActivity.this); + sharedPreferences.unregisterOnSharedPreferenceChangeListener(this); + } + + @Override + public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) { + if( MyDebug.LOG ) + Log.d(TAG, "onSharedPreferenceChanged: " + key); + + if( key == null ) { + // on Android 11+, when targetting Android 11+, this method is called with key==null + // if preferences are cleared (see testSettings(), or when doing "Reset settings") + return; + } + + any_change = true; + + switch( key ) { + // we whitelist preferences where we're sure that we don't need to call updateForSettings() if they've changed + //case "preference_face_detection": // need to update camera controller + case "preference_timer": + case "preference_burst_mode": + case "preference_burst_interval": + case "preference_touch_capture": + case "preference_pause_preview": + case "preference_shutter_sound": + case "preference_timer_beep": + case "preference_timer_speak": + case "preference_volume_keys": + //case "preference_audio_control": // need to update the UI + case "preference_audio_noise_control_sensitivity": + //case "preference_enable_remote": // handled below + //case "preference_remote_type": + //case "preference_remote_device_name": // handled below + //case "preference_remote_disconnect_screen_dim": + //case "preference_water_type": // handled below + case "preference_lock_orientation": + //case "preference_save_location": // we could probably whitelist this, but accessed it a lot of places... + case "preference_using_saf": + case "preference_save_photo_prefix": + case "preference_save_video_prefix": + case "preference_save_zulu_time": + case "preference_show_when_locked": + case "preference_startup_focus": + //case "preference_preview_size": // need to update preview + //case "preference_ghost_image": // don't whitelist this, as may need to reload ghost image (at fullscreen resolution) if "last" is enabled + case "ghost_image_alpha": + case "preference_focus_assist": + case "preference_show_zoom": + case "preference_show_angle": + case "preference_show_angle_line": + case "preference_show_pitch_lines": + case "preference_angle_highlight_color": + //case "preference_show_geo_direction": // don't whitelist these, as if enabled we need to call checkMagneticAccuracy() + //case "preference_show_geo_direction_lines": // as above + case "preference_show_battery": + case "preference_show_time": + case "preference_free_memory": + case "preference_show_iso": + case "preference_histogram": + case "preference_zebra_stripes": + case "preference_zebra_stripes_foreground_color": + case "preference_zebra_stripes_background_color": + case "preference_focus_peaking": + case "preference_focus_peaking_color": + case "preference_show_video_max_amp": + case "preference_grid": + case "preference_crop_guide": + case "preference_thumbnail_animation": + case "preference_take_photo_border": + //case "preference_rotate_preview": // need to update the Preview + //case "preference_ui_placement": // need to update the UI + //case "preference_immersive_mode": // probably could whitelist? + //case "preference_show_face_detection": // need to update the UI + //case "preference_show_cycle_flash": // need to update the UI + //case "preference_show_auto_level": // need to update the UI + //case "preference_show_stamp": // need to update the UI + //case "preference_show_textstamp": // need to update the UI + //case "preference_show_store_location": // need to update the UI + //case "preference_show_cycle_raw": // need to update the UI + //case "preference_show_white_balance_lock": // need to update the UI + //case "preference_show_exposure_lock": // need to update the UI + //case "preference_show_zoom_slider_controls": // need to update the UI + //case "preference_show_take_photo": // need to update the UI + case "preference_show_toasts": + case "preference_show_whats_new": + //case "preference_multi_cam_button": // need to update the UI + case "preference_keep_display_on": + case "preference_max_brightness": + //case "preference_resolution": // need to set up camera controller and preview + //case "preference_quality": // need to set up camera controller + //case "preference_image_format": // need to set up camera controller (as it can affect the image quality that we set) + //case "preference_raw": // need to update as it affects how we set up camera controller + //case "preference_raw_expo_bracketing": // as above + //case "preference_raw_focus_bracketing": // as above + //case "preference_nr_save": // we could probably whitelist this, but have not done so in case in future we allow RAW to be saved for the base image + //case "preference_hdr_save_expo": // we need to update if this is changed, as it affects whether we request RAW or not in HDR mode when RAW is enabled + case "preference_hdr_tonemapping": + case "preference_hdr_contrast_enhancement": + //case "preference_expo_bracketing_n_images": // need to set up camera controller + //case "preference_expo_bracketing_stops": // need to set up camera controller + case "preference_panorama_crop": + //case "preference_panorama_save": // we could probably whitelist this, but have not done so in case in future we allow RAW to be saved for the base images + case "preference_front_camera_mirror": + case "preference_exif_artist": + case "preference_exif_copyright": + case "preference_stamp": + case "preference_stamp_dateformat": + case "preference_stamp_timeformat": + case "preference_stamp_gpsformat": + case "preference_stamp_geo_address": + case "preference_units_distance": + case "preference_textstamp": + case "preference_stamp_fontsize": + case "preference_stamp_font_color": + case "preference_stamp_style": + //case "preference_camera2_fake_flash": // need to update camera controller + //case "preference_camera2_fast_burst": // could probably whitelist? + //case "preference_camera2_photo_video_recording": // need to update camera controller + case "preference_background_photo_saving": + //case "preference_video_quality": // need to update camera controller and preview + //case "preference_video_stabilization": // need to update camera controller + //case "preference_video_output_format": // could probably whitelist, but safest to restart camera + //case "preference_video_log": // need to update camera controller + //case "preference_video_profile_gamma": // as above + //case "preference_video_max_duration": // could probably whitelist, but safest to restart camera + //case "preference_video_restart": // could probably whitelist, but safest to restart camera + //case "preference_video_max_filesize": // could probably whitelist, but safest to restart camera + //case "preference_video_restart_max_filesize": // could probably whitelist, but safest to restart camera + case "preference_record_audio": + case "preference_record_audio_src": + case "preference_record_audio_channels": + case "preference_lock_video": + case "preference_video_subtitle": + //case "preference_video_bitrate": // could probably whitelist, but safest to restart camera + //case "preference_video_fps": // could probably whitelist, but safest to restart camera + //case "preference_force_video_4k": // could probably whitelist, but safest to restart camera + case "preference_video_low_power_check": + case "preference_video_flash": + //case "preference_location": // need to enable/disable gps listeners etc + //case "preference_gps_direction": // need to update listeners + case "preference_require_location": + //case "preference_antibanding": // need to set up camera controller + //case "preference_edge_mode": // need to set up camera controller + //case "preference_noise_reduction_mode": // need to set up camera controller + //case "preference_camera_api": // no point whitelisting as we restart anyway + if( MyDebug.LOG ) + Log.d(TAG, "this change doesn't require update"); + break; + case PreferenceKeys.EnableRemote: + bluetoothRemoteControl.startRemoteControl(); + break; + case PreferenceKeys.RemoteName: + // The remote address changed, restart the service + if (bluetoothRemoteControl.remoteEnabled()) + bluetoothRemoteControl.stopRemoteControl(); + bluetoothRemoteControl.startRemoteControl(); + break; + case PreferenceKeys.WaterType: + boolean wt = sharedPreferences.getBoolean(PreferenceKeys.WaterType, true); + mWaterDensity = wt ? WATER_DENSITY_SALTWATER : WATER_DENSITY_FRESHWATER; + break; + default: + if( MyDebug.LOG ) + Log.d(TAG, "this change does require update"); + any_significant_change = true; + break; + } + } + + boolean anyChange() { + return any_change; + } + + boolean anySignificantChange() { + return any_significant_change; + } + } + + public void openSettings() { + if( MyDebug.LOG ) + Log.d(TAG, "openSettings"); + closePopup(); // important to close the popup to avoid confusing with back button callbacks + preview.cancelTimer(); // best to cancel any timer, in case we take a photo while settings window is open, or when changing settings + preview.cancelRepeat(); // similarly cancel the auto-repeat mode! + preview.stopVideo(false); // important to stop video, as we'll be changing camera parameters when the settings window closes + applicationInterface.stopPanorama(true); // important to stop panorama recording, as we might end up as we'll be changing camera parameters when the settings window closes + stopAudioListeners(); + // close back handler callbacks (so back button is enabled again when going to settings) - in theory shouldn't be needed as all of these should + // be disabled now, but just in case: + this.enablePopupOnBackPressedCallback(false); + this.enablePausePreviewOnBackPressedCallback(false); + this.enableScreenLockOnBackPressedCallback(false); + + Bundle bundle = new Bundle(); + bundle.putBoolean("edge_to_edge_mode", edge_to_edge_mode); + bundle.putInt("cameraId", this.preview.getCameraId()); + bundle.putString("cameraIdSPhysical", this.applicationInterface.getCameraIdSPhysicalPref()); + bundle.putInt("nCameras", preview.getCameraControllerManager().getNumberOfCameras()); + bundle.putBoolean("camera_open", this.preview.getCameraController() != null); + bundle.putString("camera_api", this.preview.getCameraAPI()); + bundle.putBoolean("using_android_l", this.preview.usingCamera2API()); + if( this.preview.getCameraController() != null ) { + bundle.putInt("camera_orientation", this.preview.getCameraController().getCameraOrientation()); + } + bundle.putString("photo_mode_string", getPhotoModeString(applicationInterface.getPhotoMode(), true)); + bundle.putBoolean("supports_auto_stabilise", this.supports_auto_stabilise); + bundle.putBoolean("supports_flash", this.preview.supportsFlash()); + bundle.putBoolean("supports_force_video_4k", this.supports_force_video_4k); + bundle.putBoolean("supports_camera2", this.supports_camera2); + bundle.putBoolean("supports_face_detection", this.preview.supportsFaceDetection()); + bundle.putBoolean("supports_jpeg_r", this.preview.supportsJpegR()); + bundle.putBoolean("supports_raw", this.preview.supportsRaw()); + bundle.putBoolean("supports_burst_raw", this.supportsBurstRaw()); + bundle.putBoolean("supports_optimise_focus_latency", this.supportsOptimiseFocusLatency()); + bundle.putBoolean("supports_preshots", this.supportsPreShots()); + bundle.putBoolean("supports_hdr", this.supportsHDR()); + bundle.putBoolean("supports_nr", this.supportsNoiseReduction()); + bundle.putBoolean("supports_panorama", this.supportsPanorama()); + bundle.putBoolean("has_gyro_sensors", applicationInterface.getGyroSensor().hasSensors()); + bundle.putBoolean("supports_expo_bracketing", this.supportsExpoBracketing()); + bundle.putBoolean("supports_preview_bitmaps", this.supportsPreviewBitmaps()); + bundle.putInt("max_expo_bracketing_n_images", this.maxExpoBracketingNImages()); + bundle.putBoolean("supports_exposure_compensation", this.preview.supportsExposures()); + bundle.putInt("exposure_compensation_min", this.preview.getMinimumExposure()); + bundle.putInt("exposure_compensation_max", this.preview.getMaximumExposure()); + bundle.putBoolean("supports_iso_range", this.preview.supportsISORange()); + bundle.putInt("iso_range_min", this.preview.getMinimumISO()); + bundle.putInt("iso_range_max", this.preview.getMaximumISO()); + bundle.putBoolean("supports_exposure_time", this.preview.supportsExposureTime()); + bundle.putBoolean("supports_exposure_lock", this.preview.supportsExposureLock()); + bundle.putBoolean("supports_white_balance_lock", this.preview.supportsWhiteBalanceLock()); + bundle.putLong("exposure_time_min", this.preview.getMinimumExposureTime()); + bundle.putLong("exposure_time_max", this.preview.getMaximumExposureTime()); + bundle.putBoolean("supports_white_balance_temperature", this.preview.supportsWhiteBalanceTemperature()); + bundle.putInt("white_balance_temperature_min", this.preview.getMinimumWhiteBalanceTemperature()); + bundle.putInt("white_balance_temperature_max", this.preview.getMaximumWhiteBalanceTemperature()); + bundle.putBoolean("is_multi_cam", this.is_multi_cam); + bundle.putBoolean("has_physical_cameras", this.preview.hasPhysicalCameras()); + bundle.putBoolean("supports_optical_stabilization", this.preview.supportsOpticalStabilization()); + bundle.putBoolean("optical_stabilization_enabled", this.preview.getOpticalStabilization()); + bundle.putBoolean("supports_video_stabilization", this.preview.supportsVideoStabilization()); + bundle.putBoolean("video_stabilization_enabled", this.preview.getVideoStabilization()); + bundle.putBoolean("can_disable_shutter_sound", this.preview.canDisableShutterSound()); + bundle.putInt("tonemap_max_curve_points", this.preview.getTonemapMaxCurvePoints()); + bundle.putBoolean("supports_tonemap_curve", this.preview.supportsTonemapCurve()); + bundle.putBoolean("supports_photo_video_recording", this.preview.supportsPhotoVideoRecording()); + bundle.putFloat("camera_view_angle_x", preview.getViewAngleX(false)); + bundle.putFloat("camera_view_angle_y", preview.getViewAngleY(false)); + bundle.putFloat("min_zoom_factor", preview.getMinZoomRatio()); + bundle.putFloat("max_zoom_factor", preview.getMaxZoomRatio()); + + putBundleExtra(bundle, "color_effects", this.preview.getSupportedColorEffects()); + putBundleExtra(bundle, "scene_modes", this.preview.getSupportedSceneModes()); + putBundleExtra(bundle, "white_balances", this.preview.getSupportedWhiteBalances()); + putBundleExtra(bundle, "isos", this.preview.getSupportedISOs()); + bundle.putInt("magnetic_accuracy", magneticSensor.getMagneticAccuracy()); + bundle.putString("iso_key", this.preview.getISOKey()); + if( this.preview.getCameraController() != null ) { + bundle.putString("parameters_string", preview.getCameraController().getParametersString()); + } + List antibanding = this.preview.getSupportedAntiBanding(); + putBundleExtra(bundle, "antibanding", antibanding); + if( antibanding != null ) { + String [] entries_arr = new String[antibanding.size()]; + int i=0; + for(String value: antibanding) { + entries_arr[i] = getMainUI().getEntryForAntiBanding(value); + i++; + } + bundle.putStringArray("antibanding_entries", entries_arr); + } + List edge_modes = this.preview.getSupportedEdgeModes(); + putBundleExtra(bundle, "edge_modes", edge_modes); + if( edge_modes != null ) { + String [] entries_arr = new String[edge_modes.size()]; + int i=0; + for(String value: edge_modes) { + entries_arr[i] = getMainUI().getEntryForNoiseReductionMode(value); + i++; + } + bundle.putStringArray("edge_modes_entries", entries_arr); + } + List noise_reduction_modes = this.preview.getSupportedNoiseReductionModes(); + putBundleExtra(bundle, "noise_reduction_modes", noise_reduction_modes); + if( noise_reduction_modes != null ) { + String [] entries_arr = new String[noise_reduction_modes.size()]; + int i=0; + for(String value: noise_reduction_modes) { + entries_arr[i] = getMainUI().getEntryForNoiseReductionMode(value); + i++; + } + bundle.putStringArray("noise_reduction_modes_entries", entries_arr); + } + + List preview_sizes = this.preview.getSupportedPreviewSizes(); + if( preview_sizes != null ) { + int [] widths = new int[preview_sizes.size()]; + int [] heights = new int[preview_sizes.size()]; + int i=0; + for(CameraController.Size size: preview_sizes) { + widths[i] = size.width; + heights[i] = size.height; + i++; + } + bundle.putIntArray("preview_widths", widths); + bundle.putIntArray("preview_heights", heights); + } + bundle.putInt("preview_width", preview.getCurrentPreviewSize().width); + bundle.putInt("preview_height", preview.getCurrentPreviewSize().height); + + // Note that we set check_burst to false, as the Settings always displays all supported resolutions (along with the "saved" + // resolution preference, even if that doesn't support burst and we're in a burst mode). + // This is to be consistent with other preferences, e.g., we still show RAW settings even though that might not be supported + // for the current photo mode. + List sizes = this.preview.getSupportedPictureSizes(false); + if( sizes != null ) { + int [] widths = new int[sizes.size()]; + int [] heights = new int[sizes.size()]; + boolean [] supports_burst = new boolean[sizes.size()]; + int i=0; + for(CameraController.Size size: sizes) { + widths[i] = size.width; + heights[i] = size.height; + supports_burst[i] = size.supports_burst; + i++; + } + bundle.putIntArray("resolution_widths", widths); + bundle.putIntArray("resolution_heights", heights); + bundle.putBooleanArray("resolution_supports_burst", supports_burst); + } + if( preview.getCurrentPictureSize() != null ) { + bundle.putInt("resolution_width", preview.getCurrentPictureSize().width); + bundle.putInt("resolution_height", preview.getCurrentPictureSize().height); + } + + //List video_quality = this.preview.getVideoQualityHander().getSupportedVideoQuality(); + String fps_value = applicationInterface.getVideoFPSPref(); // n.b., this takes into account slow motion mode putting us into a high frame rate + if( MyDebug.LOG ) + Log.d(TAG, "fps_value: " + fps_value); + List video_quality = this.preview.getSupportedVideoQuality(fps_value); + if( video_quality == null || video_quality.isEmpty() ) { + Log.e(TAG, "can't find any supported video sizes for current fps!"); + // fall back to unfiltered list + video_quality = this.preview.getVideoQualityHander().getSupportedVideoQuality(); + } + if( video_quality != null && this.preview.getCameraController() != null ) { + String [] video_quality_arr = new String[video_quality.size()]; + String [] video_quality_string_arr = new String[video_quality.size()]; + int i=0; + for(String value: video_quality) { + video_quality_arr[i] = value; + video_quality_string_arr[i] = this.preview.getCamcorderProfileDescription(value); + i++; + } + bundle.putStringArray("video_quality", video_quality_arr); + bundle.putStringArray("video_quality_string", video_quality_string_arr); + + boolean is_high_speed = this.preview.fpsIsHighSpeed(fps_value); + bundle.putBoolean("video_is_high_speed", is_high_speed); + String video_quality_preference_key = PreferenceKeys.getVideoQualityPreferenceKey(this.preview.getCameraId(), applicationInterface.getCameraIdSPhysicalPref(), is_high_speed); + if( MyDebug.LOG ) + Log.d(TAG, "video_quality_preference_key: " + video_quality_preference_key); + bundle.putString("video_quality_preference_key", video_quality_preference_key); + } + + if( preview.getVideoQualityHander().getCurrentVideoQuality() != null ) { + bundle.putString("current_video_quality", preview.getVideoQualityHander().getCurrentVideoQuality()); + } + VideoProfile camcorder_profile = preview.getVideoProfile(); + bundle.putInt("video_frame_width", camcorder_profile.videoFrameWidth); + bundle.putInt("video_frame_height", camcorder_profile.videoFrameHeight); + bundle.putInt("video_bit_rate", camcorder_profile.videoBitRate); + bundle.putInt("video_frame_rate", camcorder_profile.videoFrameRate); + bundle.putDouble("video_capture_rate", camcorder_profile.videoCaptureRate); + bundle.putBoolean("video_high_speed", preview.isVideoHighSpeed()); + bundle.putFloat("video_capture_rate_factor", applicationInterface.getVideoCaptureRateFactor()); + + List video_sizes = this.preview.getVideoQualityHander().getSupportedVideoSizes(); + if( video_sizes != null ) { + int [] widths = new int[video_sizes.size()]; + int [] heights = new int[video_sizes.size()]; + int i=0; + for(CameraController.Size size: video_sizes) { + widths[i] = size.width; + heights[i] = size.height; + i++; + } + bundle.putIntArray("video_widths", widths); + bundle.putIntArray("video_heights", heights); + } + + // set up supported fps values + if( preview.usingCamera2API() ) { + // with Camera2, we know what frame rates are supported + int [] candidate_fps = {15, 24, 25, 30, 60, 96, 100, 120, 240}; + List video_fps = new ArrayList<>(); + List video_fps_high_speed = new ArrayList<>(); + for(int fps : candidate_fps) { + if( preview.fpsIsHighSpeed(String.valueOf(fps)) ) { + video_fps.add(fps); + video_fps_high_speed.add(true); + } + else if( this.preview.getVideoQualityHander().videoSupportsFrameRate(fps) ) { + video_fps.add(fps); + video_fps_high_speed.add(false); + } + } + int [] video_fps_array = new int[video_fps.size()]; + for(int i=0;i= Build.VERSION_CODES.R ) { + // take opportunity to use non-deprecated versions; also for edge_to_edge_mode==true, we need to use getInsetsIgnoringVisibility for + // immersive mode (since for edge_to_edge_mode==true, we are not using setSystemUiVisibility() / SYSTEM_UI_FLAG_LAYOUT_STABLE in setImmersiveMode()) + // also compare with MyApplicationInterface.getDisplaySize() - in particular we don't care about caption/system bar that is returned on e.g. + // OnePlus Pad for insets.top when in landscape orientation (since the system bar isn't shown); however we also need to subtract any from the cutout - + // since this code is for finding what margins we need to set to avoid navigation bars; avoiding the cutout is done below for the entire + // Open Camera view + Insets insets = windowInsets.getInsetsIgnoringVisibility(WindowInsets.Type.navigationBars() | WindowInsets.Type.displayCutout()); + Insets cutout_insets = windowInsets.getInsetsIgnoringVisibility(WindowInsets.Type.displayCutout()); + if( test_force_window_insets ) { + insets = test_insets; + cutout_insets = test_cutout_insets; + } + inset_left = insets.left - cutout_insets.left; + inset_top = insets.top - cutout_insets.top; + inset_right = insets.right - cutout_insets.right; + inset_bottom = insets.bottom - cutout_insets.bottom; + } + else { + inset_left = windowInsets.getSystemWindowInsetLeft(); + inset_top = windowInsets.getSystemWindowInsetTop(); + inset_right = windowInsets.getSystemWindowInsetRight(); + inset_bottom = windowInsets.getSystemWindowInsetBottom(); + } + if( MyDebug.LOG ) { + Log.d(TAG, "inset left: " + inset_left); + Log.d(TAG, "inset top: " + inset_top); + Log.d(TAG, "inset right: " + inset_right); + Log.d(TAG, "inset bottom: " + inset_bottom); + } + + if( edge_to_edge_mode && Build.VERSION.SDK_INT >= Build.VERSION_CODES.R ) { + // easier to ensure the entire activity avoids display cutouts - for the preview, we still support + // it showing under the navigation bar + Insets insets = windowInsets.getInsets(WindowInsets.Type.displayCutout()); + if( test_force_window_insets ) { + insets = test_cutout_insets; + } + v.setPadding(insets.left, insets.top, insets.right, insets.bottom); + + // also handle change of immersive mode (instead of using deprecated setOnSystemUiVisibilityChangeListener below + immersiveModeChanged( windowInsets.isVisible(WindowInsets.Type.navigationBars()) ); + } + + resetCachedSystemOrientation(); // don't want to get cached result - this can sometimes happen e.g. on Pixel 6 Pro when switching between landscape and reverse landscape + SystemOrientation system_orientation = getSystemOrientation(); + int new_navigation_gap, new_navigation_gap_landscape, new_navigation_gap_reverse_landscape; + switch ( system_orientation ) { + case PORTRAIT: + if( MyDebug.LOG ) + Log.d(TAG, "portrait"); + new_navigation_gap = inset_bottom; + new_navigation_gap_landscape = inset_left; + new_navigation_gap_reverse_landscape = inset_right; + break; + case LANDSCAPE: + if( MyDebug.LOG ) + Log.d(TAG, "landscape"); + new_navigation_gap = inset_right; + new_navigation_gap_landscape = inset_bottom; + new_navigation_gap_reverse_landscape = inset_top; + break; + case REVERSE_LANDSCAPE: + if( MyDebug.LOG ) + Log.d(TAG, "reverse landscape"); + new_navigation_gap = inset_left; + new_navigation_gap_landscape = inset_top; + new_navigation_gap_reverse_landscape = inset_bottom; + break; + default: + Log.e(TAG, "unknown system_orientation?!: " + system_orientation); + new_navigation_gap = 0; + new_navigation_gap_landscape = 0; + new_navigation_gap_reverse_landscape = 0; + break; + } + if( !edge_to_edge_mode ) { + // we only care about avoiding a landscape navigation bar (e.g., large tablets in landscape orientation) for edge_to_edge_mode==true + // in theory this could be useful when edge_to_edge_mode==false, but in practice we will never enter edge-to-edge-mode if the + // navigation bar is along the landscape-edge, so restrict behaviour change to edge_to_edge_mode==true + new_navigation_gap_landscape = 0; + new_navigation_gap_reverse_landscape = 0; + } + + // for edge_to_edge_mode==false, we only enter this case if system orientation changes, due to issues where this callback may be called first with 0 navigation gap + // (see notes below) + // for edge_to_edge_mode==true, simpler to always react to updated insets - in particular, in split-window mode, the navigation gaps can + // change when device rotates, even though the application remains in the same orientation + if( (edge_to_edge_mode || (has_last_system_orientation && system_orientation != last_system_orientation)) && (new_navigation_gap != navigation_gap || new_navigation_gap_landscape != navigation_gap_landscape || new_navigation_gap_reverse_landscape != navigation_gap_reverse_landscape ) ) { + if( MyDebug.LOG ) + Log.d(TAG, "navigation_gap changed from " + navigation_gap + " to " + new_navigation_gap); + + navigation_gap = new_navigation_gap; + navigation_gap_landscape = new_navigation_gap_landscape; + navigation_gap_reverse_landscape = new_navigation_gap_reverse_landscape; + + if( MyDebug.LOG ) + Log.d(TAG, "want_no_limits: " + want_no_limits); + if( want_no_limits || edge_to_edge_mode ) { + // If we want no_limits mode, then need to take care in case of device orientation + // in cases where that changes the navigation_gap: + // - Need to set showUnderNavigation() (in case navigation_gap when from zero to non-zero or vice versa). + // - Need to call layoutUI() (for different value of navigation_gap) + + // Need to call showUnderNavigation() from handler for it to take effect. + // Similarly we have problems if we call layoutUI without post-ing it - + // sometimes when rotating a device, we get a call to OnApplyWindowInsetsListener + // with 0 navigation_gap followed by the call with the correct non-zero values - + // posting the call to layoutUI means it runs after the second call, so we have the + // correct navigation_gap. + Handler handler = new Handler(); + handler.post(new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "runnable for change in navigation_gap due to orientation change"); + if( navigation_gap != 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "set FLAG_LAYOUT_NO_LIMITS"); + showUnderNavigation(true); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "clear FLAG_LAYOUT_NO_LIMITS"); + showUnderNavigation(false); + } + // needed for OnePlus Pad when rotating, to avoid delay in updating last_take_photo_top_time (affects placement of on-screen text e.g. zoom) + // need to do this from handler for this to take effect (otherwise last_take_photo_top_time won't update to new value) + applicationInterface.getDrawPreview().onNavigationGapChanged(); + + if( MyDebug.LOG ) + Log.d(TAG, "layout UI due to changing navigation_gap"); + mainUI.layoutUI(); + } + }); + } + } + else if( !edge_to_edge_mode && navigation_gap == 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "navigation_gap changed from zero to " + new_navigation_gap); + navigation_gap = new_navigation_gap; + // Sometimes when this callback is called, the navigation_gap may still be 0 even if + // the device doesn't have physical navigation buttons - we need to wait + // until we have found a non-zero value before switching to no limits. + // On devices with physical navigation bar, navigation_gap should remain 0 + // (and there's no point setting FLAG_LAYOUT_NO_LIMITS) + if( want_no_limits && navigation_gap != 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "set FLAG_LAYOUT_NO_LIMITS"); + showUnderNavigation(true); + } + } + + if( has_last_system_orientation && ( + ( system_orientation == SystemOrientation.LANDSCAPE && last_system_orientation == SystemOrientation.REVERSE_LANDSCAPE ) || + ( system_orientation == SystemOrientation.REVERSE_LANDSCAPE && last_system_orientation == SystemOrientation.LANDSCAPE ) + ) ) { + // hack - this should be done via MyDisplayListener.onDisplayChanged(), but that doesn't work on Galaxy S24+ (either MyDisplayListener.onDisplayChanged() + // isn't called, or getDefaultDisplay().getRotation() is still returning the old rotation) + if( MyDebug.LOG ) + Log.d(TAG, "onApplyWindowInsets: switched between landscape and reverse orientation"); + onSystemOrientationChanged(); + } + + has_last_system_orientation = true; + last_system_orientation = system_orientation; + + // see comments in MainUI.layoutUI() for why we don't use this + /*if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.S && getSystemOrientation() == SystemOrientation.LANDSCAPE ) { + Rect privacy_indicator_rect = windowInsets.getPrivacyIndicatorBounds(); + if( privacy_indicator_rect != null ) { + Rect window_bounds = getWindowManager().getCurrentWindowMetrics().getBounds(); + if( MyDebug.LOG ) { + Log.d(TAG, "privacy_indicator_rect: " + privacy_indicator_rect); + Log.d(TAG, "window_bounds: " + window_bounds); + } + privacy_indicator_gap = window_bounds.right - privacy_indicator_rect.left; + if( privacy_indicator_gap < 0 ) + privacy_indicator_gap = 0; // just in case?? + if( MyDebug.LOG ) + Log.d(TAG, "privacy_indicator_gap: " + privacy_indicator_gap); + } + } + else { + privacy_indicator_gap = 0; + }*/ + return getWindow().getDecorView().getRootView().onApplyWindowInsets(windowInsets); + } + }); + } + + if( edge_to_edge_mode && Build.VERSION.SDK_INT >= Build.VERSION_CODES.R ) { + // already handled by the setOnApplyWindowInsetsListener above + } + else { + decorView.setOnSystemUiVisibilityChangeListener + (new View.OnSystemUiVisibilityChangeListener() { + @Override + public void onSystemUiVisibilityChange(int visibility) { + // Note that system bars will only be "visible" if none of the + // LOW_PROFILE, HIDE_NAVIGATION, or FULLSCREEN flags are set. + + if( MyDebug.LOG ) + Log.d(TAG, "onSystemUiVisibilityChange: " + visibility); + + // Note that Android example code says to test against SYSTEM_UI_FLAG_FULLSCREEN, + // but this stopped working on Android 11, as when calling setSystemUiVisibility(0) + // to exit immersive mode, when we arrive here the flag SYSTEM_UI_FLAG_FULLSCREEN + // is still set. Fixed by checking for SYSTEM_UI_FLAG_HIDE_NAVIGATION instead - + // which makes some sense since we run in fullscreen mode all the time anyway. + //if( (visibility & View.SYSTEM_UI_FLAG_FULLSCREEN) == 0 ) { + if( (visibility & View.SYSTEM_UI_FLAG_HIDE_NAVIGATION) == 0 ) { + immersiveModeChanged(true); + } + else { + immersiveModeChanged(false); + } + } + }); + } + } + + public boolean usingKitKatImmersiveMode() { + // whether we are using a Kit Kat style immersive mode (either hiding navigation bar, GUI, or everything) + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + String immersive_mode = sharedPreferences.getString(PreferenceKeys.ImmersiveModePreferenceKey, "immersive_mode_off"); + if( immersive_mode.equals("immersive_mode_navigation") || immersive_mode.equals("immersive_mode_gui") || immersive_mode.equals("immersive_mode_everything") ) + return true; + return false; + } + + public boolean usingKitKatImmersiveModeEverything() { + // whether we are using a Kit Kat style immersive mode for everything + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + String immersive_mode = sharedPreferences.getString(PreferenceKeys.ImmersiveModePreferenceKey, "immersive_mode_off"); + if( immersive_mode.equals("immersive_mode_everything") ) + return true; + return false; + } + + + private Handler immersive_timer_handler = null; + private Runnable immersive_timer_runnable = null; + + private void cancelImmersiveTimer() { + if( immersive_timer_handler != null && immersive_timer_runnable != null ) { + immersive_timer_handler.removeCallbacks(immersive_timer_runnable); + immersive_timer_handler = null; + immersive_timer_runnable = null; + } + } + + private void setImmersiveTimer() { + cancelImmersiveTimer(); + if( app_is_paused ) { + // don't want to enter immersive mode from background + // problem that even after onPause, we can end up here via various callbacks + return; + } + immersive_timer_handler = new Handler(); + immersive_timer_handler.postDelayed(immersive_timer_runnable = new Runnable(){ + @Override + public void run(){ + if( MyDebug.LOG ) + Log.d(TAG, "setImmersiveTimer: run"); + // even though timer should have been cancelled when in background, check app_is_paused just in case + if( !app_is_paused && !camera_in_background && !popupIsOpen() && usingKitKatImmersiveMode() ) + setImmersiveMode(true); + } + }, 5000); + } + + public void initImmersiveMode() { + if( !usingKitKatImmersiveMode() ) { + setImmersiveMode(true); + } + else { + // don't start in immersive mode, only after a timer + setImmersiveTimer(); + } + } + + void setImmersiveMode(boolean on) { + if( MyDebug.LOG ) + Log.d(TAG, "setImmersiveMode: " + on); + // n.b., preview.setImmersiveMode() is called from onSystemUiVisibilityChange() + + // don't allow the kitkat-style immersive mode for panorama mode (problem that in "full" immersive mode, the gyro spot can't be seen - we could fix this, but simplest to just disallow) + boolean enable_immersive = on && usingKitKatImmersiveMode() && applicationInterface.getPhotoMode() != MyApplicationInterface.PhotoMode.Panorama; + if( MyDebug.LOG ) + Log.d(TAG, "enable_immersive?: " + enable_immersive); + + if( edge_to_edge_mode ) { + // take opportunity to avoid deprecated setSystemUiVisibility + WindowInsetsControllerCompat windowInsetsController = WindowCompat.getInsetsController(getWindow(), getWindow().getDecorView()); + int type = WindowInsetsCompat.Type.navigationBars(); // only show/hide navigation bars, as we run with system bars always hidden + if( enable_immersive ) { + windowInsetsController.hide(type); + } + else { + windowInsetsController.show(type); + } + } + else { + // save whether we set SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION - since this flag might be enabled for showUnderNavigation(true), at least indirectly by setDecorFitsSystemWindows() on old versions of Android + int saved_flags = getWindow().getDecorView().getSystemUiVisibility() & View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION; + if( MyDebug.LOG ) + Log.d(TAG, "saved_flags?: " + saved_flags); + if( enable_immersive ) { + getWindow().getDecorView().setSystemUiVisibility(saved_flags | View.SYSTEM_UI_FLAG_IMMERSIVE | View.SYSTEM_UI_FLAG_LAYOUT_STABLE | View.SYSTEM_UI_FLAG_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_FULLSCREEN); + } + else { + getWindow().getDecorView().setSystemUiVisibility(saved_flags); + } + } + } + + /** Sets the brightness level for normal operation (when camera preview is visible). + * If force_max is true, this always forces maximum brightness; otherwise this depends on user preference. + */ + public void setBrightnessForCamera(boolean force_max) { + if( MyDebug.LOG ) + Log.d(TAG, "setBrightnessForCamera"); + // set screen to max brightness - see http://stackoverflow.com/questions/11978042/android-screen-brightness-max-value + // done here rather than onCreate, so that changing it in preferences takes effect without restarting app + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + final WindowManager.LayoutParams layout = getWindow().getAttributes(); + if( force_max || sharedPreferences.getBoolean(PreferenceKeys.MaxBrightnessPreferenceKey, false) ) { + layout.screenBrightness = WindowManager.LayoutParams.BRIGHTNESS_OVERRIDE_FULL; + } + else { + layout.screenBrightness = WindowManager.LayoutParams.BRIGHTNESS_OVERRIDE_NONE; + } + + // this must be called from the ui thread + // sometimes this method may be called not on UI thread, e.g., Preview.takePhotoWhenFocused->CameraController2.takePicture + // ->CameraController2.runFakePrecapture->Preview/onFrontScreenTurnOn->MyApplicationInterface.turnFrontScreenFlashOn + // -> this.setBrightnessForCamera + this.runOnUiThread(new Runnable() { + public void run() { + getWindow().setAttributes(layout); + } + }); + } + + /** + * Set the brightness to minimal in case the preference key is set to do it + */ + public void setBrightnessToMinimumIfWanted() { + if( MyDebug.LOG ) + Log.d(TAG, "setBrightnessToMinimum"); + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + final WindowManager.LayoutParams layout = getWindow().getAttributes(); + if( sharedPreferences.getBoolean(PreferenceKeys.DimWhenDisconnectedPreferenceKey, false) ) { + layout.screenBrightness = WindowManager.LayoutParams.BRIGHTNESS_OVERRIDE_OFF; + } + else { + layout.screenBrightness = WindowManager.LayoutParams.BRIGHTNESS_OVERRIDE_NONE; + } + + this.runOnUiThread(new Runnable() { + public void run() { + getWindow().setAttributes(layout); + } + }); + + } + + /** Sets the window flags for normal operation (when camera preview is visible). + */ + public void setWindowFlagsForCamera() { + if( MyDebug.LOG ) + Log.d(TAG, "setWindowFlagsForCamera"); + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU ) { + // we set this to prevent what's on the preview being used to show under the "recent apps" view - potentially useful + // for privacy reasons + setRecentsScreenshotEnabled(false); + } + + if( lock_to_landscape ) { + // force to landscape mode + setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); + //setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE); // testing for devices with unusual sensor orientation (e.g., Nexus 5X) + } + else { + // allow orientation to change for camera, even if user has locked orientation + setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_SENSOR); + } + if( preview != null ) { + // also need to call preview.setCameraDisplayOrientation, as this handles if the user switched from portrait to reverse landscape whilst in settings/etc + // as switching from reverse landscape back to landscape isn't detected in onConfigurationChanged + // update: now probably irrelevant now that we close/reopen the camera, but keep it here anyway + preview.setCameraDisplayOrientation(); + } + if( preview != null && mainUI != null ) { + // layoutUI() is needed because even though we call layoutUI from MainUI.onOrientationChanged(), certain things + // (ui_rotation) depend on the system orientation too. + // Without this, going to Settings, then changing orientation, then exiting settings, would show the icons with the + // wrong orientation. + // We put this here instead of onConfigurationChanged() as onConfigurationChanged() isn't called when switching from + // reverse landscape to landscape orientation: so it's needed to fix if the user starts in portrait, goes to settings + // or a dialog, then switches to reverse landscape, then exits settings/dialog - the system orientation will switch + // to landscape (which Open Camera is forced to). + mainUI.layoutUI(); + } + + + // keep screen active - see http://stackoverflow.com/questions/2131948/force-screen-on + if( sharedPreferences.getBoolean(PreferenceKeys.KeepDisplayOnPreferenceKey, true) ) { + if( MyDebug.LOG ) + Log.d(TAG, "do keep screen on"); + this.getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "don't keep screen on"); + this.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); + } + if( sharedPreferences.getBoolean(PreferenceKeys.ShowWhenLockedPreferenceKey, false) ) { + if( MyDebug.LOG ) + Log.d(TAG, "do show when locked"); + // keep Open Camera on top of screen-lock (will still need to unlock when going to gallery or settings) + showWhenLocked(true); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "don't show when locked"); + showWhenLocked(false); + } + + if( want_no_limits && navigation_gap != 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "set FLAG_LAYOUT_NO_LIMITS"); + showUnderNavigation(true); + } + + setBrightnessForCamera(false); + + initImmersiveMode(); + camera_in_background = false; + + magneticSensor.clearDialog(); // if the magnetic accuracy was opened, it must have been closed now + if( !app_is_paused ) { + // Needs to be called after camera_in_background is set to false. + // Note that the app_is_paused guard is in some sense unnecessary, as initLocation tests for that too, + // but useful for error tracking - ideally we want to make sure that initLocation is never called when + // app is paused. It can happen here because setWindowFlagsForCamera() is called from + // onCreate() + initLocation(); + + // Similarly only want to reopen the camera if no longer paused + if( preview != null ) { + preview.onResume(); + } + } + } + + private void setWindowFlagsForSettings() { + setWindowFlagsForSettings(true); + } + + /** Sets the window flags for when the settings window is open. + * @param set_lock_protect If true, then window flags will be set to protect by screen lock, no + * matter what the preference setting + * PreferenceKeys.getShowWhenLockedPreferenceKey() is set to. This + * should be true for the Settings window, and anything else that might + * need protecting. But some callers use this method for opening other + * things (such as info dialogs). + */ + public void setWindowFlagsForSettings(boolean set_lock_protect) { + if( MyDebug.LOG ) + Log.d(TAG, "setWindowFlagsForSettings: " + set_lock_protect); + + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU ) { + // in settings mode, okay to revert to default behaviour for using a screenshot for "recent apps" view + setRecentsScreenshotEnabled(true); + } + + // allow screen rotation + setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED); + + // revert to standard screen blank behaviour + getWindow().clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); + if( want_no_limits && navigation_gap != 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "clear FLAG_LAYOUT_NO_LIMITS"); + showUnderNavigation(false); + } + if( set_lock_protect ) { + // settings should still be protected by screen lock + showWhenLocked(false); + } + + { + WindowManager.LayoutParams layout = getWindow().getAttributes(); + layout.screenBrightness = WindowManager.LayoutParams.BRIGHTNESS_OVERRIDE_NONE; + getWindow().setAttributes(layout); + } + + setImmersiveMode(false); + camera_in_background = true; + + // we disable location listening when showing settings or a dialog etc - saves battery life, also better for privacy + applicationInterface.getLocationSupplier().freeLocationListeners(); + + // similarly we close the camera + preview.onPause(false); + + push_switched_camera = false; // just in case + } + + private void showWhenLocked(boolean show) { + if( MyDebug.LOG ) + Log.d(TAG, "showWhenLocked: " + show); + // although FLAG_SHOW_WHEN_LOCKED is deprecated, setShowWhenLocked(false) does not work + // correctly: if we turn screen off and on when camera is open (so we're now running above + // the lock screen), going to settings does not show the lock screen, i.e., + // setShowWhenLocked(false) does not take effect! + /*if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1) { + if( MyDebug.LOG ) + Log.d(TAG, "use setShowWhenLocked"); + setShowWhenLocked(show); + } + else*/ { + if( show ) { + getWindow().addFlags(WindowManager.LayoutParams.FLAG_SHOW_WHEN_LOCKED); + } + else { + getWindow().clearFlags(WindowManager.LayoutParams.FLAG_SHOW_WHEN_LOCKED); + } + } + } + + /** Use this is place of simply alert.show(), if the orientation has just been set to allow + * rotation via setWindowFlagsForSettings(). On some devices (e.g., OnePlus 3T with Android 8), + * the dialog doesn't show properly if the phone is held in portrait. A workaround seems to be + * to use postDelayed. Note that postOnAnimation() doesn't work. + */ + public void showAlert(final AlertDialog alert) { + if( MyDebug.LOG ) + Log.d(TAG, "showAlert"); + Handler handler = new Handler(); + handler.postDelayed(new Runnable() { + public void run() { + alert.show(); + } + }, 20); + // note that 1ms usually fixes the problem, but not always; 10ms seems fine, have set 20ms + // just in case + } + + public void showPreview(boolean show) { + if( MyDebug.LOG ) + Log.d(TAG, "showPreview: " + show); + final ViewGroup container = findViewById(R.id.hide_container); + container.setVisibility(show ? View.GONE : View.VISIBLE); + } + + /** Rotates the supplied bitmap according to the orientation tag stored in the exif data. If no + * rotation is required, the input bitmap is returned. If rotation is required, the input + * bitmap is recycled. + * @param uri Uri containing the JPEG with Exif information to use. + */ + public Bitmap rotateForExif(Bitmap bitmap, Uri uri) throws IOException { + ExifInterface exif; + InputStream inputStream = null; + try { + inputStream = this.getContentResolver().openInputStream(uri); + exif = new ExifInterface(inputStream); + } + finally { + if( inputStream != null ) + inputStream.close(); + } + + if( exif != null ) { + int exif_orientation_s = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_UNDEFINED); + boolean needs_tf = false; + int exif_orientation = 0; + // see http://jpegclub.org/exif_orientation.html + // and http://stackoverflow.com/questions/20478765/how-to-get-the-correct-orientation-of-the-image-selected-from-the-default-image + if( exif_orientation_s == ExifInterface.ORIENTATION_UNDEFINED || exif_orientation_s == ExifInterface.ORIENTATION_NORMAL ) { + // leave unchanged + } + else if( exif_orientation_s == ExifInterface.ORIENTATION_ROTATE_180 ) { + needs_tf = true; + exif_orientation = 180; + } + else if( exif_orientation_s == ExifInterface.ORIENTATION_ROTATE_90 ) { + needs_tf = true; + exif_orientation = 90; + } + else if( exif_orientation_s == ExifInterface.ORIENTATION_ROTATE_270 ) { + needs_tf = true; + exif_orientation = 270; + } + else { + // just leave unchanged for now + if( MyDebug.LOG ) + Log.e(TAG, " unsupported exif orientation: " + exif_orientation_s); + } + if( MyDebug.LOG ) + Log.d(TAG, " exif orientation: " + exif_orientation); + + if( needs_tf ) { + if( MyDebug.LOG ) + Log.d(TAG, " need to rotate bitmap due to exif orientation tag"); + Matrix m = new Matrix(); + m.setRotate(exif_orientation, bitmap.getWidth() * 0.5f, bitmap.getHeight() * 0.5f); + Bitmap rotated_bitmap = Bitmap.createBitmap(bitmap, 0, 0,bitmap.getWidth(), bitmap.getHeight(), m, true); + if( rotated_bitmap != bitmap ) { + bitmap.recycle(); + bitmap = rotated_bitmap; + } + } + } + return bitmap; + } + + /** Loads a thumbnail from the supplied image uri (not videos). Note this loads from the bitmap + * rather than reading from MediaStore. Therefore this works with SAF uris as well as + * MediaStore uris, as well as allowing control over the resolution of the thumbnail. + * If sample_factor is 1, this returns a bitmap scaled to match the display resolution. If + * sample_factor is greater than 1, it will be scaled down to a lower resolution. + * We now use this for photos in preference to APIs like + * MediaStore.Images.Thumbnails.getThumbnail(). Advantages are simplifying the code, reducing + * number of different codepaths, but also seems to help against device specific bugs + * in getThumbnail() e.g. Pixel 6 Pro with x-night in portrait. + */ + private Bitmap loadThumbnailFromUri(Uri uri, int sample_factor) { + Bitmap thumbnail = null; + try { + //thumbnail = MediaStore.Images.Media.getBitmap(getContentResolver(), media.uri); + // only need to load a bitmap as large as the screen size + BitmapFactory.Options options = new BitmapFactory.Options(); + InputStream is = getContentResolver().openInputStream(uri); + // get dimensions + options.inJustDecodeBounds = true; + BitmapFactory.decodeStream(is, null, options); + int bitmap_width = options.outWidth; + int bitmap_height = options.outHeight; + Point display_size = new Point(); + applicationInterface.getDisplaySize(display_size, true); + if( MyDebug.LOG ) { + Log.d(TAG, "bitmap_width: " + bitmap_width); + Log.d(TAG, "bitmap_height: " + bitmap_height); + Log.d(TAG, "display width: " + display_size.x); + Log.d(TAG, "display height: " + display_size.y); + } + // align dimensions + if( display_size.x < display_size.y ) { + //noinspection SuspiciousNameCombination + display_size.set(display_size.y, display_size.x); + } + if( bitmap_width < bitmap_height ) { + int dummy = bitmap_width; + //noinspection SuspiciousNameCombination + bitmap_width = bitmap_height; + bitmap_height = dummy; + } + if( MyDebug.LOG ) { + Log.d(TAG, "bitmap_width: " + bitmap_width); + Log.d(TAG, "bitmap_height: " + bitmap_height); + Log.d(TAG, "display width: " + display_size.x); + Log.d(TAG, "display height: " + display_size.y); + } + // only care about height, to save worrying about different aspect ratios + options.inSampleSize = 1; + while( bitmap_height / (2*options.inSampleSize) >= display_size.y ) { + options.inSampleSize *= 2; + } + options.inSampleSize *= sample_factor; + if( MyDebug.LOG ) { + Log.d(TAG, "inSampleSize: " + options.inSampleSize); + } + options.inJustDecodeBounds = false; + // need a new inputstream, see https://stackoverflow.com/questions/2503628/bitmapfactory-decodestream-returning-null-when-options-are-set + is.close(); + is = getContentResolver().openInputStream(uri); + thumbnail = BitmapFactory.decodeStream(is, null, options); + if( thumbnail == null ) { + Log.e(TAG, "decodeStream returned null bitmap for ghost image last"); + } + is.close(); + + thumbnail = rotateForExif(thumbnail, uri); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "failed to load bitmap for ghost image last", e); + } + return thumbnail; + } + + /** Shows the default "blank" gallery icon, when we don't have a thumbnail available. + */ + private void updateGalleryIconToBlank() { + if( MyDebug.LOG ) + Log.d(TAG, "updateGalleryIconToBlank"); + ImageButton galleryButton = this.findViewById(R.id.gallery); + int bottom = galleryButton.getPaddingBottom(); + int top = galleryButton.getPaddingTop(); + int right = galleryButton.getPaddingRight(); + int left = galleryButton.getPaddingLeft(); + /*if( MyDebug.LOG ) + Log.d(TAG, "padding: " + bottom);*/ + galleryButton.setImageBitmap(null); + galleryButton.setImageResource(R.drawable.baseline_photo_library_white_48); + // workaround for setImageResource also resetting padding, Android bug + galleryButton.setPadding(left, top, right, bottom); + gallery_bitmap = null; + } + + /** Shows a thumbnail for the gallery icon. + */ + void updateGalleryIcon(Bitmap thumbnail) { + if( MyDebug.LOG ) + Log.d(TAG, "updateGalleryIcon: " + thumbnail); + // If we're currently running the background task to update the gallery (see updateGalleryIcon()), we should cancel that! + // Otherwise if user takes a photo whilst the background task is still running, the thumbnail from the latest photo will + // be overridden when the background task completes. This is more likely when using SAF on Android 10+ with scoped storage, + // due to SAF's poor performance for folders with large number of files. + if( update_gallery_future != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "cancel update_gallery_future"); + update_gallery_future.cancel(true); + } + ImageButton galleryButton = this.findViewById(R.id.gallery); + galleryButton.setImageBitmap(thumbnail); + gallery_bitmap = thumbnail; + } + + /** Updates the gallery icon by searching for the most recent photo. + * Launches the task in a separate thread. + */ + public void updateGalleryIcon() { + long debug_time = 0; + if( MyDebug.LOG ) { + Log.d(TAG, "updateGalleryIcon"); + debug_time = System.currentTimeMillis(); + } + if( update_gallery_future != null ) { + Log.d(TAG, "previous updateGalleryIcon task already running"); + return; + } + + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + String ghost_image_pref = sharedPreferences.getString(PreferenceKeys.GhostImagePreferenceKey, "preference_ghost_image_off"); + final boolean ghost_image_last = ghost_image_pref.equals("preference_ghost_image_last"); + + final Handler handler = new Handler(Looper.getMainLooper()); + + //new AsyncTask() { + Runnable runnable = new Runnable() { + private static final String TAG = "updateGalleryIcon"; + private Uri uri; + private boolean is_raw; + private boolean is_video; + + @Override + //protected Bitmap doInBackground(Void... params) { + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "doInBackground"); + StorageUtils.Media media = applicationInterface.getStorageUtils().getLatestMedia(); + Bitmap thumbnail = null; + KeyguardManager keyguard_manager = (KeyguardManager)MainActivity.this.getSystemService(Context.KEYGUARD_SERVICE); + boolean is_locked = keyguard_manager != null && keyguard_manager.inKeyguardRestrictedInputMode(); + if( MyDebug.LOG ) + Log.d(TAG, "is_locked?: " + is_locked); + if( media != null && getContentResolver() != null && !is_locked ) { + // check for getContentResolver() != null, as have had reported Google Play crashes + + uri = media.getMediaStoreUri(MainActivity.this); + is_raw = media.filename != null && StorageUtils.filenameIsRaw(media.filename); + is_video = media.video; + + if( ghost_image_last && !media.video ) { + if( MyDebug.LOG ) + Log.d(TAG, "load full size bitmap for ghost image last photo"); + // use sample factor of 1 so that it's full size for ghost image + thumbnail = loadThumbnailFromUri(media.uri, 1); + } + if( thumbnail == null ) { + try { + if( !media.video ) { + if( MyDebug.LOG ) + Log.d(TAG, "load thumbnail for photo"); + // use sample factor as this image is only used for thumbnail; and + // unlike code in MyApplicationInterface.saveImage() we don't need to + // worry about the thumbnail animation when taking/saving a photo + thumbnail = loadThumbnailFromUri(media.uri, 8); + } + else if( !media.mediastore ) { + if( MyDebug.LOG ) + Log.d(TAG, "load thumbnail for video from SAF uri"); + ParcelFileDescriptor pfd_saf = null; // keep a reference to this as long as retriever, to avoid risk of pfd_saf being garbage collected + MediaMetadataRetriever retriever = new MediaMetadataRetriever(); + try { + pfd_saf = getContentResolver().openFileDescriptor(media.uri, "r"); + retriever.setDataSource(pfd_saf.getFileDescriptor()); + thumbnail = retriever.getFrameAtTime(-1); + } + catch(Exception e) { + MyDebug.logStackTrace(TAG, "failed to load video thumbnail", e); + } + finally { + try { + retriever.release(); + } + catch(RuntimeException ex) { + // ignore + } + try { + if( pfd_saf != null ) { + pfd_saf.close(); + } + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "failed to close pfd_saf", e); + } + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "load thumbnail for video"); + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q ) { + final Size size = new Size(512, 384); // same as MediaStore.ThumbnailConstants.MINI_SIZE, which is used for MediaStore.Video.Thumbnails.MINI_KIND + thumbnail = getContentResolver().loadThumbnail(media.uri, size, new CancellationSignal()); + } + else { + // non-deprecated getContentResolver().loadThumbnail requires Android Q + //noinspection deprecation + thumbnail = MediaStore.Video.Thumbnails.getThumbnail(getContentResolver(), media.id, MediaStore.Video.Thumbnails.MINI_KIND, null); + } + } + } + catch(Throwable e) { + // have had Google Play NoClassDefFoundError crashes from getThumbnail() for Galaxy Ace4 (vivalto3g), Galaxy S Duos3 (vivalto3gvn) + // also NegativeArraySizeException - best to catch everything + if( MyDebug.LOG ) + Log.e(TAG, "thumbnail exception"); + MyDebug.logStackTrace(TAG, "thumbnail exception", e); + } + } + } + //return thumbnail; + + final Bitmap thumbnail_f = thumbnail; + handler.post(new Runnable() { + @Override + public void run() { + onPostExecute(thumbnail_f); + } + }); + } + + /** Runs on UI thread, after background work is complete. + */ + private void onPostExecute(Bitmap thumbnail) { + if( MyDebug.LOG ) + Log.d(TAG, "onPostExecute"); + if( update_gallery_future != null && update_gallery_future.isCancelled() ) { + if( MyDebug.LOG ) + Log.d(TAG, "was cancelled"); + update_gallery_future = null; + return; + } + // since we're now setting the thumbnail to the latest media on disk, we need to make sure clicking the Gallery goes to this + applicationInterface.getStorageUtils().clearLastMediaScanned(); + if( uri != null ) { + if( MyDebug.LOG ) { + Log.d(TAG, "found media uri: " + uri); + Log.d(TAG, " is_raw?: " + is_raw); + } + applicationInterface.getStorageUtils().setLastMediaScanned(uri, is_raw, false, null); + } + if( thumbnail != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "set gallery button to thumbnail"); + updateGalleryIcon(thumbnail); + applicationInterface.getDrawPreview().updateThumbnail(thumbnail, is_video, false); // needed in case last ghost image is enabled + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "set gallery button to blank"); + updateGalleryIconToBlank(); + } + + update_gallery_future = null; + } + //}.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR); + }; + + ExecutorService executor = Executors.newSingleThreadExecutor(); + //executor.execute(runnable); + update_gallery_future = executor.submit(runnable); + + if( MyDebug.LOG ) + Log.d(TAG, "updateGalleryIcon: total time to update gallery icon: " + (System.currentTimeMillis() - debug_time)); + } + + void savingImage(final boolean started) { + if( MyDebug.LOG ) + Log.d(TAG, "savingImage: " + started); + + this.runOnUiThread(new Runnable() { + public void run() { + final ImageButton galleryButton = findViewById(R.id.gallery); + if( started ) { + //galleryButton.setColorFilter(0x80ffffff, PorterDuff.Mode.MULTIPLY); + if( gallery_save_anim == null ) { + gallery_save_anim = ValueAnimator.ofInt(Color.argb(200, 255, 255, 255), Color.argb(63, 255, 255, 255)); + gallery_save_anim.setEvaluator(new ArgbEvaluator()); + gallery_save_anim.setRepeatCount(ValueAnimator.INFINITE); + gallery_save_anim.setRepeatMode(ValueAnimator.REVERSE); + gallery_save_anim.setDuration(500); + } + gallery_save_anim.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { + @Override + public void onAnimationUpdate(@NonNull ValueAnimator animation) { + galleryButton.setColorFilter((Integer)animation.getAnimatedValue(), PorterDuff.Mode.MULTIPLY); + } + }); + gallery_save_anim.start(); + } + else + if( gallery_save_anim != null ) { + gallery_save_anim.cancel(); + } + galleryButton.setColorFilter(null); + } + }); + } + + /** Called when the number of images being saved in ImageSaver changes (or otherwise something + * that changes our calculation of whether we can take a new photo, e.g., changing photo mode). + */ + void imageQueueChanged() { + if( MyDebug.LOG ) + Log.d(TAG, "imageQueueChanged"); + applicationInterface.getDrawPreview().setImageQueueFull( !applicationInterface.canTakeNewPhoto() ); + + /*if( applicationInterface.getImageSaver().getNImagesToSave() == 0) { + cancelImageSavingNotification(); + } + else if( has_notification ) { + // call again to update the text of remaining images + createImageSavingNotification(); + }*/ + } + + /** Creates a notification to indicate still saving images (or updates an existing one). + * Update: notifications now removed due to needing permissions on Android 13+. + */ + private void createImageSavingNotification() { + if( MyDebug.LOG ) + Log.d(TAG, "createImageSavingNotification"); + /*if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.O ) { + int n_images_to_save = applicationInterface.getImageSaver().getNRealImagesToSave(); + Notification.Builder builder = new Notification.Builder(this, CHANNEL_ID) + .setSmallIcon(R.drawable.ic_stat_notify_take_photo) + .setContentTitle(getString(R.string.app_name)) + .setContentText(getString(R.string.image_saving_notification) + " " + n_images_to_save + " " + getString(R.string.remaining)) + //.setStyle(new Notification.BigTextStyle() + // .bigText("Much longer text that cannot fit one line...")) + //.setPriority(Notification.PRIORITY_DEFAULT) + ; + NotificationManager notificationManager = getSystemService(NotificationManager.class); + notificationManager.notify(image_saving_notification_id, builder.build()); + has_notification = true; + }*/ + } + + /** Cancels the notification for saving images. + * Update: notifications now removed due to needing permissions on Android 13+. + */ + private void cancelImageSavingNotification() { + if( MyDebug.LOG ) + Log.d(TAG, "cancelImageSavingNotification"); + /*if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.O ) { + NotificationManager notificationManager = getSystemService(NotificationManager.class); + notificationManager.cancel(image_saving_notification_id); + has_notification = false; + }*/ + } + + public void clickedGallery(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedGallery"); + openGallery(); + } + + private void openGallery() { + if( MyDebug.LOG ) + Log.d(TAG, "openGallery"); + //Intent intent = new Intent(Intent.ACTION_VIEW, MediaStore.Images.Media.EXTERNAL_CONTENT_URI); + Uri uri = applicationInterface.getStorageUtils().getLastMediaScanned(); + boolean is_raw = uri != null && applicationInterface.getStorageUtils().getLastMediaScannedIsRaw(); + if( MyDebug.LOG && uri != null ) { + Log.d(TAG, "found cached most recent uri: " + uri); + Log.d(TAG, " is_raw: " + is_raw); + } + if( uri == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "go to latest media"); + StorageUtils.Media media = applicationInterface.getStorageUtils().getLatestMedia(); + if( media != null ) { + if( MyDebug.LOG ) { + Log.d(TAG, "latest uri:" + media.uri); + Log.d(TAG, "filename: " + media.filename); + } + uri = media.getMediaStoreUri(this); + if( MyDebug.LOG ) + Log.d(TAG, "media uri:" + uri); + is_raw = media.filename != null && StorageUtils.filenameIsRaw(media.filename); + if( MyDebug.LOG ) + Log.d(TAG, "is_raw:" + is_raw); + } + } + + if( uri != null && !MainActivity.useScopedStorage() ) { + // check uri exists + // note, with scoped storage this isn't reliable when using SAF - since we don't actually have permission to access mediastore URIs that + // were created via Storage Access Framework, even though Open Camera was the application that saved them(!) + try { + ContentResolver cr = getContentResolver(); + ParcelFileDescriptor pfd = cr.openFileDescriptor(uri, "r"); + if( pfd == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "uri no longer exists (1): " + uri); + uri = null; + is_raw = false; + } + else { + pfd.close(); + } + } + catch(IOException e) { + if( MyDebug.LOG ) + Log.d(TAG, "uri no longer exists (2): " + uri); + uri = null; + is_raw = false; + } + } + if( uri == null ) { + uri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI; + is_raw = false; + } + if( !is_test ) { + // don't do if testing, as unclear how to exit activity to finish test (for testGallery()) + if( MyDebug.LOG ) + Log.d(TAG, "launch uri:" + uri); + final String REVIEW_ACTION = "com.android.camera.action.REVIEW"; + boolean done = false; + if( !is_raw ) { + // REVIEW_ACTION means we can view video files without autoplaying. + // However, Google Photos at least has problems with going to a RAW photo (in RAW only mode), + // unless we first pause and resume Open Camera. + // Update: on Galaxy S10e with Android 11 at least, no longer seem to have problems, but leave + // the check for is_raw just in case for older devices. + if( MyDebug.LOG ) + Log.d(TAG, "try REVIEW_ACTION"); + try { + Intent intent = new Intent(REVIEW_ACTION, uri); + this.startActivity(intent); + done = true; + } + catch(ActivityNotFoundException e) { + MyDebug.logStackTrace(TAG, "failed to start REVIEW_ACTION intent", e); + } + } + if( !done ) { + if( MyDebug.LOG ) + Log.d(TAG, "try ACTION_VIEW"); + try { + Intent intent = new Intent(Intent.ACTION_VIEW, uri); + this.startActivity(intent); + } + catch(ActivityNotFoundException e) { + MyDebug.logStackTrace(TAG, "failed to start ACTION_VIEW intent", e); + preview.showToast(null, R.string.no_gallery_app); + } + catch(SecurityException e) { + // have received this crash from Google Play - don't display a toast, simply do nothing + MyDebug.logStackTrace(TAG, "SecurityException from ACTION_VIEW startActivity", e); + } + } + } + } + + /** Opens the Storage Access Framework dialog to select a folder for save location. + * @param from_preferences Whether called from the Preferences + */ + void openFolderChooserDialogSAF(boolean from_preferences) { + if( MyDebug.LOG ) + Log.d(TAG, "openFolderChooserDialogSAF: " + from_preferences); + this.saf_dialog_from_preferences = from_preferences; + Intent intent = new Intent(Intent.ACTION_OPEN_DOCUMENT_TREE); + //Intent intent = new Intent(Intent.ACTION_OPEN_DOCUMENT); + //intent.addCategory(Intent.CATEGORY_OPENABLE); + startActivityForResult(intent, CHOOSE_SAVE_FOLDER_SAF_CODE); + } + + /** Opens the Storage Access Framework dialog to select a file for ghost image. + * @param from_preferences Whether called from the Preferences + */ + void openGhostImageChooserDialogSAF(boolean from_preferences) { + if( MyDebug.LOG ) + Log.d(TAG, "openGhostImageChooserDialogSAF: " + from_preferences); + this.saf_dialog_from_preferences = from_preferences; + Intent intent = new Intent(Intent.ACTION_OPEN_DOCUMENT); + intent.addCategory(Intent.CATEGORY_OPENABLE); + intent.setType("image/*"); + try { + startActivityForResult(intent, CHOOSE_GHOST_IMAGE_SAF_CODE); + } + catch(ActivityNotFoundException e) { + // see https://stackoverflow.com/questions/34021039/action-open-document-not-working-on-miui/34045627 + preview.showToast(null, R.string.open_files_saf_exception_ghost); + MyDebug.logStackTrace(TAG, "ActivityNotFoundException from startActivityForResult", e); + } + } + + /** Opens the Storage Access Framework dialog to select a file for loading settings. + * @param from_preferences Whether called from the Preferences + */ + void openLoadSettingsChooserDialogSAF(boolean from_preferences) { + if( MyDebug.LOG ) + Log.d(TAG, "openLoadSettingsChooserDialogSAF: " + from_preferences); + this.saf_dialog_from_preferences = from_preferences; + Intent intent = new Intent(Intent.ACTION_OPEN_DOCUMENT); + intent.addCategory(Intent.CATEGORY_OPENABLE); + intent.setType("text/xml"); // note that application/xml doesn't work (can't select the xml files)! + try { + startActivityForResult(intent, CHOOSE_LOAD_SETTINGS_SAF_CODE); + } + catch(ActivityNotFoundException e) { + // see https://stackoverflow.com/questions/34021039/action-open-document-not-working-on-miui/34045627 + preview.showToast(null, R.string.open_files_saf_exception_generic); + MyDebug.logStackTrace(TAG, "ActivityNotFoundException from startActivityForResult", e); + } + } + + /** Call when the SAF save history has been updated. + * This is only public so we can call from testing. + * @param save_folder The new SAF save folder Uri. + */ + public void updateFolderHistorySAF(String save_folder) { + if( MyDebug.LOG ) + Log.d(TAG, "updateSaveHistorySAF"); + if( save_location_history_saf == null ) { + save_location_history_saf = new SaveLocationHistory(this, "save_location_history_saf", save_folder); + } + save_location_history_saf.updateFolderHistory(save_folder, true); + } + + /** Listens for the response from the Storage Access Framework dialog to select a folder + * (as opened with openFolderChooserDialogSAF()). + */ + public void onActivityResult(int requestCode, int resultCode, Intent resultData) { + if( MyDebug.LOG ) + Log.d(TAG, "onActivityResult: " + requestCode); + + super.onActivityResult(requestCode, resultCode, resultData); + + switch( requestCode ) { + case CHOOSE_SAVE_FOLDER_SAF_CODE: + if( resultCode == RESULT_OK && resultData != null ) { + Uri treeUri = resultData.getData(); + if( MyDebug.LOG ) + Log.d(TAG, "returned treeUri: " + treeUri); + // see https://developer.android.com/training/data-storage/shared/documents-files#persist-permissions : + final int takeFlags = resultData.getFlags() & (Intent.FLAG_GRANT_READ_URI_PERMISSION | Intent.FLAG_GRANT_WRITE_URI_PERMISSION); + try { + /*if( true ) + throw new SecurityException(); // test*/ + getContentResolver().takePersistableUriPermission(treeUri, takeFlags); + + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putString(PreferenceKeys.SaveLocationSAFPreferenceKey, treeUri.toString()); + editor.apply(); + + if( MyDebug.LOG ) + Log.d(TAG, "update folder history for saf"); + updateFolderHistorySAF(treeUri.toString()); + + String file = applicationInterface.getStorageUtils().getImageFolderPath(); + if( file != null ) { + preview.showToast(null, getResources().getString(R.string.changed_save_location) + "\n" + file); + } + } + catch(SecurityException e) { + MyDebug.logStackTrace(TAG, "SecurityException failed to take permission", e); + preview.showToast(null, R.string.saf_permission_failed); + // failed - if the user had yet to set a save location, make sure we switch SAF back off + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + String uri = sharedPreferences.getString(PreferenceKeys.SaveLocationSAFPreferenceKey, ""); + if( uri.isEmpty() ) { + if( MyDebug.LOG ) + Log.d(TAG, "no SAF save location was set"); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putBoolean(PreferenceKeys.UsingSAFPreferenceKey, false); + editor.apply(); + } + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "SAF dialog cancelled"); + // cancelled - if the user had yet to set a save location, make sure we switch SAF back off + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + String uri = sharedPreferences.getString(PreferenceKeys.SaveLocationSAFPreferenceKey, ""); + if( uri.isEmpty() ) { + if( MyDebug.LOG ) + Log.d(TAG, "no SAF save location was set"); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putBoolean(PreferenceKeys.UsingSAFPreferenceKey, false); + editor.apply(); + preview.showToast(null, R.string.saf_cancelled); + } + } + + if( !saf_dialog_from_preferences ) { + setWindowFlagsForCamera(); + showPreview(true); + } + break; + case CHOOSE_GHOST_IMAGE_SAF_CODE: + if( resultCode == RESULT_OK && resultData != null ) { + Uri fileUri = resultData.getData(); + if( MyDebug.LOG ) + Log.d(TAG, "returned single fileUri: " + fileUri); + // persist permission just in case? + final int takeFlags = resultData.getFlags() + & (Intent.FLAG_GRANT_READ_URI_PERMISSION); + try { + /*if( true ) + throw new SecurityException(); // test*/ + // Check for the freshest data. + getContentResolver().takePersistableUriPermission(fileUri, takeFlags); + + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putString(PreferenceKeys.GhostSelectedImageSAFPreferenceKey, fileUri.toString()); + editor.apply(); + } + catch(SecurityException e) { + MyDebug.logStackTrace(TAG, "SecurityException failed to take permission", e); + preview.showToast(null, R.string.saf_permission_failed_open_image); + // failed - if the user had yet to set a ghost image + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + String uri = sharedPreferences.getString(PreferenceKeys.GhostSelectedImageSAFPreferenceKey, ""); + if( uri.isEmpty() ) { + if( MyDebug.LOG ) + Log.d(TAG, "no SAF ghost image was set"); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putString(PreferenceKeys.GhostImagePreferenceKey, "preference_ghost_image_off"); + editor.apply(); + } + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "SAF dialog cancelled"); + // cancelled - if the user had yet to set a ghost image, make sure we switch the option back off + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + String uri = sharedPreferences.getString(PreferenceKeys.GhostSelectedImageSAFPreferenceKey, ""); + if( uri.isEmpty() ) { + if( MyDebug.LOG ) + Log.d(TAG, "no SAF ghost image was set"); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putString(PreferenceKeys.GhostImagePreferenceKey, "preference_ghost_image_off"); + editor.apply(); + } + } + + if( !saf_dialog_from_preferences ) { + setWindowFlagsForCamera(); + showPreview(true); + } + break; + case CHOOSE_LOAD_SETTINGS_SAF_CODE: + if( resultCode == RESULT_OK && resultData != null ) { + Uri fileUri = resultData.getData(); + if( MyDebug.LOG ) + Log.d(TAG, "returned single fileUri: " + fileUri); + // persist permission just in case? + final int takeFlags = resultData.getFlags() + & (Intent.FLAG_GRANT_READ_URI_PERMISSION); + try { + /*if( true ) + throw new SecurityException(); // test*/ + // Check for the freshest data. + getContentResolver().takePersistableUriPermission(fileUri, takeFlags); + + settingsManager.loadSettings(fileUri); + } + catch(SecurityException e) { + MyDebug.logStackTrace(TAG, "SecurityException failed to take permission", e); + preview.showToast(null, R.string.restore_settings_failed); + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "SAF dialog cancelled"); + } + + if( !saf_dialog_from_preferences ) { + setWindowFlagsForCamera(); + showPreview(true); + } + break; + } + } + + /** Update the save folder (for non-SAF methods). + */ + void updateSaveFolder(String new_save_location) { + if( MyDebug.LOG ) + Log.d(TAG, "updateSaveFolder: " + new_save_location); + if( new_save_location != null ) { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + String orig_save_location = this.applicationInterface.getStorageUtils().getSaveLocation(); + + if( !orig_save_location.equals(new_save_location) ) { + if( MyDebug.LOG ) + Log.d(TAG, "changed save_folder to: " + this.applicationInterface.getStorageUtils().getSaveLocation()); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putString(PreferenceKeys.SaveLocationPreferenceKey, new_save_location); + editor.apply(); + + this.save_location_history.updateFolderHistory(this.getStorageUtils().getSaveLocation(), true); + String save_folder_name = getHumanReadableSaveFolder(this.applicationInterface.getStorageUtils().getSaveLocation()); + this.preview.showToast(null, getResources().getString(R.string.changed_save_location) + "\n" + save_folder_name); + } + } + } + + public static class MyFolderChooserDialog extends FolderChooserDialog { + @Override + public void onDismiss(DialogInterface dialog) { + if( MyDebug.LOG ) + Log.d(TAG, "FolderChooserDialog dismissed"); + // n.b., fragments have to be static (as they might be inserted into a new Activity - see http://stackoverflow.com/questions/15571010/fragment-inner-class-should-be-static), + // so we access the MainActivity via the fragment's getActivity(). + MainActivity main_activity = (MainActivity)this.getActivity(); + // activity may be null, see https://stackoverflow.com/questions/13116104/best-practice-to-reference-the-parent-activity-of-a-fragment + // have had Google Play crashes from this + if( main_activity != null ) { + main_activity.setWindowFlagsForCamera(); + main_activity.showPreview(true); + String new_save_location = this.getChosenFolder(); + main_activity.updateSaveFolder(new_save_location); + } + else { + if( MyDebug.LOG ) + Log.e(TAG, "activity no longer exists!"); + } + super.onDismiss(dialog); + } + } + + /** Processes a user specified save folder. This should be used with the non-SAF scoped storage + * method, where the user types a folder directly. + */ + public static String processUserSaveLocation(String folder) { + // filter repeated '/', e.g., replace // with /: + String strip = "//"; + while( !folder.isEmpty() && folder.contains(strip) ) { + folder = folder.replaceAll(strip, "/"); + } + + if( !folder.isEmpty() && folder.charAt(0) == '/' ) { + // strip '/' as first character - as absolute paths not allowed with scoped storage + // whilst we do block entering a '/' as first character in the InputFilter, users could + // get around this (e.g., put a '/' as second character, then delete the first character) + folder = folder.substring(1); + } + + if( !folder.isEmpty() && folder.charAt(folder.length()-1) == '/' ) { + // strip '/' as last character - MediaStore will ignore it, but seems cleaner to strip it out anyway + // (we still need to allow '/' as last character in the InputFilter, otherwise users won't be able to type it whilst writing a subfolder) + folder = folder.substring(0, folder.length()-1); + } + + return folder; + } + + /** Creates a dialog builder for specifying a save folder dialog (used when not using SAF, + * and on scoped storage, as an alternative to using FolderChooserDialog). + */ + public AlertDialog.Builder createSaveFolderDialog() { + final AlertDialog.Builder alertDialog = new AlertDialog.Builder(this); + alertDialog.setTitle(R.string.preference_save_location); + + final View dialog_view = LayoutInflater.from(this).inflate(R.layout.alertdialog_edittext, null); + final EditText editText = dialog_view.findViewById(R.id.edit_text); + + // set hint instead of content description for EditText, see https://support.google.com/accessibility/android/answer/6378120 + editText.setHint(getResources().getString(R.string.preference_save_location)); + editText.setInputType(InputType.TYPE_CLASS_TEXT); + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + editText.setText(sharedPreferences.getString(PreferenceKeys.SaveLocationPreferenceKey, "OpenCamera")); + InputFilter filter = new InputFilter() { + // whilst Android seems to allow any characters on internal memory, SD cards are typically formatted with FAT32 + final String disallowed = "|\\?*<\":>"; + public CharSequence filter(CharSequence source, int start, int end, Spanned dest, int dstart, int dend) { + for(int i=start;i= 0 && which < history.size() ) { + String save_folder = history.get(history.size() - 1 - which); + if( MyDebug.LOG ) + Log.d(TAG, "changed save_folder from history to: " + save_folder); + String save_folder_name = getHumanReadableSaveFolder(save_folder); + preview.showToast(null, getResources().getString(R.string.changed_save_location) + "\n" + save_folder_name); + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(MainActivity.this); + SharedPreferences.Editor editor = sharedPreferences.edit(); + if( applicationInterface.getStorageUtils().isUsingSAF() ) + editor.putString(PreferenceKeys.SaveLocationSAFPreferenceKey, save_folder); + else + editor.putString(PreferenceKeys.SaveLocationPreferenceKey, save_folder); + editor.apply(); + history.updateFolderHistory(save_folder, true); // to move new selection to most recent + } + setWindowFlagsForCamera(); + showPreview(true); + } + + dialog.dismiss(); // need to explicitly dismiss for setSingleChoiceItems + } + }); + alertDialog.setOnCancelListener(new DialogInterface.OnCancelListener() { + @Override + public void onCancel(DialogInterface arg0) { + setWindowFlagsForCamera(); + showPreview(true); + } + }); + //getWindow().setLayout(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT); + setWindowFlagsForSettings(); + showAlert(alertDialog.create()); + } + + /** Clears the non-SAF folder history. + */ + public void clearFolderHistory() { + if( MyDebug.LOG ) + Log.d(TAG, "clearFolderHistory"); + save_location_history.clearFolderHistory(getStorageUtils().getSaveLocation()); + } + + /** Clears the SAF folder history. + */ + public void clearFolderHistorySAF() { + if( MyDebug.LOG ) + Log.d(TAG, "clearFolderHistorySAF"); + save_location_history_saf.clearFolderHistory(getStorageUtils().getSaveLocationSAF()); + } + + static private void putBundleExtra(Bundle bundle, String key, List values) { + if( values != null ) { + String [] values_arr = new String[values.size()]; + int i=0; + for(String value: values) { + values_arr[i] = value; + i++; + } + bundle.putStringArray(key, values_arr); + } + } + + public void clickedShare(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedShare"); + applicationInterface.shareLastImage(); + } + + public void clickedTrash(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clickedTrash"); + applicationInterface.trashLastImage(); + } + + /** User has pressed the take picture button, or done an equivalent action to request this (e.g., + * volume buttons, audio trigger). + * @param photo_snapshot If true, then the user has requested taking a photo whilst video + * recording. If false, either take a photo or start/stop video depending + * on the current mode. + */ + public void takePicture(boolean photo_snapshot) { + if( MyDebug.LOG ) + Log.d(TAG, "takePicture"); + + if( applicationInterface.getPhotoMode() == MyApplicationInterface.PhotoMode.Panorama ) { + if( preview.isTakingPhoto() ) { + if( MyDebug.LOG ) + Log.d(TAG, "ignore whilst taking panorama photo"); + } + else if( applicationInterface.getGyroSensor().isRecording() ) { + if( MyDebug.LOG ) + Log.d(TAG, "panorama complete"); + applicationInterface.finishPanorama(); + return; + } + else if( !applicationInterface.canTakeNewPhoto() ) { + if( MyDebug.LOG ) + Log.d(TAG, "can't start new panoroma, still saving in background"); + // we need to test here, otherwise the Preview won't take a new photo - but we'll think we've + // started the panorama! + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "start panorama"); + applicationInterface.startPanorama(); + } + } + + this.takePicturePressed(photo_snapshot, false); + } + + /** Returns whether the last photo operation was a continuous fast burst. + */ + boolean lastContinuousFastBurst() { + return this.last_continuous_fast_burst; + } + + /** + * @param photo_snapshot If true, then the user has requested taking a photo whilst video + * recording. If false, either take a photo or start/stop video depending + * on the current mode. + * @param continuous_fast_burst If true, then start a continuous fast burst. + */ + void takePicturePressed(boolean photo_snapshot, boolean continuous_fast_burst) { + if( MyDebug.LOG ) + Log.d(TAG, "takePicturePressed"); + + closePopup(); + + this.last_continuous_fast_burst = continuous_fast_burst; + this.preview.takePicturePressed(photo_snapshot, continuous_fast_burst); + } + + /** Lock the screen - this is Open Camera's own lock to guard against accidental presses, + * not the standard Android lock. + */ + void lockScreen() { + findViewById(R.id.locker).setOnTouchListener(new View.OnTouchListener() { + @SuppressLint("ClickableViewAccessibility") @Override + public boolean onTouch(View arg0, MotionEvent event) { + return gestureDetector.onTouchEvent(event); + //return true; + } + }); + screen_is_locked = true; + this.enableScreenLockOnBackPressedCallback(true); // also disable back button + } + + /** Unlock the screen (see lockScreen()). + */ + void unlockScreen() { + findViewById(R.id.locker).setOnTouchListener(null); + screen_is_locked = false; + this.enableScreenLockOnBackPressedCallback(false); // reenable back button + } + + /** Whether the screen is locked (see lockScreen()). + */ + public boolean isScreenLocked() { + return screen_is_locked; + } + + /** Listen for gestures. + * Doing a swipe will unlock the screen (see lockScreen()). + */ + private class MyGestureDetector extends SimpleOnGestureListener { + @Override + public boolean onFling(MotionEvent e1, @NonNull MotionEvent e2, float velocityX, float velocityY) { + try { + if( MyDebug.LOG ) + Log.d(TAG, "from " + e1.getX() + " , " + e1.getY() + " to " + e2.getX() + " , " + e2.getY()); + final ViewConfiguration vc = ViewConfiguration.get(MainActivity.this); + //final int swipeMinDistance = 4*vc.getScaledPagingTouchSlop(); + final float scale = getResources().getDisplayMetrics().density; + final int swipeMinDistance = (int) (160 * scale + 0.5f); // convert dps to pixels + final int swipeThresholdVelocity = vc.getScaledMinimumFlingVelocity(); + if( MyDebug.LOG ) { + Log.d(TAG, "from " + e1.getX() + " , " + e1.getY() + " to " + e2.getX() + " , " + e2.getY()); + Log.d(TAG, "swipeMinDistance: " + swipeMinDistance); + } + float xdist = e1.getX() - e2.getX(); + float ydist = e1.getY() - e2.getY(); + float dist2 = xdist*xdist + ydist*ydist; + float vel2 = velocityX*velocityX + velocityY*velocityY; + if( dist2 > swipeMinDistance*swipeMinDistance && vel2 > swipeThresholdVelocity*swipeThresholdVelocity ) { + preview.showToast(screen_locked_toast, R.string.unlocked); + unlockScreen(); + } + } + catch(Exception e) { + MyDebug.logStackTrace(TAG, "onFling failed", e); + } + return false; + } + + @Override + public boolean onDown(@NonNull MotionEvent e) { + preview.showToast(screen_locked_toast, R.string.screen_is_locked); + return true; + } + } + + @Override + protected void onSaveInstanceState(@NonNull Bundle state) { + if( MyDebug.LOG ) + Log.d(TAG, "onSaveInstanceState"); + super.onSaveInstanceState(state); + if( this.preview != null ) { + preview.onSaveInstanceState(state); + } + if( this.applicationInterface != null ) { + applicationInterface.onSaveInstanceState(state); + } + } + + public boolean supportsExposureButton() { + if( preview.isVideoHighSpeed() ) { + // manual ISO/exposure not supported for high speed video mode + // it's safer not to allow opening the panel at all (otherwise the user could open it, and switch to manual) + return false; + } + if( applicationInterface.isCameraExtensionPref() ) { + // nothing in this UI (exposure compensation, manual ISO/exposure, manual white balance) is supported for camera extensions + return false; + } + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + String iso_value = sharedPreferences.getString(PreferenceKeys.ISOPreferenceKey, CameraController.ISO_DEFAULT); + boolean manual_iso = !iso_value.equals(CameraController.ISO_DEFAULT); + return preview.supportsExposures() || (manual_iso && preview.supportsISORange() ); + } + + void cameraSetup() { + long debug_time = 0; + if( MyDebug.LOG ) { + Log.d(TAG, "cameraSetup"); + debug_time = System.currentTimeMillis(); + } + if( preview.getCameraController() == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera controller is null"); + return; + } + + boolean old_want_no_limits = want_no_limits; + this.want_no_limits = false; + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.N && isInMultiWindowMode() ) { + if( MyDebug.LOG ) + Log.d(TAG, "multi-window mode"); + // don't support want_no_limits mode in multi-window mode - extra complexity that the + // preview size could change from simply resizing the window; also problem that the + // navigation_gap, and whether we'd want want_no_limits, can both change depending on + // device orientation (because application can e.g. be in landscape mode even if device + // has switched to portrait) + } + else if( set_window_insets_listener && !edge_to_edge_mode ) { + Point display_size = new Point(); + applicationInterface.getDisplaySize(display_size, true); + int display_width = Math.max(display_size.x, display_size.y); + int display_height = Math.min(display_size.x, display_size.y); + double display_aspect_ratio = ((double)display_width)/(double)display_height; + double preview_aspect_ratio = preview.getCurrentPreviewAspectRatio(); + if( MyDebug.LOG ) { + Log.d(TAG, "display_aspect_ratio: " + display_aspect_ratio); + Log.d(TAG, "preview_aspect_ratio: " + preview_aspect_ratio); + } + boolean preview_is_wide = preview_aspect_ratio > display_aspect_ratio + 1.0e-5f; + if( test_preview_want_no_limits ) { + preview_is_wide = test_preview_want_no_limits_value; + } + if( preview_is_wide ) { + if( MyDebug.LOG ) + Log.d(TAG, "preview is wide, set want_no_limits"); + this.want_no_limits = true; + + if( !old_want_no_limits ) { + if( MyDebug.LOG ) + Log.d(TAG, "need to change to FLAG_LAYOUT_NO_LIMITS"); + // Ideally we'd just go straight to FLAG_LAYOUT_NO_LIMITS mode, but then all calls to onApplyWindowInsets() + // end up returning a value of 0 for the navigation_gap! So we need to wait until we know the navigation_gap. + if( navigation_gap != 0 ) { + // already have navigation gap, can go straight into no limits mode + if( MyDebug.LOG ) + Log.d(TAG, "set FLAG_LAYOUT_NO_LIMITS"); + showUnderNavigation(true); + // need to layout the UI again due to now taking the navigation gap into account + if( MyDebug.LOG ) + Log.d(TAG, "layout UI due to changing want_no_limits behaviour"); + mainUI.layoutUI(); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "but navigation_gap is 0"); + } + } + } + else if( old_want_no_limits && navigation_gap != 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "clear FLAG_LAYOUT_NO_LIMITS"); + showUnderNavigation(false); + // need to layout the UI again due to no longer taking the navigation gap into account + if( MyDebug.LOG ) + Log.d(TAG, "layout UI due to changing want_no_limits behaviour"); + mainUI.layoutUI(); + } + } + + if( this.supportsForceVideo4K() && preview.usingCamera2API() ) { + if( MyDebug.LOG ) + Log.d(TAG, "using Camera2 API, so can disable the force 4K option"); + this.disableForceVideo4K(); + } + if( this.supportsForceVideo4K() && preview.getVideoQualityHander().getSupportedVideoSizes() != null ) { + for(CameraController.Size size : preview.getVideoQualityHander().getSupportedVideoSizes()) { + if( size.width >= 3840 && size.height >= 2160 ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera natively supports 4K, so can disable the force option"); + this.disableForceVideo4K(); + } + } + } + if( MyDebug.LOG ) + Log.d(TAG, "cameraSetup: time after handling Force 4K option: " + (System.currentTimeMillis() - debug_time)); + + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + { + if( MyDebug.LOG ) + Log.d(TAG, "set up zoom"); + if( MyDebug.LOG ) + Log.d(TAG, "has_zoom? " + preview.supportsZoom()); + SeekBar zoomSeekBar = findViewById(R.id.zoom_seekbar); + + if( preview.supportsZoom() ) { + zoomSeekBar.setOnSeekBarChangeListener(null); // clear an existing listener - don't want to call the listener when setting up the progress bar to match the existing state + zoomSeekBar.setMax(preview.getMaxZoom()); + zoomSeekBar.setProgress(preview.getMaxZoom()-preview.getCameraController().getZoom()); + zoomSeekBar.setOnSeekBarChangeListener(new OnSeekBarChangeListener() { + private long last_haptic_time; + + @Override + public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { + if( MyDebug.LOG ) + Log.d(TAG, "zoom onProgressChanged: " + progress); + // note we zoom even if !fromUser, as various other UI controls (multitouch, volume key zoom) + // indirectly set zoom via this method, from setting the zoom slider + // if hasSmoothZoom()==true, then the preview already handled zooming to the current value + if( !preview.hasSmoothZoom() ) { + int new_zoom_factor = preview.getMaxZoom() - progress; + if( fromUser && preview.getCameraController() != null ) { + float old_zoom_ratio = preview.getZoomRatio(); + float new_zoom_ratio = preview.getZoomRatio(new_zoom_factor); + if( new_zoom_ratio != old_zoom_ratio ) { + last_haptic_time = performHapticFeedback(seekBar, last_haptic_time); + } + } + preview.zoomTo(new_zoom_factor, false, true); + } + } + + @Override + public void onStartTrackingTouch(SeekBar seekBar) { + } + + @Override + public void onStopTrackingTouch(SeekBar seekBar) { + } + }); + + if( sharedPreferences.getBoolean(PreferenceKeys.ShowZoomSliderControlsPreferenceKey, true) ) { + if( !mainUI.inImmersiveMode() ) { + zoomSeekBar.setVisibility(View.VISIBLE); + } + } + else { + zoomSeekBar.setVisibility(View.INVISIBLE); // should be INVISIBLE not GONE, as the focus_seekbar is aligned to be left to this; in future we might want this similarly for exposure panel + } + } + else { + zoomSeekBar.setVisibility(View.INVISIBLE); // should be INVISIBLE not GONE, as the focus_seekbar is aligned to be left to this; in future we might want this similarly for the exposure panel + } + if( MyDebug.LOG ) + Log.d(TAG, "cameraSetup: time after setting up zoom: " + (System.currentTimeMillis() - debug_time)); + + View takePhotoButton = findViewById(R.id.take_photo); + if( sharedPreferences.getBoolean(PreferenceKeys.ShowTakePhotoPreferenceKey, true) ) { + if( !mainUI.inImmersiveMode() ) { + takePhotoButton.setVisibility(View.VISIBLE); + } + } + else { + takePhotoButton.setVisibility(View.INVISIBLE); + } + } + { + if( MyDebug.LOG ) + Log.d(TAG, "set up manual focus"); + setManualFocusSeekbar(false); + setManualFocusSeekbar(true); + } + if( MyDebug.LOG ) + Log.d(TAG, "cameraSetup: time after setting up manual focus: " + (System.currentTimeMillis() - debug_time)); + { + if( preview.supportsISORange()) { + if( MyDebug.LOG ) + Log.d(TAG, "set up iso"); + final SeekBar iso_seek_bar = findViewById(R.id.iso_seekbar); + iso_seek_bar.setOnSeekBarChangeListener(null); // clear an existing listener - don't want to call the listener when setting up the progress bar to match the existing state + //setProgressSeekbarExponential(iso_seek_bar, preview.getMinimumISO(), preview.getMaximumISO(), preview.getCameraController().getISO()); + manualSeekbars.setProgressSeekbarISO(iso_seek_bar, preview.getMinimumISO(), preview.getMaximumISO(), preview.getCameraController().getISO()); + iso_seek_bar.setOnSeekBarChangeListener(new OnSeekBarChangeListener() { + private long last_haptic_time; + + @Override + public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { + if( MyDebug.LOG ) + Log.d(TAG, "iso seekbar onProgressChanged: " + progress); + /*double frac = progress/(double)iso_seek_bar.getMax(); + if( MyDebug.LOG ) + Log.d(TAG, "exposure_time frac: " + frac); + double scaling = MainActivity.seekbarScaling(frac); + if( MyDebug.LOG ) + Log.d(TAG, "exposure_time scaling: " + scaling); + int min_iso = preview.getMinimumISO(); + int max_iso = preview.getMaximumISO(); + int iso = min_iso + (int)(scaling * (max_iso - min_iso));*/ + /*int min_iso = preview.getMinimumISO(); + int max_iso = preview.getMaximumISO(); + int iso = (int)exponentialScaling(frac, min_iso, max_iso);*/ + // n.b., important to update even if fromUser==false (e.g., so this works when user changes ISO via clicking + // the ISO buttons rather than moving the slider directly, see MainUI.setupExposureUI()) + preview.setISO( manualSeekbars.getISO(progress) ); + mainUI.updateSelectedISOButton(); + if( fromUser ) { + last_haptic_time = performHapticFeedback(seekBar, last_haptic_time); + } + } + + @Override + public void onStartTrackingTouch(SeekBar seekBar) { + } + + @Override + public void onStopTrackingTouch(SeekBar seekBar) { + } + }); + if( preview.supportsExposureTime() ) { + if( MyDebug.LOG ) + Log.d(TAG, "set up exposure time"); + final SeekBar exposure_time_seek_bar = findViewById(R.id.exposure_time_seekbar); + exposure_time_seek_bar.setOnSeekBarChangeListener(null); // clear an existing listener - don't want to call the listener when setting up the progress bar to match the existing state + //setProgressSeekbarExponential(exposure_time_seek_bar, preview.getMinimumExposureTime(), preview.getMaximumExposureTime(), preview.getCameraController().getExposureTime()); + manualSeekbars.setProgressSeekbarShutterSpeed(exposure_time_seek_bar, preview.getMinimumExposureTime(), preview.getMaximumExposureTime(), preview.getCameraController().getExposureTime()); + exposure_time_seek_bar.setOnSeekBarChangeListener(new OnSeekBarChangeListener() { + private long last_haptic_time; + + @Override + public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { + if( MyDebug.LOG ) + Log.d(TAG, "exposure_time seekbar onProgressChanged: " + progress); + /*double frac = progress/(double)exposure_time_seek_bar.getMax(); + if( MyDebug.LOG ) + Log.d(TAG, "exposure_time frac: " + frac); + long min_exposure_time = preview.getMinimumExposureTime(); + long max_exposure_time = preview.getMaximumExposureTime(); + long exposure_time = exponentialScaling(frac, min_exposure_time, max_exposure_time);*/ + preview.setExposureTime( manualSeekbars.getExposureTime(progress) ); + if( fromUser ) { + last_haptic_time = performHapticFeedback(seekBar, last_haptic_time); + } + } + + @Override + public void onStartTrackingTouch(SeekBar seekBar) { + } + + @Override + public void onStopTrackingTouch(SeekBar seekBar) { + } + }); + } + } + } + setManualWBSeekbar(); + if( MyDebug.LOG ) + Log.d(TAG, "cameraSetup: time after setting up iso: " + (System.currentTimeMillis() - debug_time)); + { + exposure_seekbar_values = null; + if( preview.supportsExposures() ) { + if( MyDebug.LOG ) + Log.d(TAG, "set up exposure compensation"); + final int min_exposure = preview.getMinimumExposure(); + SeekBar exposure_seek_bar = findViewById(R.id.exposure_seekbar); + exposure_seek_bar.setOnSeekBarChangeListener(null); // clear an existing listener - don't want to call the listener when setting up the progress bar to match the existing state + + final int exposure_seekbar_n_repeated_zero = 3; // how many times to repeat 0 for R.id.exposure_seekbar, so that it "sticks" to zero when changing seekbar + + //exposure_seek_bar.setMax( preview.getMaximumExposure() - min_exposure + exposure_seekbar_n_repeated_zero-1 ); + //exposure_seek_bar.setProgress( preview.getCurrentExposure() - min_exposure ); + + exposure_seekbar_values = new ArrayList<>(); + int current_exposure = preview.getCurrentExposure(); + int current_progress = 0; + for(int i=min_exposure;i<=preview.getMaximumExposure();i++) { + exposure_seekbar_values.add(i); + if( i == 0 ) { + exposure_seekbar_values_zero = exposure_seekbar_values.size()-1; + exposure_seekbar_values_zero += (exposure_seekbar_n_repeated_zero-1)/2; // centre within the region of zeroes + for(int j=0;j last_haptic_time + 16 ) { + last_haptic_time = time_ms; + // SEGMENT_TICK or SEGMENT_TICK doesn't work on Galaxy S24+ at least, even though on Android 14! + /*if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE ) { + seekBar.performHapticFeedback(HapticFeedbackConstants.SEGMENT_FREQUENT_TICK); + } + else*/ { + seekBar.performHapticFeedback(HapticFeedbackConstants.CLOCK_TICK); + } + } + } + return last_haptic_time; + } + + public void setManualFocusSeekbarProgress(final boolean is_target_distance, float focus_distance) { + final SeekBar focusSeekBar = findViewById(is_target_distance ? R.id.focus_bracketing_target_seekbar : R.id.focus_seekbar); + ManualSeekbars.setProgressSeekbarScaled(focusSeekBar, 0.0, preview.getMinimumFocusDistance(), focus_distance); + } + + private void setManualFocusSeekbar(final boolean is_target_distance) { + if( MyDebug.LOG ) + Log.d(TAG, "setManualFocusSeekbar"); + final SeekBar focusSeekBar = findViewById(is_target_distance ? R.id.focus_bracketing_target_seekbar : R.id.focus_seekbar); + focusSeekBar.setOnSeekBarChangeListener(null); // clear an existing listener - don't want to call the listener when setting up the progress bar to match the existing state + setManualFocusSeekbarProgress(is_target_distance, is_target_distance ? preview.getCameraController().getFocusBracketingTargetDistance() : preview.getCameraController().getFocusDistance()); + focusSeekBar.setOnSeekBarChangeListener(new OnSeekBarChangeListener() { + private boolean has_saved_zoom; + private int saved_zoom_factor; + + @Override + public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { + if( !is_target_distance && applicationInterface.isFocusBracketingSourceAutoPref() ) { + // source is set from continuous focus, not by changing the seekbar + if( fromUser ) { + // but if user has manually changed, then exit auto mode + applicationInterface.setFocusBracketingSourceAutoPref(false); + mainUI.destroyPopup(); // need to recreate popup + } + else { + return; + } + } + double frac = progress/(double)focusSeekBar.getMax(); + double scaling = ManualSeekbars.seekbarScaling(frac); + float focus_distance = (float)(scaling * preview.getMinimumFocusDistance()); + preview.setFocusDistance(focus_distance, is_target_distance, true); + } + + @Override + public void onStartTrackingTouch(SeekBar seekBar) { + if( MyDebug.LOG ) + Log.d(TAG, "manual focus seekbar: onStartTrackingTouch"); + has_saved_zoom = false; + if( preview.supportsZoom() ) { + int focus_assist = applicationInterface.getFocusAssistPref(); + if( focus_assist > 0 && preview.getCameraController() != null ) { + has_saved_zoom = true; + saved_zoom_factor = preview.getCameraController().getZoom(); + if( MyDebug.LOG ) + Log.d(TAG, "zoom by " + focus_assist + " for focus assist, zoom factor was: " + saved_zoom_factor); + int new_zoom_factor = preview.getScaledZoomFactor(focus_assist); + preview.getCameraController().setZoom(new_zoom_factor); + } + } + } + + @Override + public void onStopTrackingTouch(SeekBar seekBar) { + if( MyDebug.LOG ) + Log.d(TAG, "manual focus seekbar: onStopTrackingTouch"); + if( has_saved_zoom && preview.getCameraController() != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "unzoom for focus assist, zoom factor was: " + saved_zoom_factor); + preview.getCameraController().setZoom(saved_zoom_factor); + } + preview.stoppedSettingFocusDistance(is_target_distance); + } + }); + setManualFocusSeekBarVisibility(is_target_distance); + } + + public boolean showManualFocusSeekbar(final boolean is_target_distance) { + if( (applicationInterface.getPhotoMode() == MyApplicationInterface.PhotoMode.FocusBracketing) && !preview.isVideo() ) { + return true; // both seekbars shown in focus bracketing mode + } + if( is_target_distance ) { + return false; // target seekbar only shown in focus bracketing mode + } + boolean is_visible = preview.getCurrentFocusValue() != null && this.getPreview().getCurrentFocusValue().equals("focus_mode_manual2"); + return is_visible; + } + + void setManualFocusSeekBarVisibility(final boolean is_target_distance) { + boolean is_visible = showManualFocusSeekbar(is_target_distance); + SeekBar focusSeekBar = findViewById(is_target_distance ? R.id.focus_bracketing_target_seekbar : R.id.focus_seekbar); + final int visibility = is_visible ? View.VISIBLE : View.GONE; + focusSeekBar.setVisibility(visibility); + if( is_visible ) { + applicationInterface.getDrawPreview().updateSettings(); // needed so that we reset focus_seekbars_margin_left, as the focus seekbars can only be updated when visible + } + } + + public void setManualWBSeekbar() { + if( MyDebug.LOG ) + Log.d(TAG, "setManualWBSeekbar"); + if( preview.getSupportedWhiteBalances() != null && preview.supportsWhiteBalanceTemperature() ) { + if( MyDebug.LOG ) + Log.d(TAG, "set up manual white balance"); + SeekBar white_balance_seek_bar = findViewById(R.id.white_balance_seekbar); + white_balance_seek_bar.setOnSeekBarChangeListener(null); // clear an existing listener - don't want to call the listener when setting up the progress bar to match the existing state + final int minimum_temperature = preview.getMinimumWhiteBalanceTemperature(); + final int maximum_temperature = preview.getMaximumWhiteBalanceTemperature(); + /* + // white balance should use linear scaling + white_balance_seek_bar.setMax(maximum_temperature - minimum_temperature); + white_balance_seek_bar.setProgress(preview.getCameraController().getWhiteBalanceTemperature() - minimum_temperature); + */ + manualSeekbars.setProgressSeekbarWhiteBalance(white_balance_seek_bar, minimum_temperature, maximum_temperature, preview.getCameraController().getWhiteBalanceTemperature()); + white_balance_seek_bar.setOnSeekBarChangeListener(new OnSeekBarChangeListener() { + private long last_haptic_time; + + @Override + public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { + if( MyDebug.LOG ) + Log.d(TAG, "white balance seekbar onProgressChanged: " + progress); + //int temperature = minimum_temperature + progress; + //preview.setWhiteBalanceTemperature(temperature); + preview.setWhiteBalanceTemperature( manualSeekbars.getWhiteBalanceTemperature(progress) ); + if( fromUser ) { + last_haptic_time = performHapticFeedback(seekBar, last_haptic_time); + } + } + + @Override + public void onStartTrackingTouch(SeekBar seekBar) { + } + + @Override + public void onStopTrackingTouch(SeekBar seekBar) { + } + }); + } + } + + public boolean supportsAutoStabilise() { + if( applicationInterface.isRawOnly() ) + return false; // if not saving JPEGs, no point having auto-stabilise mode, as it won't affect the RAW images + if( applicationInterface.getPhotoMode() == MyApplicationInterface.PhotoMode.Panorama ) + return false; // not supported in panorama mode + return this.supports_auto_stabilise; + } + + /** Returns whether the device supports auto-level at all. Most callers probably want to use + * supportsAutoStabilise() which also checks whether auto-level is allowed with current options. + */ + public boolean deviceSupportsAutoStabilise() { + return this.supports_auto_stabilise; + } + + public boolean supportsDRO() { + if( applicationInterface.isRawOnly(MyApplicationInterface.PhotoMode.DRO) ) + return false; // if not saving JPEGs, no point having DRO mode, as it won't affect the RAW images + return true; + } + + public boolean supportsHDR() { + // we also require the device have sufficient memory to do the processing + return large_heap_memory >= 128 && preview.supportsExpoBracketing(); + } + + public boolean supportsExpoBracketing() { + if( applicationInterface.isImageCaptureIntent() ) + return false; // don't support expo bracketing mode if called from image capture intent + return preview.supportsExpoBracketing(); + } + + public boolean supportsFocusBracketing() { + if( applicationInterface.isImageCaptureIntent() ) + return false; // don't support focus bracketing mode if called from image capture intent + return preview.supportsFocusBracketing(); + } + + /** Whether we support the auto mode for setting source focus distance for focus bracketing mode. + * Note the caller should still separately call supportsFocusBracketing() to see if focus + * bracketing is supported in the first place. + */ + public boolean supportsFocusBracketingSourceAuto() { + return preview.supportsFocus() && preview.getSupportedFocusValues().contains("focus_mode_continuous_picture"); + } + + public boolean supportsPanorama() { + // don't support panorama mode if called from image capture intent + // in theory this works, but problem that currently we'd end up doing the processing on the UI thread, so risk ANR + if( applicationInterface.isImageCaptureIntent() ) + return false; + // require 256MB just to be safe, due to the large number of images that may be created + // remember to update the FAQ "Why isn't Panorama supported on my device?" if this changes + return large_heap_memory >= 256 && applicationInterface.getGyroSensor().hasSensors(); + //return false; // currently blocked for release + } + + public boolean supportsFastBurst() { + if( applicationInterface.isImageCaptureIntent() ) + return false; // don't support burst mode if called from image capture intent + // require 512MB just to be safe, due to the large number of images that may be created + return( preview.usingCamera2API() && large_heap_memory >= 512 && preview.supportsBurst() ); + } + + public boolean supportsNoiseReduction() { + // we require Android 7 to limit to more modern devices (for performance reasons) + return( Build.VERSION.SDK_INT >= Build.VERSION_CODES.N && preview.usingCamera2API() && large_heap_memory >= 512 && preview.supportsBurst() && preview.supportsExposureTime() ); + //return false; // currently blocked for release + } + + /** Whether the Camera vendor extension is supported (see + * https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics ). + */ + public boolean supportsCameraExtension(int extension) { + return preview.supportsCameraExtension(extension); + } + + /** Whether RAW mode would be supported for various burst modes (expo bracketing etc). + * Note that caller should still separately check preview.supportsRaw() if required. + */ + public boolean supportsBurstRaw() { + return( large_heap_memory >= 512 ); + } + + public boolean supportsOptimiseFocusLatency() { + // whether to support optimising focus for latency + // in theory this works on any device, as well as old or Camera2 API, but restricting this for now to avoid risk of poor default behaviour + // on older devices + return( Build.VERSION.SDK_INT >= Build.VERSION_CODES.S && preview.usingCamera2API() ); + } + + public boolean supportsPreviewBitmaps() { + // In practice we only use TextureView on Android 5+ (with Camera2 API enabled) anyway, but have put an explicit check here - + return preview.getView() instanceof TextureView && large_heap_memory >= 128; + } + + public boolean supportsPreShots() { + // Need at least Android 5+ for TextureView + // Need at least Android 8+ for video encoding classes + return Build.VERSION.SDK_INT >= Build.VERSION_CODES.O && preview.getView() instanceof TextureView && large_heap_memory >= 512; + } + + private int maxExpoBracketingNImages() { + return preview.maxExpoBracketingNImages(); + } + + public boolean supportsForceVideo4K() { + return this.supports_force_video_4k; + } + + public boolean supportsCamera2() { + return this.supports_camera2; + } + + private void disableForceVideo4K() { + this.supports_force_video_4k = false; + } + + public Preview getPreview() { + return this.preview; + } + + public boolean isCameraInBackground() { + return this.camera_in_background; + } + + public boolean isAppPaused() { + return this.app_is_paused; + } + + public BluetoothRemoteControl getBluetoothRemoteControl() { + return bluetoothRemoteControl; + } + + public PermissionHandler getPermissionHandler() { + return permissionHandler; + } + + public SettingsManager getSettingsManager() { + return settingsManager; + } + + public MainUI getMainUI() { + return this.mainUI; + } + + public ManualSeekbars getManualSeekbars() { + return this.manualSeekbars; + } + + public MyApplicationInterface getApplicationInterface() { + return this.applicationInterface; + } + + public TextFormatter getTextFormatter() { + return this.textFormatter; + } + + SoundPoolManager getSoundPoolManager() { + return this.soundPoolManager; + } + + public LocationSupplier getLocationSupplier() { + return this.applicationInterface.getLocationSupplier(); + } + + public StorageUtils getStorageUtils() { + return this.applicationInterface.getStorageUtils(); + } + + public File getImageFolder() { + return this.applicationInterface.getStorageUtils().getImageFolder(); + } + + public ToastBoxer getChangedAutoStabiliseToastBoxer() { + return changed_auto_stabilise_toast; + } + + private String getPhotoModeString(MyApplicationInterface.PhotoMode photo_mode, boolean string_for_std) { + String photo_mode_string = null; + switch( photo_mode ) { + case Standard: + if( string_for_std ) + photo_mode_string = getResources().getString(R.string.photo_mode_standard_full); + break; + case DRO: + photo_mode_string = getResources().getString(R.string.photo_mode_dro); + break; + case HDR: + photo_mode_string = getResources().getString(R.string.photo_mode_hdr); + break; + case ExpoBracketing: + photo_mode_string = getResources().getString(R.string.photo_mode_expo_bracketing_full); + break; + case FocusBracketing: { + photo_mode_string = getResources().getString(R.string.photo_mode_focus_bracketing_full); + int n_images = applicationInterface.getFocusBracketingNImagesPref(); + photo_mode_string += " (" + n_images + ")"; + break; + } + case FastBurst: { + photo_mode_string = getResources().getString(R.string.photo_mode_fast_burst_full); + int n_images = applicationInterface.getBurstNImages(); + photo_mode_string += " (" + n_images + ")"; + break; + } + case NoiseReduction: + photo_mode_string = getResources().getString(R.string.photo_mode_noise_reduction_full); + break; + case Panorama: + photo_mode_string = getResources().getString(R.string.photo_mode_panorama_full); + break; + case X_Auto: + photo_mode_string = getResources().getString(R.string.photo_mode_x_auto_full); + break; + case X_HDR: + photo_mode_string = getResources().getString(R.string.photo_mode_x_hdr_full); + break; + case X_Night: + photo_mode_string = getResources().getString(R.string.photo_mode_x_night_full); + break; + case X_Bokeh: + photo_mode_string = getResources().getString(R.string.photo_mode_x_bokeh_full); + break; + case X_Beauty: + photo_mode_string = getResources().getString(R.string.photo_mode_x_beauty_full); + break; + } + return photo_mode_string; + } + + /** Displays a toast with information about the current preferences. + * If always_show is true, the toast is always displayed; otherwise, we only display + * a toast if it's important to notify the user (i.e., unusual non-default settings are + * set). We want a balance between not pestering the user too much, whilst also reminding + * them if certain settings are on. + */ + private void showPhotoVideoToast(boolean always_show) { + if( MyDebug.LOG ) { + Log.d(TAG, "showPhotoVideoToast"); + Log.d(TAG, "always_show? " + always_show); + } + CameraController camera_controller = preview.getCameraController(); + if( camera_controller == null || this.camera_in_background ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera not open or in background"); + return; + } + String toast_string; + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + boolean simple = true; + boolean video_high_speed = preview.isVideoHighSpeed(); + MyApplicationInterface.PhotoMode photo_mode = applicationInterface.getPhotoMode(); + if( preview.isVideo() ) { + VideoProfile profile = preview.getVideoProfile(); + + String extension_string = profile.fileExtension; + if( !profile.fileExtension.equals("mp4") ) { + simple = false; + } + + String bitrate_string; + if( profile.videoBitRate >= 10000000 ) + bitrate_string = profile.videoBitRate/1000000 + "Mbps"; + else if( profile.videoBitRate >= 10000 ) + bitrate_string = profile.videoBitRate/1000 + "Kbps"; + else + bitrate_string = profile.videoBitRate + "bps"; + String bitrate_value = applicationInterface.getVideoBitratePref(); + if( !bitrate_value.equals("default") ) { + simple = false; + } + + double capture_rate = profile.videoCaptureRate; + String capture_rate_string = (capture_rate < 9.5f) ? new DecimalFormat("#0.###").format(capture_rate) : String.valueOf((int) (profile.videoCaptureRate + 0.5)); + toast_string = getResources().getString(R.string.video) + ": " + profile.videoFrameWidth + "x" + profile.videoFrameHeight + "\n" + + capture_rate_string + getResources().getString(R.string.fps) + (video_high_speed ? " [" + getResources().getString(R.string.high_speed) + "]" : "") + ", " + bitrate_string + " (" + extension_string + ")"; + + String fps_value = applicationInterface.getVideoFPSPref(); + if( !fps_value.equals("default") || video_high_speed ) { + simple = false; + } + + float capture_rate_factor = applicationInterface.getVideoCaptureRateFactor(); + if( Math.abs(capture_rate_factor - 1.0f) > 1.0e-5 ) { + toast_string += "\n" + getResources().getString(R.string.preference_video_capture_rate) + ": " + capture_rate_factor + "x"; + simple = false; + } + + { + CameraController.TonemapProfile tonemap_profile = applicationInterface.getVideoTonemapProfile(); + if( tonemap_profile != CameraController.TonemapProfile.TONEMAPPROFILE_OFF && preview.supportsTonemapCurve() ) { + if( applicationInterface.getVideoTonemapProfile() != CameraController.TonemapProfile.TONEMAPPROFILE_OFF && preview.supportsTonemapCurve() ) { + int string_id = 0; + switch( tonemap_profile ) { + case TONEMAPPROFILE_REC709: + string_id = R.string.preference_video_rec709; + break; + case TONEMAPPROFILE_SRGB: + string_id = R.string.preference_video_srgb; + break; + case TONEMAPPROFILE_LOG: + string_id = R.string.video_log; + break; + case TONEMAPPROFILE_GAMMA: + string_id = R.string.preference_video_gamma; + break; + case TONEMAPPROFILE_JTVIDEO: + string_id = R.string.preference_video_jtvideo; + break; + case TONEMAPPROFILE_JTLOG: + string_id = R.string.preference_video_jtlog; + break; + case TONEMAPPROFILE_JTLOG2: + string_id = R.string.preference_video_jtlog2; + break; + } + if( string_id != 0 ) { + simple = false; + toast_string += "\n" + getResources().getString(string_id); + if( tonemap_profile == CameraController.TonemapProfile.TONEMAPPROFILE_GAMMA ) { + toast_string += " " + applicationInterface.getVideoProfileGamma(); + } + } + else { + Log.e(TAG, "unknown tonemap_profile: " + tonemap_profile); + } + } + } + } + + boolean record_audio = applicationInterface.getRecordAudioPref(); + if( !record_audio ) { + toast_string += "\n" + getResources().getString(R.string.audio_disabled); + simple = false; + } + String max_duration_value = sharedPreferences.getString(PreferenceKeys.VideoMaxDurationPreferenceKey, "0"); + if( !max_duration_value.isEmpty() && !max_duration_value.equals("0") ) { + String [] entries_array = getResources().getStringArray(R.array.preference_video_max_duration_entries); + String [] values_array = getResources().getStringArray(R.array.preference_video_max_duration_values); + int index = Arrays.asList(values_array).indexOf(max_duration_value); + if( index != -1 ) { // just in case! + String entry = entries_array[index]; + toast_string += "\n" + getResources().getString(R.string.max_duration) +": " + entry; + simple = false; + } + } + long max_filesize = applicationInterface.getVideoMaxFileSizeUserPref(); + if( max_filesize != 0 ) { + toast_string += "\n" + getResources().getString(R.string.max_filesize) +": "; + if( max_filesize >= 1024*1024*1024 ) { + long max_filesize_gb = max_filesize/(1024*1024*1024); + toast_string += max_filesize_gb + getResources().getString(R.string.gb_abbreviation); + } + else { + long max_filesize_mb = max_filesize/(1024*1024); + toast_string += max_filesize_mb + getResources().getString(R.string.mb_abbreviation); + } + simple = false; + } + if( applicationInterface.getVideoFlashPref() && preview.supportsFlash() ) { + toast_string += "\n" + getResources().getString(R.string.preference_video_flash); + simple = false; + } + } + else { + if( photo_mode == MyApplicationInterface.PhotoMode.Panorama ) { + // don't show resolution in panorama mode + toast_string = ""; + } + else { + toast_string = getResources().getString(R.string.photo); + CameraController.Size current_size = preview.getCurrentPictureSize(); + toast_string += " " + current_size.width + "x" + current_size.height; + } + + String photo_mode_string = getPhotoModeString(photo_mode, false); + if( photo_mode_string != null ) { + toast_string += (toast_string.isEmpty() ? "" : "\n") + getResources().getString(R.string.photo_mode) + ": " + photo_mode_string; + if( photo_mode != MyApplicationInterface.PhotoMode.DRO && photo_mode != MyApplicationInterface.PhotoMode.HDR && photo_mode != MyApplicationInterface.PhotoMode.NoiseReduction ) + simple = false; + } + + if( preview.supportsFocus() && preview.getSupportedFocusValues().size() > 1 && photo_mode != MyApplicationInterface.PhotoMode.FocusBracketing ) { + String focus_value = preview.getCurrentFocusValue(); + if( focus_value != null && !focus_value.equals("focus_mode_auto") && !focus_value.equals("focus_mode_continuous_picture") ) { + String focus_entry = preview.findFocusEntryForValue(focus_value); + if( focus_entry != null ) { + toast_string += "\n" + focus_entry; + } + } + } + + if( applicationInterface.getAutoStabilisePref() ) { + // important as users are sometimes confused at the behaviour if they don't realise the option is on + toast_string += (toast_string.isEmpty() ? "" : "\n") + getResources().getString(R.string.preference_auto_stabilise); + simple = false; + } + } + if( applicationInterface.getFaceDetectionPref() ) { + // important so that the user realises why touching for focus/metering areas won't work - easy to forget that face detection has been turned on! + toast_string += "\n" + getResources().getString(R.string.preference_face_detection); + simple = false; + } + if( !video_high_speed ) { + //manual ISO only supported for high speed video + String iso_value = applicationInterface.getISOPref(); + if( !iso_value.equals(CameraController.ISO_DEFAULT) ) { + toast_string += "\nISO: " + iso_value; + if( preview.supportsExposureTime() ) { + long exposure_time_value = applicationInterface.getExposureTimePref(); + toast_string += " " + preview.getExposureTimeString(exposure_time_value); + } + simple = false; + } + int current_exposure = camera_controller.getExposureCompensation(); + if( current_exposure != 0 ) { + toast_string += "\n" + preview.getExposureCompensationString(current_exposure); + simple = false; + } + } + try { + String scene_mode = camera_controller.getSceneMode(); + String white_balance = camera_controller.getWhiteBalance(); + String color_effect = camera_controller.getColorEffect(); + if( scene_mode != null && !scene_mode.equals(CameraController.SCENE_MODE_DEFAULT) ) { + toast_string += "\n" + getResources().getString(R.string.scene_mode) + ": " + mainUI.getEntryForSceneMode(scene_mode); + simple = false; + } + if( white_balance != null && !white_balance.equals(CameraController.WHITE_BALANCE_DEFAULT) ) { + toast_string += "\n" + getResources().getString(R.string.white_balance) + ": " + mainUI.getEntryForWhiteBalance(white_balance); + if( white_balance.equals("manual") && preview.supportsWhiteBalanceTemperature() ) { + toast_string += " " + camera_controller.getWhiteBalanceTemperature(); + } + simple = false; + } + if( color_effect != null && !color_effect.equals(CameraController.COLOR_EFFECT_DEFAULT) ) { + toast_string += "\n" + getResources().getString(R.string.color_effect) + ": " + mainUI.getEntryForColorEffect(color_effect); + simple = false; + } + } + catch(RuntimeException e) { + // catch runtime error from camera_controller old API from camera.getParameters() + MyDebug.logStackTrace(TAG, "failed to get info from camera controller", e); + } + String lock_orientation = applicationInterface.getLockOrientationPref(); + if( !lock_orientation.equals("none") && photo_mode != MyApplicationInterface.PhotoMode.Panorama ) { + // panorama locks to portrait, but don't want to display that in the toast + String [] entries_array = getResources().getStringArray(R.array.preference_lock_orientation_entries); + String [] values_array = getResources().getStringArray(R.array.preference_lock_orientation_values); + int index = Arrays.asList(values_array).indexOf(lock_orientation); + if( index != -1 ) { // just in case! + String entry = entries_array[index]; + toast_string += "\n" + entry; + simple = false; + } + } + String timer = sharedPreferences.getString(PreferenceKeys.TimerPreferenceKey, "0"); + if( !timer.equals("0") && photo_mode != MyApplicationInterface.PhotoMode.Panorama ) { + String [] entries_array = getResources().getStringArray(R.array.preference_timer_entries); + String [] values_array = getResources().getStringArray(R.array.preference_timer_values); + int index = Arrays.asList(values_array).indexOf(timer); + if( index != -1 ) { // just in case! + String entry = entries_array[index]; + toast_string += "\n" + getResources().getString(R.string.preference_timer) + ": " + entry; + simple = false; + } + } + String repeat = applicationInterface.getRepeatPref(); + if( !repeat.equals("1") ) { + String [] entries_array = getResources().getStringArray(R.array.preference_burst_mode_entries); + String [] values_array = getResources().getStringArray(R.array.preference_burst_mode_values); + int index = Arrays.asList(values_array).indexOf(repeat); + if( index != -1 ) { // just in case! + String entry = entries_array[index]; + toast_string += "\n" + getResources().getString(R.string.preference_burst_mode) + ": " + entry; + simple = false; + } + } + /*if( audio_listener != null ) { + toast_string += "\n" + getResources().getString(R.string.preference_audio_noise_control); + }*/ + + if( MyDebug.LOG ) { + Log.d(TAG, "toast_string: " + toast_string); + Log.d(TAG, "simple?: " + simple); + Log.d(TAG, "push_info_toast_text: " + push_info_toast_text); + } + final boolean use_fake_toast = true; + if( !simple || always_show ) { + if( push_info_toast_text != null ) { + toast_string = push_info_toast_text + "\n" + toast_string; + } + preview.showToast(switch_video_toast, toast_string, use_fake_toast); + } + else if( push_info_toast_text != null ) { + preview.showToast(switch_video_toast, push_info_toast_text, use_fake_toast); + } + push_info_toast_text = null; // reset + } + + private void freeAudioListener(boolean wait_until_done) { + if( MyDebug.LOG ) + Log.d(TAG, "freeAudioListener"); + if( audio_listener != null ) { + audio_listener.release(wait_until_done); + audio_listener = null; + } + mainUI.audioControlStopped(); + } + + private void startAudioListener() { + if( MyDebug.LOG ) + Log.d(TAG, "startAudioListener"); + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.M ) { + // we restrict the checks to Android 6 or later just in case, see note in LocationSupplier.setupLocationListener() + if( MyDebug.LOG ) + Log.d(TAG, "check for record audio permission"); + if( ContextCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED ) { + if( MyDebug.LOG ) + Log.d(TAG, "record audio permission not available"); + applicationInterface.requestRecordAudioPermission(); + return; + } + } + + MyAudioTriggerListenerCallback callback = new MyAudioTriggerListenerCallback(this); + audio_listener = new AudioListener(callback); + if( audio_listener.status() ) { + preview.showToast(audio_control_toast, R.string.audio_listener_started, true); + + audio_listener.start(); + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + String sensitivity_pref = sharedPreferences.getString(PreferenceKeys.AudioNoiseControlSensitivityPreferenceKey, "0"); + int audio_noise_sensitivity; + switch(sensitivity_pref) { + case "3": + audio_noise_sensitivity = 50; + break; + case "2": + audio_noise_sensitivity = 75; + break; + case "1": + audio_noise_sensitivity = 125; + break; + case "-1": + audio_noise_sensitivity = 150; + break; + case "-2": + audio_noise_sensitivity = 200; + break; + case "-3": + audio_noise_sensitivity = 400; + break; + default: + // default + audio_noise_sensitivity = 100; + break; + } + callback.setAudioNoiseSensitivity(audio_noise_sensitivity); + mainUI.audioControlStarted(); + } + else { + audio_listener.release(true); // shouldn't be needed, but just to be safe + audio_listener = null; + preview.showToast(null, R.string.audio_listener_failed); + } + } + + public boolean hasAudioControl() { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); + String audio_control = sharedPreferences.getString(PreferenceKeys.AudioControlPreferenceKey, "none"); + /*if( audio_control.equals("voice") ) { + return speechControl.hasSpeechRecognition(); + } + else*/ if( audio_control.equals("noise") ) { + return true; + } + return false; + } + + /*void startAudioListeners() { + initAudioListener(); + // no need to restart speech recognizer, as we didn't free it in stopAudioListeners(), and it's controlled by a user button + }*/ + + public void stopAudioListeners() { + freeAudioListener(true); + /*if( speechControl.hasSpeechRecognition() ) { + // no need to free the speech recognizer, just stop it + speechControl.stopListening(); + }*/ + } + + public void initLocation() { + if( MyDebug.LOG ) + Log.d(TAG, "initLocation"); + if( app_is_paused ) { + if( MyDebug.LOG ) + Log.d(TAG, "initLocation: app is paused!"); + // we shouldn't need this (as we only call initLocation() when active), but just in case we end up here after onPause... + // in fact this happens when we need to grant permission for location - the call to initLocation() from + // MainActivity.onRequestPermissionsResult()->PermissionsHandler.onRequestPermissionsResult() will be when the application + // is still paused - so we won't do anything here, but instead initLocation() will be called after when resuming. + } + else if( camera_in_background ) { + if( MyDebug.LOG ) + Log.d(TAG, "initLocation: camera in background!"); + // we will end up here if app is pause/resumed when camera in background (settings, dialog, etc) + } + else if( !applicationInterface.getLocationSupplier().setupLocationListener() ) { + if( MyDebug.LOG ) + Log.d(TAG, "location permission not available, so request permission"); + permissionHandler.requestLocationPermission(); + } + } + + private void initGyroSensors() { + if( MyDebug.LOG ) + Log.d(TAG, "initGyroSensors"); + if( applicationInterface.getPhotoMode() == MyApplicationInterface.PhotoMode.Panorama ) { + applicationInterface.getGyroSensor().enableSensors(); + } + else { + applicationInterface.getGyroSensor().disableSensors(); + } + } + + void speak(String text) { + if( textToSpeech != null && textToSpeechSuccess ) { + textToSpeech.speak(text, TextToSpeech.QUEUE_FLUSH, null, null); + } + } + + @Override + public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { + if( MyDebug.LOG ) + Log.d(TAG, "onRequestPermissionsResult: requestCode " + requestCode); + super.onRequestPermissionsResult(requestCode, permissions, grantResults); + permissionHandler.onRequestPermissionsResult(requestCode, grantResults); + } + + public void restartOpenCamera() { + if( MyDebug.LOG ) + Log.d(TAG, "restartOpenCamera"); + this.waitUntilImageQueueEmpty(); + // see http://stackoverflow.com/questions/2470870/force-application-to-restart-on-first-activity + Intent intent = this.getBaseContext().getPackageManager().getLaunchIntentForPackage( this.getBaseContext().getPackageName() ); + intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); + this.startActivity(intent); + } + + public void takePhotoButtonLongClickCancelled() { + if( MyDebug.LOG ) + Log.d(TAG, "takePhotoButtonLongClickCancelled"); + if( preview.getCameraController() != null && preview.getCameraController().isContinuousBurstInProgress() ) { + preview.getCameraController().stopContinuousBurst(); + } + } + + // for testing: + public SaveLocationHistory getSaveLocationHistory() { + return this.save_location_history; + } + + public SaveLocationHistory getSaveLocationHistorySAF() { + return this.save_location_history_saf; + } + + public void usedFolderPicker() { + if( applicationInterface.getStorageUtils().isUsingSAF() ) { + save_location_history_saf.updateFolderHistory(getStorageUtils().getSaveLocationSAF(), true); + } + else { + save_location_history.updateFolderHistory(getStorageUtils().getSaveLocation(), true); + } + } + + public boolean hasThumbnailAnimation() { + return this.applicationInterface.hasThumbnailAnimation(); + } + + /*public boolean testHasNotification() { + return has_notification; + }*/ +} diff --git a/app/src/main/java/net/sourceforge/opencamera/MyApplicationInterface.java b/app/src/main/java/net/sourceforge/opencamera/MyApplicationInterface.java new file mode 100644 index 0000000..1e8c05c --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/MyApplicationInterface.java @@ -0,0 +1,3937 @@ +package net.sourceforge.opencamera; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileWriter; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.Date; +import java.util.List; +import java.util.Timer; +import java.util.TimerTask; + +import net.sourceforge.opencamera.cameracontroller.CameraController; +import net.sourceforge.opencamera.cameracontroller.RawImage; +import net.sourceforge.opencamera.preview.ApplicationInterface; +import net.sourceforge.opencamera.preview.BasicApplicationInterface; +import net.sourceforge.opencamera.preview.Preview; +import net.sourceforge.opencamera.preview.VideoProfile; +import net.sourceforge.opencamera.ui.DrawPreview; + +import android.app.Activity; +import android.content.ContentValues; +import android.content.Context; +import android.content.Intent; +import android.content.SharedPreferences; +import android.graphics.Bitmap; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Insets; +import android.graphics.Paint; +import android.graphics.Point; +import android.graphics.Rect; +//import android.location.Address; // don't use until we have info for data privacy! +//import android.location.Geocoder; // don't use until we have info for data privacy! +import android.hardware.camera2.CameraExtensionCharacteristics; +import android.location.Location; +import android.media.MediaMetadataRetriever; +import android.media.MediaPlayer; +import android.media.MediaRecorder; +import android.net.Uri; +import android.os.Build; +import android.os.Bundle; +import android.os.Environment; +import android.os.Handler; +import android.os.ParcelFileDescriptor; +import android.preference.PreferenceManager; +import android.provider.DocumentsContract; +import android.provider.MediaStore; +import android.provider.Settings; +import android.util.Log; +import android.util.Pair; +import android.view.Display; +import android.view.MotionEvent; +import android.view.Surface; +import android.view.View; +import android.view.WindowInsets; +import android.view.WindowMetrics; +import android.widget.ImageButton; + +import androidx.annotation.RequiresApi; + +/** Our implementation of ApplicationInterface, see there for details. + */ +public class MyApplicationInterface extends BasicApplicationInterface { + private static final String TAG = "MyApplicationInterface"; + + // note, okay to change the order of enums in future versions, as getPhotoMode() does not rely on the order for the saved photo mode + public enum PhotoMode { + Standard, + DRO, // single image "fake" HDR + HDR, // HDR created from multiple (expo bracketing) images + ExpoBracketing, // take multiple expo bracketed images, without combining to a single image + FocusBracketing, // take multiple focus bracketed images, without combining to a single image + FastBurst, + NoiseReduction, + Panorama, + // camera vendor extensions: + X_Auto, + X_HDR, + X_Night, + X_Bokeh, + X_Beauty + } + + private final MainActivity main_activity; + private final LocationSupplier locationSupplier; + private final GyroSensor gyroSensor; + private final StorageUtils storageUtils; + private final DrawPreview drawPreview; + private final ImageSaver imageSaver; + + private final static float panorama_pics_per_screen = 3.33333f; + private int n_capture_images = 0; // how many calls to onPictureTaken() since the last call to onCaptureStarted() + private int n_capture_images_raw = 0; // how many calls to onRawPictureTaken() since the last call to onCaptureStarted() + private int n_panorama_pics = 0; + public final static int max_panorama_pics_c = 10; // if we increase this, review against memory requirements under MainActivity.supportsPanorama() + private boolean panorama_pic_accepted; // whether the last panorama picture was accepted, or else needs to be retaken + private boolean panorama_dir_left_to_right = true; // direction of panorama (set after we've captured two images) + + private File last_video_file = null; + private Uri last_video_file_uri = null; + + private final Timer subtitleVideoTimer = new Timer(); + private TimerTask subtitleVideoTimerTask; + + private final Rect text_bounds = new Rect(); + private boolean used_front_screen_flash ; + + // store to avoid calling PreferenceManager.getDefaultSharedPreferences() repeatedly + private final SharedPreferences sharedPreferences; + + private enum LastImagesType { + FILE, + SAF, + MEDIASTORE + } + private LastImagesType last_images_type = LastImagesType.FILE; // whether the last images array are using File API, SAF or MediaStore + + /** This class keeps track of the images saved in this batch, for use with Pause Preview option, so we can share or trash images. + */ + private static class LastImage { + final boolean share; // one of the images in the list should have share set to true, to indicate which image to share + final String name; + Uri uri; + + LastImage(Uri uri, boolean share) { + this.name = null; + this.uri = uri; + this.share = share; + } + + LastImage(String filename, boolean share) { + this.name = filename; + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.N ) { + // previous to Android 7, we could just use a "file://" uri, but this is no longer supported on Android 7, and + // results in a android.os.FileUriExposedException when trying to share! + // see https://stackoverflow.com/questions/38200282/android-os-fileuriexposedexception-file-storage-emulated-0-test-txt-exposed + // so instead we leave null for now, and set it from MyApplicationInterface.scannedFile(). + this.uri = null; + } + else { + this.uri = Uri.parse("file://" + this.name); + } + this.share = share; + } + } + private final List last_images = new ArrayList<>(); + + private final ToastBoxer photo_delete_toast = new ToastBoxer(); + + // camera properties which are saved in bundle, but not stored in preferences (so will be remembered if the app goes into background, but not after restart) + private final static int cameraId_default = 0; + private boolean has_set_cameraId; + private int cameraId = cameraId_default; + private String cameraIdSPhysical = null; // if non-null, this is the ID string for a physical camera, undlerying the logical cameraId + private final static String nr_mode_default = "preference_nr_mode_normal"; + private String nr_mode = nr_mode_default; + private final static float aperture_default = -1.0f; + private float aperture = aperture_default; + // camera properties that aren't saved even in the bundle; these should be initialised/reset in reset() + private int zoom_factor = -1; // don't save zoom, as doing so tends to confuse users; other camera applications don't seem to save zoom when pause/resuming + + // for testing: + public volatile int test_n_videos_scanned; + public volatile int test_max_mp; + + MyApplicationInterface(MainActivity main_activity, Bundle savedInstanceState) { + long debug_time = 0; + if( MyDebug.LOG ) { + Log.d(TAG, "MyApplicationInterface"); + debug_time = System.currentTimeMillis(); + } + this.main_activity = main_activity; + this.sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + this.locationSupplier = new LocationSupplier(main_activity); + if( MyDebug.LOG ) + Log.d(TAG, "MyApplicationInterface: time after creating location supplier: " + (System.currentTimeMillis() - debug_time)); + this.gyroSensor = new GyroSensor(main_activity); + this.storageUtils = new StorageUtils(main_activity, this); + if( MyDebug.LOG ) + Log.d(TAG, "MyApplicationInterface: time after creating storage utils: " + (System.currentTimeMillis() - debug_time)); + this.drawPreview = new DrawPreview(main_activity, this); + + this.imageSaver = new ImageSaver(main_activity); + this.imageSaver.start(); + + this.reset(false); + if( savedInstanceState != null ) { + // load the things we saved in onSaveInstanceState(). + if( MyDebug.LOG ) + Log.d(TAG, "read from savedInstanceState"); + has_set_cameraId = true; + cameraId = savedInstanceState.getInt("cameraId", cameraId_default); + if( MyDebug.LOG ) + Log.d(TAG, "found cameraId: " + cameraId); + cameraIdSPhysical = savedInstanceState.getString("cameraIdSPhysical", null); + if( MyDebug.LOG ) + Log.d(TAG, "found cameraIdSPhysical: " + cameraIdSPhysical); + nr_mode = savedInstanceState.getString("nr_mode", nr_mode_default); + if( MyDebug.LOG ) + Log.d(TAG, "found nr_mode: " + nr_mode); + aperture = savedInstanceState.getFloat("aperture", aperture_default); + if( MyDebug.LOG ) + Log.d(TAG, "found aperture: " + aperture); + } + + if( MyDebug.LOG ) + Log.d(TAG, "MyApplicationInterface: total time to create MyApplicationInterface: " + (System.currentTimeMillis() - debug_time)); + } + + /** Here we save states which aren't saved in preferences (we don't want them to be saved if the + * application is restarted from scratch), but we do want to preserve if Android has to recreate + * the application (e.g., configuration change, or it's destroyed while in background). + */ + void onSaveInstanceState(Bundle state) { + if( MyDebug.LOG ) + Log.d(TAG, "onSaveInstanceState"); + if( MyDebug.LOG ) + Log.d(TAG, "save cameraId: " + cameraId); + state.putInt("cameraId", cameraId); + if( MyDebug.LOG ) + Log.d(TAG, "save cameraIdSPhysical: " + cameraIdSPhysical); + state.putString("cameraIdSPhysical", cameraIdSPhysical); + if( MyDebug.LOG ) + Log.d(TAG, "save nr_mode: " + nr_mode); + state.putString("nr_mode", nr_mode); + if( MyDebug.LOG ) + Log.d(TAG, "save aperture: " + aperture); + state.putFloat("aperture", aperture); + } + + void onDestroy() { + if( MyDebug.LOG ) + Log.d(TAG, "onDestroy"); + if( drawPreview != null ) { + drawPreview.onDestroy(); + } + if( imageSaver != null ) { + imageSaver.onDestroy(); + } + } + + LocationSupplier getLocationSupplier() { + return locationSupplier; + } + + public GyroSensor getGyroSensor() { + return gyroSensor; + } + + StorageUtils getStorageUtils() { + return storageUtils; + } + + public ImageSaver getImageSaver() { + return imageSaver; + } + + public DrawPreview getDrawPreview() { + return drawPreview; + } + + @Override + public Context getContext() { + return main_activity; + } + + @Override + public boolean useCamera2() { + if( main_activity.supportsCamera2() ) { + String camera_api = sharedPreferences.getString(PreferenceKeys.CameraAPIPreferenceKey, PreferenceKeys.CameraAPIPreferenceDefault); + if( "preference_camera_api_camera2".equals(camera_api) ) { + return true; + } + } + return false; + } + + /** If adding extra calls to this, consider whether explicit user permission is required, and whether + * privacy policy or data privacy section needs updating. + * Returns null if location not available. + */ + @Override + public Location getLocation() { + return locationSupplier.getLocation(); + } + + /** If adding extra calls to this, consider whether explicit user permission is required, and whether + * privacy policy or data privacy section needs updating. + * Returns null if location not available. + */ + public Location getLocation(LocationSupplier.LocationInfo locationInfo) { + return locationSupplier.getLocation(locationInfo); + } + + @Override + public VideoMethod createOutputVideoMethod() { + if( isVideoCaptureIntent() ) { + if( MyDebug.LOG ) + Log.d(TAG, "from video capture intent"); + Bundle myExtras = main_activity.getIntent().getExtras(); + if (myExtras != null) { + Uri intent_uri = myExtras.getParcelable(MediaStore.EXTRA_OUTPUT); + if( intent_uri != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "save to: " + intent_uri); + return VideoMethod.URI; + } + } + // if no EXTRA_OUTPUT, we should save to standard location, and will pass back the Uri of that location + if( MyDebug.LOG ) + Log.d(TAG, "intent uri not specified"); + if( MainActivity.useScopedStorage() ) { + // can't use file method with scoped storage + return VideoMethod.MEDIASTORE; + } + else { + // note that SAF URIs don't seem to work for calling applications (tested with Grabilla and "Photo Grabber Image From Video" (FreezeFrame)), so we use standard folder with non-SAF method + return VideoMethod.FILE; + } + } + else if( storageUtils.isUsingSAF() ) { + return VideoMethod.SAF; + } + else if( MainActivity.useScopedStorage() ) { + return VideoMethod.MEDIASTORE; + } + else { + return VideoMethod.FILE; + } + } + + @Override + public File createOutputVideoFile(String extension) throws IOException { + return createOutputVideoFile(false, extension, new Date()); + } + + @Override + public Uri createOutputVideoSAF(String extension) throws IOException { + return createOutputVideoSAF(false, extension, new Date()); + } + + @Override + public Uri createOutputVideoMediaStore(String extension) throws IOException { + return createOutputVideoMediaStore(false, extension, new Date()); + } + + public File createOutputVideoFile(boolean is_preshot, String extension, Date date) throws IOException { + last_video_file = storageUtils.createOutputMediaFile(is_preshot ? StorageUtils.MEDIA_TYPE_PRESHOT : StorageUtils.MEDIA_TYPE_VIDEO, "", extension, date); + return last_video_file; + } + + public Uri createOutputVideoSAF(boolean is_preshot, String extension, Date date) throws IOException { + last_video_file_uri = storageUtils.createOutputMediaFileSAF(is_preshot ? StorageUtils.MEDIA_TYPE_PRESHOT : StorageUtils.MEDIA_TYPE_VIDEO, "", extension, date); + return last_video_file_uri; + } + + public Uri createOutputVideoMediaStore(boolean is_preshot, String extension, Date date) throws IOException { + Uri folder = Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q ? + MediaStore.Video.Media.getContentUri(MediaStore.VOLUME_EXTERNAL_PRIMARY) : + MediaStore.Video.Media.EXTERNAL_CONTENT_URI; + ContentValues contentValues = new ContentValues(); + String filename = storageUtils.createMediaFilename(is_preshot ? StorageUtils.MEDIA_TYPE_PRESHOT : StorageUtils.MEDIA_TYPE_VIDEO, "", 0, "." + extension, date); + if( MyDebug.LOG ) + Log.d(TAG, "filename: " + filename); + contentValues.put(MediaStore.Video.Media.DISPLAY_NAME, filename); + String mime_type = storageUtils.getVideoMimeType(extension); + if( MyDebug.LOG ) + Log.d(TAG, "mime_type: " + mime_type); + contentValues.put(MediaStore.Video.Media.MIME_TYPE, mime_type); + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q ) { + String relative_path = storageUtils.getSaveRelativeFolder(); + if( MyDebug.LOG ) + Log.d(TAG, "relative_path: " + relative_path); + contentValues.put(MediaStore.Video.Media.RELATIVE_PATH, relative_path); + contentValues.put(MediaStore.Video.Media.IS_PENDING, 1); + } + + try { + last_video_file_uri = main_activity.getContentResolver().insert(folder, contentValues); + if( MyDebug.LOG ) + Log.d(TAG, "uri: " + last_video_file_uri); + } + catch(IllegalArgumentException e) { + // can happen for mediastore method if invalid ContentResolver.insert() call + MyDebug.logStackTrace(TAG, "IllegalArgumentException writing video file", e); + throw new IOException(); + } + catch(IllegalStateException e) { + // have received Google Play crashes from ContentResolver.insert() call for mediastore method + MyDebug.logStackTrace(TAG, "IllegalStateException writing video file", e); + throw new IOException(); + } + if( last_video_file_uri == null ) { + throw new IOException(); + } + + return last_video_file_uri; + } + + @Override + public Uri createOutputVideoUri() { + if( isVideoCaptureIntent() ) { + if( MyDebug.LOG ) + Log.d(TAG, "from video capture intent"); + Bundle myExtras = main_activity.getIntent().getExtras(); + if (myExtras != null) { + Uri intent_uri = myExtras.getParcelable(MediaStore.EXTRA_OUTPUT); + if( intent_uri != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "save to: " + intent_uri); + return intent_uri; + } + } + } + throw new RuntimeException(); // programming error if we arrived here + } + + @Override + public int getCameraIdPref() { + return cameraId; + } + + @Override + public String getCameraIdSPhysicalPref() { + return cameraIdSPhysical; + } + + @Override + public String getFlashPref() { + return sharedPreferences.getString(PreferenceKeys.getFlashPreferenceKey(cameraId), ""); + } + + @Override + public String getFocusPref(boolean is_video) { + if( getPhotoMode() == PhotoMode.FocusBracketing && !main_activity.getPreview().isVideo() ) { + if( isFocusBracketingSourceAutoPref() ) { + return "focus_mode_continuous_picture"; + } + else { + return "focus_mode_manual2"; + } + } + return sharedPreferences.getString(PreferenceKeys.getFocusPreferenceKey(cameraId, is_video), ""); + } + + int getFocusAssistPref() { + String focus_assist_value = sharedPreferences.getString(PreferenceKeys.FocusAssistPreferenceKey, "0"); + int focus_assist; + try { + focus_assist = Integer.parseInt(focus_assist_value); + } + catch(NumberFormatException e) { + MyDebug.logStackTrace(TAG, "failed to parse focus_assist_value: " + focus_assist_value, e); + focus_assist = 0; + } + if( focus_assist > 0 && main_activity.getPreview().isVideoRecording() ) { + // focus assist not currently supported while recording video - don't want to zoom the resultant video! + focus_assist = 0; + } + return focus_assist; + } + + @Override + public boolean isVideoPref() { + return sharedPreferences.getBoolean(PreferenceKeys.IsVideoPreferenceKey, false); + } + + @Override + public String getSceneModePref() { + return sharedPreferences.getString(PreferenceKeys.SceneModePreferenceKey, CameraController.SCENE_MODE_DEFAULT); + } + + @Override + public String getColorEffectPref() { + return sharedPreferences.getString(PreferenceKeys.ColorEffectPreferenceKey, CameraController.COLOR_EFFECT_DEFAULT); + } + + @Override + public String getWhiteBalancePref() { + return sharedPreferences.getString(PreferenceKeys.WhiteBalancePreferenceKey, CameraController.WHITE_BALANCE_DEFAULT); + } + + @Override + public int getWhiteBalanceTemperaturePref() { + return sharedPreferences.getInt(PreferenceKeys.WhiteBalanceTemperaturePreferenceKey, 5000); + } + + @Override + public String getAntiBandingPref() { + return sharedPreferences.getString(PreferenceKeys.AntiBandingPreferenceKey, CameraController.ANTIBANDING_DEFAULT); + } + + @Override + public String getEdgeModePref() { + return sharedPreferences.getString(PreferenceKeys.EdgeModePreferenceKey, CameraController.EDGE_MODE_DEFAULT); + } + + @Override + public String getCameraNoiseReductionModePref() { + return sharedPreferences.getString(PreferenceKeys.CameraNoiseReductionModePreferenceKey, CameraController.NOISE_REDUCTION_MODE_DEFAULT); + } + + @Override + public String getISOPref() { + return sharedPreferences.getString(PreferenceKeys.ISOPreferenceKey, CameraController.ISO_DEFAULT); + } + + @Override + public int getExposureCompensationPref() { + String value = sharedPreferences.getString(PreferenceKeys.ExposurePreferenceKey, "0"); + if( MyDebug.LOG ) + Log.d(TAG, "saved exposure value: " + value); + int exposure = 0; + try { + exposure = Integer.parseInt(value); + if( MyDebug.LOG ) + Log.d(TAG, "exposure: " + exposure); + } + catch(NumberFormatException exception) { + if( MyDebug.LOG ) + Log.d(TAG, "exposure invalid format, can't parse to int"); + } + return exposure; + } + + public static CameraController.Size choosePanoramaResolution(List sizes) { + // if we allow panorama with higher resolutions, review against memory requirements under MainActivity.supportsPanorama() + // also may need to update the downscaling in the testing code + final int max_width_c = 2080; + boolean found = false; + CameraController.Size best_size = null; + // find largest width <= max_width_c with aspect ratio 4:3 + for(CameraController.Size size : sizes) { + if( size.width <= max_width_c ) { + double aspect_ratio = ((double)size.width) / (double)size.height; + if( Math.abs(aspect_ratio - 4.0/3.0) < 1.0e-5 ) { + if( !found || size.width > best_size.width ) { + found = true; + best_size = size; + } + } + } + } + if( found ) { + return best_size; + } + // else find largest width <= max_width_c + for(CameraController.Size size : sizes) { + if( size.width <= max_width_c ) { + if( !found || size.width > best_size.width ) { + found = true; + best_size = size; + } + } + } + if( found ) { + return best_size; + } + // else find smallest width + for(CameraController.Size size : sizes) { + if( !found || size.width < best_size.width ) { + found = true; + best_size = size; + } + } + return best_size; + } + + @Override + public Pair getCameraResolutionPref(CameraResolutionConstraints constraints) { + PhotoMode photo_mode = getPhotoMode(); + if( photo_mode == PhotoMode.Panorama ) { + CameraController.Size best_size = choosePanoramaResolution(main_activity.getPreview().getSupportedPictureSizes(false)); + return new Pair<>(best_size.width, best_size.height); + } + + String resolution_value = sharedPreferences.getString(PreferenceKeys.getResolutionPreferenceKey(cameraId, cameraIdSPhysical), ""); + if( MyDebug.LOG ) + Log.d(TAG, "resolution_value: " + resolution_value); + Pair result = null; + if( !resolution_value.isEmpty() ) { + // parse the saved size, and make sure it is still valid + int index = resolution_value.indexOf(' '); + if( index == -1 ) { + if( MyDebug.LOG ) + Log.d(TAG, "resolution_value invalid format, can't find space"); + } + else { + String resolution_w_s = resolution_value.substring(0, index); + String resolution_h_s = resolution_value.substring(index+1); + if( MyDebug.LOG ) { + Log.d(TAG, "resolution_w_s: " + resolution_w_s); + Log.d(TAG, "resolution_h_s: " + resolution_h_s); + } + try { + int resolution_w = Integer.parseInt(resolution_w_s); + if( MyDebug.LOG ) + Log.d(TAG, "resolution_w: " + resolution_w); + int resolution_h = Integer.parseInt(resolution_h_s); + if( MyDebug.LOG ) + Log.d(TAG, "resolution_h: " + resolution_h); + result = new Pair<>(resolution_w, resolution_h); + } + catch(NumberFormatException exception) { + if( MyDebug.LOG ) + Log.d(TAG, "resolution_value invalid format, can't parse w or h to int"); + } + } + } + + if( photo_mode == PhotoMode.NoiseReduction || photo_mode == PhotoMode.HDR ) { + // set a maximum resolution for modes that require decompressing multiple images for processing, + // due to risk of running out of memory! + constraints.has_max_mp = true; + constraints.max_mp = 18000000; // max of 18MP + //constraints.max_mp = 7800000; // test! + if( main_activity.is_test && test_max_mp != 0 ) { + constraints.max_mp = test_max_mp; + } + } + + return result; + } + + /** getImageQualityPref() returns the image quality used for the Camera Controller for taking a + * photo - in some cases, we may set that to a higher value, then perform processing on the + * resultant JPEG before resaving. This method returns the image quality setting to be used for + * saving the final image (as specified by the user). + */ + private int getSaveImageQualityPref() { + if( MyDebug.LOG ) + Log.d(TAG, "getSaveImageQualityPref"); + String image_quality_s = sharedPreferences.getString(PreferenceKeys.QualityPreferenceKey, "90"); + int image_quality; + try { + image_quality = Integer.parseInt(image_quality_s); + } + catch(NumberFormatException exception) { + if( MyDebug.LOG ) + Log.e(TAG, "image_quality_s invalid format: " + image_quality_s); + image_quality = 90; + } + if( isRawOnly() ) { + // if raw only mode, we can set a lower quality for the JPEG, as it isn't going to be saved - only used for + // the thumbnail and pause preview option + if( MyDebug.LOG ) + Log.d(TAG, "set lower quality for raw_only mode"); + image_quality = Math.min(image_quality, 70); + } + return image_quality; + } + + @Override + public int getImageQualityPref() { + if( MyDebug.LOG ) + Log.d(TAG, "getImageQualityPref"); + // see documentation for getSaveImageQualityPref(): in DRO mode we want to take the photo + // at 100% quality for post-processing, the final image will then be saved at the user requested + // setting + PhotoMode photo_mode = getPhotoMode(); + if( main_activity.getPreview().isVideo() ) + ; // for video photo snapshot mode, the photo modes for 100% quality won't be enabled + else if( photo_mode == PhotoMode.DRO ) + return 100; + else if( photo_mode == PhotoMode.HDR ) + return 100; + else if( photo_mode == PhotoMode.NoiseReduction ) + return 100; + + if( getImageFormatPref() != ImageSaver.Request.ImageFormat.STD ) + return 100; + + return getSaveImageQualityPref(); + } + + @Override + public boolean getFaceDetectionPref() { + if( isCameraExtensionPref() ) { + // not supported for camera extensions + return false; + } + return sharedPreferences.getBoolean(PreferenceKeys.FaceDetectionPreferenceKey, false); + } + + /** Returns whether the current fps preference is one that requires a "high speed" video size/ + * frame rate. + */ + public boolean fpsIsHighSpeed() { + return main_activity.getPreview().fpsIsHighSpeed(getVideoFPSPref()); + } + + @Override + public String getVideoQualityPref() { + if( isVideoCaptureIntent() ) { + if( MyDebug.LOG ) + Log.d(TAG, "from video capture intent"); + if( main_activity.getIntent().hasExtra(MediaStore.EXTRA_VIDEO_QUALITY) ) { + int intent_quality = main_activity.getIntent().getIntExtra(MediaStore.EXTRA_VIDEO_QUALITY, 0); + if( MyDebug.LOG ) + Log.d(TAG, "intent_quality: " + intent_quality); + if( intent_quality == 0 || intent_quality == 1 ) { + List video_quality = main_activity.getPreview().getVideoQualityHander().getSupportedVideoQuality(); + if( intent_quality == 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "return lowest quality"); + // return lowest quality, video_quality is sorted high to low + return video_quality.get(video_quality.size()-1); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "return highest quality"); + // return highest quality, video_quality is sorted high to low + return video_quality.get(0); + } + } + } + } + + // Conceivably, we might get in a state where the fps isn't supported at all (e.g., an upgrade changes the available + // supported video resolutions/frame-rates). + return sharedPreferences.getString(PreferenceKeys.getVideoQualityPreferenceKey(cameraId, cameraIdSPhysical, fpsIsHighSpeed()), ""); + } + + @Override + public boolean getVideoStabilizationPref() { + return sharedPreferences.getBoolean(PreferenceKeys.VideoStabilizationPreferenceKey, false); + } + + @Override + public boolean getForce4KPref() { + return cameraId == 0 && sharedPreferences.getBoolean(PreferenceKeys.ForceVideo4KPreferenceKey, false) && main_activity.supportsForceVideo4K(); + } + + @Override + public String getRecordVideoOutputFormatPref() { + return sharedPreferences.getString(PreferenceKeys.VideoFormatPreferenceKey, "preference_video_output_format_default"); + } + + @Override + public String getVideoBitratePref() { + return sharedPreferences.getString(PreferenceKeys.VideoBitratePreferenceKey, "default"); + } + + @Override + public String getVideoFPSPref() { + // if check for EXTRA_VIDEO_QUALITY, if set, best to fall back to default FPS - see corresponding code in getVideoQualityPref + if( isVideoCaptureIntent() ) { + if( MyDebug.LOG ) + Log.d(TAG, "from video capture intent"); + if( main_activity.getIntent().hasExtra(MediaStore.EXTRA_VIDEO_QUALITY) ) { + int intent_quality = main_activity.getIntent().getIntExtra(MediaStore.EXTRA_VIDEO_QUALITY, 0); + if (MyDebug.LOG) + Log.d(TAG, "intent_quality: " + intent_quality); + if (intent_quality == 0 || intent_quality == 1) { + return "default"; + } + } + } + + float capture_rate_factor = getVideoCaptureRateFactor(); + if( capture_rate_factor < 1.0f-1.0e-5f ) { + if( MyDebug.LOG ) + Log.d(TAG, "set fps for slow motion, capture rate: " + capture_rate_factor); + int preferred_fps = (int)(30.0/capture_rate_factor+0.5); + if( MyDebug.LOG ) + Log.d(TAG, "preferred_fps: " + preferred_fps); + if( main_activity.getPreview().getVideoQualityHander().videoSupportsFrameRateHighSpeed(preferred_fps) || + main_activity.getPreview().getVideoQualityHander().videoSupportsFrameRate(preferred_fps) ) + return String.valueOf(preferred_fps); + // just in case say we support 120fps but NOT 60fps, getSupportedSlowMotionRates() will have returned that 2x slow + // motion is supported, but we need to set 120fps instead of 60fps + while( preferred_fps < 240 ) { + preferred_fps *= 2; + if( MyDebug.LOG ) + Log.d(TAG, "preferred_fps not supported, try: " + preferred_fps); + if( main_activity.getPreview().getVideoQualityHander().videoSupportsFrameRateHighSpeed(preferred_fps) || + main_activity.getPreview().getVideoQualityHander().videoSupportsFrameRate(preferred_fps) ) + return String.valueOf(preferred_fps); + } + // shouln't happen based on getSupportedSlowMotionRates() + Log.e(TAG, "can't find valid fps for slow motion"); + return "default"; + } + return sharedPreferences.getString(PreferenceKeys.getVideoFPSPreferenceKey(cameraId, cameraIdSPhysical), "default"); + } + + @Override + public float getVideoCaptureRateFactor() { + float capture_rate_factor = sharedPreferences.getFloat(PreferenceKeys.getVideoCaptureRatePreferenceKey(main_activity.getPreview().getCameraId(), cameraIdSPhysical), 1.0f); + if( MyDebug.LOG ) + Log.d(TAG, "capture_rate_factor: " + capture_rate_factor); + if( Math.abs(capture_rate_factor - 1.0f) > 1.0e-5 ) { + // check stored capture rate is valid + if( MyDebug.LOG ) + Log.d(TAG, "check stored capture rate is valid"); + List supported_capture_rates = getSupportedVideoCaptureRates(); + if( MyDebug.LOG ) + Log.d(TAG, "supported_capture_rates: " + supported_capture_rates); + boolean found = false; + for(float this_capture_rate : supported_capture_rates) { + if( Math.abs(capture_rate_factor - this_capture_rate) < 1.0e-5 ) { + found = true; + break; + } + } + if( !found ) { + Log.e(TAG, "stored capture_rate_factor: " + capture_rate_factor + " not supported"); + capture_rate_factor = 1.0f; + } + } + return capture_rate_factor; + } + + /** This will always return 1, even if slow motion isn't supported (i.e., + * slow motion should only be considered as supported if at least 2 entries + * are returned. Entries are returned in increasing order. + */ + public List getSupportedVideoCaptureRates() { + List rates = new ArrayList<>(); + if( main_activity.getPreview().supportsVideoHighSpeed() ) { + // We consider a slow motion rate supported if we can get at least 30fps in slow motion. + // If this code is updated, see if we also need to update how slow motion fps is chosen + // in getVideoFPSPref(). + if( main_activity.getPreview().getVideoQualityHander().videoSupportsFrameRateHighSpeed(240) || + main_activity.getPreview().getVideoQualityHander().videoSupportsFrameRate(240) ) { + rates.add(1.0f/8.0f); + rates.add(1.0f/4.0f); + rates.add(1.0f/2.0f); + } + else if( main_activity.getPreview().getVideoQualityHander().videoSupportsFrameRateHighSpeed(120) || + main_activity.getPreview().getVideoQualityHander().videoSupportsFrameRate(120) ) { + rates.add(1.0f/4.0f); + rates.add(1.0f/2.0f); + } + else if( main_activity.getPreview().getVideoQualityHander().videoSupportsFrameRateHighSpeed(60) || + main_activity.getPreview().getVideoQualityHander().videoSupportsFrameRate(60) ) { + rates.add(1.0f/2.0f); + } + } + rates.add(1.0f); + { + // add timelapse options + // in theory this should work on any Android version, though video fails to record in timelapse mode on Galaxy Nexus... + rates.add(2.0f); + rates.add(3.0f); + rates.add(4.0f); + rates.add(5.0f); + rates.add(10.0f); + rates.add(20.0f); + rates.add(30.0f); + rates.add(60.0f); + rates.add(120.0f); + rates.add(240.0f); + } + return rates; + } + + @Override + public CameraController.TonemapProfile getVideoTonemapProfile() { + String video_log = sharedPreferences.getString(PreferenceKeys.VideoLogPreferenceKey, "off"); + // only return TONEMAPPROFILE_LOG for values recognised by getVideoLogProfileStrength() + switch( video_log ) { + case "off": + return CameraController.TonemapProfile.TONEMAPPROFILE_OFF; + case "rec709": + return CameraController.TonemapProfile.TONEMAPPROFILE_REC709; + case "srgb": + return CameraController.TonemapProfile.TONEMAPPROFILE_SRGB; + case "fine": + case "low": + case "medium": + case "strong": + case "extra_strong": + return CameraController.TonemapProfile.TONEMAPPROFILE_LOG; + case "gamma": + return CameraController.TonemapProfile.TONEMAPPROFILE_GAMMA; + case "jtvideo": + return CameraController.TonemapProfile.TONEMAPPROFILE_JTVIDEO; + case "jtlog": + return CameraController.TonemapProfile.TONEMAPPROFILE_JTLOG; + case "jtlog2": + return CameraController.TonemapProfile.TONEMAPPROFILE_JTLOG2; + } + return CameraController.TonemapProfile.TONEMAPPROFILE_OFF; + } + + @Override + public float getVideoLogProfileStrength() { + String video_log = sharedPreferences.getString(PreferenceKeys.VideoLogPreferenceKey, "off"); + // remember to update getVideoTonemapProfile() if adding/changing modes + switch( video_log ) { + case "off": + case "rec709": + case "srgb": + case "gamma": + case "jtvideo": + case "jtlog": + case "jtlog2": + return 0.0f; + /*case "fine": + return 1.0f; + case "low": + return 5.0f; + case "medium": + return 10.0f; + case "strong": + return 100.0f; + case "extra_strong": + return 500.0f;*/ + // need a range of values as behaviour can vary between devices - e.g., "fine" has more effect on Nexus 6 than + // other devices such as OnePlus 3T or Galaxy S10e + // recalibrated in v1.48 to correspond to improvements made in CameraController2 + case "fine": + return 10.0f; + case "low": + return 32.0f; + case "medium": + return 100.0f; + case "strong": + return 224.0f; + case "extra_strong": + return 500.0f; + } + return 0.0f; + } + + @Override + public float getVideoProfileGamma() { + String gamma_value = sharedPreferences.getString(PreferenceKeys.VideoProfileGammaPreferenceKey, "2.2"); + float gamma = 0.0f; + try { + gamma = Float.parseFloat(gamma_value); + if( MyDebug.LOG ) + Log.d(TAG, "gamma: " + gamma); + } + catch(NumberFormatException e) { + MyDebug.logStackTrace(TAG, "failed to parse gamma value: " + gamma_value, e); + } + return gamma; + } + + @Override + public long getVideoMaxDurationPref() { + if( isVideoCaptureIntent() ) { + if( MyDebug.LOG ) + Log.d(TAG, "from video capture intent"); + if( main_activity.getIntent().hasExtra(MediaStore.EXTRA_DURATION_LIMIT) ) { + int intent_duration_limit = main_activity.getIntent().getIntExtra(MediaStore.EXTRA_DURATION_LIMIT, 0); + if( MyDebug.LOG ) + Log.d(TAG, "intent_duration_limit: " + intent_duration_limit); + return intent_duration_limit * 1000L; + } + } + + String video_max_duration_value = sharedPreferences.getString(PreferenceKeys.VideoMaxDurationPreferenceKey, "0"); + long video_max_duration; + try { + video_max_duration = (long)Integer.parseInt(video_max_duration_value) * 1000; + } + catch(NumberFormatException e) { + MyDebug.logStackTrace(TAG, "failed to parse preference_video_max_duration value: " + video_max_duration_value, e); + video_max_duration = 0; + } + return video_max_duration; + } + + @Override + public int getVideoRestartTimesPref() { + String restart_value = sharedPreferences.getString(PreferenceKeys.VideoRestartPreferenceKey, "0"); + int remaining_restart_video; + try { + remaining_restart_video = Integer.parseInt(restart_value); + } + catch(NumberFormatException e) { + MyDebug.logStackTrace(TAG, "failed to parse preference_video_restart value: " + restart_value, e); + remaining_restart_video = 0; + } + return remaining_restart_video; + } + + long getVideoMaxFileSizeUserPref() { + if( MyDebug.LOG ) + Log.d(TAG, "getVideoMaxFileSizeUserPref"); + + if( isVideoCaptureIntent() ) { + if( MyDebug.LOG ) + Log.d(TAG, "from video capture intent"); + if( main_activity.getIntent().hasExtra(MediaStore.EXTRA_SIZE_LIMIT) ) { + long intent_size_limit = main_activity.getIntent().getLongExtra(MediaStore.EXTRA_SIZE_LIMIT, 0); + if( MyDebug.LOG ) + Log.d(TAG, "intent_size_limit: " + intent_size_limit); + return intent_size_limit; + } + } + + String video_max_filesize_value = sharedPreferences.getString(PreferenceKeys.VideoMaxFileSizePreferenceKey, "0"); + long video_max_filesize; + try { + video_max_filesize = Long.parseLong(video_max_filesize_value); + } + catch(NumberFormatException e) { + MyDebug.logStackTrace(TAG, "failed to parse preference_video_max_filesize value: " + video_max_filesize_value, e); + video_max_filesize = 0; + } + //video_max_filesize = 1024*1024; // test + if( MyDebug.LOG ) + Log.d(TAG, "video_max_filesize: " + video_max_filesize); + return video_max_filesize; + } + + private boolean getVideoRestartMaxFileSizeUserPref() { + if( isVideoCaptureIntent() ) { + if( MyDebug.LOG ) + Log.d(TAG, "from video capture intent"); + if( main_activity.getIntent().hasExtra(MediaStore.EXTRA_SIZE_LIMIT) ) { + // if called from a video capture intent that set a max file size, this will be expecting a single file with that maximum size + return false; + } + } + + return sharedPreferences.getBoolean(PreferenceKeys.VideoRestartMaxFileSizePreferenceKey, true); + } + + @Override + public VideoMaxFileSize getVideoMaxFileSizePref() throws NoFreeStorageException { + if( MyDebug.LOG ) + Log.d(TAG, "getVideoMaxFileSizePref"); + VideoMaxFileSize video_max_filesize = new VideoMaxFileSize(); + video_max_filesize.max_filesize = getVideoMaxFileSizeUserPref(); + video_max_filesize.auto_restart = getVideoRestartMaxFileSizeUserPref(); + + /* Try to set the max filesize so we don't run out of space. + If using SD card without storage access framework, it's not reliable to get the free storage + (see https://sourceforge.net/p/opencamera/tickets/153/ ). + If using Storage Access Framework, getting the available space seems to be reliable for + internal storage or external SD card. + */ + boolean set_max_filesize; + if( storageUtils.isUsingSAF() ) { + set_max_filesize = true; + } + else { + String folder_name = storageUtils.getSaveLocation(); + if( MyDebug.LOG ) + Log.d(TAG, "saving to: " + folder_name); + boolean is_internal = false; + if( !StorageUtils.saveFolderIsFull(folder_name) ) { + is_internal = true; + } + else { + // If save folder path is a full path, see if it matches the "external" storage (which actually means "primary", which typically isn't an SD card these days). + File storage = Environment.getExternalStorageDirectory(); + if( MyDebug.LOG ) + Log.d(TAG, "compare to: " + storage.getAbsolutePath()); + if( folder_name.startsWith( storage.getAbsolutePath() ) ) + is_internal = true; + } + if( MyDebug.LOG ) + Log.d(TAG, "using internal storage?" + is_internal); + set_max_filesize = is_internal; + } + if( set_max_filesize ) { + if( MyDebug.LOG ) + Log.d(TAG, "try setting max filesize"); + long free_memory = storageUtils.freeMemory(); + if( free_memory >= 0 ) { + free_memory = free_memory * 1024 * 1024; + + final long min_free_memory = 50000000; // how much free space to leave after video + // min_free_filesize is the minimum value to set for max file size: + // - no point trying to create a really short video + // - too short videos can end up being corrupted + // - also with auto-restart, if this is too small we'll end up repeatedly restarting and creating shorter and shorter videos + final long min_free_filesize = 20000000; + long available_memory = free_memory - min_free_memory; + if( test_set_available_memory ) { + available_memory = test_available_memory; + } + if( MyDebug.LOG ) { + Log.d(TAG, "free_memory: " + free_memory); + Log.d(TAG, "available_memory: " + available_memory); + } + if( available_memory > min_free_filesize ) { + if( video_max_filesize.max_filesize == 0 || video_max_filesize.max_filesize > available_memory ) { + video_max_filesize.max_filesize = available_memory; + // still leave auto_restart set to true - because even if we set a max filesize for running out of storage, the video may still hit a maximum limit beforehand, if there's a device max limit set (typically ~2GB) + if( MyDebug.LOG ) + Log.d(TAG, "set video_max_filesize to avoid running out of space: " + video_max_filesize); + } + } + else { + if( MyDebug.LOG ) + Log.e(TAG, "not enough free storage to record video"); + throw new NoFreeStorageException(); + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "can't determine remaining free space"); + } + } + + return video_max_filesize; + } + + @Override + public boolean getVideoFlashPref() { + return sharedPreferences.getBoolean(PreferenceKeys.VideoFlashPreferenceKey, false); + } + + @Override + public boolean getVideoLowPowerCheckPref() { + return sharedPreferences.getBoolean(PreferenceKeys.VideoLowPowerCheckPreferenceKey, true); + } + + @Override + public String getPreviewSizePref() { + return sharedPreferences.getString(PreferenceKeys.PreviewSizePreferenceKey, "preference_preview_size_wysiwyg"); + } + + @Override + public String getLockOrientationPref() { + if( getPhotoMode() == PhotoMode.Panorama ) + return "portrait"; // for now panorama only supports portrait + return sharedPreferences.getString(PreferenceKeys.LockOrientationPreferenceKey, "none"); + } + + @Override + public boolean getTouchCapturePref() { + String value = sharedPreferences.getString(PreferenceKeys.TouchCapturePreferenceKey, "none"); + return value.equals("single"); + } + + @Override + public boolean getDoubleTapCapturePref() { + String value = sharedPreferences.getString(PreferenceKeys.TouchCapturePreferenceKey, "none"); + return value.equals("double"); + } + + @Override + public boolean getPausePreviewPref() { + if( main_activity.getPreview().isVideoRecording() ) { + // don't pause preview when taking photos while recording video! + return false; + } + else if( main_activity.lastContinuousFastBurst() ) { + // Don't use pause preview mode when doing a continuous fast burst + // Firstly due to not using background thread for pause preview mode, this will be + // sluggish anyway, but even when this is fixed, I'm not sure it makes sense to use + // pause preview in this mode. + return false; + } + else if( getPhotoMode() == PhotoMode.Panorama ) { + // don't pause preview when taking photos for panorama mode + return false; + } + return sharedPreferences.getBoolean(PreferenceKeys.PausePreviewPreferenceKey, false); + } + + @Override + public boolean getShowToastsPref() { + return sharedPreferences.getBoolean(PreferenceKeys.ShowToastsPreferenceKey, true); + } + + public boolean getThumbnailAnimationPref() { + return sharedPreferences.getBoolean(PreferenceKeys.ThumbnailAnimationPreferenceKey, true); + } + + @Override + public boolean getShutterSoundPref() { + if( getPhotoMode() == PhotoMode.Panorama ) + return false; + return sharedPreferences.getBoolean(PreferenceKeys.ShutterSoundPreferenceKey, true); + } + + @Override + public boolean getStartupFocusPref() { + return sharedPreferences.getBoolean(PreferenceKeys.StartupFocusPreferenceKey, true); + } + + @Override + public long getTimerPref() { + if( getPhotoMode() == MyApplicationInterface.PhotoMode.Panorama ) + return 0; // don't support timer with panorama + String timer_value = sharedPreferences.getString(PreferenceKeys.TimerPreferenceKey, "0"); + long timer_delay; + try { + timer_delay = (long)Integer.parseInt(timer_value) * 1000; + } + catch(NumberFormatException e) { + MyDebug.logStackTrace(TAG, "failed to parse preference_timer value: " + timer_value, e); + timer_delay = 0; + } + return timer_delay; + } + + @Override + public String getRepeatPref() { + if( getPhotoMode() == MyApplicationInterface.PhotoMode.Panorama ) + return "1"; // don't support repeat with panorama + return sharedPreferences.getString(PreferenceKeys.RepeatModePreferenceKey, "1"); + } + + @Override + public long getRepeatIntervalPref() { + String timer_value = sharedPreferences.getString(PreferenceKeys.RepeatIntervalPreferenceKey, "0"); + long timer_delay; + try { + float timer_delay_s = Float.parseFloat(timer_value); + if( MyDebug.LOG ) + Log.d(TAG, "timer_delay_s: " + timer_delay_s); + timer_delay = (long)(timer_delay_s * 1000); + } + catch(NumberFormatException e) { + MyDebug.logStackTrace(TAG, "failed to parse repeat interval value: " + timer_value, e); + timer_delay = 0; + } + return timer_delay; + } + + private ImageSaver.Request.RemoveDeviceExif getRemoveDeviceExifPref() { + switch( sharedPreferences.getString(PreferenceKeys.RemoveDeviceExifPreferenceKey, "preference_remove_device_exif_off") ) { + case "preference_remove_device_exif_on": + return ImageSaver.Request.RemoveDeviceExif.ON; + case "preference_remove_device_exif_keep_datetime": + return ImageSaver.Request.RemoveDeviceExif.KEEP_DATETIME; + default: + return ImageSaver.Request.RemoveDeviceExif.OFF; + } + } + + @Override + public boolean getGeotaggingPref() { + return sharedPreferences.getBoolean(PreferenceKeys.LocationPreferenceKey, false); + } + + @Override + public boolean getRequireLocationPref() { + return sharedPreferences.getBoolean(PreferenceKeys.RequireLocationPreferenceKey, false); + } + + boolean getGeodirectionPref() { + return sharedPreferences.getBoolean(PreferenceKeys.GPSDirectionPreferenceKey, false); + } + + @Override + public boolean getRecordAudioPref() { + return sharedPreferences.getBoolean(PreferenceKeys.RecordAudioPreferenceKey, true); + } + + @Override + public String getRecordAudioChannelsPref() { + return sharedPreferences.getString(PreferenceKeys.RecordAudioChannelsPreferenceKey, "audio_default"); + } + + @Override + public String getRecordAudioSourcePref() { + return sharedPreferences.getString(PreferenceKeys.RecordAudioSourcePreferenceKey, "audio_src_camcorder"); + } + + public boolean getFocusPeakingPref() { + String focus_peaking_pref = sharedPreferences.getString(PreferenceKeys.FocusPeakingPreferenceKey, "preference_focus_peaking_off"); + return !focus_peaking_pref.equals("preference_focus_peaking_off") && main_activity.supportsPreviewBitmaps(); + } + + public boolean getPreShotsPref(PhotoMode photo_mode) { + if( main_activity.getPreview().isVideo() || photo_mode == PhotoMode.ExpoBracketing || photo_mode == PhotoMode.FocusBracketing || photo_mode == PhotoMode.Panorama ) { + // pre-shots not supported for these modes + return false; + } + String pre_shots_pref = sharedPreferences.getString(PreferenceKeys.PreShotsPreferenceKey, "preference_save_preshots_off"); + return !pre_shots_pref.equals("preference_save_preshots_off") && main_activity.supportsPreShots(); + } + + public boolean getAutoStabilisePref() { + boolean auto_stabilise = sharedPreferences.getBoolean(PreferenceKeys.AutoStabilisePreferenceKey, false); + return auto_stabilise && main_activity.supportsAutoStabilise(); + } + + /** Returns the alpha value to use for ghost image, as a number from 0 to 255. + * Note that we store the preference as a percentage from 0 to 100, but scale this to 0 to 255. + */ + public int getGhostImageAlpha() { + String ghost_image_alpha_value = sharedPreferences.getString(PreferenceKeys.GhostImageAlphaPreferenceKey, "50"); + int ghost_image_alpha; + try { + ghost_image_alpha = Integer.parseInt(ghost_image_alpha_value); + } + catch(NumberFormatException e) { + MyDebug.logStackTrace(TAG, "failed to parse ghost_image_alpha_value: " + ghost_image_alpha_value, e); + ghost_image_alpha = 50; + } + ghost_image_alpha = (int)(ghost_image_alpha*2.55f+0.1f); + return ghost_image_alpha; + } + + public String getStampPref() { + return sharedPreferences.getString(PreferenceKeys.StampPreferenceKey, "preference_stamp_no"); + } + + private String getStampDateFormatPref() { + return sharedPreferences.getString(PreferenceKeys.StampDateFormatPreferenceKey, "preference_stamp_dateformat_default"); + } + + private String getStampTimeFormatPref() { + return sharedPreferences.getString(PreferenceKeys.StampTimeFormatPreferenceKey, "preference_stamp_timeformat_default"); + } + + private String getStampGPSFormatPref() { + return sharedPreferences.getString(PreferenceKeys.StampGPSFormatPreferenceKey, "preference_stamp_gpsformat_default"); + } + + /*private String getStampGeoAddressPref() { + return sharedPreferences.getString(PreferenceKeys.StampGeoAddressPreferenceKey, "preference_stamp_geo_address_no"); + }*/ + + private String getUnitsDistancePref() { + return sharedPreferences.getString(PreferenceKeys.UnitsDistancePreferenceKey, "preference_units_distance_m"); + } + + public String getTextStampPref() { + return sharedPreferences.getString(PreferenceKeys.TextStampPreferenceKey, ""); + } + + private int getTextStampFontSizePref() { + int font_size = 12; + String value = sharedPreferences.getString(PreferenceKeys.StampFontSizePreferenceKey, "12"); + if( MyDebug.LOG ) + Log.d(TAG, "saved font size: " + value); + try { + font_size = Integer.parseInt(value); + if( MyDebug.LOG ) + Log.d(TAG, "font_size: " + font_size); + } + catch(NumberFormatException exception) { + if( MyDebug.LOG ) + Log.d(TAG, "font size invalid format, can't parse to int"); + } + return font_size; + } + + /** Whether the Mediastore API supports saving subtitle files. + */ + static boolean mediastoreSupportsVideoSubtitles() { + // Android 11+ no longer allows mediastore API to save types that Android doesn't support! + return Build.VERSION.SDK_INT < Build.VERSION_CODES.R; + } + + private String getVideoSubtitlePref(VideoMethod video_method) { + if( video_method == VideoMethod.MEDIASTORE && !mediastoreSupportsVideoSubtitles() ) { + return "preference_video_subtitle_no"; + } + return sharedPreferences.getString(PreferenceKeys.VideoSubtitlePref, "preference_video_subtitle_no"); + } + + @Override + public int getZoomPref() { + if( MyDebug.LOG ) + Log.d(TAG, "getZoomPref: " + zoom_factor); + return zoom_factor; + } + + @Override + public double getCalibratedLevelAngle() { + return sharedPreferences.getFloat(PreferenceKeys.CalibratedLevelAnglePreferenceKey, 0.0f); + } + + @Override + public boolean canTakeNewPhoto() { + if( MyDebug.LOG ) + Log.d(TAG, "canTakeNewPhoto"); + + int n_raw, n_jpegs; + if( main_activity.getPreview().isVideo() ) { + // video snapshot mode + n_raw = 0; + n_jpegs = 1; + } + else { + n_jpegs = 1; // default + + if( main_activity.getPreview().supportsExpoBracketing() && this.isExpoBracketingPref() ) { + n_jpegs = this.getExpoBracketingNImagesPref(); + } + else if( main_activity.getPreview().supportsFocusBracketing() && this.isFocusBracketingPref() ) { + // focus bracketing mode always avoids blocking the image queue, no matter how many images are being taken + // so all that matters is that we can take at least 1 photo (for the first shot) + //n_jpegs = this.getFocusBracketingNImagesPref(); + n_jpegs = 1; + } + else if( main_activity.getPreview().supportsBurst() && this.isCameraBurstPref() ) { + if( this.getBurstForNoiseReduction() ) { + if( this.getNRModePref() == ApplicationInterface.NRModePref.NRMODE_LOW_LIGHT ) { + n_jpegs = CameraController.N_IMAGES_NR_DARK_LOW_LIGHT; + } + else { + n_jpegs = CameraController.N_IMAGES_NR_DARK; + } + } + else { + n_jpegs = this.getBurstNImages(); + } + } + + if( main_activity.getPreview().supportsRaw() && this.getRawPref() == RawPref.RAWPREF_JPEG_DNG ) { + // note, even in RAW only mode, the CameraController will still take JPEG+RAW (we still need to JPEG to + // generate a bitmap from for thumbnail and pause preview option), so this still generates a request in + // the ImageSaver + n_raw = n_jpegs; + } + else { + n_raw = 0; + } + } + + int photo_cost = imageSaver.computePhotoCost(n_raw, n_jpegs); + if( imageSaver.queueWouldBlock(photo_cost) ) { + if( MyDebug.LOG ) + Log.d(TAG, "canTakeNewPhoto: no, as queue would block"); + return false; + } + + // even if the queue isn't full, we may apply additional limits + int n_images_to_save = imageSaver.getNImagesToSave(); + PhotoMode photo_mode = getPhotoMode(); + if( photo_mode == PhotoMode.FastBurst || photo_mode == PhotoMode.Panorama ) { + // only allow one fast burst at a time, so require queue to be empty + if( n_images_to_save > 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "canTakeNewPhoto: no, as too many for fast burst"); + return false; + } + } + if( photo_mode == PhotoMode.NoiseReduction ) { + // allow a max of 2 photos in memory when at max of 8 images + if( n_images_to_save >= 2*photo_cost ) { + if( MyDebug.LOG ) + Log.d(TAG, "canTakeNewPhoto: no, as too many for nr"); + return false; + } + } + if( n_jpegs > 1 ) { + // if in any other kind of burst mode (e.g., expo burst, HDR), allow a max of 3 photos in memory + if( n_images_to_save >= 3*photo_cost ) { + if( MyDebug.LOG ) + Log.d(TAG, "canTakeNewPhoto: no, as too many for burst"); + return false; + } + } + if( n_raw > 0 ) { + // if RAW mode, allow a max of 3 photos + if( n_images_to_save >= 3*photo_cost ) { + if( MyDebug.LOG ) + Log.d(TAG, "canTakeNewPhoto: no, as too many for raw"); + return false; + } + } + // otherwise, still have a max limit of 5 photos + if( n_images_to_save >= 5*photo_cost ) { + if( main_activity.supportsNoiseReduction() && n_images_to_save <= 8 ) { + // if we take a photo in NR mode, then switch to std mode, it doesn't make sense to suddenly block! + // so need to at least allow a new photo, if the number of photos is less than 1 NR photo + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "canTakeNewPhoto: no, as too many for regular"); + return false; + } + } + + return true; + } + + @Override + public boolean imageQueueWouldBlock(int n_raw, int n_jpegs) { + if( MyDebug.LOG ) + Log.d(TAG, "imageQueueWouldBlock"); + return imageSaver.queueWouldBlock(n_raw, n_jpegs); + } + + /** Returns the ROTATION_* enum of the display relative to the natural device orientation, but + * also checks for the preview being rotated due to user preference + * RotatePreviewPreferenceKey. + * See ApplicationInterface.getDisplayRotation() for more details, including for prefer_later. + */ + @Override + public int getDisplayRotation(boolean prefer_later) { + // important to use cached rotation to reduce issues of incorrect focus square location when + // rotating device, due to strange Android behaviour where rotation changes shortly before + // the configuration actually changes + int rotation = main_activity.getDisplayRotation(prefer_later); + + String rotate_preview = sharedPreferences.getString(PreferenceKeys.RotatePreviewPreferenceKey, "0"); + if( MyDebug.LOG ) + Log.d(TAG, " rotate_preview = " + rotate_preview); + if( rotate_preview.equals("180") ) { + switch (rotation) { + case Surface.ROTATION_0: rotation = Surface.ROTATION_180; break; + case Surface.ROTATION_90: rotation = Surface.ROTATION_270; break; + case Surface.ROTATION_180: rotation = Surface.ROTATION_0; break; + case Surface.ROTATION_270: rotation = Surface.ROTATION_90; break; + default: + break; + } + } + + return rotation; + } + + @Override + public long getExposureTimePref() { + return sharedPreferences.getLong(PreferenceKeys.ExposureTimePreferenceKey, CameraController.EXPOSURE_TIME_DEFAULT); + } + + @Override + public float getFocusDistancePref(boolean is_target_distance) { + return sharedPreferences.getFloat(is_target_distance ? PreferenceKeys.FocusBracketingTargetDistancePreferenceKey : PreferenceKeys.FocusDistancePreferenceKey, 0.0f); + } + + @Override + public boolean isFocusBracketingSourceAutoPref() { + if( !main_activity.supportsFocusBracketingSourceAuto() ) + return false; // not supported + return sharedPreferences.getBoolean(PreferenceKeys.FocusBracketingAutoSourceDistancePreferenceKey, false); + } + + /** Sets whether in focus bracketing auto focusing mode for source focus distance. + * If enabled==false (i.e. returning to manual mode), the caller should call Preview.setFocusDistance() + * to set the new manual focus distance. + */ + public void setFocusBracketingSourceAutoPref(boolean enabled) { + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putBoolean(PreferenceKeys.FocusBracketingAutoSourceDistancePreferenceKey, enabled); + editor.apply(); + if( main_activity.getPreview().getCameraController() != null ) { + main_activity.getPreview().setFocusPref(true); + } + } + + @Override + public boolean isExpoBracketingPref() { + PhotoMode photo_mode = getPhotoMode(); + return photo_mode == PhotoMode.HDR || photo_mode == PhotoMode.ExpoBracketing; + } + + @Override + public boolean isFocusBracketingPref() { + PhotoMode photo_mode = getPhotoMode(); + return photo_mode == PhotoMode.FocusBracketing; + } + + @Override + public boolean isCameraBurstPref() { + PhotoMode photo_mode = getPhotoMode(); + return photo_mode == PhotoMode.FastBurst || photo_mode == PhotoMode.NoiseReduction; + } + + @Override + public int getBurstNImages() { + PhotoMode photo_mode = getPhotoMode(); + if( photo_mode == PhotoMode.FastBurst ) { + String n_images_value = sharedPreferences.getString(PreferenceKeys.FastBurstNImagesPreferenceKey, "5"); + int n_images; + try { + n_images = Integer.parseInt(n_images_value); + } + catch(NumberFormatException e) { + MyDebug.logStackTrace(TAG, "failed to parse FastBurstNImagesPreferenceKey value: " + n_images_value, e); + n_images = 5; + } + return n_images; + } + return 1; + } + + @Override + public boolean getBurstForNoiseReduction() { + PhotoMode photo_mode = getPhotoMode(); + return photo_mode == PhotoMode.NoiseReduction; + } + + public void setNRMode(String nr_mode) { + this.nr_mode = nr_mode; + } + + public String getNRMode() { + /*if( MyDebug.LOG ) + Log.d(TAG, "nr_mode: " + nr_mode);*/ + return nr_mode; + } + + @Override + public NRModePref getNRModePref() { + /*if( MyDebug.LOG ) + Log.d(TAG, "nr_mode: " + nr_mode);*/ + //noinspection SwitchStatementWithTooFewBranches + switch( nr_mode ) { + case "preference_nr_mode_low_light": + return NRModePref.NRMODE_LOW_LIGHT; + } + return NRModePref.NRMODE_NORMAL; + } + + @Override + public boolean isCameraExtensionPref() { + PhotoMode photo_mode = getPhotoMode(); + return photo_mode == PhotoMode.X_Auto || photo_mode == PhotoMode.X_HDR || photo_mode == PhotoMode.X_Night || photo_mode == PhotoMode.X_Bokeh || photo_mode == PhotoMode.X_Beauty; + } + + @Override + @RequiresApi(api = Build.VERSION_CODES.S) + public int getCameraExtensionPref() { + PhotoMode photo_mode = getPhotoMode(); + if( photo_mode == PhotoMode.X_Auto ) { + return CameraExtensionCharacteristics.EXTENSION_AUTOMATIC; + } + else if( photo_mode == PhotoMode.X_HDR ) { + return CameraExtensionCharacteristics.EXTENSION_HDR; + } + else if( photo_mode == PhotoMode.X_Night ) { + return CameraExtensionCharacteristics.EXTENSION_NIGHT; + } + else if( photo_mode == PhotoMode.X_Bokeh ) { + return CameraExtensionCharacteristics.EXTENSION_BOKEH; + } + else if( photo_mode == PhotoMode.X_Beauty ) { + return CameraExtensionCharacteristics.EXTENSION_BEAUTY; + } + return 0; + } + + public void setAperture(float aperture) { + this.aperture = aperture; + } + + @Override + public float getAperturePref() { + return aperture; + } + + @Override + public int getExpoBracketingNImagesPref() { + if( MyDebug.LOG ) + Log.d(TAG, "getExpoBracketingNImagesPref"); + int n_images; + PhotoMode photo_mode = getPhotoMode(); + if( photo_mode == PhotoMode.HDR ) { + // always set 3 images for HDR + n_images = 3; + } + else { + String n_images_s = sharedPreferences.getString(PreferenceKeys.ExpoBracketingNImagesPreferenceKey, "3"); + try { + n_images = Integer.parseInt(n_images_s); + } + catch(NumberFormatException exception) { + if( MyDebug.LOG ) + Log.e(TAG, "n_images_s invalid format: " + n_images_s); + n_images = 3; + } + } + if( MyDebug.LOG ) + Log.d(TAG, "n_images = " + n_images); + return n_images; + } + + @Override + public double getExpoBracketingStopsPref() { + if( MyDebug.LOG ) + Log.d(TAG, "getExpoBracketingStopsPref"); + double n_stops; + PhotoMode photo_mode = getPhotoMode(); + if( photo_mode == PhotoMode.HDR ) { + // always set 2 stops for HDR + n_stops = 2.0; + } + else { + String n_stops_s = sharedPreferences.getString(PreferenceKeys.ExpoBracketingStopsPreferenceKey, "2"); + try { + n_stops = Double.parseDouble(n_stops_s); + } + catch(NumberFormatException exception) { + if( MyDebug.LOG ) + Log.e(TAG, "n_stops_s invalid format: " + n_stops_s); + n_stops = 2.0; + } + } + if( MyDebug.LOG ) + Log.d(TAG, "n_stops = " + n_stops); + return n_stops; + } + + @Override + public int getFocusBracketingNImagesPref() { + if( MyDebug.LOG ) + Log.d(TAG, "getFocusBracketingNImagesPref"); + int n_images; + String n_images_s = sharedPreferences.getString(PreferenceKeys.FocusBracketingNImagesPreferenceKey, "3"); + try { + n_images = Integer.parseInt(n_images_s); + } + catch(NumberFormatException exception) { + if( MyDebug.LOG ) + Log.e(TAG, "n_images_s invalid format: " + n_images_s); + n_images = 3; + } + if( MyDebug.LOG ) + Log.d(TAG, "n_images = " + n_images); + return n_images; + } + + @Override + public boolean getFocusBracketingAddInfinityPref() { + return sharedPreferences.getBoolean(PreferenceKeys.FocusBracketingAddInfinityPreferenceKey, false); + } + + /** Returns the current photo mode. + * Note, this always should return the true photo mode - if we're in video mode and taking a photo snapshot while + * video recording, the caller should override. We don't override here, as this preference may be used to affect how + * the CameraController is set up, and we don't always re-setup the camera when switching between photo and video modes. + */ + public PhotoMode getPhotoMode() { + String photo_mode_pref = sharedPreferences.getString(PreferenceKeys.PhotoModePreferenceKey, "preference_photo_mode_std"); + /*if( MyDebug.LOG ) + Log.d(TAG, "photo_mode_pref: " + photo_mode_pref);*/ + boolean dro = photo_mode_pref.equals("preference_photo_mode_dro"); + if( dro && main_activity.supportsDRO() ) + return PhotoMode.DRO; + boolean hdr = photo_mode_pref.equals("preference_photo_mode_hdr"); + if( hdr && main_activity.supportsHDR() ) + return PhotoMode.HDR; + boolean expo_bracketing = photo_mode_pref.equals("preference_photo_mode_expo_bracketing"); + if( expo_bracketing && main_activity.supportsExpoBracketing() ) + return PhotoMode.ExpoBracketing; + boolean focus_bracketing = photo_mode_pref.equals("preference_photo_mode_focus_bracketing"); + if( focus_bracketing && main_activity.supportsFocusBracketing() ) + return PhotoMode.FocusBracketing; + boolean fast_burst = photo_mode_pref.equals("preference_photo_mode_fast_burst"); + if( fast_burst && main_activity.supportsFastBurst() ) + return PhotoMode.FastBurst; + boolean noise_reduction = photo_mode_pref.equals("preference_photo_mode_noise_reduction"); + if( noise_reduction && main_activity.supportsNoiseReduction() ) + return PhotoMode.NoiseReduction; + boolean panorama = photo_mode_pref.equals("preference_photo_mode_panorama"); + if( panorama && !main_activity.getPreview().isVideo() && main_activity.supportsPanorama() ) + return PhotoMode.Panorama; + boolean x_auto = photo_mode_pref.equals("preference_photo_mode_x_auto"); + if( x_auto && !main_activity.getPreview().isVideo() && main_activity.supportsCameraExtension(CameraExtensionCharacteristics.EXTENSION_AUTOMATIC) ) + return PhotoMode.X_Auto; + boolean x_hdr = photo_mode_pref.equals("preference_photo_mode_x_hdr"); + if( x_hdr && !main_activity.getPreview().isVideo() && main_activity.supportsCameraExtension(CameraExtensionCharacteristics.EXTENSION_HDR) ) + return PhotoMode.X_HDR; + boolean x_night = photo_mode_pref.equals("preference_photo_mode_x_night"); + if( x_night && !main_activity.getPreview().isVideo() && main_activity.supportsCameraExtension(CameraExtensionCharacteristics.EXTENSION_NIGHT) ) + return PhotoMode.X_Night; + boolean x_bokeh = photo_mode_pref.equals("preference_photo_mode_x_bokeh"); + if( x_bokeh && !main_activity.getPreview().isVideo() && main_activity.supportsCameraExtension(CameraExtensionCharacteristics.EXTENSION_BOKEH) ) + return PhotoMode.X_Bokeh; + boolean x_beauty = photo_mode_pref.equals("preference_photo_mode_x_beauty"); + if( x_beauty && !main_activity.getPreview().isVideo() && main_activity.supportsCameraExtension(CameraExtensionCharacteristics.EXTENSION_BEAUTY) ) + return PhotoMode.X_Beauty; + return PhotoMode.Standard; + } + + @Override + public boolean getJpegRPref() { + if( sharedPreferences.getString(PreferenceKeys.ImageFormatPreferenceKey, "preference_image_format_jpeg").equals("preference_image_format_jpeg_r") ) { + if( main_activity.getPreview().isVideo() ) { + // don't support JPEG R, either for video recording or video snapshot - problem that video recording fails + // if CameraController2 sets "config.setDynamicRangeProfile(DynamicRangeProfiles.HLG10);" for the preview + return false; + } + else { + PhotoMode photo_mode = getPhotoMode(); + if( photo_mode == PhotoMode.NoiseReduction || photo_mode == PhotoMode.HDR || photo_mode == PhotoMode.Panorama ) + return false; // not supported for these photo modes + // n.b., JPEG R won't be supported by x- extension modes either, although this is automatically handled by Preview + return true; + } + } + return false; + } + + private ImageSaver.Request.ImageFormat getImageFormatPref() { + switch( sharedPreferences.getString(PreferenceKeys.ImageFormatPreferenceKey, "preference_image_format_jpeg") ) { + case "preference_image_format_webp": + return ImageSaver.Request.ImageFormat.WEBP; + case "preference_image_format_png": + return ImageSaver.Request.ImageFormat.PNG; + default: + return ImageSaver.Request.ImageFormat.STD; + } + } + + /** Returns whether RAW is currently allowed, even if RAW is enabled in the preference (RAW + * isn't allowed for some photo modes, or in video mode, or when called from an intent). + * Note that this doesn't check whether RAW is supported by the camera. + */ + public boolean isRawAllowed(PhotoMode photo_mode) { + if( isImageCaptureIntent() ) + return false; + if( main_activity.getPreview().isVideo() ) + return false; // video snapshot mode + //return photo_mode == PhotoMode.Standard || photo_mode == PhotoMode.DRO; + if( photo_mode == PhotoMode.Standard || photo_mode == PhotoMode.DRO ) { + return true; + } + else if( photo_mode == PhotoMode.ExpoBracketing ) { + return sharedPreferences.getBoolean(PreferenceKeys.AllowRawForExpoBracketingPreferenceKey, true) && + main_activity.supportsBurstRaw(); + } + else if( photo_mode == PhotoMode.HDR ) { + // for HDR, RAW is only relevant if we're going to be saving the base expo images (otherwise there's nothing to save) + return sharedPreferences.getBoolean(PreferenceKeys.HDRSaveExpoPreferenceKey, false) && + sharedPreferences.getBoolean(PreferenceKeys.AllowRawForExpoBracketingPreferenceKey, true) && + main_activity.supportsBurstRaw(); + } + else if( photo_mode == PhotoMode.FocusBracketing ) { + return sharedPreferences.getBoolean(PreferenceKeys.AllowRawForFocusBracketingPreferenceKey, true) && + main_activity.supportsBurstRaw(); + } + // not supported for panorama mode + // not supported for camera vendor extensions + return false; + } + + /** Return whether to capture JPEG, or RAW+JPEG. + * Note even if in RAW only mode, we still capture RAW+JPEG - the JPEG is needed for things like + * getting the bitmap for the thumbnail and pause preview option; we simply don't do any post- + * processing or saving on the JPEG. + */ + @Override + public RawPref getRawPref() { + PhotoMode photo_mode = getPhotoMode(); + if( isRawAllowed(photo_mode) ) { + switch( sharedPreferences.getString(PreferenceKeys.RawPreferenceKey, "preference_raw_no") ) { + case "preference_raw_yes": + case "preference_raw_only": + return RawPref.RAWPREF_JPEG_DNG; + } + } + return RawPref.RAWPREF_JPEG_ONLY; + } + + /** Whether RAW only mode is enabled. + */ + public boolean isRawOnly() { + PhotoMode photo_mode = getPhotoMode(); + return isRawOnly(photo_mode); + } + + /** Use this instead of isRawOnly() if the photo mode is already known - useful to call e.g. from MainActivity.supportsDRO() + * without causing an infinite loop! + */ + boolean isRawOnly(PhotoMode photo_mode) { + if( isRawAllowed(photo_mode) ) { + //noinspection SwitchStatementWithTooFewBranches + switch( sharedPreferences.getString(PreferenceKeys.RawPreferenceKey, "preference_raw_no") ) { + case "preference_raw_only": + return true; + } + } + return false; + } + + @Override + public int getMaxRawImages() { + return imageSaver.getMaxDNG(); + } + + @Override + public boolean useCamera2FakeFlash() { + return sharedPreferences.getBoolean(PreferenceKeys.Camera2FakeFlashPreferenceKey, false); + } + + @Override + public boolean useCamera2DummyCaptureHack() { + return sharedPreferences.getBoolean(PreferenceKeys.Camera2DummyCaptureHackPreferenceKey, false); + } + + @Override + public boolean useCamera2FastBurst() { + return sharedPreferences.getBoolean(PreferenceKeys.Camera2FastBurstPreferenceKey, true); + } + + @Override + public boolean usePhotoVideoRecording() { + // we only show the preference for Camera2 API (since there's no point disabling the feature for old API) + if( !useCamera2() ) + return true; + return sharedPreferences.getBoolean(PreferenceKeys.Camera2PhotoVideoRecordingPreferenceKey, true); + } + + @Override + public boolean isPreviewInBackground() { + return main_activity.isCameraInBackground(); + } + + @Override + public boolean allowZoom() { + if( getPhotoMode() == PhotoMode.Panorama ) { + // don't allow zooming in panorama mode, the algorithm isn't set up to support this! + return false; + } + else if( isCameraExtensionPref() && Build.VERSION.SDK_INT >= Build.VERSION_CODES.S && !main_activity.getPreview().supportsZoomForCameraExtension(getCameraExtensionPref()) ) { + // zoom not supported for camera extension + return false; + } + return true; + } + + @Override + public boolean optimiseFocusForLatency() { + String pref = sharedPreferences.getString(PreferenceKeys.OptimiseFocusPreferenceKey, "preference_photo_optimise_focus_latency"); + return pref.equals("preference_photo_optimise_focus_latency") && main_activity.supportsOptimiseFocusLatency(); + } + + @Override + public void getDisplaySize(Point display_size, boolean exclude_insets) { + if( Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.R ) { + WindowMetrics window_metrics = main_activity.getWindowManager().getCurrentWindowMetrics(); + final Rect bounds = window_metrics.getBounds(); + if( !main_activity.getEdgeToEdgeMode() || exclude_insets ) { + // use non-deprecated equivalent of Display.getSize() + final WindowInsets windowInsets = window_metrics.getWindowInsets(); + Insets insets = windowInsets.getInsetsIgnoringVisibility(WindowInsets.Type.navigationBars() | WindowInsets.Type.displayCutout()); + int insetsWidth = insets.right + insets.left; + int insetsHeight = insets.top + insets.bottom; + display_size.x = bounds.width() - insetsWidth; + display_size.y = bounds.height() - insetsHeight; + } + else { + display_size.x = bounds.width(); + display_size.y = bounds.height(); + } + } + else { + Display display = main_activity.getWindowManager().getDefaultDisplay(); + display.getSize(display_size); + } + } + + @Override + public boolean isTestAlwaysFocus() { + if( MyDebug.LOG ) { + Log.d(TAG, "isTestAlwaysFocus: " + main_activity.is_test); + } + return main_activity.is_test; + } + + @Override + public void cameraSetup() { + main_activity.cameraSetup(); + drawPreview.clearContinuousFocusMove(); + // Need to cause drawPreview.updateSettings(), otherwise icons like HDR won't show after force-restart, because we only + // know that HDR is supported after the camera is opened + // Also needed for settings which update when switching between photo and video mode. + drawPreview.updateSettings(); + } + + @Override + public void onContinuousFocusMove(boolean start) { + if( MyDebug.LOG ) + Log.d(TAG, "onContinuousFocusMove: " + start); + drawPreview.onContinuousFocusMove(start); + } + + void startPanorama() { + if( MyDebug.LOG ) + Log.d(TAG, "startPanorama"); + gyroSensor.startRecording(); + n_panorama_pics = 0; + panorama_pic_accepted = false; + panorama_dir_left_to_right = true; + + main_activity.getMainUI().setTakePhotoIcon(); + View cancelPanoramaButton = main_activity.findViewById(R.id.cancel_panorama); + cancelPanoramaButton.setVisibility(View.VISIBLE); + main_activity.getMainUI().closeExposureUI(); // close seekbars if open (popup is already closed when taking a photo) + // taking the photo will end up calling MainUI.showGUI(), which will hide the other on-screen icons + } + + /** Ends panorama and submits the panoramic images to be processed. + */ + void finishPanorama() { + if( MyDebug.LOG ) + Log.d(TAG, "finishPanorama"); + + imageSaver.getImageBatchRequest().panorama_dir_left_to_right = this.panorama_dir_left_to_right; + + stopPanorama(false); + + boolean image_capture_intent = isImageCaptureIntent(); + boolean do_in_background = saveInBackground(image_capture_intent); + imageSaver.finishImageBatch(do_in_background); + } + + /** Stop the panorama recording. Does nothing if panorama isn't currently recording. + * @param is_cancelled Whether the panorama has been cancelled. + */ + void stopPanorama(boolean is_cancelled) { + if( MyDebug.LOG ) + Log.d(TAG, "stopPanorama"); + if( !gyroSensor.isRecording() ) { + if( MyDebug.LOG ) + Log.d(TAG, "...nothing to stop"); + return; + } + gyroSensor.stopRecording(); + clearPanoramaPoint(); + if( is_cancelled ) { + imageSaver.flushImageBatch(); + } + main_activity.getMainUI().setTakePhotoIcon(); + View cancelPanoramaButton = main_activity.findViewById(R.id.cancel_panorama); + cancelPanoramaButton.setVisibility(View.GONE); + main_activity.getMainUI().showGUI(); // refresh UI icons now that we've stopped panorama + } + + private void setNextPanoramaPoint(boolean repeat) { + if( MyDebug.LOG ) + Log.d(TAG, "setNextPanoramaPoint"); + float camera_angle_y = main_activity.getPreview().getViewAngleY(false); + if( !repeat ) + n_panorama_pics++; + if( MyDebug.LOG ) + Log.d(TAG, "n_panorama_pics is now: " + n_panorama_pics); + if( n_panorama_pics == max_panorama_pics_c ) { + if( MyDebug.LOG ) + Log.d(TAG, "reached max panorama limit"); + finishPanorama(); + return; + } + float angle = (float) Math.toRadians(camera_angle_y) * n_panorama_pics; + if( n_panorama_pics > 1 && !panorama_dir_left_to_right ) { + angle = - angle; // for right-to-left + } + float x = (float) Math.sin(angle / panorama_pics_per_screen); + float z = (float) -Math.cos(angle / panorama_pics_per_screen); + setNextPanoramaPoint(x, 0.0f, z); + + if( n_panorama_pics == 1 ) { + // also set target for right-to-left + angle = - angle; + x = (float) Math.sin(angle / panorama_pics_per_screen); + z = (float) -Math.cos(angle / panorama_pics_per_screen); + gyroSensor.addTarget(x, 0.0f, z); + drawPreview.addGyroDirectionMarker(x, 0.0f, z); + } + } + + private void setNextPanoramaPoint(float x, float y, float z) { + if( MyDebug.LOG ) + Log.d(TAG, "setNextPanoramaPoint : " + x + " , " + y + " , " + z); + + @SuppressWarnings("PointlessArithmeticExpression") + final float target_angle = 1.0f * 0.01745329252f; + //final float target_angle = 0.5f * 0.01745329252f; + // good to not allow too small an angle for upright_angle_tol - as sometimes the device may + // get in a state where what we think is upright isn't quite right, and frustrating for users + // to be told they have to tilt to not be upright + final float upright_angle_tol = 3.0f * 0.017452406437f; + //final float upright_angle_tol = 2.0f * 0.017452406437f; + final float too_far_angle = 45.0f * 0.01745329252f; + gyroSensor.setTarget(x, y, z, target_angle, upright_angle_tol, too_far_angle, new GyroSensor.TargetCallback() { + @Override + public void onAchieved(int indx) { + if( MyDebug.LOG ) { + Log.d(TAG, "TargetCallback.onAchieved: " + indx); + Log.d(TAG, " n_panorama_pics: " + n_panorama_pics); + } + // Disable the target callback so we avoid risk of multiple callbacks - but note we don't call + // clearPanoramaPoint(), as we don't want to call drawPreview.clearGyroDirectionMarker() + // at this stage (looks better to keep showing the target market on-screen whilst photo + // is being taken, user more likely to keep the device still). + // Also we still keep the target active (and don't call clearTarget() so we can monitor if + // the target is still achieved or not (for panorama_pic_accepted). + //gyroSensor.clearTarget(); + gyroSensor.disableTargetCallback(); + if( n_panorama_pics == 1 ) { + panorama_dir_left_to_right = indx == 0; + if( MyDebug.LOG ) + Log.d(TAG, "set panorama_dir_left_to_right to " + panorama_dir_left_to_right); + } + main_activity.takePicturePressed(false, false); + } + + @Override + public void onTooFar() { + if( MyDebug.LOG ) + Log.d(TAG, "TargetCallback.onTooFar"); + + // it's better not to cancel the panorama if the user moves the device too far in wrong direction + /*if( !main_activity.is_test ) { + main_activity.getPreview().showToast(null, R.string.panorama_cancelled, true); + MyApplicationInterface.this.stopPanorama(true); + }*/ + } + + }); + drawPreview.setGyroDirectionMarker(x, y, z); + } + + private void clearPanoramaPoint() { + if( MyDebug.LOG ) + Log.d(TAG, "clearPanoramaPoint"); + gyroSensor.clearTarget(); + drawPreview.clearGyroDirectionMarker(); + } + + static float getPanoramaPicsPerScreen() { + return panorama_pics_per_screen; + } + + @Override + public void touchEvent(MotionEvent event) { + main_activity.getMainUI().closeExposureUI(); + main_activity.getMainUI().closePopup(); + if( main_activity.usingKitKatImmersiveMode() ) { + main_activity.setImmersiveMode(false); + } + } + + @Override + public void startingVideo() { + if( sharedPreferences.getBoolean(PreferenceKeys.LockVideoPreferenceKey, false) ) { + main_activity.lockScreen(); + } + main_activity.stopAudioListeners(); // important otherwise MediaRecorder will fail to start() if we have an audiolistener! Also don't want to have the speech recognizer going off + ImageButton view = main_activity.findViewById(R.id.take_photo); + view.setImageResource(R.drawable.take_video_recording); + view.setContentDescription( getContext().getResources().getString(R.string.stop_video) ); + view.setTag(R.drawable.take_video_recording); // for testing + main_activity.getMainUI().destroyPopup(); // as the available popup options change while recording video + } + + private void startVideoSubtitlesTask(final VideoMethod video_method) { + final String preference_stamp_dateformat = this.getStampDateFormatPref(); + final String preference_stamp_timeformat = this.getStampTimeFormatPref(); + final String preference_stamp_gpsformat = this.getStampGPSFormatPref(); + final String preference_units_distance = this.getUnitsDistancePref(); + //final String preference_stamp_geo_address = this.getStampGeoAddressPref(); + final boolean store_location = getGeotaggingPref(); + final boolean store_geo_direction = getGeodirectionPref(); + class SubtitleVideoTimerTask extends TimerTask { + // need to keep a reference to pfd_saf for as long as writer, to avoid getting garbage collected - see https://sourceforge.net/p/opencamera/tickets/417/ + private ParcelFileDescriptor pfd_saf; + private OutputStreamWriter writer; + private Uri uri; + private int count = 1; + private long min_video_time_from = 0; + + private String getSubtitleFilename(String video_filename) { + if( MyDebug.LOG ) + Log.d(TAG, "getSubtitleFilename"); + int indx = video_filename.indexOf('.'); + if( indx != -1 ) { + video_filename = video_filename.substring(0, indx); + } + video_filename = video_filename + ".srt"; + if( MyDebug.LOG ) + Log.d(TAG, "return filename: " + video_filename); + return video_filename; + } + + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "SubtitleVideoTimerTask run"); + long video_time = main_activity.getPreview().getVideoTime(true); // n.b., in case of restarts due to max filesize, we only want the time for this video file! + if( !main_activity.getPreview().isVideoRecording() ) { + if( MyDebug.LOG ) + Log.d(TAG, "no longer video recording"); + return; + } + if( main_activity.getPreview().isVideoRecordingPaused() ) { + if( MyDebug.LOG ) + Log.d(TAG, "video recording is paused"); + return; + } + Date current_date = new Date(); + Calendar current_calendar = Calendar.getInstance(); + int offset_ms = current_calendar.get(Calendar.MILLISECOND); + // We subtract an offset, because if the current time is say 00:00:03.425 and the video has been recording for + // 1s, we instead need to record the video time when it became 00:00:03.000. This does mean that the GPS + // location is going to be off by up to 1s, but that should be less noticeable than the clock being off. + if( MyDebug.LOG ) { + Log.d(TAG, "count: " + count); + Log.d(TAG, "offset_ms: " + offset_ms); + Log.d(TAG, "video_time: " + video_time); + } + String date_stamp = TextFormatter.getDateString(preference_stamp_dateformat, current_date); + String time_stamp = TextFormatter.getTimeString(preference_stamp_timeformat, current_date); + Location location = store_location ? getLocation() : null; + double geo_direction = store_geo_direction && main_activity.getPreview().hasGeoDirection() ? main_activity.getPreview().getGeoDirection() : 0.0; + String gps_stamp = main_activity.getTextFormatter().getGPSString(preference_stamp_gpsformat, preference_units_distance, store_location && location!=null, location, store_geo_direction && main_activity.getPreview().hasGeoDirection(), geo_direction); + if( MyDebug.LOG ) { + Log.d(TAG, "date_stamp: " + date_stamp); + Log.d(TAG, "time_stamp: " + time_stamp); + // don't log gps_stamp, in case of privacy! + } + + String datetime_stamp = ""; + if( !date_stamp.isEmpty() ) + datetime_stamp += date_stamp; + if( !time_stamp.isEmpty() ) { + if( !datetime_stamp.isEmpty() ) + datetime_stamp += " "; + datetime_stamp += time_stamp; + } + + // build subtitles + StringBuilder subtitles = new StringBuilder(); + if( !datetime_stamp.isEmpty() ) + subtitles.append(datetime_stamp).append("\n"); + + if( !gps_stamp.isEmpty() ) { + /*Address address = null; + if( store_location && !preference_stamp_geo_address.equals("preference_stamp_geo_address_no") ) { + // try to find an address + if( main_activity.isAppPaused() ) { + // seems safer to not try to initiate potential network connections (via geocoder) if Open Camera + // is paused - this shouldn't happen, since we stop video when paused, but just to be safe + if( MyDebug.LOG ) + Log.d(TAG, "don't call geocoder for video subtitles as app is paused?!"); + } + else if( Geocoder.isPresent() ) { + if( MyDebug.LOG ) + Log.d(TAG, "geocoder is present"); + Geocoder geocoder = new Geocoder(main_activity, Locale.getDefault()); + try { + List
addresses = geocoder.getFromLocation(location.getLatitude(), location.getLongitude(), 1); + if( addresses != null && addresses.size() > 0 ) { + address = addresses.get(0); + // don't log address, in case of privacy! + if( MyDebug.LOG ) { + Log.d(TAG, "max line index: " + address.getMaxAddressLineIndex()); + } + } + } + catch(Exception e) { + MyDebug.logStackTrace(TAG, "failed to read from geocoder", e); + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "geocoder not present"); + } + } + + if( address != null ) { + for(int i=0;i<=address.getMaxAddressLineIndex();i++) { + // write in forward order + String addressLine = address.getAddressLine(i); + subtitles.append(addressLine).append("\n"); + } + }*/ + + //if( address == null || preference_stamp_geo_address.equals("preference_stamp_geo_address_both") ) + { + if( MyDebug.LOG ) + Log.d(TAG, "display gps coords"); + subtitles.append(gps_stamp).append("\n"); + } + /*else if( store_geo_direction ) { + if( MyDebug.LOG ) + Log.d(TAG, "not displaying gps coords, but need to display geo direction"); + gps_stamp = main_activity.getTextFormatter().getGPSString(preference_stamp_gpsformat, preference_units_distance, false, null, store_geo_direction && main_activity.getPreview().hasGeoDirection(), geo_direction); + if( gps_stamp.length() > 0 ) { + // don't log gps_stamp, in case of privacy! + subtitles.append(gps_stamp).append("\n"); + } + }*/ + } + + // Android warning that we should replace with isEmpty(), but that requires Android 15 for CharSequence.isEmpty() + if( subtitles.length() == 0 ) { + return; + } + long video_time_from = video_time - offset_ms; + long video_time_to = video_time_from + 999; + // don't want to start from before 0; also need to keep track of min_video_time_from to avoid bug reported at + // https://forum.xda-developers.com/showpost.php?p=74827802&postcount=345 for pause video where we ended up + // with overlapping times when resuming + if( video_time_from < min_video_time_from ) + video_time_from = min_video_time_from; + min_video_time_from = video_time_to + 1; + String subtitle_time_from = TextFormatter.formatTimeMS(video_time_from); + String subtitle_time_to = TextFormatter.formatTimeMS(video_time_to); + try { + synchronized( this ) { + if( writer == null ) { + if( video_method == VideoMethod.FILE ) { + String subtitle_filename = last_video_file.getAbsolutePath(); + subtitle_filename = getSubtitleFilename(subtitle_filename); + writer = new FileWriter(subtitle_filename); + } + else if( video_method == VideoMethod.SAF || video_method == VideoMethod.MEDIASTORE ) { + if( MyDebug.LOG ) + Log.d(TAG, "last_video_file_uri: " + last_video_file_uri); + String subtitle_filename = storageUtils.getFileName(last_video_file_uri); + subtitle_filename = getSubtitleFilename(subtitle_filename); + if( video_method == VideoMethod.SAF ) { + uri = storageUtils.createOutputFileSAF(subtitle_filename, ""); // don't set a mimetype, as we don't want it to append a new extension + } + else { + Uri folder = Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q ? + MediaStore.Video.Media.getContentUri(MediaStore.VOLUME_EXTERNAL_PRIMARY) : + MediaStore.Video.Media.EXTERNAL_CONTENT_URI; + ContentValues contentValues = new ContentValues(); + contentValues.put(MediaStore.Video.Media.DISPLAY_NAME, subtitle_filename); + // set mime type - it's unclear if .SRT files have an official mime type, but (a) we must set a mime type otherwise + // resultant files are named "*.srt.mp4", and (b) the mime type must be video/*, otherwise we get exception: + // "java.lang.IllegalArgumentException: MIME type text/plain cannot be inserted into content://media/external_primary/video/media; expected MIME type under video/*" + // and we need the file to be saved in the same folder (in DCIM/ ) as the video + contentValues.put(MediaStore.Images.Media.MIME_TYPE, "video/x-srt"); + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q ) { + String relative_path = storageUtils.getSaveRelativeFolder(); + if( MyDebug.LOG ) + Log.d(TAG, "relative_path: " + relative_path); + contentValues.put(MediaStore.Video.Media.RELATIVE_PATH, relative_path); + contentValues.put(MediaStore.Video.Media.IS_PENDING, 1); + } + + // Note, we catch exceptions specific to insert() here and rethrow as IOException, + // rather than catching below, to avoid catching things too broadly. + // Catching too broadly could mean we miss genuine problems that should be fixed. + try { + uri = main_activity.getContentResolver().insert(folder, contentValues); + } + catch(IllegalArgumentException e) { + // can happen for mediastore method if invalid ContentResolver.insert() call + MyDebug.logStackTrace(TAG, "IllegalArgumentException from SubtitleVideoTimerTask inserting to mediastore", e); + throw new IOException(); + } + catch(IllegalStateException e) { + MyDebug.logStackTrace(TAG, "IllegalStateException from SubtitleVideoTimerTask inserting to mediastore", e); + throw new IOException(); + } + if( uri == null ) { + throw new IOException(); + } + } + if( MyDebug.LOG ) + Log.d(TAG, "uri: " + uri); + pfd_saf = getContext().getContentResolver().openFileDescriptor(uri, "w"); + writer = new FileWriter(pfd_saf.getFileDescriptor()); + } + } + if( writer != null ) { + writer.append(Integer.toString(count)); + writer.append('\n'); + writer.append(subtitle_time_from); + writer.append(" --> "); + writer.append(subtitle_time_to); + writer.append('\n'); + writer.append(subtitles.toString()); // subtitles should include the '\n' at the end + writer.append('\n'); // additional newline to indicate end of this subtitle + writer.flush(); + // n.b., we flush rather than closing/reopening the writer each time, as appending doesn't seem to work with storage access framework + } + } + count++; + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "SubtitleVideoTimerTask failed to create or write", e); + } + if( MyDebug.LOG ) + Log.d(TAG, "SubtitleVideoTimerTask exit"); + } + + public boolean cancel() { + if( MyDebug.LOG ) + Log.d(TAG, "SubtitleVideoTimerTask cancel"); + synchronized( this ) { + if( writer != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "close writer"); + try { + writer.close(); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "failed to close writer", e); + } + writer = null; + } + if( pfd_saf != null ) { + try { + pfd_saf.close(); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "failed to close pfd_saf", e); + } + pfd_saf = null; + } + if( video_method == VideoMethod.MEDIASTORE ) { + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q ) { + ContentValues contentValues = new ContentValues(); + contentValues.put(MediaStore.Video.Media.IS_PENDING, 0); + main_activity.getContentResolver().update(uri, contentValues, null, null); + } + } + } + return super.cancel(); + } + } + subtitleVideoTimer.schedule(subtitleVideoTimerTask = new SubtitleVideoTimerTask(), 0, 1000); + } + + @Override + public void startedVideo() { + if( MyDebug.LOG ) + Log.d(TAG, "startedVideo()"); + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.N ) { + if( !( main_activity.getMainUI().inImmersiveMode() && main_activity.usingKitKatImmersiveModeEverything() ) ) { + View pauseVideoButton = main_activity.findViewById(R.id.pause_video); + pauseVideoButton.setVisibility(View.VISIBLE); + } + main_activity.getMainUI().setPauseVideoContentDescription(); + } + if( main_activity.getPreview().supportsPhotoVideoRecording() && this.usePhotoVideoRecording() ) { + if( !( main_activity.getMainUI().inImmersiveMode() && main_activity.usingKitKatImmersiveModeEverything() ) ) { + View takePhotoVideoButton = main_activity.findViewById(R.id.take_photo_when_video_recording); + takePhotoVideoButton.setVisibility(View.VISIBLE); + } + } + if( main_activity.getMainUI().isExposureUIOpen() ) { + if( MyDebug.LOG ) + Log.d(TAG, "need to update exposure UI for start video recording"); + // need to update the exposure UI when starting/stopping video recording, to remove/add + // ability to switch between auto and manual + main_activity.getMainUI().setupExposureUI(); + } + final VideoMethod video_method = this.createOutputVideoMethod(); + boolean dategeo_subtitles = getVideoSubtitlePref(video_method).equals("preference_video_subtitle_yes"); + if( dategeo_subtitles && video_method != ApplicationInterface.VideoMethod.URI ) { + startVideoSubtitlesTask(video_method); + } + } + + @Override + public void stoppingVideo() { + if( MyDebug.LOG ) + Log.d(TAG, "stoppingVideo()"); + main_activity.unlockScreen(); + ImageButton view = main_activity.findViewById(R.id.take_photo); + view.setImageResource(R.drawable.take_video_selector); + view.setContentDescription( getContext().getResources().getString(R.string.start_video) ); + view.setTag(R.drawable.take_video_selector); // for testing + } + + @Override + public void stoppedVideo(final VideoMethod video_method, final Uri uri, final String filename) { + if( MyDebug.LOG ) { + Log.d(TAG, "stoppedVideo"); + Log.d(TAG, "video_method " + video_method); + Log.d(TAG, "uri " + uri); + Log.d(TAG, "filename " + filename); + } + View pauseVideoButton = main_activity.findViewById(R.id.pause_video); + pauseVideoButton.setVisibility(View.GONE); + View takePhotoVideoButton = main_activity.findViewById(R.id.take_photo_when_video_recording); + takePhotoVideoButton.setVisibility(View.GONE); + main_activity.getMainUI().setPauseVideoContentDescription(); // just to be safe + main_activity.getMainUI().destroyPopup(); // as the available popup options change while recording video + if( main_activity.getMainUI().isExposureUIOpen() ) { + if( MyDebug.LOG ) + Log.d(TAG, "need to update exposure UI for stop video recording"); + // need to update the exposure UI when starting/stopping video recording, to remove/add + // ability to switch between auto and manual + main_activity.getMainUI().setupExposureUI(); + } + if( subtitleVideoTimerTask != null ) { + subtitleVideoTimerTask.cancel(); + subtitleVideoTimerTask = null; + } + + completeVideo(video_method, uri); + boolean done = broadcastVideo(video_method, uri, filename); + if( MyDebug.LOG ) + Log.d(TAG, "done? " + done); + + if( isVideoCaptureIntent() ) { + if( done && video_method == VideoMethod.FILE ) { + // do nothing here - we end the activity from storageUtils.broadcastFile after the file has been scanned, as it seems caller apps seem to prefer the content:// Uri rather than one based on a File + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "from video capture intent"); + Intent output = null; + if( done ) { + // may need to pass back the Uri we saved to, if the calling application didn't specify a Uri + // set note above for VideoMethod.FILE + // n.b., currently this code is not used, as we always switch to VideoMethod.FILE if the calling application didn't specify a Uri, but I've left this here for possible future behaviour + if( video_method == VideoMethod.SAF || video_method == VideoMethod.MEDIASTORE ) { + output = new Intent(); + output.setData(uri); + if( MyDebug.LOG ) + Log.d(TAG, "pass back output uri [saf]: " + output.getData()); + } + } + main_activity.setResult(done ? Activity.RESULT_OK : Activity.RESULT_CANCELED, output); + main_activity.finish(); + } + } + else if( done ) { + // create thumbnail + long debug_time = System.currentTimeMillis(); + Bitmap thumbnail = null; + ParcelFileDescriptor pfd_saf = null; // keep a reference to this as long as retriever, to avoid risk of pfd_saf being garbage collected + MediaMetadataRetriever retriever = new MediaMetadataRetriever(); + try { + if( video_method == VideoMethod.FILE ) { + File file = new File(filename); + retriever.setDataSource(file.getPath()); + } + else { + pfd_saf = getContext().getContentResolver().openFileDescriptor(uri, "r"); + retriever.setDataSource(pfd_saf.getFileDescriptor()); + } + thumbnail = retriever.getFrameAtTime(-1); + } + catch(FileNotFoundException | /*IllegalArgumentException |*/ RuntimeException e) { + // video file wasn't saved or corrupt video file? + MyDebug.logStackTrace(TAG, "failed to find thumbnail", e); + } + finally { + try { + retriever.release(); + } + catch(RuntimeException | IOException ex) { + // ignore + } + try { + if( pfd_saf != null ) { + pfd_saf.close(); + } + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "failed to close pfd_saf", e); + } + } + if( thumbnail != null ) { + ImageButton galleryButton = main_activity.findViewById(R.id.gallery); + int width = thumbnail.getWidth(); + int height = thumbnail.getHeight(); + if( MyDebug.LOG ) + Log.d(TAG, " video thumbnail size " + width + " x " + height); + if( width > galleryButton.getWidth() ) { + float scale = (float) galleryButton.getWidth() / width; + int new_width = Math.round(scale * width); + int new_height = Math.round(scale * height); + if( MyDebug.LOG ) + Log.d(TAG, " scale video thumbnail to " + new_width + " x " + new_height); + Bitmap scaled_thumbnail = Bitmap.createScaledBitmap(thumbnail, new_width, new_height, true); + // careful, as scaled_thumbnail is sometimes not a copy! + if( scaled_thumbnail != thumbnail ) { + thumbnail.recycle(); + thumbnail = scaled_thumbnail; + } + } + final Bitmap thumbnail_f = thumbnail; + main_activity.runOnUiThread(new Runnable() { + public void run() { + updateThumbnail(thumbnail_f, true); + } + }); + } + if( MyDebug.LOG ) + Log.d(TAG, " time to create thumbnail: " + (System.currentTimeMillis() - debug_time)); + } + } + + @Override + public void restartedVideo(final VideoMethod video_method, final Uri uri, final String filename) { + if( MyDebug.LOG ) { + Log.d(TAG, "restartedVideo"); + Log.d(TAG, "video_method " + video_method); + Log.d(TAG, "uri " + uri); + Log.d(TAG, "filename " + filename); + } + completeVideo(video_method, uri); + broadcastVideo(video_method, uri, filename); + + // also need to restart subtitles file + if( subtitleVideoTimerTask != null ) { + subtitleVideoTimerTask.cancel(); + subtitleVideoTimerTask = null; + + // No need to check if option for subtitles is set, if we were already saving subtitles. + // Assume that video_method is unchanged between old and new video file when restarting. + startVideoSubtitlesTask(video_method); + } + } + + /** Called when we've finished recording to a video file, to do any necessary cleanup for the + * file. + */ + void completeVideo(final VideoMethod video_method, final Uri uri) { + if( MyDebug.LOG ) + Log.d(TAG, "completeVideo"); + if( video_method == VideoMethod.MEDIASTORE ) { + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q ) { + ContentValues contentValues = new ContentValues(); + contentValues.put(MediaStore.Video.Media.IS_PENDING, 0); + main_activity.getContentResolver().update(uri, contentValues, null, null); + } + } + } + + boolean broadcastVideo(final VideoMethod video_method, final Uri uri, final String filename) { + if( MyDebug.LOG ) { + Log.d(TAG, "broadcastVideo"); + Log.d(TAG, "video_method " + video_method); + Log.d(TAG, "uri " + uri); + Log.d(TAG, "filename " + filename); + } + boolean done = false; + // clear just in case we're unable to update this - don't want an out of date cached uri + storageUtils.clearLastMediaScanned(); + if( video_method == VideoMethod.MEDIASTORE ) { + // no need to broadcast when using mediastore + + if( uri != null ) { + // in theory this is pointless, as announceUri no longer does anything on Android 7+, + // and mediastore method is only used on Android 10+, but keep this just in case + // announceUri does something in future + storageUtils.announceUri(uri, false, true); + + // we also want to save the uri - we can use the media uri directly, rather than having to scan it + storageUtils.setLastMediaScanned(uri, false, false, null); + + done = true; + } + } + else if( video_method == VideoMethod.FILE ) { + if( filename != null ) { + File file = new File(filename); + storageUtils.broadcastFile(file, false, true, true, false, null); + done = true; + } + } + else { + if( uri != null ) { + // see note in onPictureTaken() for where we call broadcastFile for SAF photos + storageUtils.broadcastUri(uri, false, true, true, false, false); + done = true; + } + } + if( done ) { + test_n_videos_scanned++; + if( MyDebug.LOG ) + Log.d(TAG, "test_n_videos_scanned is now: " + test_n_videos_scanned); + } + + if( video_method == VideoMethod.MEDIASTORE && isVideoCaptureIntent() ) { + finishVideoIntent(uri); + } + return done; + } + + /** For use when called from a video capture intent. This returns the supplied uri to the + * caller, and finishes the activity. + */ + void finishVideoIntent(Uri uri) { + if( MyDebug.LOG ) + Log.d(TAG, "finishVideoIntent:" + uri); + Intent output = new Intent(); + output.setData(uri); + main_activity.setResult(Activity.RESULT_OK, output); + main_activity.finish(); + } + + @Override + public void deleteUnusedVideo(final VideoMethod video_method, final Uri uri, final String filename) { + if( MyDebug.LOG ) { + Log.d(TAG, "deleteUnusedVideo"); + Log.d(TAG, "video_method " + video_method); + Log.d(TAG, "uri " + uri); + Log.d(TAG, "filename " + filename); + } + if( video_method == VideoMethod.FILE ) { + trashImage(LastImagesType.FILE, uri, filename, false); + } + else if( video_method == VideoMethod.SAF ) { + trashImage(LastImagesType.SAF, uri, filename, false); + } + else if( video_method == VideoMethod.MEDIASTORE ) { + trashImage(LastImagesType.MEDIASTORE, uri, filename, false); + } + // else can't delete Uri + } + + @Override + public void onVideoInfo(int what, int extra) { + // we don't show a toast for MEDIA_RECORDER_INFO_MAX_DURATION_REACHED - conflicts with "n repeats to go" toast from Preview + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.O && what == MediaRecorder.MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED ) { + if( MyDebug.LOG ) + Log.d(TAG, "next output file started"); + int message_id = R.string.video_max_filesize; + main_activity.getPreview().showToast(null, message_id, true); + } + else if( what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED ) { + if( MyDebug.LOG ) + Log.d(TAG, "max filesize reached"); + int message_id = R.string.video_max_filesize; + main_activity.getPreview().showToast(null, message_id, true); + } + // in versions 1.24 and 1.24, there was a bug where we had "info_" for onVideoError and "error_" for onVideoInfo! + // fixed in 1.25; also was correct for 1.23 and earlier + String debug_value = "info_" + what + "_" + extra; + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putString("last_video_error", debug_value); + editor.apply(); + } + + @Override + public void onFailedStartPreview() { + main_activity.getPreview().showToast(null, R.string.failed_to_start_camera_preview); + main_activity.enablePausePreviewOnBackPressedCallback(false); // reenable standard back button behaviour (in case preview was paused due to option to pause preview after taking a photo) + } + + @Override + public void onCameraError() { + main_activity.getPreview().showToast(null, R.string.camera_error); + } + + @Override + public void onPhotoError() { + main_activity.getPreview().showToast(null, R.string.failed_to_take_picture); + } + + @Override + public void onVideoError(int what, int extra) { + if( MyDebug.LOG ) { + Log.d(TAG, "onVideoError: " + what + " extra: " + extra); + } + int message_id = R.string.video_error_unknown; + if( what == MediaRecorder.MEDIA_ERROR_SERVER_DIED ) { + if( MyDebug.LOG ) + Log.d(TAG, "error: server died"); + message_id = R.string.video_error_server_died; + } + main_activity.getPreview().showToast(null, message_id); + // in versions 1.24 and 1.24, there was a bug where we had "info_" for onVideoError and "error_" for onVideoInfo! + // fixed in 1.25; also was correct for 1.23 and earlier + String debug_value = "error_" + what + "_" + extra; + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putString("last_video_error", debug_value); + editor.apply(); + } + + @Override + public void onVideoRecordStartError(VideoProfile profile) { + if( MyDebug.LOG ) + Log.d(TAG, "onVideoRecordStartError"); + String error_message; + String features = main_activity.getPreview().getErrorFeatures(profile); + if( !features.isEmpty() ) { + error_message = getContext().getResources().getString(R.string.sorry) + ", " + features + " " + getContext().getResources().getString(R.string.not_supported); + } + else { + error_message = getContext().getResources().getString(R.string.failed_to_record_video); + } + main_activity.getPreview().showToast(null, error_message); + } + + @Override + public void onVideoRecordStopError(VideoProfile profile) { + if( MyDebug.LOG ) + Log.d(TAG, "onVideoRecordStopError"); + //main_activity.getPreview().showToast(null, R.string.failed_to_record_video); + String features = main_activity.getPreview().getErrorFeatures(profile); + String error_message = getContext().getResources().getString(R.string.video_may_be_corrupted); + if( !features.isEmpty() ) { + error_message += ", " + features + " " + getContext().getResources().getString(R.string.not_supported); + } + main_activity.getPreview().showToast(null, error_message); + } + + @Override + public void onFailedReconnectError() { + main_activity.getPreview().showToast(null, R.string.failed_to_reconnect_camera); + } + + @Override + public void onFailedCreateVideoFileError() { + if( MyDebug.LOG ) + Log.d(TAG, "onFailedCreateVideoFileError"); + main_activity.getPreview().showToast(null, R.string.failed_to_save_video); + } + + @Override + public void hasPausedPreview(boolean paused) { + if( MyDebug.LOG ) + Log.d(TAG, "hasPausedPreview: " + paused); + View shareButton = main_activity.findViewById(R.id.share); + View trashButton = main_activity.findViewById(R.id.trash); + if( paused ) { + shareButton.setVisibility(View.VISIBLE); + trashButton.setVisibility(View.VISIBLE); + main_activity.enablePausePreviewOnBackPressedCallback(true); // so that pressing back button instead unpauses the preview + } + else { + shareButton.setVisibility(View.GONE); + trashButton.setVisibility(View.GONE); + this.clearLastImages(); + main_activity.enablePausePreviewOnBackPressedCallback(false); // reenable standard back button behaviour + } + } + + @Override + public void cameraInOperation(boolean in_operation, boolean is_video) { + if( MyDebug.LOG ) + Log.d(TAG, "cameraInOperation: " + in_operation); + if( !in_operation && used_front_screen_flash ) { + main_activity.setBrightnessForCamera(false); // ensure screen brightness matches user preference, after using front screen flash + used_front_screen_flash = false; + } + drawPreview.cameraInOperation(in_operation); + main_activity.getMainUI().showGUI(!in_operation, is_video); + } + + @Override + public void turnFrontScreenFlashOn() { + if( MyDebug.LOG ) + Log.d(TAG, "turnFrontScreenFlashOn"); + used_front_screen_flash = true; + main_activity.setBrightnessForCamera(true); // ensure we have max screen brightness, even if user preference not set for max brightness + drawPreview.turnFrontScreenFlashOn(); + } + + @Override + public void onCaptureStarted() { + if( MyDebug.LOG ) + Log.d(TAG, "onCaptureStarted"); + n_capture_images = 0; + n_capture_images_raw = 0; + drawPreview.onCaptureStarted(); + + if( getPhotoMode() == PhotoMode.X_Night ) { + main_activity.getPreview().showToast(null, R.string.preference_nr_mode_low_light_message, true); + } + } + + @Override + public void onPictureCompleted() { + if( MyDebug.LOG ) + Log.d(TAG, "onPictureCompleted"); + + // clear any toasts displayed during progress (e.g., preference_nr_mode_low_light_message, or onExtensionProgress()) + main_activity.getPreview().clearActiveFakeToast(); + + PhotoMode photo_mode = getPhotoMode(); + if( main_activity.getPreview().isVideo() ) { + if( MyDebug.LOG ) + Log.d(TAG, "snapshot mode"); + // must be in photo snapshot while recording video mode, only support standard photo mode + photo_mode = PhotoMode.Standard; + } + if( photo_mode == PhotoMode.NoiseReduction ) { + boolean image_capture_intent = isImageCaptureIntent(); + boolean do_in_background = saveInBackground(image_capture_intent); + imageSaver.finishImageBatch(do_in_background); + } + else if( photo_mode == MyApplicationInterface.PhotoMode.Panorama && gyroSensor.isRecording() ) { + if( panorama_pic_accepted ) { + if( MyDebug.LOG ) + Log.d(TAG, "set next panorama point"); + this.setNextPanoramaPoint(false); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "panorama pic wasn't accepted"); + this.setNextPanoramaPoint(true); + } + } + else if( photo_mode == PhotoMode.FocusBracketing ) { + if( MyDebug.LOG ) + Log.d(TAG, "focus bracketing completed"); + if( getShutterSoundPref() ) { + if( MyDebug.LOG ) + Log.d(TAG, "play completion sound"); + MediaPlayer player = MediaPlayer.create(getContext(), Settings.System.DEFAULT_NOTIFICATION_URI); + if( player != null ) { + player.start(); + } + } + } + + // call this, so that if pause-preview-after-taking-photo option is set, we remove the "taking photo" border indicator straight away + // also even for normal (not pausing) behaviour, good to remove the border asap + drawPreview.cameraInOperation(false); + } + + @Override + public void onExtensionProgress(int progress) { + String message = ""; + if( getPhotoMode() == PhotoMode.X_Night ) { + message = getContext().getResources().getString(R.string.preference_nr_mode_low_light_message) + "\n"; + } + // call with dont_clear==true, as on some devices (e.g., Galaxy S24+ in X-Night mode), there can be a pause long enough that + // the toast would clear, even when not at 100% + main_activity.getPreview().showToast(null, message + progress + "%", true, true); + } + + @Override + public void cameraClosed() { + if( MyDebug.LOG ) + Log.d(TAG, "cameraClosed"); + this.stopPanorama(true); + main_activity.getMainUI().closeExposureUI(); + main_activity.getMainUI().destroyPopup(); // need to close popup - and when camera reopened, it may have different settings + drawPreview.clearContinuousFocusMove(); + } + + void updateThumbnail(Bitmap thumbnail, boolean is_video) { + if( MyDebug.LOG ) + Log.d(TAG, "updateThumbnail"); + main_activity.updateGalleryIcon(thumbnail); + drawPreview.updateThumbnail(thumbnail, is_video, true); + if( !is_video && this.getPausePreviewPref() ) { + drawPreview.showLastImage(); + } + } + + @Override + public void timerBeep(long remaining_time) { + if( MyDebug.LOG ) { + Log.d(TAG, "timerBeep()"); + Log.d(TAG, "remaining_time: " + remaining_time); + } + if( sharedPreferences.getBoolean(PreferenceKeys.TimerBeepPreferenceKey, true) ) { + if( MyDebug.LOG ) + Log.d(TAG, "play beep!"); + boolean is_last = remaining_time <= 1000; + main_activity.getSoundPoolManager().playSound(is_last ? R.raw.mybeep_hi : R.raw.mybeep); + } + if( sharedPreferences.getBoolean(PreferenceKeys.TimerSpeakPreferenceKey, false) ) { + if( MyDebug.LOG ) + Log.d(TAG, "speak countdown!"); + int remaining_time_s = (int)(remaining_time/1000); + if( remaining_time_s <= 60 ) + main_activity.speak(String.valueOf(remaining_time_s)); + } + } + + @Override + public void multitouchZoom(int new_zoom) { + main_activity.getMainUI().setSeekbarZoom(new_zoom); + } + + @Override + public void requestTakePhoto() { + if( MyDebug.LOG ) + Log.d(TAG, "requestTakePhoto"); + main_activity.takePicture(false); + } + + /** Switch to the first available camera that is front or back facing as desired. + * @param front_facing Whether to switch to a front or back facing camera. + */ + void switchToCamera(boolean front_facing) { + if( MyDebug.LOG ) + Log.d(TAG, "switchToCamera: " + front_facing); + int n_cameras = main_activity.getPreview().getCameraControllerManager().getNumberOfCameras(); + CameraController.Facing want_facing = front_facing ? CameraController.Facing.FACING_FRONT : CameraController.Facing.FACING_BACK; + for(int i=0;i images, Date current_date) { + if( MyDebug.LOG ) + Log.d(TAG, "saveImage"); + + System.gc(); + + boolean image_capture_intent = isImageCaptureIntent(); + Uri image_capture_intent_uri = null; + if( image_capture_intent ) { + if( MyDebug.LOG ) + Log.d(TAG, "from image capture intent"); + Bundle myExtras = main_activity.getIntent().getExtras(); + if( myExtras != null ) { + image_capture_intent_uri = myExtras.getParcelable(MediaStore.EXTRA_OUTPUT); + if( MyDebug.LOG ) + Log.d(TAG, "save to: " + image_capture_intent_uri); + } + } + + boolean using_camera2 = main_activity.getPreview().usingCamera2API(); + boolean using_camera_extensions = isCameraExtensionPref(); + ImageSaver.Request.ImageFormat image_format = getImageFormatPref(); + boolean store_ypr = sharedPreferences.getBoolean(PreferenceKeys.AddYPRToComments, false) && + main_activity.getPreview().hasLevelAngle() && + main_activity.getPreview().hasPitchAngle() && + main_activity.getPreview().hasGeoDirection(); + if( MyDebug.LOG ) { + Log.d(TAG, "store_ypr: " + store_ypr); + Log.d(TAG, "has level angle: " + main_activity.getPreview().hasLevelAngle()); + Log.d(TAG, "has pitch angle: " + main_activity.getPreview().hasPitchAngle()); + Log.d(TAG, "has geo direction: " + main_activity.getPreview().hasGeoDirection()); + } + int image_quality = getSaveImageQualityPref(); + if( MyDebug.LOG ) + Log.d(TAG, "image_quality: " + image_quality); + boolean do_auto_stabilise = getAutoStabilisePref() && main_activity.getPreview().hasLevelAngleStable(); + double level_angle = (main_activity.getPreview().hasLevelAngle()) ? main_activity.getPreview().getLevelAngle() : 0.0; + double pitch_angle = (main_activity.getPreview().hasPitchAngle()) ? main_activity.getPreview().getPitchAngle() : 0.0; + if( do_auto_stabilise && main_activity.test_have_angle ) + level_angle = main_activity.test_angle; + if( do_auto_stabilise && main_activity.test_low_memory ) + level_angle = 45.0; + // I have received crashes where camera_controller was null - could perhaps happen if this thread was running just as the camera is closing? + boolean is_front_facing = main_activity.getPreview().getCameraController() != null && (main_activity.getPreview().getCameraController().getFacing() == CameraController.Facing.FACING_FRONT); + boolean mirror = is_front_facing && sharedPreferences.getString(PreferenceKeys.FrontCameraMirrorKey, "preference_front_camera_mirror_no").equals("preference_front_camera_mirror_photo"); + String preference_stamp = this.getStampPref(); + String preference_textstamp = this.getTextStampPref(); + int font_size = getTextStampFontSizePref(); + int color = getStampFontColor(); + String pref_style = sharedPreferences.getString(PreferenceKeys.StampStyleKey, "preference_stamp_style_shadowed"); + String preference_stamp_dateformat = this.getStampDateFormatPref(); + String preference_stamp_timeformat = this.getStampTimeFormatPref(); + String preference_stamp_gpsformat = this.getStampGPSFormatPref(); + //String preference_stamp_geo_address = this.getStampGeoAddressPref(); + String preference_units_distance = this.getUnitsDistancePref(); + boolean panorama_crop = sharedPreferences.getString(PreferenceKeys.PanoramaCropPreferenceKey, "preference_panorama_crop_on").equals("preference_panorama_crop_on"); + ImageSaver.Request.RemoveDeviceExif remove_device_exif = getRemoveDeviceExifPref(); + boolean store_location = getGeotaggingPref() && getLocation() != null; + Location location = store_location ? getLocation() : null; + boolean store_geo_direction = main_activity.getPreview().hasGeoDirection() && getGeodirectionPref(); + double geo_direction = main_activity.getPreview().hasGeoDirection() ? main_activity.getPreview().getGeoDirection() : 0.0; + String custom_tag_artist = sharedPreferences.getString(PreferenceKeys.ExifArtistPreferenceKey, ""); + String custom_tag_copyright = sharedPreferences.getString(PreferenceKeys.ExifCopyrightPreferenceKey, ""); + + int iso = 800; // default value if we can't get ISO + long exposure_time = 1000000000L/30; // default value if we can't get shutter speed + float zoom_factor = 1.0f; + if( main_activity.getPreview().getCameraController() != null ) { + if( main_activity.getPreview().getCameraController().captureResultHasIso() ) { + iso = main_activity.getPreview().getCameraController().captureResultIso(); + if( MyDebug.LOG ) + Log.d(TAG, "iso: " + iso); + } + if( main_activity.getPreview().getCameraController().captureResultHasExposureTime() ) { + exposure_time = main_activity.getPreview().getCameraController().captureResultExposureTime(); + if( MyDebug.LOG ) + Log.d(TAG, "exposure_time: " + exposure_time); + } + + zoom_factor = main_activity.getPreview().getZoomRatio(); + } + + boolean has_thumbnail_animation = getThumbnailAnimationPref(); + + boolean do_in_background = saveInBackground(image_capture_intent); + + String ghost_image_pref = sharedPreferences.getString(PreferenceKeys.GhostImagePreferenceKey, "preference_ghost_image_off"); + + int sample_factor = 1; + if( !this.getPausePreviewPref() && !ghost_image_pref.equals("preference_ghost_image_last") ) { + // if pausing the preview, we use the thumbnail also for the preview, so don't downsample + // similarly for ghosting last image + // otherwise, we can downsample by 4 to increase performance, without noticeable loss in visual quality (even for the thumbnail animation) + sample_factor *= 4; + if( !has_thumbnail_animation ) { + // can use even lower resolution if we don't have the thumbnail animation + sample_factor *= 4; + } + } + if( MyDebug.LOG ) + Log.d(TAG, "sample_factor: " + sample_factor); + + boolean success; + PhotoMode photo_mode = getPhotoMode(); + if( main_activity.getPreview().isVideo() ) { + if( MyDebug.LOG ) + Log.d(TAG, "snapshot mode"); + // must be in photo snapshot while recording video mode, only support standard photo mode + photo_mode = PhotoMode.Standard; + } + + List preshot_bitmaps = null; + if( !image_capture_intent && n_capture_images <= 1 && getPreShotsPref(photo_mode) ) { + // n.b., n_capture_images == 0 if using onBurstPictureTaken(), e.g., for photo mode HDR + Preview.RingBuffer ring_buffer = main_activity.getPreview().getPreShotsRingBuffer(); + + if( ring_buffer.getNBitmaps() >= 3 ) { + if( MyDebug.LOG ) + Log.d(TAG, "save pre-shots"); + + preshot_bitmaps = new ArrayList<>(); + while( ring_buffer.hasBitmaps() ) { + Bitmap bitmap = ring_buffer.get(); + preshot_bitmaps.add(bitmap); + } + } + } + + if( !main_activity.is_test && photo_mode == PhotoMode.Panorama && gyroSensor.isRecording() && gyroSensor.hasTarget() && !gyroSensor.isTargetAchieved() ) { + if( MyDebug.LOG ) + Log.d(TAG, "ignore panorama image as target no longer achieved!"); + // n.b., gyroSensor.hasTarget() will be false if this is the first picture in the panorama series + panorama_pic_accepted = false; + success = true; // still treat as success + } + else if( photo_mode == PhotoMode.NoiseReduction || photo_mode == PhotoMode.Panorama ) { + boolean first_image; + if( photo_mode == PhotoMode.Panorama ) { + panorama_pic_accepted = true; + first_image = n_panorama_pics == 0; + } + else + first_image = n_capture_images == 1; + if( first_image ) { + ImageSaver.Request.SaveBase save_base = ImageSaver.Request.SaveBase.SAVEBASE_NONE; + if( photo_mode == PhotoMode.NoiseReduction ) { + String save_base_preference = sharedPreferences.getString(PreferenceKeys.NRSaveExpoPreferenceKey, "preference_nr_save_no"); + switch( save_base_preference ) { + case "preference_nr_save_single": + save_base = ImageSaver.Request.SaveBase.SAVEBASE_FIRST; + break; + case "preference_nr_save_all": + save_base = ImageSaver.Request.SaveBase.SAVEBASE_ALL; + break; + } + } + else if( photo_mode == PhotoMode.Panorama ) { + String save_base_preference = sharedPreferences.getString(PreferenceKeys.PanoramaSaveExpoPreferenceKey, "preference_panorama_save_no"); + switch( save_base_preference ) { + case "preference_panorama_save_all": + save_base = ImageSaver.Request.SaveBase.SAVEBASE_ALL; + break; + case "preference_panorama_save_all_plus_debug": + save_base = ImageSaver.Request.SaveBase.SAVEBASE_ALL_PLUS_DEBUG; + break; + } + } + + imageSaver.startImageBatch(true, + photo_mode == PhotoMode.NoiseReduction ? ImageSaver.Request.ProcessType.AVERAGE : ImageSaver.Request.ProcessType.PANORAMA, + preshot_bitmaps, + save_base, + image_capture_intent, image_capture_intent_uri, + using_camera2, using_camera_extensions, + image_format, image_quality, + do_auto_stabilise, level_angle, photo_mode == PhotoMode.Panorama, + is_front_facing, + mirror, + current_date, + iso, + exposure_time, + zoom_factor, + preference_stamp, preference_textstamp, font_size, color, pref_style, preference_stamp_dateformat, preference_stamp_timeformat, preference_stamp_gpsformat, + //preference_stamp_geo_address, + preference_units_distance, + panorama_crop, + remove_device_exif, + store_location, location, store_geo_direction, geo_direction, + pitch_angle, store_ypr, + custom_tag_artist, custom_tag_copyright, + sample_factor); + + if( photo_mode == PhotoMode.Panorama ) { + imageSaver.getImageBatchRequest().camera_view_angle_x = main_activity.getPreview().getViewAngleX(false); + imageSaver.getImageBatchRequest().camera_view_angle_y = main_activity.getPreview().getViewAngleY(false); + } + } + + float [] gyro_rotation_matrix = null; + if( photo_mode == PhotoMode.Panorama ) { + gyro_rotation_matrix = new float[9]; + this.gyroSensor.getRotationMatrix(gyro_rotation_matrix); + } + + imageSaver.addImageBatch(images.get(0), gyro_rotation_matrix); + success = true; + } + else { + ImageSaver.Request.ProcessType processType; + if( photo_mode == PhotoMode.DRO || photo_mode == PhotoMode.HDR ) + processType = ImageSaver.Request.ProcessType.HDR; + else if( photo_mode == PhotoMode.X_Night ) + processType = ImageSaver.Request.ProcessType.X_NIGHT; + else + processType = ImageSaver.Request.ProcessType.NORMAL; + boolean force_suffix = forceSuffix(photo_mode); + + HDRProcessor.TonemappingAlgorithm preference_hdr_tonemapping_algorithm = HDRProcessor.default_tonemapping_algorithm_c; + { + String tonemapping_algorithm_pref = sharedPreferences.getString(PreferenceKeys.HDRTonemappingPreferenceKey, "preference_hdr_tonemapping_default"); + switch( tonemapping_algorithm_pref ) { + case "preference_hdr_tonemapping_clamp": + preference_hdr_tonemapping_algorithm = HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_CLAMP; + break; + case "preference_hdr_tonemapping_exponential": + preference_hdr_tonemapping_algorithm = HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_EXPONENTIAL; + break; + case "preference_hdr_tonemapping_default": // reinhard + preference_hdr_tonemapping_algorithm = HDRProcessor.default_tonemapping_algorithm_c; + break; + case "preference_hdr_tonemapping_aces": + preference_hdr_tonemapping_algorithm = HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_ACES; + break; + default: + Log.e(TAG, "unhandled case for tonemapping: " + tonemapping_algorithm_pref); + break; + } + } + String preference_hdr_contrast_enhancement = sharedPreferences.getString(PreferenceKeys.HDRContrastEnhancementPreferenceKey, "preference_hdr_contrast_enhancement_smart"); + + success = imageSaver.saveImageJpeg(do_in_background, processType, + force_suffix, + // N.B., n_capture_images will be 1 for first image, not 0, so subtract 1 so we start off from _0. + // (It wouldn't be a huge problem if we did start from _1, but it would be inconsistent with the naming + // of images where images.size() > 1 (e.g., expo bracketing mode) where we also start from _0.) + force_suffix ? (n_capture_images-1) : 0, + save_expo, images, + preshot_bitmaps, + image_capture_intent, image_capture_intent_uri, + using_camera2, using_camera_extensions, + image_format, image_quality, + do_auto_stabilise, level_angle, + is_front_facing, + mirror, + current_date, + preference_hdr_tonemapping_algorithm, + preference_hdr_contrast_enhancement, + iso, + exposure_time, + zoom_factor, + preference_stamp, preference_textstamp, font_size, color, pref_style, preference_stamp_dateformat, preference_stamp_timeformat, preference_stamp_gpsformat, + //preference_stamp_geo_address, + preference_units_distance, + false, // panorama doesn't use this codepath + remove_device_exif, + store_location, location, store_geo_direction, geo_direction, + pitch_angle, store_ypr, + custom_tag_artist, custom_tag_copyright, + sample_factor); + } + + if( MyDebug.LOG ) + Log.d(TAG, "saveImage complete, success: " + success); + + return success; + } + + @Override + public boolean onPictureTaken(byte [] data, Date current_date) { + if( MyDebug.LOG ) + Log.d(TAG, "onPictureTaken"); + + n_capture_images++; + if( MyDebug.LOG ) + Log.d(TAG, "n_capture_images is now " + n_capture_images); + + List images = new ArrayList<>(); + images.add(data); + + boolean success = saveImage(false, images, current_date); + + if( MyDebug.LOG ) + Log.d(TAG, "onPictureTaken complete, success: " + success); + + return success; + } + + @Override + public boolean onBurstPictureTaken(List images, Date current_date) { + if( MyDebug.LOG ) + Log.d(TAG, "onBurstPictureTaken: received " + images.size() + " images"); + + boolean success; + PhotoMode photo_mode = getPhotoMode(); + if( main_activity.getPreview().isVideo() ) { + if( MyDebug.LOG ) + Log.d(TAG, "snapshot mode"); + // must be in photo snapshot while recording video mode, only support standard photo mode + photo_mode = PhotoMode.Standard; + } + if( photo_mode == PhotoMode.HDR ) { + if( MyDebug.LOG ) + Log.d(TAG, "HDR mode"); + boolean save_expo = sharedPreferences.getBoolean(PreferenceKeys.HDRSaveExpoPreferenceKey, false); + if( MyDebug.LOG ) + Log.d(TAG, "save_expo: " + save_expo); + + success = saveImage(save_expo, images, current_date); + } + else { + if( MyDebug.LOG ) { + Log.d(TAG, "exposure/focus bracketing mode mode"); + if( photo_mode != PhotoMode.ExpoBracketing && photo_mode != PhotoMode.FocusBracketing ) + Log.e(TAG, "onBurstPictureTaken called with unexpected photo mode?!: " + photo_mode); + } + + success = saveImage(true, images, current_date); + } + return success; + } + + @Override + public boolean onRawPictureTaken(RawImage raw_image, Date current_date) { + if( MyDebug.LOG ) + Log.d(TAG, "onRawPictureTaken"); + System.gc(); + + n_capture_images_raw++; + if( MyDebug.LOG ) + Log.d(TAG, "n_capture_images_raw is now " + n_capture_images_raw); + + boolean do_in_background = saveInBackground(false); + + PhotoMode photo_mode = getPhotoMode(); + if( main_activity.getPreview().isVideo() ) { + if( MyDebug.LOG ) + Log.d(TAG, "snapshot mode"); + // must be in photo snapshot while recording video mode, only support standard photo mode + // (RAW not supported anyway for video snapshot mode, but have this code just to be safe) + photo_mode = PhotoMode.Standard; + } + boolean force_suffix = forceSuffix(photo_mode); + // N.B., n_capture_images_raw will be 1 for first image, not 0, so subtract 1 so we start off from _0. + // (It wouldn't be a huge problem if we did start from _1, but it would be inconsistent with the naming + // of images where images.size() > 1 (e.g., expo bracketing mode) where we also start from _0.) + int suffix_offset = force_suffix ? (n_capture_images_raw-1) : 0; + boolean success = imageSaver.saveImageRaw(do_in_background, force_suffix, suffix_offset, raw_image, current_date); + + if( MyDebug.LOG ) + Log.d(TAG, "onRawPictureTaken complete"); + return success; + } + + @Override + public boolean onRawBurstPictureTaken(List raw_images, Date current_date) { + if( MyDebug.LOG ) + Log.d(TAG, "onRawBurstPictureTaken"); + System.gc(); + + boolean do_in_background = saveInBackground(false); + + // currently we don't ever do post processing with RAW burst images, so just save them all + boolean success = true; + for(int i=0;i 0 ) { + if( from_user ) + preview.showToast(photo_delete_toast, R.string.photo_deleted, true); + } + } + else if( image_name != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "Delete: " + image_name); + File file = new File(image_name); + if( !file.delete() ) { + if( MyDebug.LOG ) + Log.e(TAG, "failed to delete " + image_name); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "successfully deleted " + image_name); + if( from_user ) + preview.showToast(photo_delete_toast, R.string.photo_deleted, true); + storageUtils.broadcastFile(file, false, false, false, false, null); + } + } + } + + void trashLastImage() { + if( MyDebug.LOG ) + Log.d(TAG, "trashLastImage"); + Preview preview = main_activity.getPreview(); + if( preview.isPreviewPaused() ) { + for(int i=0;i 150 ) { + if( MyDebug.LOG ) + Log.d(TAG, "loud noise!: " + level); + audio_trigger = true; + }*/ + + if( last_level == -1 ) { + last_level = level; + return; + } + int diff = level - last_level; + + if( MyDebug.LOG ) { + Log.d(TAG, "noise_sensitivity: " + audio_noise_sensitivity); + Log.d(TAG, "diff: " + diff); + } + + if( diff > audio_noise_sensitivity ) { + if( MyDebug.LOG ) + Log.d(TAG, "got louder!: " + last_level + " to " + level + " , diff: " + diff); + time_quiet_loud = System.currentTimeMillis(); + if( MyDebug.LOG ) + Log.d(TAG, " time: " + time_quiet_loud); + } + else if( diff < -audio_noise_sensitivity && time_quiet_loud != -1 ) { + if( MyDebug.LOG ) + Log.d(TAG, "got quieter!: " + last_level + " to " + level + " , diff: " + diff); + long time_now = System.currentTimeMillis(); + long duration = time_now - time_quiet_loud; + if( MyDebug.LOG ) { + Log.d(TAG, "stopped being loud - was loud since: " + time_quiet_loud); + Log.d(TAG, " time_now: " + time_now); + Log.d(TAG, " duration: " + duration); + } + if( duration < 1500 ) { + if( MyDebug.LOG ) + Log.d(TAG, "audio_trigger set"); + audio_trigger = true; + } + time_quiet_loud = -1; + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "audio level: " + last_level + " to " + level + " , diff: " + diff); + } + + last_level = level; + + if( audio_trigger ) { + if( MyDebug.LOG ) + Log.d(TAG, "audio trigger"); + // need to run on UI thread so that this function returns quickly (otherwise we'll have lag in processing the audio) + // but also need to check we're not currently taking a photo or on timer, so we don't repeatedly queue up takePicture() calls, or cancel a timer + long time_now = System.currentTimeMillis(); + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + boolean want_audio_listener = sharedPreferences.getString(PreferenceKeys.AudioControlPreferenceKey, "none").equals("noise"); + if( time_last_audio_trigger_photo != -1 && time_now - time_last_audio_trigger_photo < 5000 ) { + // avoid risk of repeatedly being triggered - as well as problem of being triggered again by the camera's own "beep"! + if( MyDebug.LOG ) + Log.d(TAG, "ignore loud noise due to too soon since last audio triggered photo: " + (time_now - time_last_audio_trigger_photo)); + } + else if( !want_audio_listener ) { + // just in case this is a callback from an AudioListener before it's been freed (e.g., if there's a loud noise when exiting settings after turning the option off + if( MyDebug.LOG ) + Log.d(TAG, "ignore loud noise due to audio listener option turned off"); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "audio trigger from loud noise"); + time_last_audio_trigger_photo = time_now; + main_activity.audioTrigger(); + } + } + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/MyDebug.java b/app/src/main/java/net/sourceforge/opencamera/MyDebug.java new file mode 100644 index 0000000..b9eab66 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/MyDebug.java @@ -0,0 +1,21 @@ +package net.sourceforge.opencamera; + +import android.util.Log; + +/** Helper class for logging. + */ +public class MyDebug { + /** Global constant to control logging, should always be set to false in + * released versions. + */ + public static final boolean LOG = false; + + /** Wrapper to print exceptions, should use instead of e.printStackTrace(). + */ + public static void logStackTrace(String tag, String msg, Throwable tr) { + if( LOG ) { + // don't log exceptions in releases + Log.e(tag, msg, tr); + } + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/MyPreferenceFragment.java b/app/src/main/java/net/sourceforge/opencamera/MyPreferenceFragment.java new file mode 100644 index 0000000..5403022 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/MyPreferenceFragment.java @@ -0,0 +1,1099 @@ +package net.sourceforge.opencamera; + +import net.sourceforge.opencamera.ui.FolderChooserDialog; +import net.sourceforge.opencamera.ui.MyEditTextPreference; + +import android.annotation.SuppressLint; +import android.app.Activity; +import android.app.AlertDialog; +import android.app.DialogFragment; +import android.app.Fragment; +import android.app.FragmentManager; +import android.content.ClipData; +import android.content.ClipboardManager; +import android.content.DialogInterface; +//import android.content.Intent; +import android.content.SharedPreferences; +import android.content.SharedPreferences.OnSharedPreferenceChangeListener; +import android.content.pm.PackageInfo; +import android.content.pm.PackageManager.NameNotFoundException; +import android.content.res.TypedArray; +import android.graphics.Color; +import android.graphics.Insets; +import android.graphics.Point; +//import android.net.Uri; +import android.graphics.Rect; +import android.os.Build; +import android.os.Bundle; +import android.preference.EditTextPreference; +import android.preference.ListPreference; +import android.preference.Preference; +import android.preference.Preference.OnPreferenceChangeListener; +import android.preference.Preference.OnPreferenceClickListener; +import android.preference.PreferenceFragment; +import android.preference.PreferenceGroup; +import android.preference.PreferenceManager; +import android.preference.TwoStatePreference; +import android.text.Html; +import android.text.SpannableString; +import android.text.Spanned; +import android.text.method.LinkMovementMethod; +import android.util.Log; +import android.view.Display; +import android.view.LayoutInflater; +import android.view.View; +import android.view.WindowInsets; +import android.view.WindowMetrics; +import android.widget.ScrollView; +import android.widget.TextView; + +import androidx.core.view.ViewCompat; +import androidx.core.view.WindowInsetsCompat; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; + +/** Fragment to handle the Settings UI. Note that originally this was a + * PreferenceActivity rather than a PreferenceFragment which required all + * communication to be via the bundle (since this replaced the MainActivity, + * meaning we couldn't access data from that class. This no longer applies due + * to now using a PreferenceFragment, but I've still kept with transferring + * information via the bundle (for the most part, at least). + * Also note that passing via a bundle may be necessary to avoid accessing the + * preview, which can be null - see note about video resolutions below. + * Also see https://stackoverflow.com/questions/14093438/after-the-rotate-oncreate-fragment-is-called-before-oncreate-fragmentactivi . + * If the application is destroyed when in background when the user is viewing + * the settings, then the application and its fragments will be recreated - + * so reading from the bundle means the state is restored, where as trying + * to read camera settings won't be possible as the camera won't yet be + * reopened. + */ +public class MyPreferenceFragment extends PreferenceFragment implements OnSharedPreferenceChangeListener { + private static final String TAG = "MyPreferenceFragment"; + + private boolean edge_to_edge_mode = false; + + private int cameraId; + + /* Any AlertDialogs we create should be added to dialogs, and removed when dismissed. Any dialogs still + * opened when onDestroy() is called are closed. + * Normally this shouldn't be needed - the settings is usually only closed by the user pressing Back, + * which can only be done once any opened dialogs are also closed. But this is required if we want to + * programmatically close the settings - this is done in MainActivity.onNewIntent(), so that if Open Camera + * is launched from the homescreen again when the settings was opened, we close the settings. + * UPDATE: At the time of writing, we don't set android:launchMode="singleTask", so onNewIntent() is not called, + * so this code isn't necessary - but there shouldn't be harm to leave it here for future use. + */ + private final HashSet dialogs = new HashSet<>(); + + @Override + public void onCreate(Bundle savedInstanceState) { + if( MyDebug.LOG ) + Log.d(TAG, "onCreate"); + super.onCreate(savedInstanceState); + addPreferencesFromResource(R.xml.preferences); + + final Bundle bundle = getArguments(); + this.edge_to_edge_mode = bundle.getBoolean("edge_to_edge_mode"); + this.cameraId = bundle.getInt("cameraId"); + if( MyDebug.LOG ) + Log.d(TAG, "cameraId: " + cameraId); + final int nCameras = bundle.getInt("nCameras"); + if( MyDebug.LOG ) + Log.d(TAG, "nCameras: " + nCameras); + + final boolean camera_open = bundle.getBoolean("camera_open"); + if( MyDebug.LOG ) + Log.d(TAG, "camera_open: " + camera_open); + + final String camera_api = bundle.getString("camera_api"); + + final String photo_mode_string = bundle.getString("photo_mode_string"); + + final boolean using_android_l = bundle.getBoolean("using_android_l"); + if( MyDebug.LOG ) + Log.d(TAG, "using_android_l: " + using_android_l); + + final int camera_orientation = bundle.getInt("camera_orientation"); + if( MyDebug.LOG ) + Log.d(TAG, "camera_orientation: " + camera_orientation); + + final float min_zoom_factor = bundle.getFloat("min_zoom_factor"); + final float max_zoom_factor = bundle.getFloat("max_zoom_factor"); + + final SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getActivity()); + + final boolean supports_auto_stabilise = bundle.getBoolean("supports_auto_stabilise"); + if( MyDebug.LOG ) + Log.d(TAG, "supports_auto_stabilise: " + supports_auto_stabilise); + + /*if( !supports_auto_stabilise ) { + Preference pref = findPreference("preference_auto_stabilise"); + PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_category_camera_effects"); + pg.removePreference(pref); + }*/ + + //readFromBundle(bundle, "color_effects", Preview.getColorEffectPreferenceKey(), Camera.Parameters.EFFECT_NONE, "preference_category_camera_effects"); + //readFromBundle(bundle, "scene_modes", Preview.getSceneModePreferenceKey(), Camera.Parameters.SCENE_MODE_AUTO, "preference_category_camera_effects"); + //readFromBundle(bundle, "white_balances", Preview.getWhiteBalancePreferenceKey(), Camera.Parameters.WHITE_BALANCE_AUTO, "preference_category_camera_effects"); + //readFromBundle(bundle, "isos", Preview.getISOPreferenceKey(), "auto", "preference_category_camera_effects"); + //readFromBundle(bundle, "exposures", "preference_exposure", "0", "preference_category_camera_effects"); + + final boolean supports_face_detection = bundle.getBoolean("supports_face_detection"); + if( MyDebug.LOG ) + Log.d(TAG, "supports_face_detection: " + supports_face_detection); + + if( !supports_face_detection && ( camera_open || sharedPreferences.getBoolean(PreferenceKeys.FaceDetectionPreferenceKey, false) == false ) ) { + // if camera not open, we'll think this setting isn't supported - but should only remove + // this preference if it's set to the default (otherwise if user sets to a non-default + // value that causes camera to not open, user won't be able to put it back to the + // default!) + Preference pref = findPreference(PreferenceKeys.FaceDetectionPreferenceKey); + PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_category_camera_controls"); + pg.removePreference(pref); + } + + final int preview_width = bundle.getInt("preview_width"); + final int preview_height = bundle.getInt("preview_height"); + final int [] preview_widths = bundle.getIntArray("preview_widths"); + final int [] preview_heights = bundle.getIntArray("preview_heights"); + final int [] video_widths = bundle.getIntArray("video_widths"); + final int [] video_heights = bundle.getIntArray("video_heights"); + + final int resolution_width = bundle.getInt("resolution_width"); + final int resolution_height = bundle.getInt("resolution_height"); + final int [] widths = bundle.getIntArray("resolution_widths"); + final int [] heights = bundle.getIntArray("resolution_heights"); + final boolean [] supports_burst = bundle.getBooleanArray("resolution_supports_burst"); + + final boolean supports_raw = bundle.getBoolean("supports_raw"); + if( MyDebug.LOG ) + Log.d(TAG, "supports_raw: " + supports_raw); + + final boolean supports_hdr = bundle.getBoolean("supports_hdr"); + if( MyDebug.LOG ) + Log.d(TAG, "supports_hdr: " + supports_hdr); + + final boolean supports_panorama = bundle.getBoolean("supports_panorama"); + if( MyDebug.LOG ) + Log.d(TAG, "supports_panorama: " + supports_panorama); + + final boolean has_gyro_sensors = bundle.getBoolean("has_gyro_sensors"); + if( MyDebug.LOG ) + Log.d(TAG, "has_gyro_sensors: " + has_gyro_sensors); + + final boolean supports_expo_bracketing = bundle.getBoolean("supports_expo_bracketing"); + if( MyDebug.LOG ) + Log.d(TAG, "supports_expo_bracketing: " + supports_expo_bracketing); + + final boolean supports_exposure_compensation = bundle.getBoolean("supports_exposure_compensation"); + final int exposure_compensation_min = bundle.getInt("exposure_compensation_min"); + final int exposure_compensation_max = bundle.getInt("exposure_compensation_max"); + if( MyDebug.LOG ) { + Log.d(TAG, "supports_exposure_compensation: " + supports_exposure_compensation); + Log.d(TAG, "exposure_compensation_min: " + exposure_compensation_min); + Log.d(TAG, "exposure_compensation_max: " + exposure_compensation_max); + } + + final boolean supports_iso_range = bundle.getBoolean("supports_iso_range"); + final int iso_range_min = bundle.getInt("iso_range_min"); + final int iso_range_max = bundle.getInt("iso_range_max"); + if( MyDebug.LOG ) { + Log.d(TAG, "supports_iso_range: " + supports_iso_range); + Log.d(TAG, "iso_range_min: " + iso_range_min); + Log.d(TAG, "iso_range_max: " + iso_range_max); + } + + final boolean supports_exposure_time = bundle.getBoolean("supports_exposure_time"); + final long exposure_time_min = bundle.getLong("exposure_time_min"); + final long exposure_time_max = bundle.getLong("exposure_time_max"); + if( MyDebug.LOG ) { + Log.d(TAG, "supports_exposure_time: " + supports_exposure_time); + Log.d(TAG, "exposure_time_min: " + exposure_time_min); + Log.d(TAG, "exposure_time_max: " + exposure_time_max); + } + + final boolean supports_white_balance_temperature = bundle.getBoolean("supports_white_balance_temperature"); + final int white_balance_temperature_min = bundle.getInt("white_balance_temperature_min"); + final int white_balance_temperature_max = bundle.getInt("white_balance_temperature_max"); + if( MyDebug.LOG ) { + Log.d(TAG, "supports_white_balance_temperature: " + supports_white_balance_temperature); + Log.d(TAG, "white_balance_temperature_min: " + white_balance_temperature_min); + Log.d(TAG, "white_balance_temperature_max: " + white_balance_temperature_max); + } + + final boolean is_multi_cam = bundle.getBoolean("is_multi_cam"); + if( MyDebug.LOG ) + Log.d(TAG, "is_multi_cam: " + is_multi_cam); + + final String [] video_quality = bundle.getStringArray("video_quality"); + + final String current_video_quality = bundle.getString("current_video_quality"); + final int video_frame_width = bundle.getInt("video_frame_width"); + final int video_frame_height = bundle.getInt("video_frame_height"); + final int video_bit_rate = bundle.getInt("video_bit_rate"); + final int video_frame_rate = bundle.getInt("video_frame_rate"); + final double video_capture_rate = bundle.getDouble("video_capture_rate"); + final boolean video_high_speed = bundle.getBoolean("video_high_speed"); + final float video_capture_rate_factor = bundle.getFloat("video_capture_rate_factor"); + + final boolean supports_optical_stabilization = bundle.getBoolean("supports_optical_stabilization"); + final boolean optical_stabilization_enabled = bundle.getBoolean("optical_stabilization_enabled"); + + final boolean supports_video_stabilization = bundle.getBoolean("supports_video_stabilization"); + if( MyDebug.LOG ) + Log.d(TAG, "supports_video_stabilization: " + supports_video_stabilization); + + final boolean video_stabilization_enabled = bundle.getBoolean("video_stabilization_enabled"); + + final boolean can_disable_shutter_sound = bundle.getBoolean("can_disable_shutter_sound"); + if( MyDebug.LOG ) + Log.d(TAG, "can_disable_shutter_sound: " + can_disable_shutter_sound); + + final int tonemap_max_curve_points = bundle.getInt("tonemap_max_curve_points"); + final boolean supports_tonemap_curve = bundle.getBoolean("supports_tonemap_curve"); + if( MyDebug.LOG ) { + Log.d(TAG, "tonemap_max_curve_points: " + tonemap_max_curve_points); + Log.d(TAG, "supports_tonemap_curve: " + supports_tonemap_curve); + } + + final float camera_view_angle_x = bundle.getFloat("camera_view_angle_x"); + final float camera_view_angle_y = bundle.getFloat("camera_view_angle_y"); + if( MyDebug.LOG ) { + Log.d(TAG, "camera_view_angle_x: " + camera_view_angle_x); + Log.d(TAG, "camera_view_angle_y: " + camera_view_angle_y); + } + + { + List camera_api_values = new ArrayList<>(); + List camera_api_entries = new ArrayList<>(); + + // all devices support old api + camera_api_values.add("preference_camera_api_old"); + camera_api_entries.add(getActivity().getResources().getString(R.string.preference_camera_api_old)); + + final boolean supports_camera2 = bundle.getBoolean("supports_camera2"); + if( MyDebug.LOG ) + Log.d(TAG, "supports_camera2: " + supports_camera2); + if( supports_camera2 ) { + camera_api_values.add("preference_camera_api_camera2"); + camera_api_entries.add(getActivity().getResources().getString(R.string.preference_camera_api_camera2)); + } + + if( camera_api_values.size() == 1 ) { + // if only supports 1 API, no point showing the preference + camera_api_values.clear(); + camera_api_entries.clear(); + } + + readFromBundle(camera_api_values.toArray(new String[0]), camera_api_entries.toArray(new String[0]), "preference_camera_api", PreferenceKeys.CameraAPIPreferenceDefault, "preference_category_online"); + + if( camera_api_values.size() >= 2 ) { + final Preference pref = findPreference("preference_camera_api"); + pref.setOnPreferenceChangeListener(new OnPreferenceChangeListener() { + @Override + public boolean onPreferenceChange(Preference arg0, Object newValue) { + if( pref.getKey().equals("preference_camera_api") ) { + ListPreference list_pref = (ListPreference)pref; + if( list_pref.getValue().equals(newValue) ) { + if( MyDebug.LOG ) + Log.d(TAG, "user selected same camera API"); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "user changed camera API - need to restart"); + MainActivity main_activity = (MainActivity)MyPreferenceFragment.this.getActivity(); + main_activity.restartOpenCamera(); + } + } + return true; + } + }); + } + } + /*final boolean supports_camera2 = bundle.getBoolean("supports_camera2"); + if( MyDebug.LOG ) + Log.d(TAG, "supports_camera2: " + supports_camera2); + if( supports_camera2 ) { + final Preference pref = findPreference("preference_use_camera2"); + pref.setOnPreferenceClickListener(new OnPreferenceClickListener() { + @Override + public boolean onPreferenceClick(Preference arg0) { + if( pref.getKey().equals("preference_use_camera2") ) { + if( MyDebug.LOG ) + Log.d(TAG, "user clicked camera2 API - need to restart"); + MainActivity main_activity = (MainActivity)MyPreferenceFragment.this.getActivity(); + main_activity.restartOpenCamera(); + return false; + } + return false; + } + }); + } + else { + Preference pref = findPreference("preference_use_camera2"); + PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_category_online"); + pg.removePreference(pref); + }*/ + + { + final Preference pref = findPreference("preference_online_help"); + pref.setOnPreferenceClickListener(new OnPreferenceClickListener() { + @Override + public boolean onPreferenceClick(Preference arg0) { + if( pref.getKey().equals("preference_online_help") ) { + if( MyDebug.LOG ) + Log.d(TAG, "user clicked online help"); + MainActivity main_activity = (MainActivity)MyPreferenceFragment.this.getActivity(); + main_activity.launchOnlineHelp(); + return false; + } + return false; + } + }); + } + + { + final Preference pref = findPreference("preference_privacy_policy"); + pref.setOnPreferenceClickListener(new OnPreferenceClickListener() { + @Override + public boolean onPreferenceClick(Preference arg0) { + if( pref.getKey().equals("preference_privacy_policy") ) { + if( MyDebug.LOG ) + Log.d(TAG, "user clicked privacy policy"); + + clickedPrivacyPolicy(); + } + return false; + } + }); + } + + { + final Preference pref = findPreference("preference_about"); + pref.setOnPreferenceClickListener(new OnPreferenceClickListener() { + @Override + public boolean onPreferenceClick(Preference arg0) { + if( pref.getKey().equals("preference_about") ) { + if( MyDebug.LOG ) + Log.d(TAG, "user clicked about"); + AlertDialog.Builder alertDialog = new AlertDialog.Builder(MyPreferenceFragment.this.getActivity()); + alertDialog.setTitle(R.string.preference_about); + final StringBuilder about_string = new StringBuilder(); + String version = "UNKNOWN_VERSION"; + int version_code = -1; + try { + PackageInfo pInfo = MyPreferenceFragment.this.getActivity().getPackageManager().getPackageInfo(MyPreferenceFragment.this.getActivity().getPackageName(), 0); + version = pInfo.versionName; + version_code = pInfo.versionCode; + } + catch(NameNotFoundException e) { + MyDebug.logStackTrace(TAG, "NameNotFoundException exception trying to get version number", e); + } + about_string.append("Open Camera v"); + about_string.append(version); + about_string.append("\nCode: "); + about_string.append(version_code); + about_string.append("\nPackage: "); + about_string.append(MyPreferenceFragment.this.getActivity().getPackageName()); + about_string.append("\nAndroid API version: "); + about_string.append(Build.VERSION.SDK_INT); + about_string.append("\nDevice manufacturer: "); + about_string.append(Build.MANUFACTURER); + about_string.append("\nDevice model: "); + about_string.append(Build.MODEL); + if( Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.R ) { + // use non-deprecated equivalent of Display.getSize() + WindowMetrics window_metrics = MyPreferenceFragment.this.getActivity().getWindowManager().getCurrentWindowMetrics(); + final WindowInsets windowInsets = window_metrics.getWindowInsets(); + Insets insets = windowInsets.getInsetsIgnoringVisibility(WindowInsets.Type.navigationBars() | WindowInsets.Type.displayCutout()); + int insetsWidth = insets.right + insets.left; + int insetsHeight = insets.top + insets.bottom; + final Rect bounds = window_metrics.getBounds(); + int display_x = bounds.width() - insetsWidth; + int display_y = bounds.height() - insetsHeight; + about_string.append("\nDisplay size: "); + about_string.append(display_x); + about_string.append("x"); + about_string.append(display_y); + } + else { + Point display_size = new Point(); + Display display = MyPreferenceFragment.this.getActivity().getWindowManager().getDefaultDisplay(); + display.getSize(display_size); + about_string.append("\nDisplay size: "); + about_string.append(display_size.x); + about_string.append("x"); + about_string.append(display_size.y); + } + about_string.append("\nCurrent camera ID: "); + about_string.append(cameraId); + about_string.append("\nNo. of cameras: "); + about_string.append(nCameras); + about_string.append("\nMulti-camera?: "); + about_string.append(is_multi_cam); + about_string.append("\nCamera API: "); + about_string.append(camera_api); + about_string.append("\nCamera orientation: "); + about_string.append(camera_orientation); + about_string.append("\nPhoto mode: "); + about_string.append(photo_mode_string==null ? "UNKNOWN" : photo_mode_string); + { + String last_video_error = sharedPreferences.getString("last_video_error", ""); + if( !last_video_error.isEmpty() ) { + about_string.append("\nLast video error: "); + about_string.append(last_video_error); + } + } + about_string.append("\nMin zoom factor: "); + about_string.append(min_zoom_factor); + about_string.append("\nMax zoom factor: "); + about_string.append(max_zoom_factor); + if( preview_widths != null && preview_heights != null ) { + about_string.append("\nPreview resolutions: "); + for(int i=0;i 0 ) { + about_string.append(", "); + } + about_string.append(preview_widths[i]); + about_string.append("x"); + about_string.append(preview_heights[i]); + } + } + about_string.append("\nPreview resolution: "); + about_string.append(preview_width); + about_string.append("x"); + about_string.append(preview_height); + if( widths != null && heights != null ) { + about_string.append("\nPhoto resolutions: "); + for(int i=0;i 0 ) { + about_string.append(", "); + } + about_string.append(widths[i]); + about_string.append("x"); + about_string.append(heights[i]); + if( supports_burst != null && !supports_burst[i] ) { + about_string.append("[no burst]"); + } + } + } + about_string.append("\nPhoto resolution: "); + about_string.append(resolution_width); + about_string.append("x"); + about_string.append(resolution_height); + if( video_quality != null ) { + about_string.append("\nVideo qualities: "); + for(int i=0;i 0 ) { + about_string.append(", "); + } + about_string.append(video_quality[i]); + } + } + if( video_widths != null && video_heights != null ) { + about_string.append("\nVideo resolutions: "); + for(int i=0;i 0 ) { + about_string.append(", "); + } + about_string.append(video_widths[i]); + about_string.append("x"); + about_string.append(video_heights[i]); + } + } + about_string.append("\nVideo quality: "); + about_string.append(current_video_quality); + about_string.append("\nVideo frame width: "); + about_string.append(video_frame_width); + about_string.append("\nVideo frame height: "); + about_string.append(video_frame_height); + about_string.append("\nVideo bit rate: "); + about_string.append(video_bit_rate); + about_string.append("\nVideo frame rate: "); + about_string.append(video_frame_rate); + about_string.append("\nVideo capture rate: "); + about_string.append(video_capture_rate); + about_string.append("\nVideo high speed: "); + about_string.append(video_high_speed); + about_string.append("\nVideo capture rate factor: "); + about_string.append(video_capture_rate_factor); + about_string.append("\nAuto-level?: "); + about_string.append(getString(supports_auto_stabilise ? R.string.about_available : R.string.about_not_available)); + about_string.append("\nAuto-level enabled?: "); + about_string.append(sharedPreferences.getBoolean(PreferenceKeys.AutoStabilisePreferenceKey, false)); + about_string.append("\nFace detection?: "); + about_string.append(getString(supports_face_detection ? R.string.about_available : R.string.about_not_available)); + about_string.append("\nRAW?: "); + about_string.append(getString(supports_raw ? R.string.about_available : R.string.about_not_available)); + about_string.append("\nHDR?: "); + about_string.append(getString(supports_hdr ? R.string.about_available : R.string.about_not_available)); + about_string.append("\nPanorama?: "); + about_string.append(getString(supports_panorama ? R.string.about_available : R.string.about_not_available)); + about_string.append("\nGyro sensors?: "); + about_string.append(getString(has_gyro_sensors ? R.string.about_available : R.string.about_not_available)); + about_string.append("\nExpo?: "); + about_string.append(getString(supports_expo_bracketing ? R.string.about_available : R.string.about_not_available)); + about_string.append("\nExpo compensation?: "); + about_string.append(getString(supports_exposure_compensation ? R.string.about_available : R.string.about_not_available)); + if( supports_exposure_compensation ) { + about_string.append("\nExposure compensation range: "); + about_string.append(exposure_compensation_min); + about_string.append(" to "); + about_string.append(exposure_compensation_max); + } + about_string.append("\nManual ISO?: "); + about_string.append(getString(supports_iso_range ? R.string.about_available : R.string.about_not_available)); + if( supports_iso_range ) { + about_string.append("\nISO range: "); + about_string.append(iso_range_min); + about_string.append(" to "); + about_string.append(iso_range_max); + } + about_string.append("\nManual exposure?: "); + about_string.append(getString(supports_exposure_time ? R.string.about_available : R.string.about_not_available)); + if( supports_exposure_time ) { + about_string.append("\nExposure range: "); + about_string.append(exposure_time_min); + about_string.append(" to "); + about_string.append(exposure_time_max); + } + about_string.append("\nManual WB?: "); + about_string.append(getString(supports_white_balance_temperature ? R.string.about_available : R.string.about_not_available)); + if( supports_white_balance_temperature ) { + about_string.append("\nWB temperature: "); + about_string.append(white_balance_temperature_min); + about_string.append(" to "); + about_string.append(white_balance_temperature_max); + } + about_string.append("\nOptical stabilization?: "); + about_string.append(getString(supports_optical_stabilization ? R.string.about_available : R.string.about_not_available)); + about_string.append("\nOptical stabilization enabled?: "); + about_string.append(optical_stabilization_enabled); + about_string.append("\nVideo stabilization?: "); + about_string.append(getString(supports_video_stabilization ? R.string.about_available : R.string.about_not_available)); + about_string.append("\nVideo stabilization enabled?: "); + about_string.append(video_stabilization_enabled); + about_string.append("\nTonemap curve?: "); + about_string.append(getString(supports_tonemap_curve ? R.string.about_available : R.string.about_not_available)); + about_string.append("\nTonemap max curve points: "); + about_string.append(tonemap_max_curve_points); + about_string.append("\nCan disable shutter sound?: "); + about_string.append(getString(can_disable_shutter_sound ? R.string.about_available : R.string.about_not_available)); + + about_string.append("\nCamera view angle: ").append(camera_view_angle_x).append(" , ").append(camera_view_angle_y); + + about_string.append("\nFlash modes: "); + String [] flash_values = bundle.getStringArray("flash_values"); + if( flash_values != null && flash_values.length > 0 ) { + for(int i=0;i 0 ) { + about_string.append(", "); + } + about_string.append(flash_values[i]); + } + } + else { + about_string.append("None"); + } + about_string.append("\nFocus modes: "); + String [] focus_values = bundle.getStringArray("focus_values"); + if( focus_values != null && focus_values.length > 0 ) { + for(int i=0;i 0 ) { + about_string.append(", "); + } + about_string.append(focus_values[i]); + } + } + else { + about_string.append("None"); + } + about_string.append("\nColor effects: "); + String [] color_effects_values = bundle.getStringArray("color_effects"); + if( color_effects_values != null && color_effects_values.length > 0 ) { + for(int i=0;i 0 ) { + about_string.append(", "); + } + about_string.append(color_effects_values[i]); + } + } + else { + about_string.append("None"); + } + about_string.append("\nScene modes: "); + String [] scene_modes_values = bundle.getStringArray("scene_modes"); + if( scene_modes_values != null && scene_modes_values.length > 0 ) { + for(int i=0;i 0 ) { + about_string.append(", "); + } + about_string.append(scene_modes_values[i]); + } + } + else { + about_string.append("None"); + } + about_string.append("\nWhite balances: "); + String [] white_balances_values = bundle.getStringArray("white_balances"); + if( white_balances_values != null && white_balances_values.length > 0 ) { + for(int i=0;i 0 ) { + about_string.append(", "); + } + about_string.append(white_balances_values[i]); + } + } + else { + about_string.append("None"); + } + if( !using_android_l ) { + about_string.append("\nISOs: "); + String[] isos = bundle.getStringArray("isos"); + if (isos != null && isos.length > 0) { + for (int i = 0; i < isos.length; i++) { + if (i > 0) { + about_string.append(", "); + } + about_string.append(isos[i]); + } + } else { + about_string.append("None"); + } + String iso_key = bundle.getString("iso_key"); + if (iso_key != null) { + about_string.append("\nISO key: "); + about_string.append(iso_key); + } + } + + int magnetic_accuracy = bundle.getInt("magnetic_accuracy"); + about_string.append("\nMagnetic accuracy?: "); + about_string.append(magnetic_accuracy); + + about_string.append("\nUsing SAF?: "); + about_string.append(sharedPreferences.getBoolean(PreferenceKeys.UsingSAFPreferenceKey, false)); + String save_location = sharedPreferences.getString(PreferenceKeys.SaveLocationPreferenceKey, "OpenCamera"); + about_string.append("\nSave Location: "); + about_string.append(save_location); + String save_location_saf = sharedPreferences.getString(PreferenceKeys.SaveLocationSAFPreferenceKey, ""); + about_string.append("\nSave Location SAF: "); + about_string.append(save_location_saf); + + about_string.append("\nParameters: "); + String parameters_string = bundle.getString("parameters_string"); + if( parameters_string != null ) { + about_string.append(parameters_string); + } + else { + about_string.append("None"); + } + + SpannableString span = new SpannableString(about_string); + + // clickable text is only supported if we call setMovementMethod on the TextView - which means we need to create + // our own for the AlertDialog! + @SuppressLint("InflateParams") // we add the view to the alert dialog in addTextViewForAlertDialog() + final View dialog_view = LayoutInflater.from(getActivity()).inflate(R.layout.alertdialog_textview, null); + final TextView textView = dialog_view.findViewById(R.id.text_view); + + textView.setText(span); + textView.setMovementMethod(LinkMovementMethod.getInstance()); + textView.setTextAppearance(getActivity(), android.R.style.TextAppearance_Medium); + addTextViewForAlertDialog(alertDialog, textView); + //alertDialog.setMessage(about_string); + + alertDialog.setPositiveButton(android.R.string.ok, null); + alertDialog.setNegativeButton(R.string.about_copy_to_clipboard, new DialogInterface.OnClickListener() { + public void onClick(DialogInterface dialog, int id) { + if( MyDebug.LOG ) + Log.d(TAG, "user clicked copy to clipboard"); + ClipboardManager clipboard = (ClipboardManager) getActivity().getSystemService(Activity.CLIPBOARD_SERVICE); + ClipData clip = ClipData.newPlainText("OpenCamera About", about_string); + clipboard.setPrimaryClip(clip); + } + }); + final AlertDialog alert = alertDialog.create(); + // AlertDialog.Builder.setOnDismissListener() requires API level 17, so do it this way instead + alert.setOnDismissListener(new DialogInterface.OnDismissListener() { + @Override + public void onDismiss(DialogInterface arg0) { + if( MyDebug.LOG ) + Log.d(TAG, "about dialog dismissed"); + dialogs.remove(alert); + } + }); + alert.show(); + dialogs.add(alert); + return false; + } + return false; + } + }); + } + + setupDependencies(); + } + + @Override + public void onViewCreated(View view, Bundle savedInstanceState) { + super.onViewCreated(view, savedInstanceState); + + if( edge_to_edge_mode ) { + handleEdgeToEdge(view); + } + } + + static void handleEdgeToEdge(View view) { + ViewCompat.setOnApplyWindowInsetsListener(view, (v, windowInsets) -> { + //androidx.core.graphics.Insets insets = windowInsets.getInsets(WindowInsetsCompat.Type.systemBars() | WindowInsetsCompat.Type.displayCutout()); + // don't need to avoid WindowInsetsCompat.Type.displayCutout(), as we already do this for the entire activity (see MainActivity's setOnApplyWindowInsetsListener) + androidx.core.graphics.Insets insets = windowInsets.getInsets(WindowInsetsCompat.Type.systemBars()); + v.setPadding(insets.left, insets.top, insets.right, insets.bottom); + return WindowInsetsCompat.CONSUMED; + }); + view.requestApplyInsets(); + } + + /** Adds a TextView to an AlertDialog builder, placing it inside a scrollview and adding appropriate padding. + */ + private void addTextViewForAlertDialog(AlertDialog.Builder alertDialog, TextView textView) { + final float scale = getActivity().getResources().getDisplayMetrics().density; + ScrollView scrollView = new ScrollView(getActivity()); + scrollView.addView(textView); + // padding values from /sdk/platforms/android-18/data/res/layout/alert_dialog.xml + textView.setPadding((int)(5*scale+0.5f), (int)(5*scale+0.5f), (int)(5*scale+0.5f), (int)(5*scale+0.5f)); + scrollView.setPadding((int)(14*scale+0.5f), (int)(2*scale+0.5f), (int)(10*scale+0.5f), (int)(12*scale+0.5f)); + alertDialog.setView(scrollView); + } + + /** Programmatically set up dependencies for preference types (e.g., ListPreference) that don't + * support this in xml (such as SwitchPreference and CheckBoxPreference), or where this depends + * on the device (e.g., Android version). + */ + private void setupDependencies() { + } + + /* The user clicked the privacy policy preference. + */ + public void clickedPrivacyPolicy() { + if( MyDebug.LOG ) + Log.d(TAG, "clickedPrivacyPolicy()"); + /*MainActivity main_activity = (MainActivity)MyPreferenceFragment.this.getActivity(); + main_activity.launchOnlinePrivacyPolicy();*/ + + AlertDialog.Builder alertDialog = new AlertDialog.Builder(MyPreferenceFragment.this.getActivity()); + alertDialog.setTitle(R.string.preference_privacy_policy); + + //SpannableString span = new SpannableString(getActivity().getResources().getString(R.string.preference_privacy_policy_text)); + //Linkify.addLinks(span, Linkify.WEB_URLS | Linkify.EMAIL_ADDRESSES); + String privacy_policy_text = getActivity().getResources().getString(R.string.preference_privacy_policy_text); + Spanned span = Html.fromHtml(privacy_policy_text); + // clickable text is only supported if we call setMovementMethod on the TextView - which means we need to create + // our own for the AlertDialog! + @SuppressLint("InflateParams") // we add the view to the alert dialog in addTextViewForAlertDialog() + final View dialog_view = LayoutInflater.from(getActivity()).inflate(R.layout.alertdialog_textview, null); + final TextView textView = dialog_view.findViewById(R.id.text_view); + textView.setText(span); + textView.setMovementMethod(LinkMovementMethod.getInstance()); + textView.setTextAppearance(getActivity(), android.R.style.TextAppearance_Medium); + addTextViewForAlertDialog(alertDialog, textView); + //alertDialog.setMessage(R.string.preference_privacy_policy_text); + + alertDialog.setPositiveButton(android.R.string.ok, null); + alertDialog.setNegativeButton(R.string.preference_privacy_policy_online, new DialogInterface.OnClickListener() { + @Override + public void onClick(DialogInterface dialog, int which) { + if( MyDebug.LOG ) + Log.d(TAG, "online privacy policy"); + MainActivity main_activity = (MainActivity)MyPreferenceFragment.this.getActivity(); + main_activity.launchOnlinePrivacyPolicy(); + } + }); + final AlertDialog alert = alertDialog.create(); + // AlertDialog.Builder.setOnDismissListener() requires API level 17, so do it this way instead + alert.setOnDismissListener(new DialogInterface.OnDismissListener() { + @Override + public void onDismiss(DialogInterface arg0) { + if( MyDebug.LOG ) + Log.d(TAG, "reset dialog dismissed"); + dialogs.remove(alert); + } + }); + alert.show(); + dialogs.add(alert); + } + + /** Removes an entry and value pair from a ListPreference, if it exists. + * @param pref The ListPreference to remove the supplied entry/value. + * @param filter_value The value to remove from the list. + */ + static void filterArrayEntry(ListPreference pref, String filter_value) { + { + CharSequence [] orig_entries = pref.getEntries(); + CharSequence [] orig_values = pref.getEntryValues(); + List new_entries = new ArrayList<>(); + List new_values = new ArrayList<>(); + for(int i=0;i 0 ) { + if( MyDebug.LOG ) { + Log.d(TAG, "values:"); + for(String value : values) { + Log.d(TAG, value); + } + } + ListPreference lp = (ListPreference)preference_fragment.findPreference(preference_key); + lp.setEntries(entries); + lp.setEntryValues(values); + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(preference_fragment.getActivity()); + String value = sharedPreferences.getString(preference_key, default_value); + if( MyDebug.LOG ) + Log.d(TAG, " value: " + Arrays.toString(values)); + lp.setValue(value); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "remove preference " + preference_key + " from category " + preference_category_key); + Preference pref = preference_fragment.findPreference(preference_key); + PreferenceGroup pg = (PreferenceGroup)preference_fragment.findPreference(preference_category_key); + pg.removePreference(pref); + } + } + + static void setBackground(Fragment fragment) { + // prevent fragment being transparent + // note, setting color here only seems to affect the "main" preference fragment screen, and not sub-screens + // note, on Galaxy Nexus Android 4.3 this sets to black rather than the dark grey that the background theme should be (and what the sub-screens use); works okay on Nexus 7 Android 5 + // we used to use a light theme for the PreferenceFragment, but mixing themes in same activity seems to cause problems (e.g., for EditTextPreference colors) + TypedArray array = fragment.getActivity().getTheme().obtainStyledAttributes(new int[] { + android.R.attr.colorBackground + }); + int backgroundColor = array.getColor(0, Color.BLACK); + /*if( MyDebug.LOG ) { + int r = (backgroundColor >> 16) & 0xFF; + int g = (backgroundColor >> 8) & 0xFF; + int b = (backgroundColor >> 0) & 0xFF; + Log.d(TAG, "backgroundColor: " + r + " , " + g + " , " + b); + }*/ + fragment.getView().setBackgroundColor(backgroundColor); + array.recycle(); + } + + @Override + public void onResume() { + super.onResume(); + + setBackground(this); + + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getActivity()); + sharedPreferences.registerOnSharedPreferenceChangeListener(this); + } + + @Override + public void onPause() { + super.onPause(); + } + + @Override + public void onDestroy() { + if( MyDebug.LOG ) + Log.d(TAG, "onDestroy"); + super.onDestroy(); + + if( MyDebug.LOG ) + Log.d(TAG, "isRemoving?: " + isRemoving()); + + if( isRemoving() ) { + // if isRemoving()==true, then it means the fragment is being removed and we are returning to the activity + // if isRemoving()==false, then it may be that the activity is being destroyed + ((MainActivity)getActivity()).settingsClosing(); + } + + dismissDialogs(getFragmentManager(), dialogs); + } + + static void dismissDialogs(FragmentManager fragment_manager, HashSet dialogs) { + // dismiss open dialogs - see comment for dialogs for why we do this + for(AlertDialog dialog : dialogs) { + if( MyDebug.LOG ) + Log.d(TAG, "dismiss dialog: " + dialog); + dialog.dismiss(); + } + // similarly dimiss any dialog fragments still opened + Fragment folder_fragment = fragment_manager.findFragmentByTag("FOLDER_FRAGMENT"); + if( folder_fragment != null ) { + DialogFragment dialogFragment = (DialogFragment)folder_fragment; + if( MyDebug.LOG ) + Log.d(TAG, "dismiss dialogFragment: " + dialogFragment); + dialogFragment.dismissAllowingStateLoss(); + } + } + + /* So that manual changes to the checkbox/switch preferences, while the preferences are showing, show up; + * in particular, needed for preference_using_saf, when the user cancels the SAF dialog (see + * MainActivity.onActivityResult). + * Also programmatically sets summary (see setSummary). + */ + public void onSharedPreferenceChanged(SharedPreferences prefs, String key) { + if( MyDebug.LOG ) + Log.d(TAG, "onSharedPreferenceChanged: " + key); + + if( key == null ) { + // On Android 11+, when targetting Android 11+, this method is called with key==null + // if preferences are cleared. Unclear if this happens here in practice, but return + // just in case. + return; + } + + Preference pref = findPreference(key); + handleOnSharedPreferenceChanged(prefs, key, pref); + } + + static void handleOnSharedPreferenceChanged(SharedPreferences prefs, String key, Preference pref) { + if( MyDebug.LOG ) + Log.d(TAG, "handleOnSharedPreferenceChanged: " + key); + + if( pref == null ) { + // this can happen if the shared preference that changed is for a sub-screen i.e. a different fragment + if( MyDebug.LOG ) + Log.d(TAG, "handleOnSharedPreferenceChanged: preference doesn't belong to this fragment"); + return; + } + + if( pref instanceof TwoStatePreference ) { + TwoStatePreference twoStatePref = (TwoStatePreference)pref; + twoStatePref.setChecked(prefs.getBoolean(key, true)); + } + else if( pref instanceof ListPreference ) { + ListPreference listPref = (ListPreference)pref; + listPref.setValue(prefs.getString(key, "")); + } + setSummary(pref); + } + + /** Programmatically sets summaries as required. + * Remember to call setSummary() from the constructor for any keys we set, to initialise the + * summary. + */ + static void setSummary(Preference pref) { + if( pref instanceof EditTextPreference ) { + /* We have a runtime check for using EditTextPreference - we don't want these due to importance of + * supporting the Google Play emoji policy (see comment in MyEditTextPreference.java) - and this + * helps guard against the risk of accidentally adding more EditTextPreferences in future. + * Once we've switched to using Android X Preference library, and hence safe to use EditTextPreference + * again, this code can be removed. + */ + throw new RuntimeException("detected an EditTextPreference: " + pref.getKey() + " pref: " + pref); + } + + if( pref.getKey().equals("preference_save_location") ) { + // can't use %s (as only supported for ListPreference), so handle this directly + MainActivity main_activity = (MainActivity)pref.getContext(); + String folder_name; + if( main_activity.getStorageUtils().isUsingSAF() ) { + folder_name = main_activity.getStorageUtils().getSaveLocationSAF(); + } + else { + folder_name = main_activity.getStorageUtils().getSaveLocation(); + } + folder_name = main_activity.getHumanReadableSaveFolder(folder_name); + String summary = main_activity.getResources().getString(R.string.preference_save_location_summary); + if( !folder_name.isEmpty() ) { + summary += "\n" + folder_name; + } + pref.setSummary(summary); + } + else if( pref instanceof EditTextPreference || pref instanceof MyEditTextPreference ) { + // %s only supported for ListPreference + // we also display the usual summary if no preference value is set + if( pref.getKey().equals("preference_exif_artist") || + pref.getKey().equals("preference_exif_copyright") || + pref.getKey().equals("preference_save_photo_prefix") || + pref.getKey().equals("preference_save_video_prefix") || + pref.getKey().equals("preference_textstamp") + ) { + String default_value = ""; + if( pref.getKey().equals("preference_save_photo_prefix") ) + default_value = "IMG_"; + else if( pref.getKey().equals("preference_save_video_prefix") ) + default_value = "VID_"; + + String current_value; + if( pref instanceof EditTextPreference ) { + EditTextPreference editTextPref = (EditTextPreference)pref; + current_value = editTextPref.getText(); + } + else { + MyEditTextPreference editTextPref = (MyEditTextPreference)pref; + current_value = editTextPref.getText(); + } + + if( current_value.equals(default_value) ) { + switch (pref.getKey()) { + case "preference_exif_artist": + pref.setSummary(R.string.preference_exif_artist_summary); + break; + case "preference_exif_copyright": + pref.setSummary(R.string.preference_exif_copyright_summary); + break; + case "preference_save_photo_prefix": + pref.setSummary(R.string.preference_save_photo_prefix_summary); + break; + case "preference_save_video_prefix": + pref.setSummary(R.string.preference_save_video_prefix_summary); + break; + case "preference_textstamp": + pref.setSummary(R.string.preference_textstamp_summary); + break; + } + } + else { + // non-default value, so display the current value + pref.setSummary(current_value); + } + } + } + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/MyTileService.java b/app/src/main/java/net/sourceforge/opencamera/MyTileService.java new file mode 100644 index 0000000..4954cb3 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/MyTileService.java @@ -0,0 +1,63 @@ +package net.sourceforge.opencamera; + +import android.app.PendingIntent; +import android.content.Intent; +import android.os.Build; +import android.service.quicksettings.TileService; +import androidx.annotation.RequiresApi; +import android.util.Log; + +/** Provides service for quick settings tile. + */ +@RequiresApi(api = Build.VERSION_CODES.N) +public class MyTileService extends TileService { + private static final String TAG = "MyTileService"; + public static final String TILE_ID = "net.sourceforge.opencamera.TILE_CAMERA"; + + @Override + public void onDestroy() { + super.onDestroy(); + } + + @Override + public void onTileAdded() { + super.onTileAdded(); + } + + @Override + public void onTileRemoved() { + super.onTileRemoved(); + } + + @Override + public void onStartListening() { + super.onStartListening(); + } + + @Override + public void onStopListening() { + super.onStopListening(); + } + + @Override + public void onClick() { + if( MyDebug.LOG ) + Log.d(TAG, "onClick"); + super.onClick(); + Intent intent = new Intent(this, MainActivity.class); + intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TOP); + intent.setAction(TILE_ID); + // use startActivityAndCollapse() instead of startActivity() so that the notification panel doesn't remain pulled down + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE ) { + // startActivityAndCollapse(Intent) throws UnsupportedOperationException on Android 14+ + // FLAG_IMMUTABLE needed for PendingIntents on Android 12+ + PendingIntent pending_intent = PendingIntent.getActivity(this, 0, intent, PendingIntent.FLAG_IMMUTABLE); + startActivityAndCollapse(pending_intent); + } + else { + // still get warning for startActivityAndCollapse being deprecated, but startActivityAndCollapse(PendingIntent) requires Android 14+ + // and only seems possible to disable the warning for the function, not this statement + startActivityAndCollapse(intent); + } + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/MyTileServiceFrontCamera.java b/app/src/main/java/net/sourceforge/opencamera/MyTileServiceFrontCamera.java new file mode 100644 index 0000000..3bcb684 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/MyTileServiceFrontCamera.java @@ -0,0 +1,63 @@ +package net.sourceforge.opencamera; + +import android.app.PendingIntent; +import android.content.Intent; +import android.os.Build; +import android.service.quicksettings.TileService; +import androidx.annotation.RequiresApi; +import android.util.Log; + +/** Provides service for quick settings tile. + */ +@RequiresApi(api = Build.VERSION_CODES.N) +public class MyTileServiceFrontCamera extends TileService { + private static final String TAG = "MyTileServiceFrontCam"; + public static final String TILE_ID = "net.sourceforge.opencamera.TILE_FRONT_CAMERA"; + + @Override + public void onDestroy() { + super.onDestroy(); + } + + @Override + public void onTileAdded() { + super.onTileAdded(); + } + + @Override + public void onTileRemoved() { + super.onTileRemoved(); + } + + @Override + public void onStartListening() { + super.onStartListening(); + } + + @Override + public void onStopListening() { + super.onStopListening(); + } + + @Override + public void onClick() { + if( MyDebug.LOG ) + Log.d(TAG, "onClick"); + super.onClick(); + Intent intent = new Intent(this, MainActivity.class); + intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TOP); + intent.setAction(TILE_ID); + // use startActivityAndCollapse() instead of startActivity() so that the notification panel doesn't remain pulled down + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE ) { + // startActivityAndCollapse(Intent) throws UnsupportedOperationException on Android 14+ + // FLAG_IMMUTABLE needed for PendingIntents on Android 12+ + PendingIntent pending_intent = PendingIntent.getActivity(this, 0, intent, PendingIntent.FLAG_IMMUTABLE); + startActivityAndCollapse(pending_intent); + } + else { + // still get warning for startActivityAndCollapse being deprecated, but startActivityAndCollapse(PendingIntent) requires Android 14+ + // and only seems possible to disable the warning for the function, not this statement + startActivityAndCollapse(intent); + } + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/MyTileServiceVideo.java b/app/src/main/java/net/sourceforge/opencamera/MyTileServiceVideo.java new file mode 100644 index 0000000..c3da602 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/MyTileServiceVideo.java @@ -0,0 +1,63 @@ +package net.sourceforge.opencamera; + +import android.app.PendingIntent; +import android.content.Intent; +import android.os.Build; +import android.service.quicksettings.TileService; +import androidx.annotation.RequiresApi; +import android.util.Log; + +/** Provides service for quick settings tile. + */ +@RequiresApi(api = Build.VERSION_CODES.N) +public class MyTileServiceVideo extends TileService { + private static final String TAG = "MyTileServiceVideo"; + public static final String TILE_ID = "net.sourceforge.opencamera.TILE_VIDEO"; + + @Override + public void onDestroy() { + super.onDestroy(); + } + + @Override + public void onTileAdded() { + super.onTileAdded(); + } + + @Override + public void onTileRemoved() { + super.onTileRemoved(); + } + + @Override + public void onStartListening() { + super.onStartListening(); + } + + @Override + public void onStopListening() { + super.onStopListening(); + } + + @Override + public void onClick() { + if( MyDebug.LOG ) + Log.d(TAG, "onClick"); + super.onClick(); + Intent intent = new Intent(this, MainActivity.class); + intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TOP); + intent.setAction(TILE_ID); + // use startActivityAndCollapse() instead of startActivity() so that the notification panel doesn't remain pulled down + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE ) { + // startActivityAndCollapse(Intent) throws UnsupportedOperationException on Android 14+ + // FLAG_IMMUTABLE needed for PendingIntents on Android 12+ + PendingIntent pending_intent = PendingIntent.getActivity(this, 0, intent, PendingIntent.FLAG_IMMUTABLE); + startActivityAndCollapse(pending_intent); + } + else { + // still get warning for startActivityAndCollapse being deprecated, but startActivityAndCollapse(PendingIntent) requires Android 14+ + // and only seems possible to disable the warning for the function, not this statement + startActivityAndCollapse(intent); + } + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/MyWidgetProviderTakePhoto.java b/app/src/main/java/net/sourceforge/opencamera/MyWidgetProviderTakePhoto.java new file mode 100644 index 0000000..e58b0ef --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/MyWidgetProviderTakePhoto.java @@ -0,0 +1,59 @@ +package net.sourceforge.opencamera; + +import android.app.PendingIntent; +import android.appwidget.AppWidgetManager; +import android.appwidget.AppWidgetProvider; +import android.content.Context; +import android.content.Intent; +import android.os.Build; +import android.util.Log; +import android.widget.RemoteViews; + +/** Handles the Open Camera "take photo" widget. This widget launches Open + * Camera, and immediately takes a photo. + */ +public class MyWidgetProviderTakePhoto extends AppWidgetProvider { + private static final String TAG = "MyWidgetProviderTakePho"; + + // see http://developer.android.com/guide/topics/appwidgets/index.html + public void onUpdate(Context context, AppWidgetManager appWidgetManager, int [] appWidgetIds) { + if( MyDebug.LOG ) + Log.d(TAG, "onUpdate"); + if( MyDebug.LOG ) + Log.d(TAG, "length = " + appWidgetIds.length); + + for(int appWidgetId : appWidgetIds) { + if( MyDebug.LOG ) + Log.d(TAG, "appWidgetId: " + appWidgetId); + + Intent intent = new Intent(context, TakePhoto.class); + + int flags = PendingIntent.FLAG_UPDATE_CURRENT; + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.M ) + flags = flags | PendingIntent.FLAG_IMMUTABLE; // needed for targetting Android 12+, but fine to set it all versions from Android 6 onwards + PendingIntent pendingIntent = PendingIntent.getActivity(context, 0, intent, flags); + + RemoteViews remote_views = new RemoteViews(context.getPackageName(), R.layout.widget_layout_take_photo); + remote_views.setOnClickPendingIntent(R.id.widget_take_photo, pendingIntent); + + appWidgetManager.updateAppWidget(appWidgetId, remote_views); + } + } + + /*@Override + public void onReceive(Context context, Intent intent) { + if( MyDebug.LOG ) { + Log.d(TAG, "onReceive " + intent); + } + if (intent.getAction().equals("net.sourceforge.opencamera.LAUNCH_OPEN_CAMERA")) { + if( MyDebug.LOG ) + Log.d(TAG, "Launching MainActivity"); + final Intent activity = new Intent(context, MainActivity.class); + activity.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); + context.startActivity(activity); + if( MyDebug.LOG ) + Log.d(TAG, "done"); + } + super.onReceive(context, intent); + }*/ +} diff --git a/app/src/main/java/net/sourceforge/opencamera/OpenCameraApplication.java b/app/src/main/java/net/sourceforge/opencamera/OpenCameraApplication.java new file mode 100644 index 0000000..8163bc7 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/OpenCameraApplication.java @@ -0,0 +1,30 @@ +package net.sourceforge.opencamera; + +import android.app.Application; +import android.os.Process; +import android.util.Log; + +/** We override the Application class to implement the workaround at + * https://issuetracker.google.com/issues/36972466#comment14 for Google bug crash. It seems ugly, + * but Google consider this a low priority despite calling these "bad behaviours" in applications! + */ +public class OpenCameraApplication extends Application { + private static final String TAG = "OpenCameraApplication"; + + @Override + public void onCreate() { + if( MyDebug.LOG ) + Log.d(TAG, "onCreate"); + super.onCreate(); + checkAppReplacingState(); + } + + private void checkAppReplacingState() { + if( MyDebug.LOG ) + Log.d(TAG, "checkAppReplacingState"); + if( getResources() == null ) { + Log.e(TAG, "app is replacing, kill"); + Process.killProcess(Process.myPid()); + } + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/PanoramaProcessor.java b/app/src/main/java/net/sourceforge/opencamera/PanoramaProcessor.java new file mode 100644 index 0000000..ee316d3 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/PanoramaProcessor.java @@ -0,0 +1,3028 @@ +package net.sourceforge.opencamera; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.nio.IntBuffer; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import android.content.Context; +import android.graphics.Bitmap; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Matrix; +import android.graphics.Paint; +import android.graphics.Point; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffXfermode; +import android.graphics.Rect; +import android.os.Environment; +import android.util.Log; + +public class PanoramaProcessor { + private static final String TAG = "PanoramaProcessor"; + + private final Context context; + private final HDRProcessor hdrProcessor; + + public PanoramaProcessor(Context context, HDRProcessor hdrProcessor) { + this.context = context; + this.hdrProcessor = hdrProcessor; + } + + public void onDestroy() { + if( MyDebug.LOG ) + Log.d(TAG, "onDestroy"); + } + + private Bitmap reduceBitmap(Bitmap bitmap) { + if( MyDebug.LOG ) + Log.d(TAG, "reduceBitmap"); + long time_s = 0; + if( MyDebug.LOG ) + time_s = System.currentTimeMillis(); + + int width = bitmap.getWidth(); + int height = bitmap.getHeight(); + + Bitmap reduced_bitmap = Bitmap.createBitmap(width/2, height/2, Bitmap.Config.ARGB_8888); + + //final boolean use_reduce_2d = true; + final boolean use_reduce_2d = false; // faster to do reduce as two 1D passes (note this gives minor differences in resultant images due to numerical wobble) + if( use_reduce_2d ) { + JavaImageFunctions.ReduceBitmapFunction function = new JavaImageFunctions.ReduceBitmapFunction(bitmap); + JavaImageProcessing.applyFunction(function, null, reduced_bitmap, 0, 0, reduced_bitmap.getWidth(), reduced_bitmap.getHeight()); + } + else { + + /* + // work on bitmap directly: + + Bitmap reduced_bitmap_x = Bitmap.createBitmap(width/2, height, Bitmap.Config.ARGB_8888); + JavaImageFunctions.ReduceBitmapXFunction function_x = new JavaImageFunctions.ReduceBitmapXFunction(bitmap); + JavaImageProcessing.applyFunction(function_x, null, reduced_bitmap_x, 0, 0, reduced_bitmap_x.getWidth(), reduced_bitmap_x.getHeight()); + if( MyDebug.LOG ) + Log.d(TAG, "### time for reduceBitmapX: " + (System.currentTimeMillis() - time_s)); + + JavaImageFunctions.ReduceBitmapYFunction function_y = new JavaImageFunctions.ReduceBitmapYFunction(reduced_bitmap_x); + JavaImageProcessing.applyFunction(function_y, null, reduced_bitmap, 0, 0, reduced_bitmap.getWidth(), reduced_bitmap.getHeight()); + + reduced_bitmap_x.recycle(); + */ + + // work with temp arrays instead of bitmaps + byte [] bitmap_argb; + { + int [] pixels = new int[width*height]; + bitmap.getPixels(pixels, 0, width, 0, 0, width, height); + if( MyDebug.LOG ) + Log.d(TAG, "### reduceBitmap: time after getPixels: " + (System.currentTimeMillis() - time_s)); + // convert int[] array to byte[] array + ByteBuffer byte_buffer = ByteBuffer.allocate(4*width*height); + IntBuffer int_buffer = byte_buffer.asIntBuffer(); + int_buffer.put(pixels); + bitmap_argb = byte_buffer.array(); + if( MyDebug.LOG ) + Log.d(TAG, "### reduceBitmap: time after converting int array to byte array: " + (System.currentTimeMillis() - time_s)); + } + + byte [] reduced_bitmap_x_argb = new byte[4*(width/2)*(height)]; + JavaImageFunctions.ReduceBitmapXFullFunction function_x = new JavaImageFunctions.ReduceBitmapXFullFunction(bitmap_argb, reduced_bitmap_x_argb, width/2); + JavaImageProcessing.applyFunction(function_x, null, null, 0, 0, width/2, height); + if( MyDebug.LOG ) + Log.d(TAG, "### time for reduceBitmapX: " + (System.currentTimeMillis() - time_s)); + + // noinspection UnusedAssignment + bitmap_argb = null; // help garbage collection + + byte [] reduced_bitmap_argb = new byte[4*(width/2)*(height/2)]; + JavaImageFunctions.ReduceBitmapYFullFunction function_y = new JavaImageFunctions.ReduceBitmapYFullFunction(reduced_bitmap_x_argb, reduced_bitmap_argb, width/2, height/2); + JavaImageProcessing.applyFunction(function_y, null, null, 0, 0, width/2, height/2); + if( MyDebug.LOG ) + Log.d(TAG, "### time for reduceBitmapY: " + (System.currentTimeMillis() - time_s)); + + // noinspection UnusedAssignment + reduced_bitmap_x_argb = null; // help garbage collection + + { + int [] pixels = new int[(width/2)*(height/2)]; + IntBuffer int_buffer = ByteBuffer.wrap(reduced_bitmap_argb).asIntBuffer(); + int_buffer.get(pixels); + if( MyDebug.LOG ) + Log.d(TAG, "### reduceBitmap: time after converting byte array to int array: " + (System.currentTimeMillis() - time_s)); + reduced_bitmap.setPixels(pixels,0, width/2, 0, 0, width/2, height/2); + if( MyDebug.LOG ) + Log.d(TAG, "### reduceBitmap: time after setPixels: " + (System.currentTimeMillis() - time_s)); + } + } + if( MyDebug.LOG ) + Log.d(TAG, "### time for reduceBitmap: " + (System.currentTimeMillis() - time_s)); + + return reduced_bitmap; + } + + private Bitmap expandBitmap(Bitmap bitmap) { + if( MyDebug.LOG ) + Log.d(TAG, "expandBitmap"); + long time_s = 0; + if( MyDebug.LOG ) + time_s = System.currentTimeMillis(); + + int width = bitmap.getWidth(); + int height = bitmap.getHeight(); + + /* + // work on bitmap directly: + + Bitmap expanded_bitmap = Bitmap.createBitmap(2*width, 2*height, Bitmap.Config.ARGB_8888); + if( MyDebug.LOG ) + Log.d(TAG, "### expandBitmap: time after create expanded_bitmap: " + (System.currentTimeMillis() - time_s)); + JavaImageFunctions.ExpandBitmapFunction function = new JavaImageFunctions.ExpandBitmapFunction(bitmap); + JavaImageProcessing.applyFunction(function, null, expanded_bitmap, 0, 0, expanded_bitmap.getWidth(), expanded_bitmap.getHeight()); + if( MyDebug.LOG ) + Log.d(TAG, "### expandBitmap: time after expand: " + (System.currentTimeMillis() - time_s)); + + Bitmap temp_bitmap = Bitmap.createBitmap(2*width, 2*height, Bitmap.Config.ARGB_8888); + if( MyDebug.LOG ) + Log.d(TAG, "### expandBitmap: time after create temp_bitmap: " + (System.currentTimeMillis() - time_s)); + JavaImageFunctions.Blur1dXFunction function_blur1dX = new JavaImageFunctions.Blur1dXFunction(expanded_bitmap); + JavaImageProcessing.applyFunction(function_blur1dX, null, temp_bitmap, 0, 0, temp_bitmap.getWidth(), temp_bitmap.getHeight()); + if( MyDebug.LOG ) + Log.d(TAG, "### expandBitmap: time after blur1dX: " + (System.currentTimeMillis() - time_s)); + + // now re-use expanded_bitmap for the result_bitmap + @SuppressWarnings("UnnecessaryLocalVariable") + Bitmap result_bitmap = expanded_bitmap; + JavaImageFunctions.Blur1dYFunction function_blur1dY = new JavaImageFunctions.Blur1dYFunction(temp_bitmap); + JavaImageProcessing.applyFunction(function_blur1dY, null, result_bitmap, 0, 0, result_bitmap.getWidth(), result_bitmap.getHeight()); + if( MyDebug.LOG ) + Log.d(TAG, "### expandBitmap: time after blur1dY: " + (System.currentTimeMillis() - time_s)); + + temp_bitmap.recycle(); + */ + + // work with temp arrays instead of bitmaps + + byte [] bitmap_argb; + { + int [] pixels = new int[width*height]; + bitmap.getPixels(pixels, 0, width, 0, 0, width, height); + if( MyDebug.LOG ) + Log.d(TAG, "### expandBitmap: time after getPixels: " + (System.currentTimeMillis() - time_s)); + /*bitmap_argb = new byte[4*width*height]; + for(int byte_i=0,int_i=0;int_i> 24) & 0xFF); + bitmap_argb[byte_i+1] = (byte)((color >> 16) & 0xFF); + bitmap_argb[byte_i+2] = (byte)((color >> 8) & 0xFF); + bitmap_argb[byte_i+3] = (byte)(color & 0xFF); + }*/ + // convert int[] array to byte[] array + ByteBuffer byte_buffer = ByteBuffer.allocate(4*width*height); + IntBuffer int_buffer = byte_buffer.asIntBuffer(); + int_buffer.put(pixels); + bitmap_argb = byte_buffer.array(); + if( MyDebug.LOG ) + Log.d(TAG, "### expandBitmap: time after converting int array to byte array: " + (System.currentTimeMillis() - time_s)); + } + + byte [] expanded_bitmap_argb = new byte[4*(2*width)*(2*height)]; + JavaImageFunctions.ExpandBitmapFullFunction function = new JavaImageFunctions.ExpandBitmapFullFunction(bitmap_argb, expanded_bitmap_argb, 2*width, 2*height); + JavaImageProcessing.applyFunction(function, null, null, 0, 0, 2*width, 2*height); + if( MyDebug.LOG ) + Log.d(TAG, "### expandBitmap: time after expand: " + (System.currentTimeMillis() - time_s)); + + // noinspection UnusedAssignment + bitmap_argb = null; // help garbage collection + + /*Bitmap expanded_bitmap = Bitmap.createBitmap(2*width, 2*height, Bitmap.Config.ARGB_8888); + if( MyDebug.LOG ) + Log.d(TAG, "### expandBitmap: time after create expanded_bitmap: " + (System.currentTimeMillis() - time_s)); + JavaImageFunctions.ExpandBitmapFunction function = new JavaImageFunctions.ExpandBitmapFunction(bitmap); + JavaImageProcessing.applyFunction(function, null, expanded_bitmap, 0, 0, expanded_bitmap.getWidth(), expanded_bitmap.getHeight()); + if( MyDebug.LOG ) + Log.d(TAG, "### expandBitmap: time after expand: " + (System.currentTimeMillis() - time_s)); + + byte [] expanded_bitmap_argb = new byte[4*(2*width)*(2*height)]; + { + int [] pixels = new int[(2*width)*(2*height)]; + expanded_bitmap.getPixels(pixels, 0, 2*width, 0, 0, 2*width, 2*height); + for(int byte_i=0,int_i=0;int_i<(2*width)*(2*height);byte_i+=4,int_i++) { + int color = pixels[int_i]; + expanded_bitmap_argb[byte_i] = (byte)((color >> 24) & 0xFF); + expanded_bitmap_argb[byte_i+1] = (byte)((color >> 16) & 0xFF); + expanded_bitmap_argb[byte_i+2] = (byte)((color >> 8) & 0xFF); + expanded_bitmap_argb[byte_i+3] = (byte)(color & 0xFF); + } + expanded_bitmap.recycle(); + }*/ + + byte [] temp_bitmap_argb = new byte[4*(2*width)*(2*height)]; + JavaImageFunctions.Blur1dXFullFunction function_blur1dX = new JavaImageFunctions.Blur1dXFullFunction(expanded_bitmap_argb, temp_bitmap_argb, 2*width, 2*height); + JavaImageProcessing.applyFunction(function_blur1dX, null, null, 0, 0, 2*width, 2*height); + if( MyDebug.LOG ) + Log.d(TAG, "### expandBitmap: time after blur1dX: " + (System.currentTimeMillis() - time_s)); + + /*Bitmap temp_bitmap = Bitmap.createBitmap(2*width, 2*height, Bitmap.Config.ARGB_8888); + if( MyDebug.LOG ) + Log.d(TAG, "### expandBitmap: time after create temp_bitmap: " + (System.currentTimeMillis() - time_s)); + JavaImageFunctions.Blur1dXFunction function_blur1dX = new JavaImageFunctions.Blur1dXFunction(expanded_bitmap); + JavaImageProcessing.applyFunction(function_blur1dX, null, temp_bitmap, 0, 0, temp_bitmap.getWidth(), temp_bitmap.getHeight()); + if( MyDebug.LOG ) + Log.d(TAG, "### expandBitmap: time after blur1dX: " + (System.currentTimeMillis() - time_s)); + expanded_bitmap.recycle(); + + byte [] temp_bitmap_argb = new byte[4*(2*width)*(2*height)]; + { + int [] pixels = new int[(2*width)*(2*height)]; + temp_bitmap.getPixels(pixels, 0, 2*width, 0, 0, 2*width, 2*height); + for(int byte_i=0,int_i=0;int_i<(2*width)*(2*height);byte_i+=4,int_i++) { + int color = pixels[int_i]; + temp_bitmap_argb[byte_i] = (byte)((color >> 24) & 0xFF); + temp_bitmap_argb[byte_i+1] = (byte)((color >> 16) & 0xFF); + temp_bitmap_argb[byte_i+2] = (byte)((color >> 8) & 0xFF); + temp_bitmap_argb[byte_i+3] = (byte)(color & 0xFF); + } + temp_bitmap.recycle(); + }*/ + + //byte [] result_bitmap_argb = new byte[4*(2*width)*(2*height)]; + // now re-use expanded_bitmap for the result_bitmap + @SuppressWarnings("UnnecessaryLocalVariable") + byte [] result_bitmap_argb = expanded_bitmap_argb; + + JavaImageFunctions.Blur1dYFullFunction function_blur1dY = new JavaImageFunctions.Blur1dYFullFunction(temp_bitmap_argb, result_bitmap_argb, 2*width, 2*height); + JavaImageProcessing.applyFunction(function_blur1dY, null, null, 0, 0, 2*width, 2*height); + if( MyDebug.LOG ) + Log.d(TAG, "### expandBitmap: time after blur1dY: " + (System.currentTimeMillis() - time_s)); + + // noinspection UnusedAssignment + temp_bitmap_argb = null; // help garbage collection + + Bitmap result_bitmap = Bitmap.createBitmap(2*width, 2*height, Bitmap.Config.ARGB_8888); + { + int [] pixels = new int[(2*width)*(2*height)]; + /*for(int byte_i=0,int_i=0;int_i<(2*width)*(2*height);byte_i+=4,int_i++) { + int a = result_bitmap_argb[byte_i] & 0xFF; + int r = result_bitmap_argb[byte_i+1] & 0xFF; + int g = result_bitmap_argb[byte_i+2] & 0xFF; + int b = result_bitmap_argb[byte_i+3] & 0xFF; + pixels[int_i] = (a << 24) | (r << 16) | (g << 8) | b; + }*/ + IntBuffer int_buffer = ByteBuffer.wrap(result_bitmap_argb).asIntBuffer(); + int_buffer.get(pixels); + if( MyDebug.LOG ) + Log.d(TAG, "### expandBitmap: time after converting byte array to int array: " + (System.currentTimeMillis() - time_s)); + result_bitmap.setPixels(pixels,0, 2*width, 0, 0, 2*width, 2*height); + if( MyDebug.LOG ) + Log.d(TAG, "### expandBitmap: time after setPixels: " + (System.currentTimeMillis() - time_s)); + } + + return result_bitmap; + } + + /** Creates a floating point array representing a bitmap where each pixel equals the pixel from + * bitmap0 minus the corresponding pixel from bitmap1. + */ + private float [] subtractBitmap(Bitmap bitmap0, Bitmap bitmap1) { + if( MyDebug.LOG ) + Log.d(TAG, "subtractBitmap"); + int width = bitmap0.getWidth(); + int height = bitmap0.getHeight(); + if( bitmap1.getWidth() != width || bitmap1.getHeight() != height ) { + Log.e(TAG, "bitmaps of different dimensions"); + throw new RuntimeException(); + } + float [] result_rgbf = new float[3*width*height]; + + JavaImageFunctions.SubtractBitmapFunction function = new JavaImageFunctions.SubtractBitmapFunction(result_rgbf, bitmap1); + JavaImageProcessing.applyFunction(function, bitmap0, null, 0, 0, bitmap0.getWidth(), bitmap0.getHeight()); + + return result_rgbf; + } + + /** Updates bitmap0 such that each pixel equals the pixel from bitmap0 plus the + * corresponding pixel from bitmap1. + * bitmap0 should be of type RGBA_8888, bitmap1 should be of type RGBf. + */ + private void addBitmap(Bitmap bitmap0, float [] bitmap1) { + if( MyDebug.LOG ) + Log.d(TAG, "addBitmap"); + int width = bitmap0.getWidth(); + int height = bitmap0.getHeight(); + if( bitmap1.length != 3*width*height ) { + Log.e(TAG, "bitmaps of different dimensions"); + throw new RuntimeException(); + } + JavaImageFunctions.AddBitmapFunction function = new JavaImageFunctions.AddBitmapFunction(bitmap1, width); + JavaImageProcessing.applyFunction(function, bitmap0, bitmap0, 0, 0, bitmap0.getWidth(), bitmap0.getHeight()); + } + + private List createGaussianPyramid(Bitmap bitmap, int n_levels) { + if( MyDebug.LOG ) + Log.d(TAG, "createGaussianPyramid"); + List pyramid = new ArrayList<>(); + + pyramid.add(bitmap); + for(int i=0;i diffs; // floating point diffs, i-th entry equal to [G(i) - G'(i+1)], where G(i) is the i-th level of the gaussian pyramid + private final List widths; // width of each floating point bitmap in diffs + private final List heights; // width of each floating point bitmap in diffs + private Bitmap top_level; + + LaplacianPyramid() { + this.diffs = new ArrayList<>(); + this.widths = new ArrayList<>(); + this.heights = new ArrayList<>(); + } + + void addDiff(float [] diff, int width, int height) { + this.diffs.add(diff); + this.widths.add(width); + this.heights.add(height); + } + + void setTopLevel(Bitmap top_level) { + this.top_level = top_level; + } + } + + /** Creates a laplacian pyramid of the supplied bitmap, ordered from bottom to top. The i-th + * entry of the diffs array is equal to [G(i) - G'(i+1)], where G(i) is the i-th level of the gaussian pyramid, + * and G' is created by expanding a level of the gaussian pyramid. The last + * (i.e., top) level of the gaussian pyramid is stored as top_level. + * The diffs are of type floating point (RGB); the top_level is of type + * RGBA_8888. + */ + private LaplacianPyramid createLaplacianPyramid(Bitmap bitmap, int n_levels, String name) { + if( MyDebug.LOG ) + Log.d(TAG, "createLaplacianPyramid"); + long time_s = 0; + if( MyDebug.LOG ) + time_s = System.currentTimeMillis(); + + List gaussianPyramid = createGaussianPyramid(bitmap, n_levels); + if( MyDebug.LOG ) + Log.d(TAG, "### createLaplacianPyramid: time after createGaussianPyramid: " + (System.currentTimeMillis() - time_s)); + /*if( MyDebug.LOG ) + { + // debug + savePyramid("gaussian", gaussianPyramid); + }*/ + + LaplacianPyramid pyramid = new LaplacianPyramid(); + + for(int i=0;i=0;i--) { + Bitmap expanded_bitmap = expandBitmap(bitmap); + bitmap.recycle(); + addBitmap(expanded_bitmap, pyramid.diffs.get(i)); + bitmap = expanded_bitmap; + } + + return bitmap; + } + + private void computeInterpolatedBestPath(int [] interpolated_best_path, int width, int height, int blend_width, int [] best_path, int best_path_n_x) { + float best_path_y_scale = best_path.length/(float)height; + for(int y=0;y= best_path.length-1+0.5f ) { + best_path_value = best_path[best_path.length-1]; + } + else { + best_path_y_index -= 0.5f; + int best_path_y_index_i = (int)best_path_y_index; + float linear_alpha = best_path_y_index - best_path_y_index_i; + //float alpha = linear_alpha; + //final float edge_length = 0.25f; + final float edge_length = 0.1f; + float alpha; + if( linear_alpha < edge_length ) + alpha = 0.0f; + else if( linear_alpha > 1.0f-edge_length ) + alpha = 1.0f; + else + alpha = (linear_alpha - edge_length) / (1.0f - 2.0f*edge_length); + int prev_best_path = best_path[best_path_y_index_i]; + int next_best_path = best_path[best_path_y_index_i+1]; + best_path_value = (1.0f-alpha) * prev_best_path + alpha * next_best_path; + /*if( MyDebug.LOG ) { + Log.d(TAG, " alpha: " + alpha); + Log.d(TAG, " prev_best_path: " + prev_best_path); + Log.d(TAG, " next_best_path: " + next_best_path); + }*/ + } + //interpolated_best_path[y] = (int)((best_path_value+1) * best_path_x_width + 0.5f); + float alpha = best_path_value / (best_path_n_x-1.0f); + float frac = (1.0f - alpha) * 0.25f + alpha * 0.75f; + interpolated_best_path[y] = (int)(frac*width + 0.5f); + /*if( MyDebug.LOG ) { + Log.d(TAG, " interpolated_best_path[" + y + "]: " + interpolated_best_path[y] + " (best_path_value " + best_path_value + ")"); + }*/ + } + if( interpolated_best_path[y] - blend_width/2 < 0 ) { + Log.e(TAG, " interpolated_best_path[" + y + "]: " + interpolated_best_path[y]); + Log.e(TAG, " blend_width: " + blend_width); + Log.e(TAG, " width: " + width); + throw new RuntimeException("blend window runs off left hand size"); + } + else if( interpolated_best_path[y] + blend_width/2 > width ) { + Log.e(TAG, " interpolated_best_path[" + y + "]: " + interpolated_best_path[y]); + Log.e(TAG, " blend_width: " + blend_width); + Log.e(TAG, " width: " + width); + throw new RuntimeException("blend window runs off right hand size"); + } + } + } + + /** Updates every entry in pyramid0 to be a blend from the left hand of pyramid0 to the + * right hand of pyramid1. + * Note that the width of the blend region will be half of the width of each image. + * @param best_path If non-null, the blend region will follow the supplied best path. + */ + private void mergePyramids(LaplacianPyramid pyramid0, LaplacianPyramid pyramid1, int [] best_path, int best_path_n_x) { + if( MyDebug.LOG ) + Log.d(TAG, "mergePyramids"); + + if( best_path == null ) { + best_path = new int[1]; + best_path_n_x = 3; + best_path[0] = 1; + //best_path[0] = 2; // test + } + if( MyDebug.LOG ) { + for(int i=0;i { + private final int index0, index1; + private float distance; // from 0 to 1, higher means poorer match + + private FeatureMatch(int index0, int index1) { + this.index0 = index0; + this.index1 = index1; + } + + @Override + public int compareTo(FeatureMatch that) { + //return (int)(this.distance - that.distance); + /*if( this.distance > that.distance ) + return 1; + else if( this.distance < that.distance ) + return -1; + else + return 0;*/ + return Float.compare(this.distance, that.distance); + } + + @Override + public boolean equals(Object that) { + return (that instanceof FeatureMatch) && compareTo((FeatureMatch)that) == 0; + } + } + + private static void computeDistancesBetweenMatches(List matches, int st_indx, int nd_indx, int feature_descriptor_radius, List bitmaps, int [] pixels0, int [] pixels1) { + final int wid = 2*feature_descriptor_radius+1; + final int wid2 = wid*wid; + for(int indx=st_indx;indx matches; + private final int st_indx; + private final int nd_indx; + private final int feature_descriptor_radius; + private final List bitmaps; + private final int [] pixels0; + private final int [] pixels1; + + ComputeDistancesBetweenMatchesThread(List matches, int st_indx, int nd_indx, int feature_descriptor_radius, List bitmaps, int [] pixels0, int [] pixels1) { + super("ComputeDistancesBetweenMatchesThread"); + this.matches = matches; + this.st_indx = st_indx; + this.nd_indx = nd_indx; + this.feature_descriptor_radius = feature_descriptor_radius; + this.bitmaps = bitmaps; + this.pixels0 = pixels0; + this.pixels1 = pixels1; + } + + public void run() { + computeDistancesBetweenMatches(matches, st_indx, nd_indx, feature_descriptor_radius, bitmaps, pixels0, pixels1); + } + } + + static class AutoAlignmentByFeatureResult { + final int offset_x; + final int offset_y; + final float rotation; + final float y_scale; + + AutoAlignmentByFeatureResult(int offset_x, int offset_y, float rotation, float y_scale) { + this.offset_x = offset_x; + this.offset_y = offset_y; + this.rotation = rotation; + this.y_scale = y_scale; + } + } + + private AutoAlignmentByFeatureResult autoAlignmentByFeature(int width, int height, List bitmaps, int debug_index) throws PanoramaProcessorException { + if( MyDebug.LOG ) { + Log.d(TAG, "autoAlignmentByFeature"); + Log.d(TAG, "width: " + width); + Log.d(TAG, "height: " + height); + } + long time_s = 0; + if( MyDebug.LOG ) + time_s = System.currentTimeMillis(); + if( bitmaps.size() != 2 ) { + Log.e(TAG, "must have 2 bitmaps"); + throw new PanoramaProcessorException(PanoramaProcessorException.INVALID_N_IMAGES); + } + + /*if( MyDebug.LOG ) + Log.d(TAG, "convert to greyscale"); + Bitmap [] gs_bitmaps = new Bitmap[bitmaps.size()]; + Paint paint = new Paint(); + ColorMatrix color_matrix = new ColorMatrix(); + color_matrix.set(new float[]{ + 0.3f, 0.59f, 0.11f, 0.0f, 0.0f, + 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, + 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, + 0.0f, 0.0f, 0.0f, 0.0f, 0.0f + }); + color_matrix.setSaturation(0); + ColorMatrixColorFilter color_matrix_filter = new ColorMatrixColorFilter(color_matrix); + paint.setColorFilter(color_matrix_filter); + for(int i=0;i all_points = new ArrayList<>(); + for(int cy=0;cy>> find corners, chunk " + cy + " / " + n_y_chunks); + float threshold = 5000000.0f; + // setting a min_threshold fixes testPanorama11, also helps testPanorama1 + // note that this needs to be at least 1250000.0f - at 625000.0f, testPanorama1 + // still has problems and in fact ends up being worse than having no min threshold + final float min_threshold = 1250000.0f; + //final float min_threshold = 625000.0f; + float low_threshold = 0.0f; + float high_threshold = -1.0f; + int start_y = (cy*height)/n_y_chunks; + int stop_y = ((cy+1)*height)/n_y_chunks; + if( MyDebug.LOG ) { + Log.d(TAG, " start_y: " + start_y); + Log.d(TAG, " stop_y: " + stop_y); + } + final int max_iter = 10; + for(int count=0;;count++) { + if( MyDebug.LOG ) + Log.d(TAG, "### attempt " + count + " try threshold: " + threshold + " [ " + low_threshold + " : " + high_threshold + " ]"); + + JavaImageFunctions.LocalMaximumFunction function = new JavaImageFunctions.LocalMaximumFunction(strength_rgbf, bytes, width, height, threshold); + JavaImageProcessing.applyFunction(function, null, null, 0, 0, width, height); + + // find points + List points = new ArrayList<>(); + for(int y=Math.max(start_y, feature_descriptor_radius);y= min_corners && points.size() <= max_corners ) { + all_points.addAll(points); + break; + } + else if( points.size() < min_corners ) { + if( threshold <= min_threshold ) { + if( MyDebug.LOG ) + Log.d(TAG, " hit minimum threshold: " + threshold); + all_points.addAll(points); + break; + } + else if( count+1 == max_iter ) { + if( MyDebug.LOG ) + Log.d(TAG, " too few points but hit max iterations: " + points.size()); + all_points.addAll(points); + //if( true ) + // throw new RuntimeException("too few points: " + points.size()); // test + break; + } + else { + high_threshold = threshold; + threshold = 0.5f * ( low_threshold + threshold ); + if( MyDebug.LOG ) + Log.d(TAG, " reduced threshold to: " + threshold); + /*if( low_threshold == 0.0f ) { + throw new RuntimeException(); + }*/ + /*if( count == 0 ) { + throw new RuntimeException(); + }*/ + } + } + else if( count+1 == max_iter ) { + if( MyDebug.LOG ) + Log.d(TAG, " too many points but hit max iterations: " + points.size()); + // arbitrarily take a subset + points.subList(max_corners,points.size()).clear(); + all_points.addAll(points); + //if( true ) + // throw new RuntimeException("too many points: " + points.size()); // test + break; + } + else { + low_threshold = threshold; + if( high_threshold < 0.0f ) { + threshold *= 10.0f; + } + else + threshold = 0.5f * ( threshold + high_threshold ); + if( MyDebug.LOG ) + Log.d(TAG, " increased threshold to: " + threshold); + } + } + } + points_arrays[i] = all_points.toArray(new Point[0]); + + if( MyDebug.LOG ) + Log.d(TAG, "### image: " + i + " has " + points_arrays[i].length + " points"); + } + if( MyDebug.LOG ) + Log.d(TAG, "### autoAlignmentByFeature: time after feature detection: " + (System.currentTimeMillis() - time_s)); + + // if we have too few good corners, risk of getting a poor match + final int min_required_corners = 10; + if( points_arrays[0].length < min_required_corners || points_arrays[1].length < min_required_corners ) { + if( MyDebug.LOG ) + Log.d(TAG, "too few points!"); + /*if( true ) + throw new RuntimeException();*/ + + return new AutoAlignmentByFeatureResult(0, 0, 0.0f, 1.0f); + } + + // generate candidate matches + //noinspection UnnecessaryLocalVariable + final int max_match_dist_x = width; + final int max_match_dist_y = height/16; + final int max_match_dist2 = max_match_dist_x*max_match_dist_x + max_match_dist_y*max_match_dist_y; + if( MyDebug.LOG ) { + Log.d(TAG, "max_match_dist_x: " + max_match_dist_x); + Log.d(TAG, "max_match_dist_y: " + max_match_dist_y); + Log.d(TAG, "max_match_dist2: " + max_match_dist2); + } + List matches = new ArrayList<>(); + for(int i=0;i actual_matches = new ArrayList<>(); + //final int n_matches = (int)(matches.size()*0.25f)+1; + //for(FeatureMatch match : matches) { + for(int i=0;i 0.8f ) { + if( MyDebug.LOG ) { + Log.d(TAG, " reject due to Lowe's test, ratio: " + ratio); + } + reject = true; + } + } + } + if( reject ) { + has_matched0[match.index0] = true; + rejected0[match.index0] = true; + continue; + } + + actual_matches.add(match); + has_matched0[match.index0] = true; + has_matched1[match.index1] = true; + /*if( actual_matches.size() == n_matches ) { + // only use best matches + break; + }*/ + } + if( MyDebug.LOG ) + Log.d(TAG, "### autoAlignmentByFeature: time after initial matching: " + (System.currentTimeMillis() - time_s)); + if( MyDebug.LOG ) + Log.d(TAG, "### found: " + actual_matches.size() + " matches"); + Log.d(TAG, "### autoAlignmentByFeature: time after finding possible matches: " + (System.currentTimeMillis() - time_s)); + + // but now choose only best actual matches + // using 0.4 rather than 0.7 helps testPanorama15 images _5 to _6, to get rid of incorrect grass matches (together with Lowe's test) + //int n_matches = (int)(actual_matches.size()*0.1)+1; + int n_matches = (int)(actual_matches.size()*0.4)+1; + //int n_matches = (int)(actual_matches.size()*0.7)+1; + // but don't want too few matches - need at least 4 to get a good transform for testPanorama18, images _2 to _3; + // and at least 5 to get good transform for testPanorama30_galaxys10e, images _0 to _1 + //final int n_minimum_matches_c = 4; + final int n_minimum_matches_c = 5; + /*if( n_matches < n_minimum_matches_c ) { + throw new RuntimeException("n_matches: " + n_matches); + }*/ + n_matches = Math.max(n_minimum_matches_c, n_matches); + if( n_matches < actual_matches.size() ) + actual_matches.subList(n_matches,actual_matches.size()).clear(); + if( MyDebug.LOG ) + Log.d(TAG, "### resized to: " + actual_matches.size() + " actual matches"); + // need to reset has_matched arrays + has_matched0 = new boolean[points_arrays[0].length]; + has_matched1 = new boolean[points_arrays[1].length]; + for(FeatureMatch match : actual_matches) { + has_matched0[match.index0] = true; + has_matched1[match.index1] = true; + if( MyDebug.LOG ) + Log.d(TAG, " actual match between " + match.index0 + " and " + match.index1 + " distance: " + match.distance); + } + if( MyDebug.LOG ) + Log.d(TAG, "### autoAlignmentByFeature: time after choosing best matches: " + (System.currentTimeMillis() - time_s)); + + if( actual_matches.isEmpty() ) { + if( MyDebug.LOG ) + Log.d(TAG, "no matches!"); + /*if( true ) + throw new RuntimeException();*/ + + return new AutoAlignmentByFeatureResult(0, 0, 0.0f, 1.0f); + } + + final boolean use_ransac = true; + //final boolean use_ransac = false; + //final boolean estimate_rotation = false; + final boolean estimate_rotation = true; + final boolean estimate_y_scale = false; + //final boolean estimate_y_scale = true; + //final boolean estimate_rotation = debug_index < 3; + boolean use_rotation = false; + boolean use_y_scale = false; + final float max_y_scale = 1.05f + 1.0e-5f; + + // needed a larger min_rotation_dist of Math.max(width, height)/4.0f to help testPanorama15 images _5 to _6, otherwise + // we risk choose matches that are too close, and getting an incorrect rotation + //final float min_rotation_dist = Math.max(5.0f, Math.max(width, height)/32.0f); + final float min_rotation_dist = Math.max(5.0f, Math.max(width, height)/4.0f); + if( MyDebug.LOG ) + Log.d(TAG, "min_rotation_dist: " + min_rotation_dist); + //final float min_rotation_dist2 = 1.0e-5f; + final float min_rotation_dist2 = min_rotation_dist*min_rotation_dist; + + List ransac_matches = new ArrayList<>(); // used for debugging: the matches that were used to define the transform + if( use_ransac ) { + // RANSAC + List best_inliers = new ArrayList<>(); + List inliers = new ArrayList<>(); + //final float max_inlier_dist = 2.01f; + //final float max_inlier_dist = 5.01f; + //final float max_inlier_dist = 10.01f; + //final float max_inlier_dist = 20.01f; + //final float max_inlier_dist = Math.max(10.01f, Math.max(width, height)/100.0f); + final float max_inlier_dist = Math.max(5.01f, Math.max(width, height)/100.0f); + //final float max_inlier_dist = Math.max(2.51f, Math.max(width, height)/200.0f); + //final float max_inlier_dist = Math.max(1.26f, Math.max(width, height)/400.0f); + if( MyDebug.LOG ) + Log.d(TAG, "max_inlier_dist: " + max_inlier_dist); + final float max_inlier_dist2 = max_inlier_dist*max_inlier_dist; + for(int i=0;i best_inliers.size() ) { + // found an improved model! + if( MyDebug.LOG ) + Log.d(TAG, "match " + i + " gives better translation model: " + inliers.size() + " inliers vs " + best_inliers.size()); + ransac_matches.clear(); + ransac_matches.add(match); + best_inliers.clear(); + best_inliers.addAll(inliers); + use_rotation = false; + use_y_scale = false; + if( best_inliers.size() == actual_matches.size() ) { + if( MyDebug.LOG ) + Log.d(TAG, "all matches are inliers"); + // no point trying any further + break; + } + } + } + + if( estimate_rotation ) { + // compute exact rotation and translation + // we need two points, so compare to every other point + for(int j=0;j max_height || + points_arrays[1][match.index1].y < min_height || points_arrays[1][match.index1].y > max_height || + points_arrays[0][match2.index0].y < min_height || points_arrays[0][match2.index0].y > max_height || + points_arrays[1][match2.index1].y < min_height || points_arrays[1][match2.index1].y > max_height + ) { + // for testPanorama28 - can get poor rotations if using matches too low or high, as photos more likely to be distorted + // also helps testPanorama31, testPanorama34, testPanorama35 + continue; + } + + /*float y_scale = 1.0f; + boolean found_y_scale = false; + if( estimate_y_scale && Math.abs(dy0) > min_rotation_dist && Math.abs(dy1) > min_rotation_dist ) { + y_scale = dy1 / dy0; + if( y_scale <= max_y_scale && y_scale >= 1.0f/max_y_scale ) { + found_y_scale = true; + } + else { + y_scale = 1.0f; + } + dy0 *= y_scale; + }*/ + + float angle = (float)(Math.atan2(dy1, dx1) - Math.atan2(dy0, dx0)); + if( angle < -Math.PI ) + angle += (float) (2.0f*Math.PI); + else if( angle > Math.PI ) + angle -= (float) (2.0f*Math.PI); + if( Math.abs(angle) > 30.0f*Math.PI/180.0f ) { + // reject too large angles + continue; + } + /*if( MyDebug.LOG ) { + Log.d(TAG, "ransac: " + i + " , " + j + ": "); + Log.d(TAG, " match 0: " + points_arrays[0][match.index0].x + " , " + points_arrays[0][match.index0].y); + Log.d(TAG, " match 1: " + points_arrays[1][match.index1].x + " , " + points_arrays[1][match.index1].y); + Log.d(TAG, " match2 0: " + points_arrays[0][match2.index0].x + " , " + points_arrays[0][match2.index0].y); + Log.d(TAG, " match2 1: " + points_arrays[1][match2.index1].x + " , " + points_arrays[1][match2.index1].y); + Log.d(TAG, " y_scale: " + y_scale); + Log.d(TAG, " angle: " + angle); + Log.d(TAG, " mag0: " + Math.sqrt(mag_sq0)); + Log.d(TAG, " mag1: " + Math.sqrt(mag_sq1)); + }*/ + + float y_scale = 1.0f; + boolean found_y_scale = false; + if( estimate_y_scale ) { + //int transformed_dx0 = (int)(dx0 * Math.cos(angle) - dy0 * Math.sin(angle)); + int transformed_dy0 = (int)(dx0 * Math.sin(angle) + dy0 * Math.cos(angle)); + if( Math.abs(transformed_dy0) > min_rotation_dist && Math.abs(dy1) > min_rotation_dist ) { + y_scale = dy1 / transformed_dy0; + if( y_scale <= max_y_scale && y_scale >= 1.0f/max_y_scale ) { + found_y_scale = true; + } + else { + y_scale = 1.0f; + } + } + } + + // find the inliers from this + inliers.clear(); + for(FeatureMatch other_match : actual_matches) { + int x0 = points_arrays[0][other_match.index0].x; + int y0 = points_arrays[0][other_match.index0].y; + int x1 = points_arrays[1][other_match.index1].x; + int y1 = points_arrays[1][other_match.index1].y; + x0 -= c0_x; + y0 -= c0_y; + //y0 *= y_scale; + int transformed_x0 = (int)(x0 * Math.cos(angle) - y0 * Math.sin(angle)); + int transformed_y0 = (int)(x0 * Math.sin(angle) + y0 * Math.cos(angle)); + // warning "Possibly lossy implicit cast in compound assignment" suppressed: + // it's intentional that we multiply int by float, and implicitly cast back to int + // (the suggested solution is to first cast the float to int before multiplying, which + // we don't want) + //noinspection lossy-conversions + transformed_y0 *= y_scale; + transformed_x0 += c1_x; + transformed_y0 += c1_y; + + float dx = transformed_x0 - x1; + float dy = transformed_y0 - y1; + /*if( MyDebug.LOG ) { + if( other_match == match ) + Log.d(TAG, " ransac on match: " + i + " , " + j + " : " + dx + " , " + dy); + else if( other_match == match2 ) + Log.d(TAG, " ransac on match2: " + i + " , " + j + " : " + dx + " , " + dy); + }*/ + float error2 = dx*dx + dy*dy; + if( error2 + 1.0e-5 <= max_inlier_dist2 ) { + inliers.add(other_match); + } + } + + if( inliers.size() > best_inliers.size() && inliers.size() >= 5 ) { + // found an improved model! + if( MyDebug.LOG ) { + Log.d(TAG, "match " + i + " gives better rotation model: " + inliers.size() + " inliers vs " + best_inliers.size()); + Log.d(TAG, " c0_x: " + c0_x + " , c0_y: " + c0_y); + Log.d(TAG, " c1_x: " + c1_x + " , c1_y: " + c1_y); + Log.d(TAG, " dx0: " + dx0 + " , dy0: " + dy0); + Log.d(TAG, " dx1: " + dx1 + " , dy1: " + dy1); + Log.d(TAG, " rotate by " + angle + " about: " + c0_x + " , " + c0_y); + Log.d(TAG, " y scale by " + y_scale); + Log.d(TAG, " translate by: " + (c1_x-c0_x) + " , " + (c1_y-c0_y)); + } + ransac_matches.clear(); + ransac_matches.add(match); + ransac_matches.add(match2); + best_inliers.clear(); + best_inliers.addAll(inliers); + use_rotation = true; + use_y_scale = found_y_scale; + if( best_inliers.size() == actual_matches.size() ) { + if( MyDebug.LOG ) + Log.d(TAG, "all matches are inliers"); + // no point trying any further + break; + } + } + } + + if( best_inliers.size() == actual_matches.size() ) { + if( MyDebug.LOG ) + Log.d(TAG, "all matches are inliers"); + // no point trying any further + break; + } + } + } + actual_matches = best_inliers; + if( MyDebug.LOG ) + Log.d(TAG, "### autoAlignmentByFeature: time after RANSAC: " + (System.currentTimeMillis() - time_s)); + if( MyDebug.LOG ) { + for(FeatureMatch match : actual_matches) { + Log.d(TAG, " after ransac: actual match between " + match.index0 + " and " + match.index1 + " distance: " + match.distance); + } + } + } + + Point [] centres = new Point[2]; + for(int i=0;i<2;i++) { + centres[i] = new Point(); + } + for(FeatureMatch match : actual_matches) { + centres[0].x += points_arrays[0][match.index0].x; + centres[0].y += points_arrays[0][match.index0].y; + centres[1].x += points_arrays[1][match.index1].x; + centres[1].y += points_arrays[1][match.index1].y; + } + for(int i=0;i<2;i++) { + centres[i].x /= actual_matches.size(); + centres[i].y /= actual_matches.size(); + } + if( MyDebug.LOG ) { + Log.d(TAG, "centres[0]: " + centres[0].x + " , " + centres[0].y); + Log.d(TAG, "centres[1]: " + centres[1].x + " , " + centres[1].y); + } + + int offset_x = centres[1].x - centres[0].x; + int offset_y = centres[1].y - centres[0].y; + float rotation = 0.0f; + float y_scale = 1.0f; + + if( estimate_rotation && use_rotation ) { + /*if( true ) + throw new RuntimeException(); // test*/ + + // first compute an ideal y_scale + /*if( estimate_y_scale && use_y_scale ) { + float y_scale_sum = 0.0f; + int n_y_scale = 0; + for(FeatureMatch match : actual_matches) { + float d0_y = points_arrays[0][match.index0].y - centres[0].y; + float d1_y = points_arrays[1][match.index1].y - centres[1].y; + if( Math.abs(d0_y) > min_rotation_dist && Math.abs(d1_y) > min_rotation_dist ) { + float this_y_scale = d1_y / d0_y; + y_scale_sum += this_y_scale; + n_y_scale++; + if( MyDebug.LOG ) + Log.d(TAG, " match has scale: " + this_y_scale); + } + } + if( n_y_scale > 0 ) { + y_scale = y_scale_sum / n_y_scale; + } + }*/ + + // compute an ideal rotation for a transformation where we rotate about centres[0], and then translate + float angle_sum = 0.0f; + int n_angles = 0; + for(FeatureMatch match : actual_matches) { + float dx0 = points_arrays[0][match.index0].x - centres[0].x; + float dy0 = points_arrays[0][match.index0].y - centres[0].y; + float dx1 = points_arrays[1][match.index1].x - centres[1].x; + float dy1 = points_arrays[1][match.index1].y - centres[1].y; + float mag_sq0 = dx0*dx0 + dy0*dy0; + float mag_sq1 = dx1*dx1 + dy1*dy1; + if( mag_sq0 < 1.0e-5 || mag_sq1 < 1.0e-5 ) { + continue; + } + + //dy0 *= y_scale; + + float angle = (float)(Math.atan2(dy1, dx1) - Math.atan2(dy0, dx0)); + if( angle < -Math.PI ) + angle += (float) (2.0f*Math.PI); + else if( angle > Math.PI ) + angle -= (float) (2.0f*Math.PI); + if( MyDebug.LOG ) + Log.d(TAG, " match has angle: " + angle); + angle_sum += angle; + n_angles++; + } + if( n_angles > 0 ) { + rotation = angle_sum / n_angles; + } + //rotation = 0.0f; // test + //rotation = (float)(0.125*Math.PI); // test + //centres[1].x = centres[0].x; // test + //centres[1].y = centres[0].y; // test + //offset_x = 0; // test + //offset_y = 0; // test + + if( estimate_y_scale && use_y_scale ) { + float y_scale_sum = 0.0f; + int n_y_scale = 0; + for(FeatureMatch match : actual_matches) { + float dx0 = (points_arrays[0][match.index0].x - centres[0].x); + float dy0 = (points_arrays[0][match.index0].y - centres[0].y); + //float dx1 = (points_arrays[1][match.index1].x - centres[1].x); + float dy1 = (points_arrays[1][match.index1].y - centres[1].y); + int transformed_dy0 = (int)(dx0 * Math.sin(rotation) + dy0 * Math.cos(rotation)); + if( Math.abs(transformed_dy0) > min_rotation_dist && Math.abs(dy1) > min_rotation_dist ) { + float this_y_scale = dy1 / transformed_dy0; + y_scale_sum += this_y_scale; + n_y_scale++; + if( MyDebug.LOG ) + Log.d(TAG, " match has scale: " + this_y_scale); + } + } + if( n_y_scale > 0 ) { + y_scale = y_scale_sum / n_y_scale; + } + } + + // but instead we want to (scale and) rotate about the origin and then translate: + // R[x-c] + c + d = R[x] + (d + c - R[c]) + // Or with scale: + // // RS[x-c] + c + d = RS[x] + (d + c - RS[c]) + //float rotated_centre_x = (float)(centres[0].x * Math.cos(rotation) - y_scale * centres[0].y * Math.sin(rotation)); + //float rotated_centre_y = (float)(centres[0].x * Math.sin(rotation) + y_scale * centres[0].y * Math.cos(rotation)); + // SR[x-c] + c + d = SR[x] + (d + c - SR[c]) + float rotated_centre_x = (float)(centres[0].x * Math.cos(rotation) - centres[0].y * Math.sin(rotation)); + float rotated_centre_y = (float)(centres[0].x * Math.sin(rotation) + centres[0].y * Math.cos(rotation)); + rotated_centre_y *= y_scale; + if( MyDebug.LOG ) { + Log.d(TAG, "offset_x before rotation: " + offset_x); + Log.d(TAG, "offset_y before rotation: " + offset_y); + Log.d(TAG, "rotated_centre: " + rotated_centre_x + " , " + rotated_centre_y); + } + //noinspection lossy-conversions + offset_x += centres[0].x - rotated_centre_x; + //noinspection lossy-conversions + offset_y += centres[0].y - rotated_centre_y; + + } + if( MyDebug.LOG ) + Log.d(TAG, "### autoAlignmentByFeature: time after computing transformation: " + (System.currentTimeMillis() - time_s)); + if( MyDebug.LOG ) { + Log.d(TAG, "offset_x: " + offset_x); + Log.d(TAG, "offset_y: " + offset_y); + Log.d(TAG, "rotation: " + rotation); + Log.d(TAG, "y_scale: " + y_scale); + + Log.d(TAG, "ransac matches are:"); + for(FeatureMatch match : ransac_matches) { + int x0 = points_arrays[0][match.index0].x; + int y0 = points_arrays[0][match.index0].y; + int x1 = points_arrays[1][match.index1].x; + int y1 = points_arrays[1][match.index1].y; + Log.d(TAG, " index : " + match.index0 + " to " + match.index1); + Log.d(TAG, " coords " + x0 + " , " + y0 + " to " + x1 + " , " + y1); + Log.d(TAG, " distance: " + match.distance); + } + } + /*if( Math.abs(rotation) > 30.0f*Math.PI/180.0f ) { + // test + throw new RuntimeException(); + }*/ + + if( false && MyDebug.LOG ) { + // debug: + Bitmap bitmap = Bitmap.createBitmap(2*width, height, Bitmap.Config.ARGB_8888); + Paint p = new Paint(); + p.setStyle(Paint.Style.STROKE); + Canvas canvas = new Canvas(bitmap); + + // draw bitmaps + canvas.drawBitmap(bitmaps.get(0), 0, 0, p); + canvas.drawBitmap(bitmaps.get(1), width, 0, p); + + // draw feature points + for(int i=0;i<2;i++) { + for(int j=0;j power ) + power *= 2; + return power; + }*/ + + private static int nextMultiple(int value, int multiple) { + int remainder = value % multiple; + if( remainder > 0 ) { + value += multiple - remainder; + } + return value; + } + + private Bitmap createProjectedBitmap(final Rect src_rect_workspace, final Rect dst_rect_workspace, final Bitmap bitmap, final Paint p, final int bitmap_width, final int bitmap_height, final double camera_angle, final int centre_shift_x) { + Bitmap projected_bitmap = Bitmap.createBitmap(bitmap_width, bitmap_height, Bitmap.Config.ARGB_8888); + { + // project + Canvas projected_canvas = new Canvas(projected_bitmap); + int prev_x = 0; + int prev_y0 = -1, prev_y1 = -1; + for(int x=0;x y_tol || Math.abs(dst_y1 - prev_y1) > y_tol ) { + src_rect_workspace.set(prev_x, 0, x, bitmap_height); + dst_rect_workspace.set(prev_x, dst_y0, x, dst_y1); + projected_canvas.drawBitmap(bitmap, src_rect_workspace, dst_rect_workspace, p); + prev_x = x; + prev_y0 = dst_y0; + prev_y1 = dst_y1; + } + + if( x == bitmap_width-1 ) { + // draw last + src_rect_workspace.set(prev_x, 0, x+1, bitmap_height); + dst_rect_workspace.set(prev_x, dst_y0, x+1, dst_y1); + projected_canvas.drawBitmap(bitmap, src_rect_workspace, dst_rect_workspace, p); + } + + /*src_rect.set(x, 0, x+1, bitmap_height); + dst_rect.set(x, dst_y0, x+1, dst_y1); + + projected_canvas.drawBitmap(bitmap, src_rect, dst_rect, p);*/ + } + } + return projected_bitmap; + } + + private void renderPanoramaImage(final int i, final int n_bitmaps, final Rect src_rect_workspace, final Rect dst_rect_workspace, + final Bitmap bitmap, final Paint p, final int bitmap_width, final int bitmap_height, + final int blend_hwidth, final int slice_width, final int offset_x, + final Bitmap panorama, final Canvas canvas, final int crop_x0, final int crop_y0, + final int align_x, final int align_y, final int dst_offset_x, final int shift_stop_x, final int centre_shift_x, + final double camera_angle, long time_s) { + //float alpha = (float)((camera_angle * i)/panorama_pics_per_screen); + if( MyDebug.LOG ) { + //Log.d(TAG, " alpha: " + alpha + " ( " + Math.toDegrees(alpha) + " degrees )"); + Log.d(TAG, " align_x: " + align_x); + Log.d(TAG, " align_y: " + align_y); + Log.d(TAG, " dst_offset_x: " + dst_offset_x); + Log.d(TAG, " shift_stop_x: " + shift_stop_x); + } + + if( MyDebug.LOG ) + Log.d(TAG, "### time before projection for " + i + "th bitmap: " + (System.currentTimeMillis() - time_s)); + Bitmap projected_bitmap = createProjectedBitmap(src_rect_workspace, dst_rect_workspace, bitmap, p, bitmap_width, bitmap_height, camera_angle, centre_shift_x); + if( MyDebug.LOG ) + Log.d(TAG, "### time after projection for " + i + "th bitmap: " + (System.currentTimeMillis() - time_s)); + + if( i > 0 && blend_hwidth > 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "### time before blending for " + i + "th bitmap: " + (System.currentTimeMillis() - time_s)); + // first blend right hand side of previous image with left hand side of new image + final int blend_dimension = getBlendDimension(); + + // ensure we blend images that are a multiple of blend_dimension + int blend_width = nextMultiple(2*blend_hwidth, blend_dimension); + int blend_height = nextMultiple(bitmap_height, blend_dimension); + if( MyDebug.LOG ) { + Log.d(TAG, "blend_dimension: " + blend_dimension); + Log.d(TAG, "blend_hwidth: " + blend_hwidth); + Log.d(TAG, "bitmap_height: " + bitmap_height); + Log.d(TAG, "blend_width: " + blend_width); + Log.d(TAG, "blend_height: " + blend_height); + } + + // Note that we don't handle the crop_x0 and crop_y0 in the same way: for the x crop, it's + // important to shift the x coordinate of the blend window to match what we'll blend if not + // cropping. Otherwise we have problems in testPanorama6 and especially testPanorama28 + // (note, due to instability at the time of writing, testPanorama28 issue was reproduced on + // Nokia 8, but not Samsung Galaxy S10e). + // For the y crop, there isn't any advantage to shifting. + + //Bitmap lhs = Bitmap.createBitmap(panorama, offset_x + dst_offset_x - blend_hwidth, 0, 2*blend_hwidth, bitmap_height); + Bitmap lhs = Bitmap.createBitmap(blend_width, blend_height, Bitmap.Config.ARGB_8888); + { + Canvas lhs_canvas = new Canvas(lhs); + src_rect_workspace.set(offset_x + dst_offset_x - blend_hwidth, 0, offset_x + dst_offset_x + blend_hwidth, bitmap_height); + // n.b., shouldn't shift by align_x, align_y + src_rect_workspace.offset(-crop_x0, 0); + dst_rect_workspace.set(0, 0, blend_width, blend_height); + lhs_canvas.drawBitmap(panorama, src_rect_workspace, dst_rect_workspace, p); + } + + //Bitmap rhs = Bitmap.createBitmap(projected_bitmap, offset_x - blend_hwidth, 0, 2*blend_hwidth, bitmap_height); + Bitmap rhs = Bitmap.createBitmap(blend_width, blend_height, Bitmap.Config.ARGB_8888); + { + Canvas rhs_canvas = new Canvas(rhs); + src_rect_workspace.set(offset_x - blend_hwidth, 0, offset_x + blend_hwidth, bitmap_height); + src_rect_workspace.offset(align_x, align_y); + dst_rect_workspace.set(0, -crop_y0, blend_width, blend_height-crop_y0); + rhs_canvas.drawBitmap(projected_bitmap, src_rect_workspace, dst_rect_workspace, p); + } + if( MyDebug.LOG ) { + Log.d(TAG, "lhs dimensions: " + lhs.getWidth() + " x " + lhs.getHeight()); + Log.d(TAG, "rhs dimensions: " + rhs.getWidth() + " x " + rhs.getHeight()); + } + //Bitmap blended_bitmap = blend_panorama_alpha(lhs, rhs); + Bitmap blended_bitmap = blendPyramids(lhs, rhs); + /*Bitmap blended_bitmap = Bitmap.createBitmap(2*blend_hwidth, bitmap_height, Bitmap.Config.ARGB_8888); + Canvas blended_canvas = new Canvas(blended_bitmap); + p.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.ADD)); + for(int x=0;x<2*blend_hwidth;x++) { + src_rect_workspace.set(x, 0, x+1, bitmap_height); + + // left hand blend + // if x=0: frac=1 + // if x=2*blend_width-1: frac=0 + float frac = (2.0f*blend_hwidth-1.0f-x)/(2.0f*blend_hwidth-1.0f); + p.setAlpha((int)(255.0f*frac)); + blended_canvas.drawBitmap(lhs, src_rect_workspace, src_rect_workspace, p); + + // right hand blend + // if x=0: frac=0 + // if x=2*blend_width-1: frac=1 + frac = ((float)x)/(2.0f*blend_hwidth-1.0f); + p.setAlpha((int)(255.0f*frac)); + blended_canvas.drawBitmap(rhs, src_rect_workspace, src_rect_workspace, p); + } + p.setAlpha(255); // reset + p.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC_OVER)); // reset + */ + + // now draw the blended region + // note it's intentional that we don't shift for crop_y0, see comment above + canvas.drawBitmap(blended_bitmap, offset_x + dst_offset_x - blend_hwidth - crop_x0, 0, p); + + lhs.recycle(); + rhs.recycle(); + blended_bitmap.recycle(); + if( MyDebug.LOG ) + Log.d(TAG, "### time after blending for " + i + "th bitmap: " + (System.currentTimeMillis() - time_s)); + } + + int start_x = blend_hwidth; + int stop_x = slice_width+blend_hwidth; + if( i == 0 ) + start_x = -offset_x; + if( i == n_bitmaps-1 ) { + stop_x = slice_width + offset_x; + stop_x -= align_x; // to undo the shift of src_rect_workspace by align_x below + } + stop_x -= shift_stop_x; + if( MyDebug.LOG ) { + Log.d(TAG, " offset_x: " + offset_x); + Log.d(TAG, " dst_offset_x: " + dst_offset_x); + Log.d(TAG, " start_x: " + start_x); + Log.d(TAG, " stop_x: " + stop_x); + } + + // draw rest of this image + if( MyDebug.LOG ) + Log.d(TAG, "### time before drawing non-blended region for " + i + "th bitmap: " + (System.currentTimeMillis() - time_s)); + src_rect_workspace.set(offset_x + start_x, 0, offset_x + stop_x, bitmap_height); + src_rect_workspace.offset(align_x, align_y); + dst_rect_workspace.set(offset_x + dst_offset_x + start_x - crop_x0, -crop_y0, offset_x + dst_offset_x + stop_x - crop_x0, bitmap_height-crop_y0); + if( MyDebug.LOG ) { + Log.d(TAG, " src_rect_workspace: " + src_rect_workspace); + Log.d(TAG, " dst_rect_workspace: " + dst_rect_workspace); + } + canvas.drawBitmap(projected_bitmap, src_rect_workspace, dst_rect_workspace, p); + if( MyDebug.LOG ) + Log.d(TAG, "### time after drawing non-blended region for " + i + "th bitmap: " + (System.currentTimeMillis() - time_s)); + + /* + int start_x = -blend_hwidth; + int stop_x = slice_width+blend_hwidth; + if( i == 0 ) + start_x = -offset_x; + if( i == bitmaps.size()-1 ) + stop_x = slice_width+offset_x; + stop_x -= align_x; + if( MyDebug.LOG ) { + Log.d(TAG, " start_x: " + start_x); + Log.d(TAG, " stop_x: " + stop_x); + } + + p.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.ADD)); + for(int x=start_x;x 0 && x < blend_hwidth ) { + // left hand blend + //blend_alpha = 127; + // if x=-blend_hwidth: frac=0 + // if x=blend_hwidth-1: frac=1 + float frac = ((float)x+blend_hwidth)/(2*blend_hwidth-1.0f); + blend_alpha = (int)(255.0f*frac); + //if( MyDebug.LOG ) + //Log.d(TAG, " left hand blend_alpha: " + blend_alpha); + } + else if( i < bitmaps.size()-1 && x > stop_x-2*blend_hwidth-1 ) { + // right hand blend + //blend_alpha = 127; + // if x=stop_x-2*blend_hwidth: frac=1 + // if x=stop_x-1: frac=0 + float frac = ((float)stop_x-1-x)/(2*blend_hwidth-1.0f); + blend_alpha = (int)(255.0f*frac); + //if( MyDebug.LOG ) + //Log.d(TAG, " right hand blend_alpha: " + blend_alpha); + } + p.setAlpha(blend_alpha); + + //canvas.drawBitmap(bitmap, src_rect_workspace, dst_rect_workspace, p); + canvas.drawBitmap(projected_bitmap, src_rect_workspace, dst_rect_workspace, p); + } + p.setAlpha(255); // reset + p.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC_OVER)); // reset + */ + + projected_bitmap.recycle(); + /*if( rotated_bitmap != null ) { + rotated_bitmap.recycle(); + }*/ + + /*float x0 = -slice_width/2; + float new_height0 = bitmap_height * (float)(h / (h * Math.cos(alpha) - x0 * Math.sin(alpha))); + if( MyDebug.LOG ) + Log.d(TAG, " new_height0: " + new_height0); + + float x1 = slice_width/2; + float new_height1 = bitmap_height * (float)(h / (h * Math.cos(alpha) - x1 * Math.sin(alpha))); + if( MyDebug.LOG ) + Log.d(TAG, " new_height1: " + new_height1); + + float src_x0 = 0, src_y0 = 0.0f; + float src_x1 = 0, src_y1 = bitmap_height; + float src_x2 = slice_width, src_y2 = 0.0f; + float src_x3 = slice_width, src_y3 = bitmap_height; + + float dst_x0 = src_x0, dst_y0 = (bitmap_height - new_height0)/2.0f; + float dst_x1 = src_x1, dst_y1 = (bitmap_height + new_height0)/2.0f; + float dst_x2 = src_x2, dst_y2 = (bitmap_height - new_height1)/2.0f; + float dst_x3 = src_x3, dst_y3 = (bitmap_height + new_height1)/2.0f; + + float [] src_points = new float[]{src_x0, src_y0, src_x1, src_y1, src_x2, src_y2, src_x3, src_y3}; + float [] dst_points = new float[]{dst_x0, dst_y0, dst_x1, dst_y1, dst_x2, dst_y2, dst_x3, dst_y3}; + if( MyDebug.LOG ) { + Log.d(TAG, " src top-left: " + src_x0 + " , " + src_y0); + Log.d(TAG, " src bottom-left: " + src_x1 + " , " + src_y1); + Log.d(TAG, " src top-right: " + src_x2 + " , " + src_y2); + Log.d(TAG, " src bottom-right: " + src_x3 + " , " + src_y3); + Log.d(TAG, " dst top-left: " + dst_x0 + " , " + dst_y0); + Log.d(TAG, " dst bottom-left: " + dst_x1 + " , " + dst_y1); + Log.d(TAG, " dst top-right: " + dst_x2 + " , " + dst_y2); + Log.d(TAG, " dst bottom-right: " + dst_x3 + " , " + dst_y3); + } + + Matrix matrix = new Matrix(); + if( !matrix.setPolyToPoly(src_points, 0, dst_points, 0, 4) ) { + Log.e(TAG, "failed to create matrix"); + throw new RuntimeException(); + } + if( MyDebug.LOG ) + Log.d(TAG, "matrix: " + matrix); + + matrix.postTranslate(i*slice_width, 0.0f); + + Bitmap bitmap_slice = Bitmap.createBitmap(bitmap, (bitmap_width - slice_width)/2, 0, slice_width, bitmap_height); + canvas.drawBitmap(bitmap_slice, matrix, null); + bitmap_slice.recycle(); + */ + } + + /** + * @return Returns the ratio between maximum and minimum computed brightnesses. + */ + private float adjustExposuresLocal(List bitmaps, int bitmap_width, int bitmap_height, int slice_width, long time_s) { + final int exposure_hwidth = bitmap_width/10; + final int offset_x = (bitmap_width - slice_width)/2; + + List relative_brightness = new ArrayList<>(); + float current_relative_brightness = 1.0f; + relative_brightness.add(current_relative_brightness); + float min_relative_brightness = current_relative_brightness; + float max_relative_brightness = current_relative_brightness; + + if( MyDebug.LOG ) + Log.d(TAG, "### time before computing brightnesses: " + (System.currentTimeMillis() - time_s)); + + for(int i=0;i histogramInfos = new ArrayList<>(); + float mean_median_brightness = 0.0f; // mean of the global median brightnesse + float mean_equalised_brightness = 0.0f; // mean of the brightnesses if all adjusted to match exposure of the first image + for(int i=0;i 2.0f ) { + throw new RuntimeException(""); + }*/ + + return ratio_brightnesses; + } + + /*private void adjustExposures(List bitmaps, long time_s) { + List histogramInfos = new ArrayList<>(); + + float mean_median_brightness = 0.0f; + List median_brightnesses = new ArrayList<>(); + for(int i=0;i 0 ) { + // local_mean_brightness += median_brightnesses.get(i-1); + // count++; + //} + //if( i < bitmaps.size()-1 ) { + // local_mean_brightness += median_brightnesses.get(i+1); + // count++; + //} + //local_mean_brightness /= count; + //if( MyDebug.LOG ) + // Log.d(TAG, " local_mean_brightness: " + local_mean_brightness); + //final int brightness_target = (int)(local_mean_brightness + 0.1f); + + min_preferred_scale = Math.min(min_preferred_scale, brightness_target/(float)histogramInfo.median_brightness); + max_preferred_scale = Math.max(max_preferred_scale, brightness_target/(float)histogramInfo.median_brightness); + int min_brightness = (int)(histogramInfo.median_brightness*2.0f/3.0f+0.5f); + //int min_brightness = (int)(histogramInfo.median_brightness*1.0f+0.5f); + int max_brightness = (int)(histogramInfo.median_brightness*1.5f+0.5f); + int this_brightness_target = brightness_target; + this_brightness_target = Math.max(this_brightness_target, min_brightness); + this_brightness_target = Math.min(this_brightness_target, max_brightness); + if( MyDebug.LOG ) { + Log.d(TAG, " brightness_target: " + brightness_target); + Log.d(TAG, " preferred brightness scale: " + brightness_target / (float) histogramInfo.median_brightness); + Log.d(TAG, " this_brightness_target: " + this_brightness_target); + Log.d(TAG, " actual brightness scale: " + this_brightness_target / (float) histogramInfo.median_brightness); + } + + hdrProcessor.brightenImage(bitmap, histogramInfo.median_brightness, histogramInfo.max_brightness, this_brightness_target); + } + if( MyDebug.LOG ) { + Log.d(TAG, "min_preferred_scale: " + min_preferred_scale); + Log.d(TAG, "max_preferred_scale: " + max_preferred_scale); + Log.d(TAG, "### time after adjusting brightnesses: " + (System.currentTimeMillis() - time_s)); + } + }*/ + + private void computePanoramaTransforms(List cumulative_transforms, List align_x_values, List dst_offset_x_values, + List bitmaps, final int bitmap_width, final int bitmap_height, + final int offset_x, final int slice_width, final int align_hwidth, + long time_s) throws PanoramaProcessorException { + Matrix cumulative_transform = new Matrix(); + int align_x = 0, align_y = 0; + int dst_offset_x = 0; + //List align_y_values = new ArrayList<>(); + + final boolean use_auto_align = true; + //final boolean use_auto_align = false; + + for(int i=0;i 0 ) { + // autoalignment + List alignment_bitmaps = new ArrayList<>(); + //alignment_bitmaps.add( Bitmap.createBitmap(bitmaps.get(i-1), offset_x+slice_width-align_hwidth, 0, 2*align_hwidth, bitmap_height) ); + //alignment_bitmaps.add( Bitmap.createBitmap(bitmaps.get(i), offset_x-align_hwidth, 0, 2*align_hwidth, bitmap_height) ); + // tall: + if( MyDebug.LOG ) { + Log.d(TAG, " align_x: " + align_x); + Log.d(TAG, " offset_x: " + offset_x); + Log.d(TAG, " slice_width: " + slice_width); + Log.d(TAG, " align_x+offset_x+slice_width-align_hwidth: " + (align_x + offset_x + slice_width - align_hwidth)); + Log.d(TAG, " bitmap(i-1) width: " + bitmaps.get(i - 1).getWidth()); + } + + //final boolean use_align_by_feature = false; + final boolean use_align_by_feature = true; + float align_downsample = 1.0f; + if( use_align_by_feature ) { + // scale height to 520 + // although in theory the alignment algorithm should work on any size, it is best to standardise, as most testing + // was done where input images had height 2080 or 2048, and the alignment images were downscaled by a factor of 4 + align_downsample = bitmap_height/520.0f; + if( MyDebug.LOG ) { + Log.d(TAG, "downscale by: " + align_downsample); + Log.d(TAG, "### time before downscaling creating alignment bitmaps for " + i + "th bitmap: " + (System.currentTimeMillis() - time_s)); + } + // snap to power of 2 + for(int k=0,power=1;k<=4;k++,power*=2) { + double ratio = power/align_downsample; + if( ratio >= 0.95f && ratio <= 1.05f ) { + align_downsample = power; + if( MyDebug.LOG ) + Log.d(TAG, "snapped downscale to: " + align_downsample); + break; + } + } + } + + int align_bitmap_height = (3*bitmap_height)/4; + if( MyDebug.LOG ) + Log.d(TAG, "### time before creating alignment bitmaps for " + i + "th bitmap: " + (System.currentTimeMillis() - time_s)); + // n.b., we add in reverse order, so we find the transformation to map the next image (i) onto the previous image (i-1) + //alignment_bitmaps.add( Bitmap.createBitmap(bitmaps.get(i), align_x+offset_x-align_hwidth, (bitmap_height-align_bitmap_height)/2, 2*align_hwidth, align_bitmap_height) ); + //alignment_bitmaps.add( Bitmap.createBitmap(bitmaps.get(i-1), align_x+offset_x+slice_width-align_hwidth, (bitmap_height-align_bitmap_height)/2, 2*align_hwidth, align_bitmap_height) ); + Matrix align_scale_matrix = new Matrix(); + align_scale_matrix.postScale(1.0f/align_downsample, 1.0f/align_downsample); + alignment_bitmaps.add( Bitmap.createBitmap(bitmaps.get(i), align_x+offset_x-align_hwidth, (bitmap_height-align_bitmap_height)/2, 2*align_hwidth, align_bitmap_height, align_scale_matrix, true) ); + alignment_bitmaps.add( Bitmap.createBitmap(bitmaps.get(i-1), align_x+offset_x+slice_width-align_hwidth, (bitmap_height-align_bitmap_height)/2, 2*align_hwidth, align_bitmap_height, align_scale_matrix, true) ); + if( MyDebug.LOG ) + Log.d(TAG, "### time after creating alignment bitmaps for " + i + "th bitmap: " + (System.currentTimeMillis() - time_s)); + + /*if( use_align_by_feature ) { + Matrix align_scale_matrix = new Matrix(); + align_scale_matrix.postScale(1.0f/align_downsample, 1.0f/align_downsample); + for(int j=0;j bitmaps, List cumulative_transforms, + int panorama_width, int slice_width, int bitmap_width, int bitmap_height) { + float [] values = new float[9]; + + float min_rotation = 1000, max_rotation = - 1000; + float sum_rotation = 0.0f; + for(int i=0;i bitmaps, int bitmap_width, int bitmap_height, + List cumulative_transforms, List align_x_values, List dst_offset_x_values, + final int blend_hwidth, final int slice_width, final int offset_x, + final Bitmap panorama, final int crop_x0, final int crop_y0, + final double camera_angle, long time_s) { + + Rect src_rect = new Rect(); + Rect dst_rect = new Rect(); + //Paint p = new Paint(); + Paint p = new Paint(Paint.FILTER_BITMAP_FLAG); + Canvas canvas = new Canvas(panorama); + + for(int i=0;i 0 ) { + bake_trans_x = - trans_x; + //if( true ) + // throw new RuntimeException(); // test + } + + cumulative_transforms.get(i).postTranslate(bake_trans_x, 0.0f); + //if( MyDebug.LOG ) + //Log.d(TAG, "centre_shift_x: " + centre_shift_x); + //if( MyDebug.LOG ) + //Log.d(TAG, " align_x: " + align_x); + centre_shift_x += bake_trans_x; + //if( MyDebug.LOG ) + //Log.d(TAG, "new centre_shift_x: " + centre_shift_x); + align_x += bake_trans_x; + } + + { + Bitmap rotated_bitmap = Bitmap.createBitmap(bitmap_width, bitmap_height, Bitmap.Config.ARGB_8888); + Canvas rotated_canvas = new Canvas(rotated_bitmap); + rotated_canvas.save(); + + rotated_canvas.setMatrix(cumulative_transforms.get(i)); + + rotated_canvas.drawBitmap(bitmap, 0, 0, p); + rotated_canvas.restore(); + + bitmap = rotated_bitmap; + /*if( MyDebug.LOG ) { + saveBitmap(bitmap, "transformed_bitmap_" + i + ".jpg"); + }*/ + free_bitmap = true; + } + } + + renderPanoramaImage(i, bitmaps.size(), src_rect, dst_rect, + bitmap, p, bitmap_width, bitmap_height, + blend_hwidth, slice_width, offset_x, + panorama, canvas, crop_x0, crop_y0, + align_x, align_y, dst_offset_x, shift_stop_x, centre_shift_x, + camera_angle, time_s); + + if( free_bitmap ) { + bitmap.recycle(); + } + + if( MyDebug.LOG ) + Log.d(TAG, "### time after rendering " + i + "th bitmap: " + (System.currentTimeMillis() - time_s)); + } + } + + public Bitmap panorama(List bitmaps, float panorama_pics_per_screen, float camera_angle_y, final boolean crop) throws PanoramaProcessorException { + if( MyDebug.LOG ) { + Log.d(TAG, "panorama"); + Log.d(TAG, "camera_angle_y: " + camera_angle_y); + } + + long time_s = 0; + if( MyDebug.LOG ) + time_s = System.currentTimeMillis(); + + int bitmap_width = bitmaps.get(0).getWidth(); + int bitmap_height = bitmaps.get(0).getHeight(); + if( MyDebug.LOG ) { + Log.d(TAG, "bitmap_width: " + bitmap_width); + Log.d(TAG, "bitmap_height: " + bitmap_height); + } + + for(int i=1;i cumulative_transforms = new ArrayList<>(); // i-th entry is the transform to apply to the i-th bitmap so that it's aligned to the same space as the 1st bitmap + + List align_x_values = new ArrayList<>(); + List dst_offset_x_values = new ArrayList<>(); + + computePanoramaTransforms(cumulative_transforms, align_x_values, dst_offset_x_values, bitmaps, + bitmap_width, bitmap_height, offset_x, slice_width, align_hwidth, time_s); + + // note that we crop the panorama_width later on, but for now we still need an estimate, before finalising + // the transforms + int panorama_width = (bitmaps.size()*slice_width+2*offset_x); + if( MyDebug.LOG ) { + Log.d(TAG, "original panorama_width: " + panorama_width); + } + + adjustPanoramaTransforms(bitmaps, cumulative_transforms, panorama_width, slice_width, bitmap_width, bitmap_height); + if( MyDebug.LOG ) + Log.d(TAG, "### time after adjusting transforms: " + (System.currentTimeMillis() - time_s)); + + //adjustExposures(bitmaps, time_s); + float ratio_brightnesses = adjustExposuresLocal(bitmaps, bitmap_width, bitmap_height, slice_width, time_s); + + int panorama_height = bitmap_height; + int crop_x0 = 0; + int crop_y0 = 0; + + if( crop ) { + // compute crop regions + int crop_x1 = bitmap_width-1; + int crop_y1 = bitmap_height-1; + for(int i=0;i 0 ) { + // need to shift transforms over + for(int i=0;i= 3.0f ) { + if( MyDebug.LOG ) + Log.d(TAG, "apply contrast enhancement, ratio_brightnesses: " + ratio_brightnesses); + + /*if( true ) + throw new RuntimeException("ratio_brightnesses: " + ratio_brightnesses);*/ + + hdrProcessor.adjustHistogram(panorama, panorama, panorama.getWidth(), panorama.getHeight(), 0.25f, 1, true, time_s); + if( MyDebug.LOG ) + Log.d(TAG, "### time after adjustHistogram: " + (System.currentTimeMillis() - time_s)); + } + + if( MyDebug.LOG ) + Log.d(TAG, "panorama complete!"); + + if( MyDebug.LOG ) + Log.d(TAG, "### time taken for panorama: " + (System.currentTimeMillis() - time_s)); + + return panorama; + } + +} diff --git a/app/src/main/java/net/sourceforge/opencamera/PanoramaProcessorException.java b/app/src/main/java/net/sourceforge/opencamera/PanoramaProcessorException.java new file mode 100644 index 0000000..b078769 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/PanoramaProcessorException.java @@ -0,0 +1,20 @@ +package net.sourceforge.opencamera; + +/** Exception for PanoramaProcessor class. + */ +@SuppressWarnings("WeakerAccess") +public class PanoramaProcessorException extends Exception { + final static public int INVALID_N_IMAGES = 0; // the supplied number of images is not supported + final static public int UNEQUAL_SIZES = 1; // images not of the same resolution + final static public int FAILED_TO_CROP = 1; // failed to crop + + final private int code; + + PanoramaProcessorException(int code) { + this.code = code; + } + + public int getCode() { + return code; + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/PermissionHandler.java b/app/src/main/java/net/sourceforge/opencamera/PermissionHandler.java new file mode 100644 index 0000000..1632f36 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/PermissionHandler.java @@ -0,0 +1,354 @@ +package net.sourceforge.opencamera; + +import android.Manifest; +import android.app.AlertDialog; +import android.content.DialogInterface; +import android.content.SharedPreferences; +import android.content.pm.PackageManager; +import android.os.Build; +import android.preference.PreferenceManager; +import androidx.annotation.NonNull; +import androidx.core.app.ActivityCompat; +import android.util.Log; + +/** Android 6+ permission handling: + */ +public class PermissionHandler { + private static final String TAG = "PermissionHandler"; + + private final MainActivity main_activity; + + final private static int MY_PERMISSIONS_REQUEST_CAMERA = 0; + final private static int MY_PERMISSIONS_REQUEST_STORAGE = 1; + final private static int MY_PERMISSIONS_REQUEST_RECORD_AUDIO = 2; + final private static int MY_PERMISSIONS_REQUEST_LOCATION = 3; + + private boolean camera_denied; // whether the user requested to deny a camera permission + private long camera_denied_time_ms; // if denied, the time when this occurred + private boolean storage_denied; // whether the user requested to deny a camera permission + private long storage_denied_time_ms; // if denied, the time when this occurred + private boolean audio_denied; // whether the user requested to deny a camera permission + private long audio_denied_time_ms; // if denied, the time when this occurred + private boolean location_denied; // whether the user requested to deny a camera permission + private long location_denied_time_ms; // if denied, the time when this occurred + // In some cases there can be a problem if the user denies a permission, we then get an onResume() + // (since application goes into background when showing system UI to request permission) at which + // point we try to request permission again! This would happen for camera and storage permissions. + // Whilst that isn't necessarily wrong, there would also be a problem if the user says + // "Don't ask again", we get stuck in a loop repeatedly asking the OS for permission (and it + // repeatedly being automatically denied) causing the UI to become sluggish. + // So instead we only try asking again if not within deny_delay_ms of the user denying that + // permission. + // Time shouldn't be too long, as the user might restart and then not be asked again for camera + // or storage permission. + final private static long deny_delay_ms = 1000; + + PermissionHandler(MainActivity main_activity) { + this.main_activity = main_activity; + } + + /** Show a "rationale" to the user for needing a particular permission, then request that permission again + * once they close the dialog. + */ + private void showRequestPermissionRationale(final int permission_code) { + if( MyDebug.LOG ) + Log.d(TAG, "showRequestPermissionRational: " + permission_code); + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.M ) { + if( MyDebug.LOG ) + Log.e(TAG, "shouldn't be requesting permissions for pre-Android M!"); + return; + } + + boolean ok = true; + String [] permissions = null; + int message_id = 0; + switch (permission_code) { + case MY_PERMISSIONS_REQUEST_CAMERA: + if (MyDebug.LOG) + Log.d(TAG, "display rationale for camera permission"); + permissions = new String[]{Manifest.permission.CAMERA}; + message_id = R.string.permission_rationale_camera; + break; + case MY_PERMISSIONS_REQUEST_STORAGE: + if (MyDebug.LOG) + Log.d(TAG, "display rationale for storage permission"); + permissions = new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}; + message_id = R.string.permission_rationale_storage; + break; + case MY_PERMISSIONS_REQUEST_RECORD_AUDIO: + if (MyDebug.LOG) + Log.d(TAG, "display rationale for record audio permission"); + permissions = new String[]{Manifest.permission.RECORD_AUDIO}; + message_id = R.string.permission_rationale_record_audio; + break; + case MY_PERMISSIONS_REQUEST_LOCATION: + if (MyDebug.LOG) + Log.d(TAG, "display rationale for location permission"); + permissions = new String[]{Manifest.permission.ACCESS_FINE_LOCATION, Manifest.permission.ACCESS_COARSE_LOCATION}; + message_id = R.string.permission_rationale_location; + break; + default: + if (MyDebug.LOG) + Log.e(TAG, "showRequestPermissionRational unknown permission_code: " + permission_code); + ok = false; + break; + } + + if( ok ) { + final String [] permissions_f = permissions; + new AlertDialog.Builder(main_activity) + .setTitle(R.string.permission_rationale_title) + .setMessage(message_id) + .setIcon(android.R.drawable.ic_dialog_alert) + .setPositiveButton(android.R.string.ok, null) + .setOnDismissListener(new DialogInterface.OnDismissListener() { + public void onDismiss(DialogInterface dialog) { + if( MyDebug.LOG ) + Log.d(TAG, "requesting permission..."); + ActivityCompat.requestPermissions(main_activity, permissions_f, permission_code); + } + }).show(); + } + } + + void requestCameraPermission() { + if( MyDebug.LOG ) + Log.d(TAG, "requestCameraPermission"); + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.M ) { + if( MyDebug.LOG ) + Log.e(TAG, "shouldn't be requesting permissions for pre-Android M!"); + return; + } + else if( camera_denied && System.currentTimeMillis() < camera_denied_time_ms + deny_delay_ms ) { + if( MyDebug.LOG ) + Log.d(TAG, "too soon since user last denied permission"); + return; + } + + if( ActivityCompat.shouldShowRequestPermissionRationale(main_activity, Manifest.permission.CAMERA) ) { + // Show an explanation to the user *asynchronously* -- don't block + // this thread waiting for the user's response! After the user + // sees the explanation, try again to request the permission. + showRequestPermissionRationale(MY_PERMISSIONS_REQUEST_CAMERA); + } + else { + // Can go ahead and request the permission + if( MyDebug.LOG ) + Log.d(TAG, "requesting camera permission..."); + ActivityCompat.requestPermissions(main_activity, new String[]{Manifest.permission.CAMERA}, MY_PERMISSIONS_REQUEST_CAMERA); + } + } + + void requestStoragePermission() { + if( MyDebug.LOG ) + Log.d(TAG, "requestStoragePermission"); + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.M ) { + if( MyDebug.LOG ) + Log.e(TAG, "shouldn't be requesting permissions for pre-Android M!"); + return; + } + else if( MainActivity.useScopedStorage() ) { + if( MyDebug.LOG ) + Log.e(TAG, "shouldn't be requesting permissions for scoped storage!"); + return; + } + else if( storage_denied && System.currentTimeMillis() < storage_denied_time_ms + deny_delay_ms ) { + if( MyDebug.LOG ) + Log.d(TAG, "too soon since user last denied permission"); + return; + } + + if( ActivityCompat.shouldShowRequestPermissionRationale(main_activity, Manifest.permission.WRITE_EXTERNAL_STORAGE) ) { + // Show an explanation to the user *asynchronously* -- don't block + // this thread waiting for the user's response! After the user + // sees the explanation, try again to request the permission. + showRequestPermissionRationale(MY_PERMISSIONS_REQUEST_STORAGE); + } + else { + // Can go ahead and request the permission + if( MyDebug.LOG ) + Log.d(TAG, "requesting storage permission..."); + ActivityCompat.requestPermissions(main_activity, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, MY_PERMISSIONS_REQUEST_STORAGE); + } + } + + void requestRecordAudioPermission() { + if( MyDebug.LOG ) + Log.d(TAG, "requestRecordAudioPermission"); + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.M ) { + if( MyDebug.LOG ) + Log.e(TAG, "shouldn't be requesting permissions for pre-Android M!"); + return; + } + else if( audio_denied && System.currentTimeMillis() < audio_denied_time_ms + deny_delay_ms ) { + if( MyDebug.LOG ) + Log.d(TAG, "too soon since user last denied permission"); + return; + } + + if( ActivityCompat.shouldShowRequestPermissionRationale(main_activity, Manifest.permission.RECORD_AUDIO) ) { + // Show an explanation to the user *asynchronously* -- don't block + // this thread waiting for the user's response! After the user + // sees the explanation, try again to request the permission. + showRequestPermissionRationale(MY_PERMISSIONS_REQUEST_RECORD_AUDIO); + } + else { + // Can go ahead and request the permission + if( MyDebug.LOG ) + Log.d(TAG, "requesting record audio permission..."); + ActivityCompat.requestPermissions(main_activity, new String[]{Manifest.permission.RECORD_AUDIO}, MY_PERMISSIONS_REQUEST_RECORD_AUDIO); + } + } + + void requestLocationPermission() { + if( MyDebug.LOG ) + Log.d(TAG, "requestLocationPermission"); + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.M ) { + if( MyDebug.LOG ) + Log.e(TAG, "shouldn't be requesting permissions for pre-Android M!"); + return; + } + else if( location_denied && System.currentTimeMillis() < location_denied_time_ms + deny_delay_ms ) { + if( MyDebug.LOG ) + Log.d(TAG, "too soon since user last denied permission"); + return; + } + + if( ActivityCompat.shouldShowRequestPermissionRationale(main_activity, Manifest.permission.ACCESS_FINE_LOCATION) || + ActivityCompat.shouldShowRequestPermissionRationale(main_activity, Manifest.permission.ACCESS_COARSE_LOCATION) ) { + // Show an explanation to the user *asynchronously* -- don't block + // this thread waiting for the user's response! After the user + // sees the explanation, try again to request the permission. + showRequestPermissionRationale(MY_PERMISSIONS_REQUEST_LOCATION); + } + else { + // Can go ahead and request the permission + if( MyDebug.LOG ) + Log.d(TAG, "requesting location permissions..."); + ActivityCompat.requestPermissions(main_activity, new String[]{Manifest.permission.ACCESS_FINE_LOCATION, Manifest.permission.ACCESS_COARSE_LOCATION}, MY_PERMISSIONS_REQUEST_LOCATION); + } + } + + public void onRequestPermissionsResult(int requestCode, @NonNull int[] grantResults) { + if( MyDebug.LOG ) + Log.d(TAG, "onRequestPermissionsResult: requestCode " + requestCode); + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.M ) { + if( MyDebug.LOG ) + Log.e(TAG, "shouldn't be requesting permissions for pre-Android M!"); + return; + } + + switch( requestCode ) { + case MY_PERMISSIONS_REQUEST_CAMERA: + { + // If request is cancelled, the result arrays are empty. + if( grantResults.length > 0 + && grantResults[0] == PackageManager.PERMISSION_GRANTED ) { + // permission was granted, yay! Do the + // contacts-related task you need to do. + if( MyDebug.LOG ) + Log.d(TAG, "camera permission granted"); + main_activity.getPreview().retryOpenCamera(); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "camera permission denied"); + camera_denied = true; + camera_denied_time_ms = System.currentTimeMillis(); + // permission denied, boo! Disable the + // functionality that depends on this permission. + // Open Camera doesn't need to do anything: the camera will remain closed + } + return; + } + case MY_PERMISSIONS_REQUEST_STORAGE: + { + // If request is cancelled, the result arrays are empty. + if( grantResults.length > 0 + && grantResults[0] == PackageManager.PERMISSION_GRANTED ) { + // permission was granted, yay! Do the + // contacts-related task you need to do. + if( MyDebug.LOG ) + Log.d(TAG, "storage permission granted"); + main_activity.getPreview().retryOpenCamera(); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "storage permission denied"); + storage_denied = true; + storage_denied_time_ms = System.currentTimeMillis(); + // permission denied, boo! Disable the + // functionality that depends on this permission. + // Open Camera doesn't need to do anything: the camera will remain closed + } + return; + } + case MY_PERMISSIONS_REQUEST_RECORD_AUDIO: + { + // If request is cancelled, the result arrays are empty. + if( grantResults.length > 0 + && grantResults[0] == PackageManager.PERMISSION_GRANTED ) { + // permission was granted, yay! Do the + // contacts-related task you need to do. + if( MyDebug.LOG ) + Log.d(TAG, "record audio permission granted"); + // no need to do anything + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "record audio permission denied"); + audio_denied = true; + audio_denied_time_ms = System.currentTimeMillis(); + // permission denied, boo! Disable the + // functionality that depends on this permission. + // no need to do anything + // note that we don't turn off record audio option, as user may then record video not realising audio won't be recorded - best to be explicit each time + } + return; + } + case MY_PERMISSIONS_REQUEST_LOCATION: + { + // If request is cancelled, the result arrays are empty. + if( grantResults.length == 2 && (grantResults[0] == PackageManager.PERMISSION_GRANTED || grantResults[1] == PackageManager.PERMISSION_GRANTED) ) { + // On Android 12 users can choose to only grant approximation location. This means + // one of the permissions will be denied, but as long as one location permission + // is granted, we can still go ahead and use location. + // Otherwise we have a problem that if user selects approximate location, we end + // up turning the location option back off. + if( MyDebug.LOG ) + Log.d(TAG, "location permission granted [1]"); + main_activity.initLocation(); + } + else if( grantResults.length > 0 + && grantResults[0] == PackageManager.PERMISSION_GRANTED ) { + // in theory this code path is now redundant, but keep here just in case + if( MyDebug.LOG ) + Log.d(TAG, "location permission granted [2]"); + main_activity.initLocation(); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "location permission denied"); + location_denied = true; + location_denied_time_ms = System.currentTimeMillis(); + // permission denied, boo! Disable the + // functionality that depends on this permission. + // for location, seems best to turn the option back off + if( MyDebug.LOG ) + Log.d(TAG, "location permission not available, so switch location off"); + main_activity.getPreview().showToast(null, R.string.permission_location_not_available); + SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(main_activity); + SharedPreferences.Editor editor = settings.edit(); + editor.putBoolean(PreferenceKeys.LocationPreferenceKey, false); + editor.apply(); + } + return; + } + default: + { + if( MyDebug.LOG ) + Log.e(TAG, "unknown requestCode " + requestCode); + } + } + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/PreferenceKeys.java b/app/src/main/java/net/sourceforge/opencamera/PreferenceKeys.java new file mode 100644 index 0000000..c734b9c --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/PreferenceKeys.java @@ -0,0 +1,386 @@ +package net.sourceforge.opencamera; + +/** Stores all of the string keys used for SharedPreferences. + */ +public class PreferenceKeys { + // must be static, to safely call from other Activities + + /** If this preference is set, no longer show the intro dialog. + */ + public static final String FirstTimePreferenceKey = "done_first_time"; + + /** This preference stores the version number seen by the user - used to show "What's New" dialog. + */ + public static final String LatestVersionPreferenceKey = "latest_version"; + + /** This preference stores whether to allow showing the "What's New" dialog. + */ + public static final String ShowWhatsNewPreferenceKey = "preference_show_whats_new"; + + /** If this preference is set, no longer show the auto-stabilise info dialog. + */ + public static final String AutoStabiliseInfoPreferenceKey = "done_auto_stabilise_info"; + + /** If this preference is set, no longer show the HDR info dialog. + */ + public static final String HDRInfoPreferenceKey = "done_hdr_info"; + + /** If this preference is set, no longer show the Panorama info dialog. + */ + public static final String PanoramaInfoPreferenceKey = "done_panorama_info"; + + /** If this preference is set, no longer show the raw info dialog. + */ + public static final String RawInfoPreferenceKey = "done_raw_info"; + + /** If this preference is set, no longer show the dialog for poor magnetic accuracy + */ + public static final String MagneticAccuracyPreferenceKey = "done_magnetic_accuracy"; + + public static final String CameraAPIPreferenceDefault = "preference_camera_api_old"; + public static final String CameraAPIPreferenceKey = "preference_camera_api"; + + private static String getCameraIDKey(int cameraId, String cameraIdSPhysical) { + if( cameraIdSPhysical != null ) + return cameraId + "_" + cameraIdSPhysical; + else + return String.valueOf(cameraId); + } + + // don't set to be specific for physical cameras, as too confusing to have lots of different flash preferences + // also in Preview, we don't save the flash back if not supported + public static String getFlashPreferenceKey(int cameraId) { + return "flash_value_" + cameraId; + } + + public static String getFocusPreferenceKey(int cameraId, boolean is_video) { + return "focus_value_" + cameraId + "_" + is_video; + } + + public static final String FocusAssistPreferenceKey = "preference_focus_assist"; + + public static String getResolutionPreferenceKey(int cameraId, String cameraIdSPhysical) { + return "camera_resolution_" + getCameraIDKey(cameraId, cameraIdSPhysical); + } + + public static String getVideoQualityPreferenceKey(int cameraId, String cameraIdSPhysical, boolean high_speed) { + return "video_quality_" + getCameraIDKey(cameraId, cameraIdSPhysical) + (high_speed ? "_highspeed" : ""); + } + + public static final String OptimiseFocusPreferenceKey = "preference_photo_optimise_focus"; + + public static final String ImageFormatPreferenceKey = "preference_image_format"; + + public static final String IsVideoPreferenceKey = "is_video"; + + public static final String ExposurePreferenceKey = "preference_exposure"; + + public static final String ColorEffectPreferenceKey = "preference_color_effect"; + + public static final String SceneModePreferenceKey = "preference_scene_mode"; + + public static final String WhiteBalancePreferenceKey = "preference_white_balance"; + + public static final String WhiteBalanceTemperaturePreferenceKey = "preference_white_balance_temperature"; + + public static final String AntiBandingPreferenceKey = "preference_antibanding"; + + public static final String EdgeModePreferenceKey = "preference_edge_mode"; + + public static final String CameraNoiseReductionModePreferenceKey = "preference_noise_reduction_mode"; // n.b., this is for the Camera driver noise reduction mode, not Open Camera's NR photo mode + + public static final String ISOPreferenceKey = "preference_iso"; + + public static final String ExposureTimePreferenceKey = "preference_exposure_time"; + + public static final String RawPreferenceKey = "preference_raw"; + + public static final String AllowRawForExpoBracketingPreferenceKey = "preference_raw_expo_bracketing"; + + public static final String AllowRawForFocusBracketingPreferenceKey = "preference_raw_focus_bracketing"; + + public static final String PanoramaCropPreferenceKey = "preference_panorama_crop"; + + public static final String PanoramaSaveExpoPreferenceKey = "preference_panorama_save"; + + public static final String ExpoBracketingNImagesPreferenceKey = "preference_expo_bracketing_n_images"; + + public static final String ExpoBracketingStopsPreferenceKey = "preference_expo_bracketing_stops"; + + public static final String FocusDistancePreferenceKey = "preference_focus_distance"; + + public static final String FocusBracketingTargetDistancePreferenceKey = "preference_focus_bracketing_target_distance"; + + public static final String FocusBracketingAutoSourceDistancePreferenceKey = "preference_focus_bracketing_auto_source_distance"; + + public static final String FocusBracketingNImagesPreferenceKey = "preference_focus_bracketing_n_images"; + + public static final String FocusBracketingAddInfinityPreferenceKey = "preference_focus_bracketing_add_infinity"; + + public static final String VolumeKeysPreferenceKey = "preference_volume_keys"; + + public static final String AudioControlPreferenceKey = "preference_audio_control"; + + public static final String AudioNoiseControlSensitivityPreferenceKey = "preference_audio_noise_control_sensitivity"; + + public static final String QualityPreferenceKey = "preference_quality"; + + public static final String AutoStabilisePreferenceKey = "preference_auto_stabilise"; + + public static final String PhotoModePreferenceKey = "preference_photo_mode"; + + public static final String HDRSaveExpoPreferenceKey = "preference_hdr_save_expo"; + + public static final String HDRTonemappingPreferenceKey = "preference_hdr_tonemapping"; + + public static final String HDRContrastEnhancementPreferenceKey = "preference_hdr_contrast_enhancement"; + + public static final String NRSaveExpoPreferenceKey = "preference_nr_save"; + + public static final String FastBurstNImagesPreferenceKey = "preference_fast_burst_n_images"; + + public static final String LocationPreferenceKey = "preference_location"; + + public static final String RemoveDeviceExifPreferenceKey = "preference_remove_device_exif"; + + public static final String GPSDirectionPreferenceKey = "preference_gps_direction"; + + public static final String RequireLocationPreferenceKey = "preference_require_location"; + + public static final String ExifArtistPreferenceKey = "preference_exif_artist"; + + public static final String ExifCopyrightPreferenceKey = "preference_exif_copyright"; + + public static final String StampPreferenceKey = "preference_stamp"; + + public static final String StampDateFormatPreferenceKey = "preference_stamp_dateformat"; + + public static final String StampTimeFormatPreferenceKey = "preference_stamp_timeformat"; + + public static final String StampGPSFormatPreferenceKey = "preference_stamp_gpsformat"; + + //public static final String StampGeoAddressPreferenceKey = "preference_stamp_geo_address"; + + public static final String UnitsDistancePreferenceKey = "preference_units_distance"; + + public static final String TextStampPreferenceKey = "preference_textstamp"; + + public static final String StampFontSizePreferenceKey = "preference_stamp_fontsize"; + + public static final String StampFontColorPreferenceKey = "preference_stamp_font_color"; + + public static final String StampStyleKey = "preference_stamp_style"; + + public static final String VideoSubtitlePref = "preference_video_subtitle"; + + public static final String FrontCameraMirrorKey = "preference_front_camera_mirror"; + + public static final String EnableRemote = "preference_enable_remote"; + + public static final String RemoteName = "preference_remote_device_name"; + + public static final String RemoteType = "preference_remote_type"; + + public static final String WaterType = "preference_water_type"; + + //public static final String BackgroundPhotoSavingPreferenceKey = "preference_background_photo_saving"; + + public static final String Camera2FakeFlashPreferenceKey = "preference_camera2_fake_flash"; + + public static final String Camera2DummyCaptureHackPreferenceKey = "preference_camera2_dummy_capture_hack"; + + public static final String Camera2FastBurstPreferenceKey = "preference_camera2_fast_burst"; + + public static final String Camera2PhotoVideoRecordingPreferenceKey = "preference_camera2_photo_video_recording"; + + public static final String UIPlacementPreferenceKey = "preference_ui_placement"; + + public static final String TouchCapturePreferenceKey = "preference_touch_capture"; + + public static final String PausePreviewPreferenceKey = "preference_pause_preview"; + + public static final String ShowToastsPreferenceKey = "preference_show_toasts"; + + public static final String ThumbnailAnimationPreferenceKey = "preference_thumbnail_animation"; + + public static final String TakePhotoBorderPreferenceKey = "preference_take_photo_border"; + + public static final String DimWhenDisconnectedPreferenceKey = "preference_remote_disconnect_screen_dim"; + + public static final String AllowHapticFeedbackPreferenceKey = "preference_allow_haptic_feedback"; + + public static final String ShowWhenLockedPreferenceKey = "preference_show_when_locked"; + + public static final String AllowLongPressPreferenceKey = "preference_allow_long_press"; + + public static final String StartupFocusPreferenceKey = "preference_startup_focus"; + + public static final String MultiCamButtonPreferenceKey = "preference_multi_cam_button"; + + public static final String KeepDisplayOnPreferenceKey = "preference_keep_display_on"; + + public static final String MaxBrightnessPreferenceKey = "preference_max_brightness"; + + public static final String UsingSAFPreferenceKey = "preference_using_saf"; + + public static final String SaveLocationPreferenceKey = "preference_save_location"; + + public static final String SaveLocationSAFPreferenceKey = "preference_save_location_saf"; + + public static final String SaveLocationHistoryBasePreferenceKey = "save_location_history"; + + public static final String SaveLocationHistorySAFBasePreferenceKey = "save_location_history_saf"; + + public static final String SavePhotoPrefixPreferenceKey = "preference_save_photo_prefix"; + + public static final String SaveVideoPrefixPreferenceKey = "preference_save_video_prefix"; + + public static final String SaveZuluTimePreferenceKey = "preference_save_zulu_time"; + + public static final String SaveIncludeMillisecondsPreferenceKey = "preference_save_include_milliseconds"; + + public static final String ShowZoomSliderControlsPreferenceKey = "preference_show_zoom_slider_controls"; + + public static final String ShowTakePhotoPreferenceKey = "preference_show_take_photo"; + + public static final String ShowFaceDetectionPreferenceKey = "preference_show_face_detection"; + + public static final String ShowCycleFlashPreferenceKey = "preference_show_cycle_flash"; + + public static final String ShowFocusPeakingPreferenceKey = "preference_show_focus_peaking"; + + public static final String ShowAutoLevelPreferenceKey = "preference_show_auto_level"; + + public static final String ShowStampPreferenceKey = "preference_show_stamp"; + + public static final String ShowTextStampPreferenceKey = "preference_show_textstamp"; + + public static final String ShowStoreLocationPreferenceKey = "preference_show_store_location"; + + public static final String ShowCycleRawPreferenceKey = "preference_show_cycle_raw"; + + public static final String ShowWhiteBalanceLockPreferenceKey = "preference_show_white_balance_lock"; + + public static final String ShowExposureLockPreferenceKey = "preference_show_exposure_lock"; + + public static final String ShowZoomPreferenceKey = "preference_show_zoom"; + + public static final String ShowISOPreferenceKey = "preference_show_iso"; + + public static final String HistogramPreferenceKey = "preference_histogram"; + + public static final String ZebraStripesPreferenceKey = "preference_zebra_stripes"; + + public static final String ZebraStripesForegroundColorPreferenceKey = "preference_zebra_stripes_foreground_color"; + + public static final String ZebraStripesBackgroundColorPreferenceKey = "preference_zebra_stripes_background_color"; + + public static final String FocusPeakingPreferenceKey = "preference_focus_peaking"; + + public static final String FocusPeakingColorPreferenceKey = "preference_focus_peaking_color"; + + public static final String PreShotsPreferenceKey = "preference_save_preshots"; + + public static final String ShowVideoMaxAmpPreferenceKey = "preference_show_video_max_amp"; + + public static final String ShowAnglePreferenceKey = "preference_show_angle"; + + public static final String ShowAngleLinePreferenceKey = "preference_show_angle_line"; + + public static final String ShowPitchLinesPreferenceKey = "preference_show_pitch_lines"; + + public static final String ShowGeoDirectionLinesPreferenceKey = "preference_show_geo_direction_lines"; + + public static final String ShowAngleHighlightColorPreferenceKey = "preference_angle_highlight_color"; + + public static final String CalibratedLevelAnglePreferenceKey = "preference_calibrate_level_angle"; + + public static final String ShowGeoDirectionPreferenceKey = "preference_show_geo_direction"; + + public static final String ShowFreeMemoryPreferenceKey = "preference_free_memory"; + + public static final String ShowTimePreferenceKey = "preference_show_time"; + + public static final String ShowCameraIDPreferenceKey = "preference_show_camera_id"; + + public static final String ShowBatteryPreferenceKey = "preference_show_battery"; + + public static final String ShowGridPreferenceKey = "preference_grid"; + + public static final String ShowCropGuidePreferenceKey = "preference_crop_guide"; + + public static final String FaceDetectionPreferenceKey = "preference_face_detection"; + + public static final String GhostImagePreferenceKey = "preference_ghost_image"; + + public static final String GhostSelectedImageSAFPreferenceKey = "preference_ghost_selected_image_saf"; + + public static final String GhostImageAlphaPreferenceKey = "ghost_image_alpha"; + + public static final String VideoStabilizationPreferenceKey = "preference_video_stabilization"; + + public static final String ForceVideo4KPreferenceKey = "preference_force_video_4k"; + + public static final String VideoFormatPreferenceKey = "preference_video_output_format"; + + public static final String VideoBitratePreferenceKey = "preference_video_bitrate"; + + public static String getVideoFPSPreferenceKey(int cameraId, String cameraIdSPhysical) { + // for cameraId==0 and cameraIdSPhysical==null, we return preference_video_fps instead of preference_video_fps_0, for + // backwards compatibility for people upgrading + return "preference_video_fps" + ((cameraId==0 && cameraIdSPhysical==null) ? "" : ("_"+getCameraIDKey(cameraId, cameraIdSPhysical))); + } + + public static String getVideoCaptureRatePreferenceKey(int cameraId, String cameraIdSPhysical) { + return "preference_capture_rate_" + getCameraIDKey(cameraId, cameraIdSPhysical); + } + + public static final String VideoLogPreferenceKey = "preference_video_log"; + + public static final String VideoProfileGammaPreferenceKey = "preference_video_profile_gamma"; + + public static final String VideoMaxDurationPreferenceKey = "preference_video_max_duration"; + + public static final String VideoRestartPreferenceKey = "preference_video_restart"; + + public static final String VideoMaxFileSizePreferenceKey = "preference_video_max_filesize"; + + public static final String VideoRestartMaxFileSizePreferenceKey = "preference_video_restart_max_filesize"; + + public static final String VideoFlashPreferenceKey = "preference_video_flash"; + + public static final String VideoLowPowerCheckPreferenceKey = "preference_video_low_power_check"; + + public static final String LockVideoPreferenceKey = "preference_lock_video"; + + public static final String RecordAudioPreferenceKey = "preference_record_audio"; + + public static final String RecordAudioChannelsPreferenceKey = "preference_record_audio_channels"; + + public static final String RecordAudioSourcePreferenceKey = "preference_record_audio_src"; + + public static final String PreviewSizePreferenceKey = "preference_preview_size"; + + public static final String RotatePreviewPreferenceKey = "preference_rotate_preview"; + + public static final String LockOrientationPreferenceKey = "preference_lock_orientation"; + + public static final String TimerPreferenceKey = "preference_timer"; + + public static final String TimerBeepPreferenceKey = "preference_timer_beep"; + + public static final String TimerSpeakPreferenceKey = "preference_timer_speak"; + + // note for historical reasons the preference refers to burst; the feature was renamed to + // "repeat" in v1.43, but we still need to use the old string to avoid changing user settings + // when people upgrade + public static final String RepeatModePreferenceKey = "preference_burst_mode"; + + // see note about "repeat" vs "burst" under RepeatModePreferenceKey + public static final String RepeatIntervalPreferenceKey = "preference_burst_interval"; + + public static final String ShutterSoundPreferenceKey = "preference_shutter_sound"; + + public static final String ImmersiveModePreferenceKey = "preference_immersive_mode"; + public static final String AddYPRToComments="preference_comment_ypr"; +} diff --git a/app/src/main/java/net/sourceforge/opencamera/PreferenceSubCameraControlsMore.java b/app/src/main/java/net/sourceforge/opencamera/PreferenceSubCameraControlsMore.java new file mode 100644 index 0000000..607074e --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/PreferenceSubCameraControlsMore.java @@ -0,0 +1,244 @@ +package net.sourceforge.opencamera; + +import android.app.AlertDialog; +import android.content.DialogInterface; +import android.content.SharedPreferences; +import android.os.Bundle; +import android.preference.ListPreference; +import android.preference.Preference; +import android.preference.PreferenceGroup; +import android.preference.PreferenceManager; +import android.util.Log; +import android.widget.Toast; + +import net.sourceforge.opencamera.ui.FolderChooserDialog; + +import java.io.File; + +public class PreferenceSubCameraControlsMore extends PreferenceSubScreen { + private static final String TAG = "PfSubCameraControlsMore"; + + @Override + public void onCreate(Bundle savedInstanceState) { + if( MyDebug.LOG ) + Log.d(TAG, "onCreate"); + super.onCreate(savedInstanceState); + addPreferencesFromResource(R.xml.preferences_sub_camera_controls_more); + + final Bundle bundle = getArguments(); + /*final int cameraId = bundle.getInt("cameraId"); + if( MyDebug.LOG ) + Log.d(TAG, "cameraId: " + cameraId); + final int nCameras = bundle.getInt("nCameras"); + if( MyDebug.LOG ) + Log.d(TAG, "nCameras: " + nCameras);*/ + + final SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getActivity()); + + final boolean can_disable_shutter_sound = bundle.getBoolean("can_disable_shutter_sound"); + if( MyDebug.LOG ) + Log.d(TAG, "can_disable_shutter_sound: " + can_disable_shutter_sound); + if( !can_disable_shutter_sound ) { + // Camera.enableShutterSound requires JELLY_BEAN_MR1 or greater + Preference pref = findPreference("preference_shutter_sound"); + //PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_screen_camera_controls_more"); + PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root"); + pg.removePreference(pref); + } + + /*{ + EditTextPreference edit = (EditTextPreference)findPreference("preference_save_location"); + InputFilter filter = new InputFilter() { + // whilst Android seems to allow any characters on internal memory, SD cards are typically formatted with FAT32 + String disallowed = "|\\?*<\":>"; + public CharSequence filter(CharSequence source, int start, int end, Spanned dest, int dstart, int dend) { + for(int i=start;i 0 ) { + String [] antibanding_entries = bundle.getStringArray("antibanding_entries"); + if( antibanding_entries != null && antibanding_entries.length == antibanding_values.length ) { // should always be true here, but just in case + MyPreferenceFragment.readFromBundle(this, antibanding_values, antibanding_entries, PreferenceKeys.AntiBandingPreferenceKey, CameraController.ANTIBANDING_DEFAULT, "preferences_root"); + has_antibanding = true; + } + } + if( MyDebug.LOG ) + Log.d(TAG, "has_antibanding?: " + has_antibanding); + if( !has_antibanding && ( camera_open || sharedPreferences.getString(PreferenceKeys.AntiBandingPreferenceKey, CameraController.ANTIBANDING_DEFAULT).equals(CameraController.ANTIBANDING_DEFAULT) ) ) { + // if camera not open, we'll think this setting isn't supported - but should only remove + // this preference if it's set to the default (otherwise if user sets to a non-default + // value that causes camera to not open, user won't be able to put it back to the + // default!) + Preference pref = findPreference(PreferenceKeys.AntiBandingPreferenceKey); + PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root"); + pg.removePreference(pref); + } + + boolean has_edge_mode = false; + String [] edge_mode_values = bundle.getStringArray("edge_modes"); + if( edge_mode_values != null && edge_mode_values.length > 0 ) { + String [] edge_mode_entries = bundle.getStringArray("edge_modes_entries"); + if( edge_mode_entries != null && edge_mode_entries.length == edge_mode_values.length ) { // should always be true here, but just in case + MyPreferenceFragment.readFromBundle(this, edge_mode_values, edge_mode_entries, PreferenceKeys.EdgeModePreferenceKey, CameraController.EDGE_MODE_DEFAULT, "preferences_root"); + has_edge_mode = true; + } + } + if( MyDebug.LOG ) + Log.d(TAG, "has_edge_mode?: " + has_edge_mode); + if( !has_edge_mode && ( camera_open || sharedPreferences.getString(PreferenceKeys.EdgeModePreferenceKey, CameraController.EDGE_MODE_DEFAULT).equals(CameraController.EDGE_MODE_DEFAULT) ) ) { + // if camera not open, we'll think this setting isn't supported - but should only remove + // this preference if it's set to the default (otherwise if user sets to a non-default + // value that causes camera to not open, user won't be able to put it back to the + // default!) + Preference pref = findPreference(PreferenceKeys.EdgeModePreferenceKey); + PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root"); + pg.removePreference(pref); + } + + boolean has_noise_reduction_mode = false; + String [] noise_reduction_mode_values = bundle.getStringArray("noise_reduction_modes"); + if( noise_reduction_mode_values != null && noise_reduction_mode_values.length > 0 ) { + String [] noise_reduction_mode_entries = bundle.getStringArray("noise_reduction_modes_entries"); + if( noise_reduction_mode_entries != null && noise_reduction_mode_entries.length == noise_reduction_mode_values.length ) { // should always be true here, but just in case + MyPreferenceFragment.readFromBundle(this, noise_reduction_mode_values, noise_reduction_mode_entries, PreferenceKeys.CameraNoiseReductionModePreferenceKey, CameraController.NOISE_REDUCTION_MODE_DEFAULT, "preferences_root"); + has_noise_reduction_mode = true; + } + } + if( MyDebug.LOG ) + Log.d(TAG, "has_noise_reduction_mode?: " + has_noise_reduction_mode); + if( !has_noise_reduction_mode && ( camera_open || sharedPreferences.getString(PreferenceKeys.CameraNoiseReductionModePreferenceKey, CameraController.NOISE_REDUCTION_MODE_DEFAULT).equals(CameraController.NOISE_REDUCTION_MODE_DEFAULT) ) ) { + // if camera not open, we'll think this setting isn't supported - but should only remove + // this preference if it's set to the default (otherwise if user sets to a non-default + // value that causes camera to not open, user won't be able to put it back to the + // default!) + Preference pref = findPreference(PreferenceKeys.CameraNoiseReductionModePreferenceKey); + PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root"); + pg.removePreference(pref); + } + + if( MyDebug.LOG ) + Log.d(TAG, "onCreate done"); + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/PreferenceSubRemoteCtrl.java b/app/src/main/java/net/sourceforge/opencamera/PreferenceSubRemoteCtrl.java new file mode 100644 index 0000000..7aa257f --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/PreferenceSubRemoteCtrl.java @@ -0,0 +1,18 @@ +package net.sourceforge.opencamera; + +import android.os.Bundle; +import android.util.Log; + +public class PreferenceSubRemoteCtrl extends PreferenceSubScreen { + private static final String TAG = "PreferenceSubRemoteCtrl"; + + @Override + public void onCreate(Bundle savedInstanceState) { + if( MyDebug.LOG ) + Log.d(TAG, "onCreate"); + super.onCreate(savedInstanceState); + addPreferencesFromResource(R.xml.preferences_sub_remote_ctrl); + if( MyDebug.LOG ) + Log.d(TAG, "onCreate done"); + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/PreferenceSubScreen.java b/app/src/main/java/net/sourceforge/opencamera/PreferenceSubScreen.java new file mode 100644 index 0000000..3c63a70 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/PreferenceSubScreen.java @@ -0,0 +1,80 @@ +package net.sourceforge.opencamera; + +import android.app.AlertDialog; +import android.content.SharedPreferences; +import android.os.Bundle; +import android.preference.Preference; +import android.preference.PreferenceFragment; +import android.preference.PreferenceManager; +import android.util.Log; +import android.view.View; + +import java.util.HashSet; + +/** Must be used as the parent class for all sub-screens. + */ +public class PreferenceSubScreen extends PreferenceFragment implements SharedPreferences.OnSharedPreferenceChangeListener { + private static final String TAG = "PreferenceSubScreen"; + + private boolean edge_to_edge_mode = false; + + // see note for dialogs in MyPreferenceFragment + protected final HashSet dialogs = new HashSet<>(); + + @Override + public void onCreate(Bundle savedInstanceState) { + if( MyDebug.LOG ) + Log.d(TAG, "onCreate"); + super.onCreate(savedInstanceState); + + final Bundle bundle = getArguments(); + this.edge_to_edge_mode = bundle.getBoolean("edge_to_edge_mode"); + + if( MyDebug.LOG ) + Log.d(TAG, "onCreate done"); + } + + @Override + public void onViewCreated(View view, Bundle savedInstanceState) { + super.onViewCreated(view, savedInstanceState); + + if( edge_to_edge_mode ) { + MyPreferenceFragment.handleEdgeToEdge(view); + } + } + + @Override + public void onDestroy() { + if( MyDebug.LOG ) + Log.d(TAG, "onDestroy"); + super.onDestroy(); + + MyPreferenceFragment.dismissDialogs(getFragmentManager(), dialogs); + } + + public void onResume() { + super.onResume(); + + MyPreferenceFragment.setBackground(this); + + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getActivity()); + sharedPreferences.registerOnSharedPreferenceChangeListener(this); + } + + /* See comment for MyPreferenceFragment.onSharedPreferenceChanged(). + */ + public void onSharedPreferenceChanged(SharedPreferences prefs, String key) { + if( MyDebug.LOG ) + Log.d(TAG, "onSharedPreferenceChanged: " + key); + + if( key == null ) { + // On Android 11+, when targetting Android 11+, this method is called with key==null + // if preferences are cleared. Unclear if this happens here in practice, but return + // just in case. + return; + } + + Preference pref = findPreference(key); + MyPreferenceFragment.handleOnSharedPreferenceChanged(prefs, key, pref); + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/PreferenceSubSettingsManager.java b/app/src/main/java/net/sourceforge/opencamera/PreferenceSubSettingsManager.java new file mode 100644 index 0000000..d35ec8a --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/PreferenceSubSettingsManager.java @@ -0,0 +1,246 @@ +package net.sourceforge.opencamera; + +import android.app.AlertDialog; +import android.content.DialogInterface; +import android.content.SharedPreferences; +import android.content.pm.PackageInfo; +import android.content.pm.PackageManager; +import android.os.Bundle; +import android.preference.Preference; +import android.preference.PreferenceManager; +import android.util.Log; +import android.view.LayoutInflater; +import android.view.View; +import android.widget.EditText; + +import net.sourceforge.opencamera.ui.FolderChooserDialog; + +import java.io.IOException; +import java.util.Date; + +public class PreferenceSubSettingsManager extends PreferenceSubScreen { + private static final String TAG = "PrefSubSettingsManager"; + + @Override + public void onCreate(Bundle savedInstanceState) { + if( MyDebug.LOG ) + Log.d(TAG, "onCreate"); + super.onCreate(savedInstanceState); + + addPreferencesFromResource(R.xml.preferences_sub_settings_manager); + + final SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getActivity()); + + { + final Preference pref = findPreference("preference_save_settings"); + pref.setOnPreferenceClickListener(new Preference.OnPreferenceClickListener() { + @Override + public boolean onPreferenceClick(Preference arg0) { + if( pref.getKey().equals("preference_save_settings") ) { + if( MyDebug.LOG ) + Log.d(TAG, "user clicked save settings"); + + AlertDialog.Builder alertDialog = new AlertDialog.Builder(PreferenceSubSettingsManager.this.getActivity()); + alertDialog.setTitle(R.string.preference_save_settings_filename); + + final View dialog_view = LayoutInflater.from(getActivity()).inflate(R.layout.alertdialog_edittext, null); + final EditText editText = dialog_view.findViewById(R.id.edit_text); + + editText.setSingleLine(); + // set hint instead of content description for EditText, see https://support.google.com/accessibility/android/answer/6378120 + editText.setHint(getResources().getString(R.string.preference_save_settings_filename)); + + alertDialog.setView(dialog_view); + + final MainActivity main_activity = (MainActivity)PreferenceSubSettingsManager.this.getActivity(); + try { + // find a default name - although we're only interested in the name rather than full path, this still + // requires checking the folder, so that we don't reuse an existing filename + String mediaFilename = main_activity.getStorageUtils().createOutputMediaFile( + main_activity.getStorageUtils().getSettingsFolder(), + StorageUtils.MEDIA_TYPE_PREFS, "", "xml", new Date() + ).getName(); + if( MyDebug.LOG ) + Log.d(TAG, "mediaFilename: " + mediaFilename); + int index = mediaFilename.lastIndexOf('.'); + if( index != -1 ) { + // remove extension + mediaFilename = mediaFilename.substring(0, index); + } + editText.setText(mediaFilename); + editText.setSelection(mediaFilename.length()); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "failed to obtain a filename", e); + } + + alertDialog.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { + @Override + public void onClick(DialogInterface dialogInterface, int i) { + if( MyDebug.LOG ) + Log.d(TAG, "save settings clicked okay"); + + String filename = editText.getText().toString() + ".xml"; + main_activity.getSettingsManager().saveSettings(filename); + } + }); + alertDialog.setNegativeButton(android.R.string.cancel, null); + final AlertDialog alert = alertDialog.create(); + // AlertDialog.Builder.setOnDismissListener() requires API level 17, so do it this way instead + alert.setOnDismissListener(new DialogInterface.OnDismissListener() { + @Override + public void onDismiss(DialogInterface arg0) { + if( MyDebug.LOG ) + Log.d(TAG, "save settings dialog dismissed"); + dialogs.remove(alert); + } + }); + alert.show(); + dialogs.add(alert); + //MainActivity main_activity = (MainActivity)PreferenceSubSettingsManager.this.getActivity(); + //main_activity.getSettingsManager().saveSettings(); + } + return false; + } + }); + } + { + final Preference pref = findPreference("preference_restore_settings"); + pref.setOnPreferenceClickListener(new Preference.OnPreferenceClickListener() { + @Override + public boolean onPreferenceClick(Preference arg0) { + if( pref.getKey().equals("preference_restore_settings") ) { + if( MyDebug.LOG ) + Log.d(TAG, "user clicked restore settings"); + + loadSettings(); + } + return false; + } + }); + } + { + final Preference pref = findPreference("preference_reset"); + pref.setOnPreferenceClickListener(new Preference.OnPreferenceClickListener() { + @Override + public boolean onPreferenceClick(Preference arg0) { + if( pref.getKey().equals("preference_reset") ) { + if( MyDebug.LOG ) + Log.d(TAG, "user clicked reset settings"); + AlertDialog.Builder alertDialog = new AlertDialog.Builder(PreferenceSubSettingsManager.this.getActivity()); + alertDialog.setIcon(android.R.drawable.ic_dialog_alert); + alertDialog.setTitle(R.string.preference_reset); + alertDialog.setMessage(R.string.preference_reset_question); + alertDialog.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() { + @Override + public void onClick(DialogInterface dialog, int which) { + if( MyDebug.LOG ) + Log.d(TAG, "user confirmed reset"); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.clear(); + editor.putBoolean(PreferenceKeys.FirstTimePreferenceKey, true); + try { + PackageInfo pInfo = PreferenceSubSettingsManager.this.getActivity().getPackageManager().getPackageInfo(PreferenceSubSettingsManager.this.getActivity().getPackageName(), 0); + int version_code = pInfo.versionCode; + editor.putInt(PreferenceKeys.LatestVersionPreferenceKey, version_code); + } + catch(PackageManager.NameNotFoundException e) { + MyDebug.logStackTrace(TAG, "NameNotFoundException trying to get version number", e); + } + editor.apply(); + MainActivity main_activity = (MainActivity)PreferenceSubSettingsManager.this.getActivity(); + main_activity.setDeviceDefaults(); + if( MyDebug.LOG ) + Log.d(TAG, "user clicked reset - need to restart"); + main_activity.restartOpenCamera(); + } + }); + alertDialog.setNegativeButton(android.R.string.no, null); + final AlertDialog alert = alertDialog.create(); + // AlertDialog.Builder.setOnDismissListener() requires API level 17, so do it this way instead + alert.setOnDismissListener(new DialogInterface.OnDismissListener() { + @Override + public void onDismiss(DialogInterface arg0) { + if( MyDebug.LOG ) + Log.d(TAG, "reset dialog dismissed"); + dialogs.remove(alert); + } + }); + alert.show(); + dialogs.add(alert); + } + return false; + } + }); + } + + if( MyDebug.LOG ) + Log.d(TAG, "onCreate done"); + } + + private void loadSettings() { + if( MyDebug.LOG ) + Log.d(TAG, "loadSettings"); + AlertDialog.Builder alertDialog = new AlertDialog.Builder(PreferenceSubSettingsManager.this.getActivity()); + alertDialog.setIcon(android.R.drawable.ic_dialog_alert); + alertDialog.setTitle(R.string.preference_restore_settings); + alertDialog.setMessage(R.string.preference_restore_settings_question); + alertDialog.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() { + @Override + public void onClick(DialogInterface dialog, int which) { + if( MyDebug.LOG ) + Log.d(TAG, "user confirmed to restore settings"); + MainActivity main_activity = (MainActivity)PreferenceSubSettingsManager.this.getActivity(); + /*if( main_activity.getStorageUtils().isUsingSAF() ) { + main_activity.openLoadSettingsChooserDialogSAF(true); + } + else*/ { + FolderChooserDialog fragment = new PreferenceSubSettingsManager.LoadSettingsFileChooserDialog(); + fragment.setShowDCIMShortcut(false); + fragment.setShowNewFolderButton(false); + fragment.setModeFolder(false); + fragment.setExtension(".xml"); + fragment.setStartFolder(main_activity.getStorageUtils().getSettingsFolder()); + if( MainActivity.useScopedStorage() ) { + // since we use File API to load, don't allow going outside of the application's folder, as we won't be able to read those files! + fragment.setMaxParent(main_activity.getExternalFilesDir(null)); + } + fragment.show(getFragmentManager(), "FOLDER_FRAGMENT"); + } + } + }); + alertDialog.setNegativeButton(android.R.string.no, null); + final AlertDialog alert = alertDialog.create(); + // AlertDialog.Builder.setOnDismissListener() requires API level 17, so do it this way instead + alert.setOnDismissListener(new DialogInterface.OnDismissListener() { + @Override + public void onDismiss(DialogInterface arg0) { + if( MyDebug.LOG ) + Log.d(TAG, "reset dialog dismissed"); + dialogs.remove(alert); + } + }); + alert.show(); + dialogs.add(alert); + } + + public static class LoadSettingsFileChooserDialog extends FolderChooserDialog { + @Override + public void onDismiss(DialogInterface dialog) { + if( MyDebug.LOG ) + Log.d(TAG, "FolderChooserDialog dismissed"); + // n.b., fragments have to be static (as they might be inserted into a new Activity - see http://stackoverflow.com/questions/15571010/fragment-inner-class-should-be-static), + // so we access the MainActivity via the fragment's getActivity(). + MainActivity main_activity = (MainActivity)this.getActivity(); + if( main_activity != null ) { // main_activity may be null if this is being closed via MainActivity.onNewIntent() + String settings_file = this.getChosenFile(); + if( MyDebug.LOG ) + Log.d(TAG, "settings_file: " + settings_file); + if( settings_file != null ) { + main_activity.getSettingsManager().loadSettings(settings_file); + } + } + super.onDismiss(dialog); + } + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/PreferenceSubVideo.java b/app/src/main/java/net/sourceforge/opencamera/PreferenceSubVideo.java new file mode 100644 index 0000000..db9976a --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/PreferenceSubVideo.java @@ -0,0 +1,245 @@ +package net.sourceforge.opencamera; + +import android.content.SharedPreferences; +import android.os.Build; +import android.os.Bundle; +import android.preference.ListPreference; +import android.preference.Preference; +import android.preference.PreferenceGroup; +import android.preference.PreferenceManager; +import android.util.Log; + +public class PreferenceSubVideo extends PreferenceSubScreen { + private static final String TAG = "PreferenceSubVideo"; + + @Override + public void onCreate(Bundle savedInstanceState) { + if( MyDebug.LOG ) + Log.d(TAG, "onCreate"); + super.onCreate(savedInstanceState); + addPreferencesFromResource(R.xml.preferences_sub_video); + + final Bundle bundle = getArguments(); + + final SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getActivity()); + + final int cameraId = bundle.getInt("cameraId"); + if( MyDebug.LOG ) + Log.d(TAG, "cameraId: " + cameraId); + final String cameraIdSPhysical = bundle.getString("cameraIdSPhysical"); + if( MyDebug.LOG ) + Log.d(TAG, "cameraIdSPhysical: " + cameraIdSPhysical); + + final boolean camera_open = bundle.getBoolean("camera_open"); + if( MyDebug.LOG ) + Log.d(TAG, "camera_open: " + camera_open); + + final String [] video_quality = bundle.getStringArray("video_quality"); + final String [] video_quality_string = bundle.getStringArray("video_quality_string"); + + final int [] video_fps = bundle.getIntArray("video_fps"); + final boolean [] video_fps_high_speed = bundle.getBooleanArray("video_fps_high_speed"); + + String fps_preference_key = PreferenceKeys.getVideoFPSPreferenceKey(cameraId, cameraIdSPhysical); + if( MyDebug.LOG ) + Log.d(TAG, "fps_preference_key: " + fps_preference_key); + String fps_value = sharedPreferences.getString(fps_preference_key, "default"); + if( MyDebug.LOG ) + Log.d(TAG, "fps_value: " + fps_value); + + final boolean supports_tonemap_curve = bundle.getBoolean("supports_tonemap_curve"); + if( MyDebug.LOG ) + Log.d(TAG, "supports_tonemap_curve: " + supports_tonemap_curve); + + final boolean supports_video_stabilization = bundle.getBoolean("supports_video_stabilization"); + if( MyDebug.LOG ) + Log.d(TAG, "supports_video_stabilization: " + supports_video_stabilization); + + final boolean supports_force_video_4k = bundle.getBoolean("supports_force_video_4k"); + if( MyDebug.LOG ) + Log.d(TAG, "supports_force_video_4k: " + supports_force_video_4k); + + /* Set up video resolutions. + Note that this will be the resolutions for either standard or high speed frame rate (where + the latter may also include being in slow motion mode), depending on the current setting when + this settings fragment is launched. A limitation is that if the user changes the fps value + within the settings, this list won't update until the user exits and re-enters the settings. + This could be fixed by setting a setOnPreferenceChangeListener for the preference_video_fps + ListPreference and updating, but we must not assume that the preview will be non-null (since + if the application is being recreated, MyPreferenceFragment.onCreate() is called via + MainActivity.onCreate()->super.onCreate() before the preview is created! So we still need to + read the info via a bundle, and only update when fps changes if the preview is non-null. + */ + if( video_quality != null && video_quality_string != null ) { + CharSequence [] entries = new CharSequence[video_quality.length]; + CharSequence [] values = new CharSequence[video_quality.length]; + for(int i=0;i save_location_history = new ArrayList<>(); + + /** Creates a new SaveLocationHistory class. This manages a history of save folder locations. + * @param main_activity MainActivity. + * @param pref_base String to use for shared preferences. + * @param folder_name The current save folder. + */ + SaveLocationHistory(MainActivity main_activity, String pref_base, String folder_name) { + if( MyDebug.LOG ) + Log.d(TAG, "pref_base: " + pref_base); + this.main_activity = main_activity; + this.pref_base = pref_base; + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + + // read save locations + save_location_history.clear(); + int save_location_history_size = sharedPreferences.getInt(pref_base + "_size", 0); + if( MyDebug.LOG ) + Log.d(TAG, "save_location_history_size: " + save_location_history_size); + for(int i=0;i 6 ) { + save_location_history.remove(0); + } + writeSaveLocations(); + if( MyDebug.LOG ) { + Log.d(TAG, "updateFolderHistory exit:"); + Log.d(TAG, "save_location_history size: " + save_location_history.size()); + for(int i=0;i map = sharedPreferences.getAll(); + for( Map.Entry entry : map.entrySet()) { + String key = entry.getKey(); + Object value = entry.getValue(); + if( key != null ) { + String tag_type = null; + if( value instanceof Boolean ) { + tag_type = boolean_tag; + } + else if( value instanceof Float ) { + tag_type = float_tag; + } + else if( value instanceof Integer ) { + tag_type = int_tag; + } + else if( value instanceof Long ) { + tag_type = long_tag; + } + else if( value instanceof String ) { + tag_type = string_tag; + } + else { + Log.e(TAG, "unknown value type: " + value); + } + + if( tag_type != null ) { + xmlSerializer.startTag(null, tag_type); + xmlSerializer.attribute(null, "key", key); + xmlSerializer.attribute(null, "value", value.toString()); + xmlSerializer.endTag(null, tag_type); + } + } + } + xmlSerializer.endTag(null, doc_tag); + xmlSerializer.endDocument(); + xmlSerializer.flush(); + String dataWrite = writer.toString(); + /*if( true ) + throw new IOException(); // test*/ + outputStream.write(dataWrite.getBytes(Charset.forName("UTF-8"))); + + main_activity.getPreview().showToast(null, R.string.saved_settings); + /*if( uri != null ) { + storageUtils.broadcastUri(uri, false, false, false); + } + else*/ { + storageUtils.broadcastFile(file, false, false, false, false, null); + } + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "failed to save settings", e); + main_activity.getPreview().showToast(null, R.string.save_settings_failed); + } + finally { + if( outputStream != null ) { + try { + outputStream.close(); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "failed to close outputStream", e); + } + } + } + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/SoundPoolManager.java b/app/src/main/java/net/sourceforge/opencamera/SoundPoolManager.java new file mode 100644 index 0000000..c343d86 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/SoundPoolManager.java @@ -0,0 +1,81 @@ +package net.sourceforge.opencamera; + +import android.content.Context; +import android.media.AudioAttributes; +import android.media.AudioManager; +import android.media.SoundPool; +import android.util.Log; +import android.util.SparseIntArray; + +/** Manages loading and playing sounds, via SoundPool. + */ +class SoundPoolManager { + private static final String TAG = "SoundPoolManager"; + + private final Context context; + + private SoundPool sound_pool; + private SparseIntArray sound_ids; + + SoundPoolManager(Context context) { + this.context = context; + } + + void initSound() { + if( sound_pool == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "create new sound_pool"); + { + AudioAttributes audio_attributes = new AudioAttributes.Builder() + .setLegacyStreamType(AudioManager.STREAM_SYSTEM) + .setContentType(AudioAttributes.CONTENT_TYPE_SONIFICATION) + .build(); + sound_pool = new SoundPool.Builder() + .setMaxStreams(1) + .setAudioAttributes(audio_attributes) + .build(); + } + sound_ids = new SparseIntArray(); + } + } + + void releaseSound() { + if( sound_pool != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "release sound_pool"); + sound_pool.release(); + sound_pool = null; + sound_ids = null; + } + } + + /* Must be called before playSound (allowing enough time to load the sound). + */ + void loadSound(int resource_id) { + if( sound_pool != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "loading sound resource: " + resource_id); + int sound_id = sound_pool.load(context, resource_id, 1); + if( MyDebug.LOG ) + Log.d(TAG, " loaded sound: " + sound_id); + sound_ids.put(resource_id, sound_id); + } + } + + /* Must call loadSound first (allowing enough time to load the sound). + */ + void playSound(int resource_id) { + if( sound_pool != null ) { + if( sound_ids.indexOfKey(resource_id) < 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "resource not loaded: " + resource_id); + } + else { + int sound_id = sound_ids.get(resource_id); + if( MyDebug.LOG ) + Log.d(TAG, "play sound: " + sound_id); + sound_pool.play(sound_id, 1.0f, 1.0f, 0, 0, 1); + } + } + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/StorageUtils.java b/app/src/main/java/net/sourceforge/opencamera/StorageUtils.java new file mode 100644 index 0000000..524305e --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/StorageUtils.java @@ -0,0 +1,1689 @@ +package net.sourceforge.opencamera; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Locale; +import java.util.TimeZone; + +import android.Manifest; +import android.annotation.SuppressLint; +import android.app.Activity; +import android.content.ContentUris; +//import android.content.ContentValues; +import android.content.Context; +import android.content.Intent; +import android.content.SharedPreferences; +import android.content.pm.PackageManager; +import android.database.Cursor; +//import android.location.Location; +import android.media.MediaScannerConnection; +import android.net.Uri; +import android.os.Build; +import android.os.Environment; +import android.os.ParcelFileDescriptor; +import android.os.StatFs; +import android.preference.PreferenceManager; +import android.provider.DocumentsContract; +import android.provider.MediaStore; +import android.provider.MediaStore.Images; +import android.provider.MediaStore.Video; +import android.provider.MediaStore.Images.ImageColumns; +import android.provider.MediaStore.Video.VideoColumns; +import android.provider.OpenableColumns; +import androidx.core.content.ContextCompat; + +import android.system.Os; +import android.system.StructStatVfs; +import android.util.Log; + +/** Provides access to the filesystem. Supports both standard and Storage + * Access Framework. + */ +public class StorageUtils { + private static final String TAG = "StorageUtils"; + + static final int MEDIA_TYPE_IMAGE = 1; + static final int MEDIA_TYPE_VIDEO = 2; + static final int MEDIA_TYPE_PREFS = 3; + static final int MEDIA_TYPE_GYRO_INFO = 4; + static final int MEDIA_TYPE_PRESHOT = 5; // filetype is a video, but we have separate enum to support a different prefix + + private final Context context; + private final MyApplicationInterface applicationInterface; + private Uri last_media_scanned; // mediastore uri + private boolean last_media_scanned_is_raw; + private boolean last_media_scanned_hasnoexifdatetime; + private Uri last_media_scanned_check_uri; + // If last_media_scanned_hasnoexifdatetime==true, it means that the last media saved had the + // option to strip exif tags. Therefore we should do more to remember the last media scanned, + // as we otherwise won't be able to find it again. + // last_media_scanned_check_uri is only non-null if last_media_scanned_hasnoexifdatetime==true. + // It stores a uri that can be used to test if the media still exists. In practice this will be + // the last_media_scanned uri, except for SAF images, when it'll be a SAF uri. + + private final static String RELATIVE_FOLDER_BASE = Environment.DIRECTORY_DCIM; + + // for testing: + public volatile boolean failed_to_scan; + + StorageUtils(Context context, MyApplicationInterface applicationInterface) { + this.context = context; + this.applicationInterface = applicationInterface; + } + + Uri getLastMediaScanned() { + return last_media_scanned; + } + + boolean getLastMediaScannedIsRaw() { + return last_media_scanned_is_raw; + } + + boolean getLastMediaScannedHasNoExifDateTime() { + return last_media_scanned_hasnoexifdatetime; + } + + Uri getLastMediaScannedCheckUri() { + return last_media_scanned_check_uri; + } + + void clearLastMediaScanned() { + if( MyDebug.LOG ) + Log.d(TAG, "clearLastMediaScanned"); + last_media_scanned = null; + last_media_scanned_is_raw = false; + last_media_scanned_hasnoexifdatetime = false; + last_media_scanned_check_uri = null; + } + + void setLastMediaScanned(Uri uri, boolean is_raw, boolean hasnoexifdatetime, Uri check_uri) { + last_media_scanned = uri; + last_media_scanned_is_raw = is_raw; + last_media_scanned_hasnoexifdatetime = hasnoexifdatetime; + if( hasnoexifdatetime ) + last_media_scanned_check_uri = check_uri; + else + last_media_scanned_check_uri = null; + if( MyDebug.LOG ) { + Log.d(TAG, "set last_media_scanned to " + last_media_scanned); + Log.d(TAG, " last_media_scanned_is_raw: " + last_media_scanned_is_raw); + Log.d(TAG, " last_media_scanned_hasnoexifdatetime: " + last_media_scanned_hasnoexifdatetime); + Log.d(TAG, " last_media_scanned_check_uri: " + check_uri); + } + } + + /** Sends the intents to announce the new file to other Android applications. E.g., cloud storage applications like + * OwnCloud use this to listen for new photos/videos to automatically upload. + * Note that on Android 7 onwards, these broadcasts are deprecated and won't have any effect - see: + * https://developer.android.com/reference/android/hardware/Camera.html#ACTION_NEW_PICTURE + * Listeners like OwnCloud should instead be using + * https://developer.android.com/reference/android/app/job/JobInfo.Builder.html#addTriggerContentUri(android.app.job.JobInfo.TriggerContentUri) + * See https://github.com/owncloud/android/issues/1675 for OwnCloud's discussion on this. + */ + void announceUri(Uri uri, boolean is_new_picture, boolean is_new_video) { + if( MyDebug.LOG ) + Log.d(TAG, "announceUri: " + uri); + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.N ) { + if( MyDebug.LOG ) + Log.d(TAG, "broadcasts deprecated on Android 7 onwards, so don't send them"); + // see note above; the intents won't be delivered, so might as well save the trouble of trying to send them + } + else if( is_new_picture ) { + // note, we reference the string directly rather than via Camera.ACTION_NEW_PICTURE, as the latter class is now deprecated - but we still need to broadcast the string for other apps + context.sendBroadcast(new Intent( "android.hardware.action.NEW_PICTURE" , uri)); + // for compatibility with some apps - apparently this is what used to be broadcast on Android? + context.sendBroadcast(new Intent("com.android.camera.NEW_PICTURE", uri)); + + if( MyDebug.LOG ) // this code only used for debugging/logging + { + @SuppressLint("InlinedApi") // complains this constant only available on API 29 (even though it was available on older versions, but looks like it was moved?) + String[] CONTENT_PROJECTION = { Images.Media.DATA, Images.Media.DISPLAY_NAME, Images.Media.MIME_TYPE, Images.Media.SIZE, Images.Media.DATE_TAKEN, Images.Media.DATE_ADDED }; + Cursor c = context.getContentResolver().query(uri, CONTENT_PROJECTION, null, null, null); + if( c == null ) { + if( MyDebug.LOG ) + Log.e(TAG, "Couldn't resolve given uri [1]: " + uri); + } + else if( !c.moveToFirst() ) { + if( MyDebug.LOG ) + Log.e(TAG, "Couldn't resolve given uri [2]: " + uri); + } + else { + String file_path = c.getString(c.getColumnIndexOrThrow(Images.Media.DATA)); + String file_name = c.getString(c.getColumnIndexOrThrow(Images.Media.DISPLAY_NAME)); + String mime_type = c.getString(c.getColumnIndexOrThrow(Images.Media.MIME_TYPE)); + @SuppressLint("InlinedApi") // complains this constant only available on API 29 (even though it was available on older versions, but looks like it was moved?) + long date_taken = c.getLong(c.getColumnIndexOrThrow(Images.Media.DATE_TAKEN)); + long date_added = c.getLong(c.getColumnIndexOrThrow(Images.Media.DATE_ADDED)); + Log.d(TAG, "file_path: " + file_path); + Log.d(TAG, "file_name: " + file_name); + Log.d(TAG, "mime_type: " + mime_type); + Log.d(TAG, "date_taken: " + date_taken); + Log.d(TAG, "date_added: " + date_added); + c.close(); + } + } + /*{ + // hack: problem on Camera2 API (at least on Nexus 6) that if geotagging is enabled, then the resultant image has incorrect Exif TAG_GPS_DATESTAMP (GPSDateStamp) set (tends to be around 2038 - possibly a driver bug of casting long to int?) + // whilst we don't yet correct for that bug, the more immediate problem is that it also messes up the DATE_TAKEN field in the media store, which messes up Gallery apps + // so for now, we correct it based on the DATE_ADDED value. + String[] CONTENT_PROJECTION = { Images.Media.DATE_ADDED }; + Cursor c = context.getContentResolver().query(uri, CONTENT_PROJECTION, null, null, null); + if( c == null ) { + if( MyDebug.LOG ) + Log.e(TAG, "Couldn't resolve given uri [1]: " + uri); + } + else if( !c.moveToFirst() ) { + if( MyDebug.LOG ) + Log.e(TAG, "Couldn't resolve given uri [2]: " + uri); + } + else { + long date_added = c.getLong(c.getColumnIndex(Images.Media.DATE_ADDED)); + if( MyDebug.LOG ) + Log.e(TAG, "replace date_taken with date_added: " + date_added); + ContentValues values = new ContentValues(); + values.put(Images.Media.DATE_TAKEN, date_added*1000); + context.getContentResolver().update(uri, values, null, null); + c.close(); + } + }*/ + } + else if( is_new_video ) { + context.sendBroadcast(new Intent("android.hardware.action.NEW_VIDEO", uri)); + + /*String[] CONTENT_PROJECTION = { Video.Media.DURATION }; + Cursor c = context.getContentResolver().query(uri, CONTENT_PROJECTION, null, null, null); + if( c == null ) { + if( MyDebug.LOG ) + Log.e(TAG, "Couldn't resolve given uri [1]: " + uri); + } + else if( !c.moveToFirst() ) { + if( MyDebug.LOG ) + Log.e(TAG, "Couldn't resolve given uri [2]: " + uri); + } + else { + long duration = c.getLong(c.getColumnIndex(Video.Media.DURATION)); + if( MyDebug.LOG ) + Log.e(TAG, "replace duration: " + duration); + ContentValues values = new ContentValues(); + values.put(Video.Media.DURATION, 1000); + context.getContentResolver().update(uri, values, null, null); + c.close(); + }*/ + } + } + + /*public Uri broadcastFileRaw(File file, Date current_date, Location location) { + if( MyDebug.LOG ) + Log.d(TAG, "broadcastFileRaw: " + file.getAbsolutePath()); + ContentValues values = new ContentValues(); + values.put(ImageColumns.TITLE, file.getName().substring(0, file.getName().lastIndexOf("."))); + values.put(ImageColumns.DISPLAY_NAME, file.getName()); + values.put(ImageColumns.DATE_TAKEN, current_date.getTime()); + values.put(ImageColumns.MIME_TYPE, "image/dng"); + //values.put(ImageColumns.MIME_TYPE, "image/jpeg"); + if( location != null ) { + values.put(ImageColumns.LATITUDE, location.getLatitude()); + values.put(ImageColumns.LONGITUDE, location.getLongitude()); + } + // leave ORIENTATION for now - this doesn't seem to get inserted for JPEGs anyway (via MediaScannerConnection.scanFile()) + values.put(ImageColumns.DATA, file.getAbsolutePath()); + //values.put(ImageColumns.DATA, "/storage/emulated/0/DCIM/OpenCamera/blah.dng"); + Uri uri = null; + try { + uri = context.getContentResolver().insert(Images.Media.EXTERNAL_CONTENT_URI, values); + if( MyDebug.LOG ) + Log.d(TAG, "inserted media uri: " + uri); + context.sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE, uri)); + } + catch (Throwable th) { + // This can happen when the external volume is already mounted, but + // MediaScanner has not notify MediaProvider to add that volume. + // The picture is still safe and MediaScanner will find it and + // insert it into MediaProvider. The only problem is that the user + // cannot click the thumbnail to review the picture. + Log.e(TAG, "Failed to write MediaStore" + th); + } + return uri; + }*/ + + /** Sends a "broadcast" for the new file. This is necessary so that Android recognises the new file without needing a reboot: + * - So that they show up when connected to a PC using MTP. + * - For JPEGs, so that they show up in gallery applications. + * - This also calls announceUri() on the resultant Uri for the new file. + * - Note this should also be called after deleting a file. + * - Note that for DNG files, MediaScannerConnection.scanFile() doesn't result in the files being shown in gallery applications. + * This may well be intentional, since most gallery applications won't read DNG files anyway. But it's still important to + * call this function for DNGs, so that they show up on MTP. + */ + public void broadcastFile(final File file, final boolean is_new_picture, final boolean is_new_video, final boolean set_last_scanned, final boolean hasnoexifdatetime, final Uri saf_uri) { + if( MyDebug.LOG ) { + Log.d(TAG, "broadcastFile: " + file.getAbsolutePath()); + Log.d(TAG, "saf_uri: " + saf_uri); + } + // note that the new method means that the new folder shows up as a file when connected to a PC via MTP (at least tested on Windows 8) + if( file.isDirectory() ) { + //this.sendBroadcast(new Intent(Intent.ACTION_MEDIA_MOUNTED, Uri.fromFile(file))); + // ACTION_MEDIA_MOUNTED no longer allowed on Android 4.4! Gives: SecurityException: Permission Denial: not allowed to send broadcast android.intent.action.MEDIA_MOUNTED + // note that we don't actually need to broadcast anything, the folder and contents appear straight away (both in Gallery on device, and on a PC when connecting via MTP) + // also note that we definitely don't want to broadcast ACTION_MEDIA_SCANNER_SCAN_FILE or use scanFile() for folders, as this means the folder shows up as a file on a PC via MTP (and isn't fixed by rebooting!) + } + else { + // both of these work fine, but using MediaScannerConnection.scanFile() seems to be preferred over sending an intent + //context.sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE, Uri.fromFile(file))); + failed_to_scan = true; // set to true until scanned okay + if( MyDebug.LOG ) + Log.d(TAG, "failed_to_scan set to true"); + MediaScannerConnection.scanFile(context, new String[] { file.getAbsolutePath() }, null, + new MediaScannerConnection.OnScanCompletedListener() { + public void onScanCompleted(String path, Uri uri) { + failed_to_scan = false; + if( MyDebug.LOG ) { + Log.d(TAG, "Scanned " + path + ":"); + Log.d(TAG, "-> uri=" + uri); + } + if( saf_uri != null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q ) { + // Prefer using MediaStore.getMediaUri() to get the mediastore URI from a SAF URI. + // Fixes bug on Pixel 6 Pro with SAF where the URI recieved by onScanCompleted() is + // of the form =content://media/external_primary/images/media/123456, when this is not + // recognised by gallery apps (causes strange bug where clicking on gallery icon opens + // contacts!) The correct URI is returned by MediaStore.getMediaUri(), and (for + // Pixel 6 Pro at least) is of the form content://media/external_primary/file/123456. + try { + Uri media_uri_from_saf_uri = MediaStore.getMediaUri(context, saf_uri); + if( media_uri_from_saf_uri != null ) { + uri = media_uri_from_saf_uri; + if( MyDebug.LOG ) { + Log.d(TAG, "prefer getMediaUri from SAF: " + uri); + } + } + } + catch(Exception e) { + MyDebug.logStackTrace(TAG, "exception from getMediaUri", e); + } + } + if( set_last_scanned ) { + boolean is_raw = filenameIsRaw(file.getName()); + setLastMediaScanned(uri, is_raw, hasnoexifdatetime, saf_uri != null ? saf_uri : uri); + } + announceUri(uri, is_new_picture, is_new_video); + applicationInterface.scannedFile(file, uri); + + // If called from video intent, if not using scoped-storage, we'll have saved using File API (even if user preference is SAF), see + // MyApplicationInterface.createOutputVideoMethod(). + // It seems caller apps seem to prefer the content:// Uri rather than one based on a File + // update for Android 7: seems that passing file uris is now restricted anyway, see https://code.google.com/p/android/issues/detail?id=203555 + // So we pass the uri back to the caller here. + Activity activity = (Activity)context; + String action = activity.getIntent().getAction(); + if( !MainActivity.useScopedStorage() && MediaStore.ACTION_VIDEO_CAPTURE.equals(action) ) { + applicationInterface.finishVideoIntent(uri); + } + } + } + ); + } + } + + /** Wrapper for broadcastFile, when we only have a Uri (e.g., for SAF) + */ + public void broadcastUri(final Uri uri, final boolean is_new_picture, final boolean is_new_video, final boolean set_last_scanned, final boolean hasnoexifdatetime, final boolean image_capture_intent) { + if( MyDebug.LOG ) + Log.d(TAG, "broadcastUri: " + uri); + /* We still need to broadcastFile for SAF for various reasons: + 1. To call storageUtils.announceUri() to broadcast NEW_PICTURE etc. + Whilst in theory we could do this directly, it seems external apps that use such broadcasts typically + won't know what to do with a SAF based Uri (e.g, Owncloud crashes!) so better to broadcast the Uri + corresponding to the real file, if it exists. + 2. Whilst the new file seems to be known by external apps such as Gallery without having to call media + scanner, I've had reports this doesn't happen when saving to external SD cards. So better to explicitly + scan. + 3. If set_last_scanned==true, it means we get the media uri which can be used to set the thumbnail uri + (see setLastMediaScanned()). This is particularly important when using SAF with scoped storage, as + getting the latest media via SAF APIs is (if not cached) very slow! N.B., most gallery apps need a + mediastore uri, not the SAF uri. + */ + File real_file = getFileFromDocumentUriSAF(uri, false); + if( MyDebug.LOG ) + Log.d(TAG, "real_file: " + real_file); + if( real_file != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "broadcast file"); + //Uri media_uri = broadcastFileRaw(real_file, current_date, location); + //announceUri(media_uri, is_new_picture, is_new_video); + broadcastFile(real_file, is_new_picture, is_new_video, set_last_scanned, hasnoexifdatetime, uri); + } + else if( !image_capture_intent ) { + if( MyDebug.LOG ) + Log.d(TAG, "announce SAF uri"); + // shouldn't do this for an image capture intent - e.g., causes crash when calling from Google Keep + announceUri(uri, is_new_picture, is_new_video); + } + } + + public boolean isUsingSAF() { + { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); + if( sharedPreferences.getBoolean(PreferenceKeys.UsingSAFPreferenceKey, false) ) { + return true; + } + } + return false; + } + + // only valid if !isUsingSAF() + String getSaveLocation() { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); + return sharedPreferences.getString(PreferenceKeys.SaveLocationPreferenceKey, "OpenCamera"); + } + + // only valid if isUsingSAF() + String getSaveLocationSAF() { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); + return sharedPreferences.getString(PreferenceKeys.SaveLocationSAFPreferenceKey, ""); + } + + // only valid if isUsingSAF() + public Uri getTreeUriSAF() { + String folder_name = getSaveLocationSAF(); + return Uri.parse(folder_name); + } + + File getSettingsFolder() { + return new File(context.getExternalFilesDir(null), "backups"); + } + + /** Valid whether or not isUsingSAF(). + * Returns the absolute path (in File format) of the image save folder. + * Only use this for needing e.g. human-readable strings for UI. + * This should not be used to create a File - instead, use getImageFolder(). + * Note that if isUsingSAF(), this may return null - it can't be assumed that there is a + * File corresponding to the SAF Uri. + */ + public String getImageFolderPath() { + File file = getImageFolder(); + return file == null ? null : file.getAbsolutePath(); + } + + /** Valid whether or not isUsingSAF(). + * But note that if isUsingSAF(), this may return null - it can't be assumed that there is a + * File corresponding to the SAF Uri. + */ + File getImageFolder() { + File file; + if( isUsingSAF() ) { + Uri uri = getTreeUriSAF(); + /*if( MyDebug.LOG ) + Log.d(TAG, "uri: " + uri);*/ + file = getFileFromDocumentUriSAF(uri, true); + } + else { + String folder_name = getSaveLocation(); + file = getImageFolder(folder_name); + } + return file; + } + + // only valid if !isUsingSAF() + // returns a form for use with RELATIVE_PATH (scoped storage) + String getSaveRelativeFolder() { + String folder_name = getSaveLocation(); + return getSaveRelativeFolder(folder_name); + } + + // only valid if !isUsingSAF() + // returns a form for use with RELATIVE_PATH (scoped storage) + private static String getSaveRelativeFolder(String folder_name) { + if( !folder_name.isEmpty() && folder_name.lastIndexOf('/') == folder_name.length()-1 ) { + // ignore final '/' character + folder_name = folder_name.substring(0, folder_name.length()-1); + } + return RELATIVE_FOLDER_BASE + File.separator + folder_name; + } + + public static File getBaseFolder() { + final File base_folder = Environment.getExternalStoragePublicDirectory(RELATIVE_FOLDER_BASE); + return base_folder; + } + + /** Whether the save photo/video location is in a form that represents a full path, or a + * sub-folder in DCIM/. + */ + static boolean saveFolderIsFull(String folder_name) { + return folder_name.startsWith("/"); + } + + // only valid if !isUsingSAF() + private static File getImageFolder(String folder_name) { + File file; + if( !folder_name.isEmpty() && folder_name.lastIndexOf('/') == folder_name.length()-1 ) { + // ignore final '/' character + folder_name = folder_name.substring(0, folder_name.length()-1); + } + if( saveFolderIsFull(folder_name) ) { + file = new File(folder_name); + } + else { + file = new File(getBaseFolder(), folder_name); + } + return file; + } + + /** Only valid if isUsingSAF() + * Returns the absolute path (in File format) of the SAF folder. + * Only use this for needing e.g. human-readable strings for UI. + * This should not be used to create a File - instead, use getFileFromDocumentUriSAF(). + */ + public String getFilePathFromDocumentUriSAF(Uri uri, boolean is_folder) { + File file = getFileFromDocumentUriSAF(uri, is_folder); + return file == null ? null : file.getAbsolutePath(); + } + + /** Only valid if isUsingSAF() + * This function should only be used as a last resort - we shouldn't generally assume that a Uri represents an actual File, or that + * the File can be obtained anyway. + * However this is needed for a workaround to the fact that deleting a document file doesn't remove it from MediaStore. + * See: + http://stackoverflow.com/questions/21605493/storage-access-framework-does-not-update-mediascanner-mtp + http://stackoverflow.com/questions/20067508/get-real-path-from-uri-android-kitkat-new-storage-access-framework/ + Note that when using Android Q's scoped storage, the returned File will be inaccessible. However we still sometimes call this, + e.g., to scan with mediascanner or get a human readable string for the path. + Also note that this will return null for media store Uris with Android Q's scoped storage: https://developer.android.com/preview/privacy/scoped-storage + "The DATA column is redacted for each file in the media store." + */ + public File getFileFromDocumentUriSAF(Uri uri, boolean is_folder) { + if( MyDebug.LOG ) { + Log.d(TAG, "getFileFromDocumentUriSAF: " + uri); + Log.d(TAG, "is_folder?: " + is_folder); + } + String authority = uri.getAuthority(); + if( MyDebug.LOG ) { + Log.d(TAG, "authority: " + authority); + Log.d(TAG, "scheme: " + uri.getScheme()); + Log.d(TAG, "fragment: " + uri.getFragment()); + Log.d(TAG, "path: " + uri.getPath()); + Log.d(TAG, "last path segment: " + uri.getLastPathSegment()); + } + File file = null; + if( "com.android.externalstorage.documents".equals(authority) ) { + final String id = is_folder ? DocumentsContract.getTreeDocumentId(uri) : DocumentsContract.getDocumentId(uri); + if( MyDebug.LOG ) + Log.d(TAG, "id: " + id); + String [] split = id.split(":"); + if( split.length >= 1 ) { + String type = split[0]; + String path = split.length >= 2 ? split[1] : ""; + /*if( MyDebug.LOG ) { + Log.d(TAG, "type: " + type); + Log.d(TAG, "path: " + path); + }*/ + File [] storagePoints = new File("/storage").listFiles(); + + if( "primary".equalsIgnoreCase(type) ) { + final File externalStorage = Environment.getExternalStorageDirectory(); + file = new File(externalStorage, path); + } + for(int i=0;storagePoints != null && i 0 ) { + index = "_" + count; // try to find a unique filename + } + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); + boolean useZuluTime = sharedPreferences.getString(PreferenceKeys.SaveZuluTimePreferenceKey, "local").equals("zulu"); + boolean includeMilliseconds = sharedPreferences.getBoolean(PreferenceKeys.SaveIncludeMillisecondsPreferenceKey, false); + String dateFormatPattern = "yyyyMMdd_HHmmss"; + if(includeMilliseconds) { + dateFormatPattern += ".SSS"; + } + String timeStamp; + if( useZuluTime ) { + SimpleDateFormat fmt = new SimpleDateFormat(dateFormatPattern+"'Z'", Locale.US); + fmt.setTimeZone(TimeZone.getTimeZone("UTC")); + timeStamp = fmt.format(current_date); + } + else { + timeStamp = new SimpleDateFormat(dateFormatPattern, Locale.US).format(current_date); + } + String mediaFilename; + switch (type) { + case MEDIA_TYPE_GYRO_INFO: // gyro info files have same name as the photo (but different extension) + case MEDIA_TYPE_PRESHOT: // preshot videos still use same prefix as photos + case MEDIA_TYPE_IMAGE: { + String prefix = sharedPreferences.getString(PreferenceKeys.SavePhotoPrefixPreferenceKey, "IMG_"); + mediaFilename = prefix + timeStamp + suffix + index + extension; + break; + } + case MEDIA_TYPE_VIDEO: { + String prefix = sharedPreferences.getString(PreferenceKeys.SaveVideoPrefixPreferenceKey, "VID_"); + mediaFilename = prefix + timeStamp + suffix + index + extension; + break; + } + case MEDIA_TYPE_PREFS: { + // good to use a prefix that sorts before IMG_ and VID_: annoyingly when using SAF, it doesn't seem possible to + // only show the xml files, and it always defaults to sorting alphabetically... + String prefix = "BACKUP_OC_"; + mediaFilename = prefix + timeStamp + suffix + index + extension; + break; + } + default: + // throw exception as this is a programming error + if (MyDebug.LOG) + Log.e(TAG, "unknown type: " + type); + throw new RuntimeException(); + } + return mediaFilename; + } + + // only valid if !isUsingSAF() + File createOutputMediaFile(int type, String suffix, String extension, Date current_date) throws IOException { + File mediaStorageDir = getImageFolder(); + return createOutputMediaFile(mediaStorageDir, type, suffix, extension, current_date); + } + + /** Create the folder if it does not exist. + */ + void createFolderIfRequired(File folder) throws IOException { + if( !folder.exists() ) { + if( MyDebug.LOG ) + Log.d(TAG, "create directory: " + folder); + if( !folder.mkdirs() ) { + Log.e(TAG, "failed to create directory"); + throw new IOException(); + } + broadcastFile(folder, false, false, false, false, null); + } + } + + // only valid if !isUsingSAF() + @SuppressLint("SimpleDateFormat") + File createOutputMediaFile(File mediaStorageDir, int type, String suffix, String extension, Date current_date) throws IOException { + createFolderIfRequired(mediaStorageDir); + + // Create a media file name + File mediaFile = null; + for(int count=0;count<100;count++) { + /*final boolean use_burst_folder = true; + if( use_burst_folder ) { + String burstFolderName = createMediaFilename(type, "", count, "", current_date); + File burstFolder = new File(mediaStorageDir.getPath() + File.separator + burstFolderName); + if( !burstFolder.exists() ) { + if( !burstFolder.mkdirs() ) { + if( MyDebug.LOG ) + Log.e(TAG, "failed to create burst sub-directory"); + throw new IOException(); + } + broadcastFile(burstFolder, false, false, false); + } + + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); + String prefix = sharedPreferences.getString(PreferenceKeys.getSavePhotoPrefixPreferenceKey(), "IMG_"); + //String mediaFilename = prefix + suffix + "." + extension; + String suffix_alt = suffix.substring(1); + String mediaFilename = suffix_alt + prefix + suffix_alt + "BURST" + "." + extension; + mediaFile = new File(burstFolder.getPath() + File.separator + mediaFilename); + } + else*/ { + String mediaFilename = createMediaFilename(type, suffix, count, "." + extension, current_date); + mediaFile = new File(mediaStorageDir.getPath() + File.separator + mediaFilename); + } + if( !mediaFile.exists() ) { + break; + } + } + + if( MyDebug.LOG ) { + Log.d(TAG, "getOutputMediaFile returns: " + mediaFile); + } + if( mediaFile == null ) + throw new IOException(); + return mediaFile; + } + + // only valid if isUsingSAF() + Uri createOutputFileSAF(String filename, String mimeType) throws IOException { + try { + Uri treeUri = getTreeUriSAF(); + if( MyDebug.LOG ) + Log.d(TAG, "treeUri: " + treeUri); + Uri docUri = DocumentsContract.buildDocumentUriUsingTree(treeUri, DocumentsContract.getTreeDocumentId(treeUri)); + if( MyDebug.LOG ) + Log.d(TAG, "docUri: " + docUri); + // note that DocumentsContract.createDocument will automatically append to the filename if it already exists + Uri fileUri = DocumentsContract.createDocument(context.getContentResolver(), docUri, mimeType, filename); + if( MyDebug.LOG ) + Log.d(TAG, "returned fileUri: " + fileUri); + /*if( true ) + throw new SecurityException(); // test*/ + if( fileUri == null ) + throw new IOException(); + return fileUri; + } + catch(IllegalArgumentException e) { + // DocumentsContract.getTreeDocumentId throws this if URI is invalid + MyDebug.logStackTrace(TAG, "createOutputMediaFileSAF failed with IllegalArgumentException", e); + throw new IOException(); + } + catch(IllegalStateException e) { + // Have reports of this from Google Play for DocumentsContract.createDocument - better to fail gracefully and tell user rather than crash! + MyDebug.logStackTrace(TAG, "createOutputMediaFileSAF failed with IllegalStateException", e); + throw new IOException(); + } + catch(NullPointerException e) { + // Have reports of this from Google Play for DocumentsContract.createDocument - better to fail gracefully and tell user rather than crash! + MyDebug.logStackTrace(TAG, "createOutputMediaFileSAF failed with NullPointerException", e); + throw new IOException(); + } + catch(SecurityException e) { + // Have reports of this from Google Play - better to fail gracefully and tell user rather than crash! + MyDebug.logStackTrace(TAG, "createOutputMediaFileSAF failed with SecurityException", e); + throw new IOException(); + } + } + + /** Return the mime type corresponding to the supplied extension. Supports images only, not video. + */ + public String getImageMimeType(String extension) { + String mimeType; + switch (extension) { + case "dng": + mimeType = "image/dng"; + //mimeType = "image/x-adobe-dng"; + break; + case "webp": + mimeType = "image/webp"; + break; + case "png": + mimeType = "image/png"; + break; + default: + mimeType = "image/jpeg"; + break; + } + return mimeType; + } + + /** Return the mime type corresponding to the supplied extension. Supports video only, not images. + */ + String getVideoMimeType(String extension) { + String mimeType; + switch( extension ) { + case "3gp": + mimeType = "video/3gpp"; + break; + case "webm": + mimeType = "video/webm"; + break; + default: + mimeType = "video/mp4"; + break; + } + return mimeType; + } + + // only valid if isUsingSAF() + Uri createOutputMediaFileSAF(int type, String suffix, String extension, Date current_date) throws IOException { + String mimeType; + switch (type) { + case MEDIA_TYPE_IMAGE: + mimeType = getImageMimeType(extension); + break; + case MEDIA_TYPE_PRESHOT: + case MEDIA_TYPE_VIDEO: + mimeType = getVideoMimeType(extension); + break; + case MEDIA_TYPE_PREFS: + case MEDIA_TYPE_GYRO_INFO: + mimeType = "text/xml"; + break; + default: + // throw exception as this is a programming error + if (MyDebug.LOG) + Log.e(TAG, "unknown type: " + type); + throw new RuntimeException(); + } + // note that DocumentsContract.createDocument will automatically append to the filename if it already exists + String mediaFilename = createMediaFilename(type, suffix, 0, "." + extension, current_date); + return createOutputFileSAF(mediaFilename, mimeType); + } + + static class Media { + final boolean mediastore; // whether uri is from mediastore + final long id; // for mediastore==true only + final boolean video; + final Uri uri; + final long date; + //final int orientation; // for mediastore==true, video==false only + final String filename; // this should correspond to DISPLAY_NAME (so available with scoped storage) - so this includes file extension, but not full path + + Media(boolean mediastore, long id, boolean video, Uri uri, long date/*, int orientation*/, String filename) { + this.mediastore = mediastore; + this.id = id; + this.video = video; + this.uri = uri; + this.date = date; + //this.orientation = orientation; + this.filename = filename; + } + + /** Returns a mediastore uri. If this Media object was not created by a mediastore uri, then + * this will try to convert using MediaStore.getMediaUri(), but if this fails the function + * will return null. + */ + Uri getMediaStoreUri(Context context) { + if( this.mediastore ) + return this.uri; + else { + try { + // should only have allowed mediastore==null when using scoped storage + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q ) { + return MediaStore.getMediaUri(context, this.uri); + } + } + catch(Exception e) { + MyDebug.logStackTrace(TAG, "exception from getMediaUri", e); + } + return null; + } + } + } + + static boolean filenameIsRaw(String filename) { + return filename.toLowerCase(Locale.US).endsWith(".dng"); + } + + private static String filenameWithoutExtension(String filename) { + String filename_without_ext = filename.toLowerCase(Locale.US); + if( filename_without_ext.indexOf(".") > 0 ) + filename_without_ext = filename_without_ext.substring(0, filename_without_ext.lastIndexOf(".")); + return filename_without_ext; + } + + /** If the filename is for a "special" type HDR, NR or PANO, then return the filename without the + * part of the filename e.g. "_HDR" onwards; else return null. + * Received filename should not include an extension. + */ + private static String filenameIsSpecial(String filename_without_ext) { + if( filename_without_ext.endsWith(ImageSaver.hdr_suffix) ) { + return filename_without_ext.substring(0, filename_without_ext.length()-ImageSaver.hdr_suffix.length()); + } + if( filename_without_ext.endsWith(ImageSaver.nr_suffix) ) { + return filename_without_ext.substring(0, filename_without_ext.length()-ImageSaver.nr_suffix.length()); + } + if( filename_without_ext.endsWith(ImageSaver.pano_suffix) ) { + return filename_without_ext.substring(0, filename_without_ext.length()-ImageSaver.pano_suffix.length()); + } + return null; + } + + private enum UriType { + MEDIASTORE_IMAGES, + MEDIASTORE_VIDEOS + } + + private Media getLatestMediaCore(Uri baseUri, String bucket_id, UriType uri_type) { + if( MyDebug.LOG ) { + Log.d(TAG, "getLatestMediaCore"); + Log.d(TAG, "baseUri: " + baseUri); + Log.d(TAG, "bucket_id: " + bucket_id); + Log.d(TAG, "uri_type: " + uri_type); + } + Media media = null; + + final int column_id_c = 0; + final int column_date_taken_c = 1; + /*final int column_data_c = 2; // full path and filename, including extension + final int column_name_c = 3; // filename (without path), including extension + final int column_orientation_c = 4; // for images only*/ + final int column_name_c = 2; // filename (without path), including extension + //final int column_orientation_c = 3; // for mediastore images only + String [] projection; + switch( uri_type ) { + case MEDIASTORE_IMAGES: + //projection = new String[] {ImageColumns._ID, ImageColumns.DATE_TAKEN, ImageColumns.DISPLAY_NAME, ImageColumns.ORIENTATION}; + projection = new String[] {ImageColumns._ID, ImageColumns.DATE_TAKEN, ImageColumns.DISPLAY_NAME}; + break; + case MEDIASTORE_VIDEOS: + projection = new String[] {VideoColumns._ID, VideoColumns.DATE_TAKEN, VideoColumns.DISPLAY_NAME}; + break; + default: + throw new RuntimeException("unknown uri_type: " + uri_type); + } + // for images, we need to search for JPEG/etc and RAW, to support RAW only mode (even if we're not currently in that mode, it may be that previously the user did take photos in RAW only mode) + // if updating this code for supported mime types, remember to also update getLatestMediaSAF() + /*String selection = video ? "" : ImageColumns.MIME_TYPE + "='image/jpeg' OR " + + ImageColumns.MIME_TYPE + "='image/webp' OR " + + ImageColumns.MIME_TYPE + "='image/png' OR " + + ImageColumns.MIME_TYPE + "='image/x-adobe-dng'";*/ + String selection = ""; + switch( uri_type ) { + case MEDIASTORE_IMAGES: + { + if( bucket_id != null ) + selection = ImageColumns.BUCKET_ID + " = " + bucket_id; + boolean and = !selection.isEmpty(); + if( and ) + selection += " AND ( "; + selection += ImageColumns.MIME_TYPE + "='image/jpeg' OR " + + ImageColumns.MIME_TYPE + "='image/webp' OR " + + ImageColumns.MIME_TYPE + "='image/png' OR " + + ImageColumns.MIME_TYPE + "='image/x-adobe-dng'"; + if( and ) + selection += " )"; + break; + } + case MEDIASTORE_VIDEOS: + if( bucket_id != null ) + selection = VideoColumns.BUCKET_ID + " = " + bucket_id; + break; + default: + throw new RuntimeException("unknown uri_type: " + uri_type); + } + if( MyDebug.LOG ) + Log.d(TAG, "selection: " + selection); + String order; + switch( uri_type ) { + case MEDIASTORE_IMAGES: + order = ImageColumns.DATE_TAKEN + " DESC," + ImageColumns._ID + " DESC"; + break; + case MEDIASTORE_VIDEOS: + //noinspection DuplicateBranchesInSwitch + order = VideoColumns.DATE_TAKEN + " DESC," + VideoColumns._ID + " DESC"; + break; + default: + throw new RuntimeException("unknown uri_type: " + uri_type); + } + Cursor cursor = null; + + // we know we only want the most recent image - however we may need to scan forward if we find a RAW, to see if there's + // an equivalent non-RAW image + // request 3, just in case + Uri queryUri = baseUri.buildUpon().appendQueryParameter("limit", "3").build(); + if( MyDebug.LOG ) + Log.d(TAG, "queryUri: " + queryUri); + + try { + cursor = context.getContentResolver().query(queryUri, projection, selection, null, order); + if( cursor != null && cursor.moveToFirst() ) { + if( MyDebug.LOG ) + Log.d(TAG, "found: " + cursor.getCount()); + + // now sorted in order of date - so just pick the most recent one + + /* + // now sorted in order of date - scan to most recent one in the Open Camera save folder + boolean found = false; + //File save_folder = getImageFolder(); // may be null if using SAF + String save_folder_string = save_folder == null ? null : save_folder.getAbsolutePath() + File.separator; + if( MyDebug.LOG ) + Log.d(TAG, "save_folder_string: " + save_folder_string); + do { + String path = cursor.getString(column_data_c); + if( MyDebug.LOG ) + Log.d(TAG, "path: " + path); + // path may be null on Android 4.4!: http://stackoverflow.com/questions/3401579/get-filename-and-path-from-uri-from-mediastore + if( save_folder_string == null || (path != null && path.contains(save_folder_string) ) ) { + if( MyDebug.LOG ) + Log.d(TAG, "found most recent in Open Camera folder"); + // we filter files with dates in future, in case there exists an image in the folder with incorrect datestamp set to the future + // we allow up to 2 days in future, to avoid risk of issues to do with timezone etc + long date = cursor.getLong(column_date_taken_c); + long current_time = System.currentTimeMillis(); + if( date > current_time + 172800000 ) { + if( MyDebug.LOG ) + Log.d(TAG, "skip date in the future!"); + } + else { + found = true; + break; + } + } + } + while( cursor.moveToNext() ); + + if( !found ) { + if( MyDebug.LOG ) + Log.d(TAG, "can't find suitable in Open Camera folder, so just go with most recent"); + cursor.moveToFirst(); + } + */ + + { + // make sure we prefer JPEG/etc (non RAW) if there's a JPEG/etc version of this image + // this is because we want to support RAW only and JPEG+RAW modes + String filename = cursor.getString(column_name_c); + if( MyDebug.LOG ) { + Log.d(TAG, "filename: " + filename); + } + // in theory now that we use DISPLAY_NAME instead of DATA (for path), this should always be non-null, but check just in case + if( filename != null && filenameIsRaw(filename) ) { + if( MyDebug.LOG ) + Log.d(TAG, "try to find a non-RAW version of the DNG"); + int dng_pos = cursor.getPosition(); + boolean found_non_raw = false; + String filename_without_ext = filenameWithoutExtension(filename); + if( MyDebug.LOG ) + Log.d(TAG, "filename_without_ext: " + filename_without_ext); + while( cursor.moveToNext() ) { + String next_filename = cursor.getString(column_name_c); + if( MyDebug.LOG ) + Log.d(TAG, "next_filename: " + next_filename); + if( next_filename == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "done scanning, couldn't find filename"); + break; + } + String next_filename_without_ext = filenameWithoutExtension(next_filename); + if( MyDebug.LOG ) + Log.d(TAG, "next_filename_without_ext: " + next_filename_without_ext); + if( !filename_without_ext.equals(next_filename_without_ext) ) { + // no point scanning any further as sorted by date - and we don't want to read through the entire set! + if( MyDebug.LOG ) + Log.d(TAG, "done scanning"); + break; + } + // so we've found another file with matching filename - is it a JPEG/etc? + // we've already restricted the query to the image types we're interested in, so + // only need to check that it isn't another DNG (which would be strange, as it + // would mean a duplicate filename, but check just in case!) + if( filenameIsRaw(next_filename) ) { + if( MyDebug.LOG ) + Log.d(TAG, "found another dng!"); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "found equivalent non-dng"); + found_non_raw = true; + break; + } + } + if( !found_non_raw ) { + if( MyDebug.LOG ) + Log.d(TAG, "can't find equivalent jpeg/etc"); + cursor.moveToPosition(dng_pos); + } + } + else if( filename != null ) { + // in cases where a HDR/NR/PANO photo was saved with base images, we should prefer the HDR image + String filename_without_ext = filenameWithoutExtension(filename).toUpperCase(Locale.US); + if( MyDebug.LOG ) + Log.d(TAG, "filename_without_ext: " + filename_without_ext); + String filename_special_base = filenameIsSpecial(filename_without_ext); + if( MyDebug.LOG ) + Log.d(TAG, "filename_special_base: " + filename_special_base); + if( filename_special_base == null ) { + String filename_base = null; + // assume that base saved images are at most _XX + if( filename_without_ext.length() >= 3 && filename_without_ext.charAt(filename_without_ext.length()-2) == '_' ) { + filename_base = filename_without_ext.substring(0, filename_without_ext.length()-2); + } + else if( filename_without_ext.length() >= 4 && filename_without_ext.charAt(filename_without_ext.length()-3) == '_' ) { + filename_base = filename_without_ext.substring(0, filename_without_ext.length()-3); + } + if( MyDebug.LOG ) + Log.d(TAG, "filename_base: " + filename_base); + if( filename_base != null ) { + int last_pos = cursor.getPosition(); + boolean found_special = false; + int scan_count = 0; + while( cursor.moveToNext() ) { + String next_filename = cursor.getString(column_name_c); + if( MyDebug.LOG ) + Log.d(TAG, "next_filename: " + next_filename); + if( next_filename == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "done scanning, couldn't find filename"); + break; + } + String next_filename_without_ext = filenameWithoutExtension(next_filename).toUpperCase(Locale.US); + if( MyDebug.LOG ) + Log.d(TAG, "next_filename_without_ext: " + next_filename_without_ext); + String next_filename_special_base = filenameIsSpecial(next_filename_without_ext); + if( MyDebug.LOG ) + Log.d(TAG, "next_filename_special_base: " + next_filename_special_base); + if( next_filename_special_base != null ) { + // found a special filename - is it the same base? + if( filename_base.equals(next_filename_special_base) ) { + // found a match + if( MyDebug.LOG ) + Log.d(TAG, "found equivalent special"); + found_special = true; + break; + } + else { + // found special, but doesn't match, so no point scanning further + if( MyDebug.LOG ) + Log.d(TAG, "found another special"); + break; + } + } + else if( !next_filename_without_ext.startsWith(filename_base) ) { + if( MyDebug.LOG ) + Log.d(TAG, "no longer matches filename_base"); + break; + } + else if( scan_count++ > 10 ) { + if( MyDebug.LOG ) + Log.d(TAG, "give up scanning"); + break; + } + } + if( !found_special ) { + if( MyDebug.LOG ) + Log.d(TAG, "can't find equivalent non-special"); + cursor.moveToPosition(last_pos); + } + } + } + } + } + + long id = cursor.getLong(column_id_c); + long date = cursor.getLong(column_date_taken_c); + //int orientation = (uri_type == UriType.MEDIASTORE_IMAGES) ? cursor.getInt(column_orientation_c) : 0; + Uri uri = ContentUris.withAppendedId(baseUri, id); + String filename = cursor.getString(column_name_c); + if( MyDebug.LOG ) + Log.d(TAG, "found most recent uri for " + uri_type + ": " + uri); + + boolean video; + switch( uri_type ) { + case MEDIASTORE_IMAGES: + video = false; + break; + case MEDIASTORE_VIDEOS: + video = true; + break; + default: + throw new RuntimeException("unknown uri_type: " + uri_type); + } + if( MyDebug.LOG ) + Log.d(TAG, "video: " + video); + + media = new Media(true, id, video, uri, date/*, orientation*/, filename); + + if( MyDebug.LOG ) { + // debug + if( cursor.moveToFirst() ) { + do { + long this_id = cursor.getLong(column_id_c); + long this_date = cursor.getLong(column_date_taken_c); + Uri this_uri = ContentUris.withAppendedId(baseUri, this_id); + String this_filename = cursor.getString(column_name_c); + Log.d(TAG, "Date: " + this_date + " ID: " + this_id + " Name: " + this_filename + " Uri: " + this_uri); + } + while( cursor.moveToNext() ); + } + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "mediastore returned no media"); + } + } + catch(Exception e) { + // have had exceptions such as SQLiteException, NullPointerException reported on Google Play from within getContentResolver().query() call + MyDebug.logStackTrace(TAG, "exception trying to find latest media", e); + } + finally { + if( cursor != null ) { + cursor.close(); + } + } + + if( MyDebug.LOG ) + Log.d(TAG, "return latest media: " + media); + return media; + } + + /** Used when using Storage Access Framework AND scoped storage. + * This is because with scoped storage, we don't request READ_EXTERNAL_STORAGE (as + * recommended). It's meant to be the case that applications should still be able to see files + * that they own - but whilst this is true when images are saved using mediastore API, this is + * NOT true when saving with Storage Access Framework - they don't show up in mediastore + * queries (even though they've definitely been added to the mediastore). So instead we read + * using the SAF uri, and if we need the media uri (e.g., to pass to Gallery application), use + * Media.getMediaStoreUri(). What a mess! + */ + private Media getLatestMediaSAF(Uri treeUri) { + if (MyDebug.LOG) + Log.d(TAG, "getLatestMediaSAF: " + treeUri); + + Media media = null; + + Uri baseUri; + try { + String parentDocUri = DocumentsContract.getTreeDocumentId(treeUri); + baseUri = DocumentsContract.buildChildDocumentsUriUsingTree(treeUri, parentDocUri); + } + catch(Exception e) { + // DocumentsContract.getTreeDocumentId throws IllegalArgumentException if the uri is + // invalid. Unclear if this can happen in practice - this happens in test + // testSaveFolderHistorySAF() but only because we test a dummy invalid SAF uri. But + // seems no harm catching it in case this can happen (e.g., especially if restoring + // backed up preferences from a different device?) Better to just show nothing in the + // thumbnail, rather than crashing! + // N.B., we catch Exception is otherwise compiler complains IllegalArgumentException + // isn't ever thrown - even though it is!? + Log.e(TAG, "Exception using treeUri: " + treeUri); + return media; + } + if( MyDebug.LOG ) + Log.d(TAG, "baseUri: " + baseUri); + + final int column_id_c = 0; + final int column_date_c = 1; + final int column_name_c = 2; // filename (without path), including extension + final int column_mime_c = 3; + String [] projection = new String[] {DocumentsContract.Document.COLUMN_DOCUMENT_ID, DocumentsContract.Document.COLUMN_LAST_MODIFIED, DocumentsContract.Document.COLUMN_DISPLAY_NAME, DocumentsContract.Document.COLUMN_MIME_TYPE}; + + // Note, it appears that when querying DocumentsContract, basic query functionality like selection, ordering, are ignored(!). + // See: https://stackoverflow.com/questions/52770188/how-to-filter-the-results-of-a-query-with-buildchilddocumentsuriusingtree + // https://stackoverflow.com/questions/56263620/contentresolver-query-on-documentcontract-lists-all-files-disregarding-selection + // So, we have to do it ourselves. + + Cursor cursor = null; + try { + cursor = context.getContentResolver().query(baseUri, projection, null, null, null); + if( cursor != null && cursor.moveToFirst() ) { + if( MyDebug.LOG ) + Log.d(TAG, "found: " + cursor.getCount()); + + Uri latest_uri = null; + long latest_date = 0; + String latest_filename = null; + boolean latest_is_video = false; + + // as well as scanning for the most recent image, we also keep track of the most recent non-RAW image, + // in case we want to prefer that when the most recent + Uri nonraw_latest_uri = null; + long nonraw_latest_date = 0; + String nonraw_latest_filename = null; + + do { + long this_date = cursor.getLong(column_date_c); + + String doc_id = cursor.getString(column_id_c); + Uri this_uri = DocumentsContract.buildDocumentUriUsingTree(treeUri, doc_id); + String this_mime_type = cursor.getString(column_mime_c); + + // if updating this code for allowed mime types, also update corresponding code in getLatestMediaCore() + boolean is_allowed; + boolean this_is_video; + switch( this_mime_type ) { + case "image/jpeg": + case "image/webp": + case "image/png": + case "image/x-adobe-dng": + is_allowed = true; + this_is_video = false; + break; + case "video/3gpp": + case "video/webm": + case "video/mp4": + // n.b., perhaps we should just allow video/*, but we should still disallow .SRT files! + is_allowed = true; + this_is_video = true; + break; + default: + // skip unwanted file format + is_allowed = false; + this_is_video = false; + break; + } + if( !is_allowed ) { + continue; + } + + String this_filename = cursor.getString(column_name_c); + if (this_filename != null && !this_filename.isEmpty() && this_filename.charAt(0) == '.') { + // skip hidden file + continue; + } + /*if( MyDebug.LOG ) { + Log.d(TAG, "Date: " + this_date + " doc_id: " + doc_id + " Name: " + this_filename + " Uri: " + this_uri); + }*/ + + if( latest_uri == null || this_date > latest_date ) { + latest_uri = this_uri; + latest_date = this_date; + latest_filename = this_filename; + latest_is_video = this_is_video; + } + if( !this_is_video && !filenameIsRaw(this_filename) ) { + if( nonraw_latest_uri == null || this_date > nonraw_latest_date ) { + nonraw_latest_uri = this_uri; + nonraw_latest_date = this_date; + nonraw_latest_filename = this_filename; + } + } + } + while( cursor.moveToNext() ); + + if( latest_uri == null ) { + if( MyDebug.LOG ) + Log.e(TAG, "couldn't find latest uri"); + } + else { + if( MyDebug.LOG ) { + Log.d(TAG, "latest_uri: " + latest_uri); + Log.d(TAG, "nonraw_latest_uri: " + nonraw_latest_uri); + } + + if( !latest_is_video && filenameIsRaw(latest_filename) && nonraw_latest_uri != null ) { + // prefer non-RAW to RAW? check filenames without extensions match + String filename_without_ext = filenameWithoutExtension(latest_filename); + String next_filename_without_ext = filenameWithoutExtension(nonraw_latest_filename); + if( MyDebug.LOG ) { + Log.d(TAG, "filename_without_ext: " + filename_without_ext); + Log.d(TAG, "next_filename_without_ext: " + next_filename_without_ext); + } + if( filename_without_ext.equals(next_filename_without_ext) ) { + if( MyDebug.LOG ) + Log.d(TAG, "prefer non-RAW to RAW"); + latest_uri = nonraw_latest_uri; + latest_date = nonraw_latest_date; + latest_filename = nonraw_latest_filename; + // video is unchanged + } + } + + media = new Media(false,0, latest_is_video, latest_uri, latest_date/*, 0*/, latest_filename); + } + + /*if( MyDebug.LOG ) { + // debug + if( cursor.moveToFirst() ) { + do { + long this_id = cursor.getLong(column_id_c); + long this_date = cursor.getLong(column_date_taken_c); + Uri this_uri = ContentUris.withAppendedId(baseUri, this_id); + String this_filename = cursor.getString(column_name_c); + Log.d(TAG, "Date: " + this_date + " ID: " + this_id + " Name: " + this_filename + " Uri: " + this_uri); + } + while( cursor.moveToNext() ); + } + }*/ + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "mediastore returned no media"); + } + } + catch(Exception e) { + MyDebug.logStackTrace(TAG, "exception trying to find latest media", e); + } + finally { + if( cursor != null ) { + cursor.close(); + } + } + + if( MyDebug.LOG ) + Log.d(TAG, "return latest media: " + media); + return media; + } + + private Media getLatestMedia(UriType uri_type) { + if( MyDebug.LOG ) + Log.d(TAG, "getLatestMedia: " + uri_type); + if( !MainActivity.useScopedStorage() && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && ContextCompat.checkSelfPermission(context, Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED ) { + // needed for Android 6, in case users deny storage permission, otherwise we get java.lang.SecurityException from ContentResolver.query() + // see https://developer.android.com/training/permissions/requesting.html + // we now request storage permission before opening the camera, but keep this here just in case + // we restrict check to Android 6 or later just in case, see note in LocationSupplier.setupLocationListener() + // update for scoped storage: here we should no longer need READ_EXTERNAL_STORAGE (which we won't have), instead we'll only be able to see + // media created by Open Camera, which is fine + if( MyDebug.LOG ) + Log.e(TAG, "don't have READ_EXTERNAL_STORAGE permission"); + return null; + } + + String save_folder = getImageFolderPath(); // may be null if using SAF + if( MyDebug.LOG ) + Log.d(TAG, "save_folder: " + save_folder); + String bucket_id = null; + if( save_folder != null ) { + bucket_id = String.valueOf(save_folder.toLowerCase().hashCode()); + } + if( MyDebug.LOG ) + Log.d(TAG, "bucket_id: " + bucket_id); + + Uri baseUri; + switch( uri_type ) { + case MEDIASTORE_IMAGES: + baseUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI; + break; + case MEDIASTORE_VIDEOS: + baseUri = Video.Media.EXTERNAL_CONTENT_URI; + break; + default: + throw new RuntimeException("unknown uri_type: " + uri_type); + } + + if( MyDebug.LOG ) + Log.d(TAG, "baseUri: " + baseUri); + Media media = getLatestMediaCore(baseUri, bucket_id, uri_type); + if( media == null && bucket_id != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "fall back to checking any folder"); + media = getLatestMediaCore(baseUri, null, uri_type); + } + + return media; + } + + Media getLatestMedia() { + if( MainActivity.useScopedStorage() && this.isUsingSAF() ) { + Uri treeUri = this.getTreeUriSAF(); + return getLatestMediaSAF(treeUri); + } + + Media image_media = getLatestMedia(UriType.MEDIASTORE_IMAGES); + Media video_media = getLatestMedia(UriType.MEDIASTORE_VIDEOS); + Media media = null; + if( image_media != null && video_media == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "only found images"); + media = image_media; + } + else if( image_media == null && video_media != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "only found videos"); + media = video_media; + } + else if( image_media != null && video_media != null ) { + if( MyDebug.LOG ) { + Log.d(TAG, "found images and videos"); + Log.d(TAG, "latest image date: " + image_media.date + " : " + new Date(image_media.date)); + Log.d(TAG, "latest video date: " + video_media.date + " : " + new Date(video_media.date)); + } + if( image_media.date >= video_media.date ) { + if( MyDebug.LOG ) + Log.d(TAG, "latest image is newer"); + media = image_media; + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "latest video is newer"); + media = video_media; + + // but in cases of using preview shots, sometimes the video ends up with a new date (even if only by 1s), so + // to be sure check filenames, and prefer image if so + String image_filename_without_ext = filenameWithoutExtension(image_media.filename).toUpperCase(Locale.US); + String video_filename_without_ext = filenameWithoutExtension(video_media.filename).toUpperCase(Locale.US); + // exclude _HDR extension etc, as these are only used for the image, not the preview video + { + String filename_special_base = filenameIsSpecial(image_filename_without_ext); + if( filename_special_base != null ) + image_filename_without_ext = filename_special_base; + } + { + String filename_special_base = filenameIsSpecial(video_filename_without_ext); + if( filename_special_base != null ) + video_filename_without_ext = filename_special_base; + } + if( MyDebug.LOG ) { + Log.d(TAG, "image_filename_without_ext: " + image_filename_without_ext); + Log.d(TAG, "video_filename_without_ext: " + video_filename_without_ext); + } + if( image_filename_without_ext.equals(video_filename_without_ext) ) { + if( MyDebug.LOG ) + Log.d(TAG, "but prefer image due to identical filenames"); + media = image_media; + } + } + } + if( MyDebug.LOG ) + Log.d(TAG, "return latest media: " + media); + return media; + } + + // only valid if isUsingSAF() + private long freeMemorySAF() { + Uri treeUri = applicationInterface.getStorageUtils().getTreeUriSAF(); + ParcelFileDescriptor pfd = null; + if( MyDebug.LOG ) + Log.d(TAG, "treeUri: " + treeUri); + try { + Uri docUri = DocumentsContract.buildDocumentUriUsingTree(treeUri, DocumentsContract.getTreeDocumentId(treeUri)); + if( MyDebug.LOG ) + Log.d(TAG, "docUri: " + docUri); + pfd = context.getContentResolver().openFileDescriptor(docUri, "r"); + if( pfd == null ) { // just in case + Log.e(TAG, "pfd is null!"); + throw new FileNotFoundException(); + } + if( MyDebug.LOG ) + Log.d(TAG, "read direct from SAF uri"); + StructStatVfs statFs = Os.fstatvfs(pfd.getFileDescriptor()); + long blocks = statFs.f_bavail; + long size = statFs.f_bsize; + return (blocks*size) / 1048576; + } + catch(IllegalArgumentException e) { + // IllegalArgumentException can be thrown by DocumentsContract.getTreeDocumentId or getContentResolver().openFileDescriptor + MyDebug.logStackTrace(TAG, "failed to get free memory for SAF", e); + } + catch(FileNotFoundException e) { + MyDebug.logStackTrace(TAG, "failed to get free memory for SAF", e); + } + catch(Exception e) { + // We actually just want to catch ErrnoException here, but that isn't available pre-Android 5, and trying to catch ErrnoException + // means we crash on pre-Android 5 with java.lang.VerifyError when trying to create the StorageUtils class! + // One solution might be to move this method to a separate class that's only created on Android 5+, but this is a quick fix for + // now. + MyDebug.logStackTrace(TAG, "failed to get free memory for SAF", e); + } + finally { + try { + if( pfd != null ) + pfd.close(); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "failed to close pfd", e); + } + } + return -1; + } + + /** Return free memory in MB, or -1 if this was unable to be found. + * Be careful of calling this on main UI thread, as this can be slow when SAF is enabled. + */ + public long freeMemory() { // return free memory in MB + if( MyDebug.LOG ) + Log.d(TAG, "freeMemory"); + if( applicationInterface.getStorageUtils().isUsingSAF() ) { + // if we fail for SAF, don't fall back to the methods below, as this may be incorrect (especially for external SD card) + return freeMemorySAF(); + } + // n.b., StatFs still seems to work with Android 10's scoped storage... (and there doesn't seem to be an official non-File based equivalent) + try { + File folder = getImageFolder(); + if( folder == null ) { + throw new IllegalArgumentException(); // so that we fall onto the backup + } + StatFs statFs = new StatFs(folder.getAbsolutePath()); + long blocks = statFs.getAvailableBlocksLong(); + long size = statFs.getBlockSizeLong(); + return (blocks*size) / 1048576; + } + catch(IllegalArgumentException e) { + // this can happen if folder doesn't exist, or don't have read access + // if the save folder is a subfolder of DCIM, we can just use that instead + try { + if( !isUsingSAF() ) { + // getSaveLocation() only valid if !isUsingSAF() + String folder_name = getSaveLocation(); + if( !saveFolderIsFull(folder_name) ) { + File folder = getBaseFolder(); + StatFs statFs = new StatFs(folder.getAbsolutePath()); + long blocks = statFs.getAvailableBlocksLong(); + long size = statFs.getBlockSizeLong(); + return (blocks*size) / 1048576; + } + } + } + catch(IllegalArgumentException e2) { + // just in case + } + } + return -1; + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/TakePhoto.java b/app/src/main/java/net/sourceforge/opencamera/TakePhoto.java new file mode 100644 index 0000000..982977e --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/TakePhoto.java @@ -0,0 +1,42 @@ +package net.sourceforge.opencamera; + +import android.content.Intent; +import android.os.Bundle; +import android.util.Log; + +import androidx.appcompat.app.AppCompatActivity; + +/** Entry Activity for the "take photo" widget (see MyWidgetProviderTakePhoto). + * This redirects to MainActivity, but uses an intent extra/bundle to pass the + * "take photo" request. + */ +public class TakePhoto extends AppCompatActivity { + private static final String TAG = "TakePhoto"; + + // Usually passing data via intent is preferred to using statics - however here a static is better for security, + // as we don't want other applications calling Open Camera's MainActivity with a take photo intent! + //public static final String TAKE_PHOTO = "net.sourceforge.opencamera.TAKE_PHOTO"; + public static boolean TAKE_PHOTO; + + @Override + protected void onCreate(Bundle savedInstanceState) { + if( MyDebug.LOG ) + Log.d(TAG, "onCreate"); + super.onCreate(savedInstanceState); + + Intent intent = new Intent(this, MainActivity.class); + intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TOP); + //intent.putExtra(TAKE_PHOTO, true); + TakePhoto.TAKE_PHOTO = true; + this.startActivity(intent); + if( MyDebug.LOG ) + Log.d(TAG, "finish"); + this.finish(); + } + + protected void onResume() { + if( MyDebug.LOG ) + Log.d(TAG, "onResume"); + super.onResume(); + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/TextFormatter.java b/app/src/main/java/net/sourceforge/opencamera/TextFormatter.java new file mode 100644 index 0000000..bce7a37 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/TextFormatter.java @@ -0,0 +1,123 @@ +package net.sourceforge.opencamera; + +import android.content.Context; +import android.location.Location; +import android.util.Log; + +import java.text.DateFormat; +import java.text.DecimalFormat; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Locale; + +/** Handles various text formatting options, used for photo stamp and video subtitles. + */ +public class TextFormatter { + private static final String TAG = "TextFormatter"; + + private final Context context; + private final DecimalFormat decimalFormat = new DecimalFormat("#0.0"); + + TextFormatter(Context context) { + this.context = context; + } + + /** Formats the date according to the user preference preference_stamp_dateformat. + * Returns "" if preference_stamp_dateformat is "preference_stamp_dateformat_none". + */ + public static String getDateString(String preference_stamp_dateformat, Date date) { + String date_stamp = ""; + if( !preference_stamp_dateformat.equals("preference_stamp_dateformat_none") ) { + switch(preference_stamp_dateformat) { + case "preference_stamp_dateformat_yyyymmdd": + // use dashes instead of slashes - this should follow https://en.wikipedia.org/wiki/ISO_8601 + date_stamp = new SimpleDateFormat("yyyy-MM-dd", Locale.getDefault()).format(date); + break; + case "preference_stamp_dateformat_ddmmyyyy": + date_stamp = new SimpleDateFormat("dd/MM/yyyy", Locale.getDefault()).format(date); + break; + case "preference_stamp_dateformat_mmddyyyy": + date_stamp = new SimpleDateFormat("MM/dd/yyyy", Locale.getDefault()).format(date); + break; + default: + date_stamp = DateFormat.getDateInstance().format(date); + break; + } + } + return date_stamp; + } + + /** Formats the time according to the user preference preference_stamp_timeformat. + * Returns "" if preference_stamp_timeformat is "preference_stamp_timeformat_none". + */ + public static String getTimeString(String preference_stamp_timeformat, Date date) { + String time_stamp = ""; + if( !preference_stamp_timeformat.equals("preference_stamp_timeformat_none") ) { + switch(preference_stamp_timeformat) { + case "preference_stamp_timeformat_12hour": + time_stamp = new SimpleDateFormat("hh:mm:ss a", Locale.getDefault()).format(date); + break; + case "preference_stamp_timeformat_24hour": + time_stamp = new SimpleDateFormat("HH:mm:ss", Locale.getDefault()).format(date); + break; + default: + time_stamp = DateFormat.getTimeInstance().format(date); + break; + } + } + return time_stamp; + } + + private String getDistanceString(double distance, String preference_units_distance) { + double converted_distance = distance; + String units = context.getResources().getString(R.string.metres_abbreviation); + if( preference_units_distance.equals("preference_units_distance_ft") ) { + converted_distance = 3.28084 * distance; + units = context.getResources().getString(R.string.feet_abbreviation); + } + return decimalFormat.format(converted_distance) + units; + } + + /** Formats the GPS information according to the user preference_stamp_gpsformat preference_stamp_timeformat. + * Returns "" if preference_stamp_gpsformat is "preference_stamp_gpsformat_none", or both store_location and + * store_geo_direction are false. + */ + public String getGPSString(String preference_stamp_gpsformat, String preference_units_distance, boolean store_location, Location location, boolean store_geo_direction, double geo_direction) { + String gps_stamp = ""; + if( !preference_stamp_gpsformat.equals("preference_stamp_gpsformat_none") ) { + if( store_location ) { + if( MyDebug.LOG ) + Log.d(TAG, "location: " + location); + if( preference_stamp_gpsformat.equals("preference_stamp_gpsformat_dms") ) + gps_stamp += LocationSupplier.locationToDMS(location.getLatitude()) + ", " + LocationSupplier.locationToDMS(location.getLongitude()); + else + gps_stamp += Location.convert(location.getLatitude(), Location.FORMAT_DEGREES) + ", " + Location.convert(location.getLongitude(), Location.FORMAT_DEGREES); + if( location.hasAltitude() ) { + gps_stamp += ", " + getDistanceString(location.getAltitude(), preference_units_distance); + } + } + if( store_geo_direction ) { + float geo_angle = (float)Math.toDegrees(geo_direction); + if( geo_angle < 0.0f ) { + geo_angle += 360.0f; + } + if( MyDebug.LOG ) + Log.d(TAG, "geo_angle: " + geo_angle); + if( !gps_stamp.isEmpty() ) + gps_stamp += ", "; + gps_stamp += String.valueOf(Math.round(geo_angle)) + (char)0x00B0; + } + } + // don't log gps_stamp, in case of privacy! + return gps_stamp; + } + + public static String formatTimeMS(long time_ms) { + int ms = (int) (time_ms) % 1000 ; + int seconds = (int) (time_ms / 1000) % 60 ; + int minutes = (int) ((time_ms / (1000*60)) % 60); + int hours = (int) ((time_ms / (1000*60*60))); + return String.format(Locale.getDefault(), "%02d:%02d:%02d,%03d", hours, minutes, seconds, ms); + } + +} diff --git a/app/src/main/java/net/sourceforge/opencamera/ToastBoxer.java b/app/src/main/java/net/sourceforge/opencamera/ToastBoxer.java new file mode 100644 index 0000000..9bb3253 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/ToastBoxer.java @@ -0,0 +1,12 @@ +package net.sourceforge.opencamera; + +import android.widget.Toast; + +/** Allows methods to update a Toast with a new Toast. + */ +public class ToastBoxer { + public Toast toast; + + public ToastBoxer() { + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/cameracontroller/CameraController.java b/app/src/main/java/net/sourceforge/opencamera/cameracontroller/CameraController.java new file mode 100644 index 0000000..2326d9f --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/cameracontroller/CameraController.java @@ -0,0 +1,815 @@ +package net.sourceforge.opencamera.cameracontroller; + +import net.sourceforge.opencamera.MyDebug; + +import java.io.Serial; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import android.graphics.Rect; +import android.location.Location; +import android.media.MediaRecorder; +import android.util.Log; +import android.view.SurfaceHolder; +import android.view.TextureView; + +import androidx.annotation.NonNull; + +/** CameraController is an abstract class that wraps up the access/control to + * the Android camera, so that the rest of the application doesn't have to + * deal directly with the Android camera API. It also allows us to support + * more than one camera API through the same API (this is used to support both + * the original camera API, and Android 5's Camera2 API). + * The class is fairly low level wrapper about the APIs - there is some + * additional logical/workarounds where such things are API-specific, but + * otherwise the calling application still controls the behaviour of the + * camera. + */ +public abstract class CameraController { + private static final String TAG = "CameraController"; + private final int cameraId; + + public static final String SCENE_MODE_DEFAULT = "auto"; // chosen to match Camera.Parameters.SCENE_MODE_AUTO, but we also use compatible values for Camera2 API + public static final String COLOR_EFFECT_DEFAULT = "none"; // chosen to match Camera.Parameters.EFFECT_NONE, but we also use compatible values for Camera2 API + public static final String WHITE_BALANCE_DEFAULT = "auto"; // chosen to match Camera.Parameters.WHITE_BALANCE_AUTO, but we also use compatible values for Camera2 API + public static final String ANTIBANDING_DEFAULT = "auto"; // chosen to match Camera.Parameters.ANTIBANDING_AUTO, but we also use compatible values for Camera2 API + public static final String EDGE_MODE_DEFAULT = "default"; + public static final String NOISE_REDUCTION_MODE_DEFAULT = "default"; + public static final String ISO_DEFAULT = "auto"; + public static final long EXPOSURE_TIME_DEFAULT = 1000000000L/30; // note, responsibility of callers to check that this is within the valid min/max range + + public static final int N_IMAGES_NR_DARK = 8; + public static final int N_IMAGES_NR_DARK_LOW_LIGHT = 15; + + // for testing: + public volatile int count_camera_parameters_exception; + public volatile int count_precapture_timeout; + public volatile boolean test_wait_capture_result; // whether to test delayed capture result in Camera2 API + public volatile boolean test_release_during_photo; // for Camera2 API, will force takePictureAfterPrecapture() to call release() on UI thread + public volatile int test_capture_results; // for Camera2 API, how many capture requests completed with RequestTagType.CAPTURE + public volatile int test_fake_flash_focus; // for Camera2 API, records torch turning on for fake flash during autofocus + public volatile int test_fake_flash_precapture; // for Camera2 API, records torch turning on for fake flash during precapture + public volatile int test_fake_flash_photo; // for Camera2 API, records torch turning on for fake flash for photo capture + public volatile int test_af_state_null_focus; // for Camera2 API, records af_state being null even when we've requested autofocus + public volatile boolean test_used_tonemap_curve; + public volatile int test_texture_view_buffer_w; // for TextureView, keep track of buffer size + public volatile int test_texture_view_buffer_h; + public volatile boolean test_force_run_post_capture; // for Camera2 API, test using adjustPreview() / RequestTagType.RUN_POST_CAPTURE + public static volatile boolean test_force_slow_preview_start; // for Camera2 API, test waiting for 6s when starting preview + + /** Class for caching a subset of CameraFeatures, that are slow to read. + * For now only used for vendor extensions which are slow to read. + */ + public static class CameraFeaturesCache { + public List supported_extensions; + public List supported_extensions_zoom; + + final Map> extension_picture_sizes_map; // key is extension + final Map> extension_preview_sizes_map; // key is extension + + CameraFeaturesCache(CameraFeatures camera_features, Map> extension_picture_sizes_map, Map> extension_preview_sizes_map) { + if( camera_features.supported_extensions != null ) + this.supported_extensions = new ArrayList<>(camera_features.supported_extensions); + if( camera_features.supported_extensions_zoom != null ) + this.supported_extensions_zoom = new ArrayList<>(camera_features.supported_extensions_zoom); + this.extension_picture_sizes_map = extension_picture_sizes_map; + this.extension_preview_sizes_map = extension_preview_sizes_map; + } + } + + public static class CameraFeatures { + public Set physical_camera_ids; // if non-null, this camera is part of a logical camera that exposes these physical camera IDs + public boolean is_zoom_supported; + public int max_zoom; + public List zoom_ratios; // list of supported zoom ratios; each value is the zoom multiplied by 100 + public boolean supports_face_detection; + public List picture_sizes; + public List video_sizes; + public List video_sizes_high_speed; // may be null if high speed not supported + public List preview_sizes; + public List supported_extensions; // if non-null, list of supported camera vendor extensions, see https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics + public List supported_extensions_zoom; // if non-null, list of camera vendor extensions that support zoom + public List supported_flash_values; + public List supported_focus_values; + public float [] apertures; // may be null if not supported, else will have at least 2 values + public int max_num_focus_areas; + public float minimum_focus_distance; + public boolean is_exposure_lock_supported; + public boolean is_white_balance_lock_supported; + public boolean is_optical_stabilization_supported; + public boolean is_video_stabilization_supported; + public boolean is_photo_video_recording_supported; + public boolean supports_white_balance_temperature; + public int min_temperature; + public int max_temperature; + public boolean supports_iso_range; + public int min_iso; + public int max_iso; + public boolean supports_exposure_time; + public long min_exposure_time; + public long max_exposure_time; + public int min_exposure; + public int max_exposure; + public float exposure_step; + public boolean can_disable_shutter_sound; + public int tonemap_max_curve_points; + public boolean supports_tonemap_curve; + public boolean supports_expo_bracketing; // whether setBurstTye(BURSTTYPE_EXPO) can be used + public int max_expo_bracketing_n_images; + public boolean supports_focus_bracketing; // whether setBurstTye(BURSTTYPE_FOCUS) can be used + public boolean supports_burst; // whether setBurstTye(BURSTTYPE_NORMAL) can be used + public boolean supports_jpeg_r; // whether supports JPEG_R (Ultra HDR) + public boolean supports_raw; + public float view_angle_x; // horizontal angle of view in degrees (when unzoomed) + public float view_angle_y; // vertical angle of view in degrees (when unzoomed) + + /** Returns whether any of the supplied sizes support the requested fps. + */ + public static boolean supportsFrameRate(List sizes, int fps) { + if( MyDebug.LOG ) + Log.d(TAG, "supportsFrameRate: " + fps); + if( sizes == null ) + return false; + for(Size size : sizes) { + if( size.supportsFrameRate(fps) ) { + if( MyDebug.LOG ) + Log.d(TAG, "fps is supported"); + return true; + } + } + if( MyDebug.LOG ) + Log.d(TAG, "fps is NOT supported"); + return false; + } + + /** + * @param return_closest If true, return a match for the width/height, even if the fps doesn't + * match. + */ + public static Size findSize(List sizes, Size size, double fps, boolean return_closest) { + Size last_s = null; + for(Size s : sizes) { + if (size.equals(s)) { + last_s = s; + if (fps > 0) { + if (s.supportsFrameRate(fps)) { + return s; + } + } else { + return s; + } + } + } + return return_closest ? last_s : null; + } + } + + // Android docs and FindBugs recommend that Comparators also be Serializable + static class RangeSorter implements Comparator, Serializable { + @Serial + private static final long serialVersionUID = 5802214721073728212L; + @Override + public int compare(int[] o1, int[] o2) { + if (o1[0] == o2[0]) return o1[1] - o2[1]; + return o1[0] - o2[0]; + } + } + + /* Sorts resolutions from highest to lowest, by area. + * Android docs and FindBugs recommend that Comparators also be Serializable + */ + static class SizeSorter implements Comparator, Serializable { + @Serial + private static final long serialVersionUID = 5802214721073718212L; + + @Override + public int compare(final CameraController.Size a, final CameraController.Size b) { + return b.width * b.height - a.width * a.height; + } + } + + public static class Size { + public final int width; + public final int height; + public boolean supports_burst; // for photo + public List supported_extensions; // for photo and preview: if non-null, list of supported camera vendor extensions + final List fps_ranges; // for video + public final boolean high_speed; // for video + + Size(int width, int height, List fps_ranges, boolean high_speed) { + this.width = width; + this.height = height; + this.supports_burst = true; + this.fps_ranges = fps_ranges; + this.high_speed = high_speed; + Collections.sort(this.fps_ranges, new RangeSorter()); + } + + public Size(int width, int height) { + this(width, height, new ArrayList<>(), false); + } + + /** Whether this size supports the requested burst and/or extension + */ + public boolean supportsRequirements(boolean want_burst, boolean want_extension, int extension) { + return (!want_burst || this.supports_burst) && (!want_extension || this.supportsExtension(extension)); + } + + public boolean supportsExtension(int extension) { + return supported_extensions != null && supported_extensions.contains(extension); + } + + public boolean supportsFrameRate(double fps) { + for (int[] f : this.fps_ranges) { + if (f[0] <= fps && fps <= f[1]) + return true; + } + return false; + } + + public int closestFrameRate(double fps) { + int closest_fps = -1; + int closest_dist = -1; + for (int[] f : this.fps_ranges) { + if (f[0] <= fps && fps <= f[1]) + return (int)fps; + int this_fps; + if( fps < f[0] ) + this_fps = f[0]; + else + this_fps = f[1]; + int dist = Math.abs(this_fps - (int)fps); + if( closest_dist == -1 || dist < closest_dist ) { + closest_fps = this_fps; + closest_dist = dist; + } + } + return closest_fps; + } + + @Override + public boolean equals(Object o) { + if( !(o instanceof Size) ) + return false; + Size that = (Size)o; + return this.width == that.width && this.height == that.height; + } + + @Override + public int hashCode() { + // must override this, as we override equals() + // can't use: + //return Objects.hash(width, height); + // as this requires API level 19 + // so use this from http://stackoverflow.com/questions/11742593/what-is-the-hashcode-for-a-custom-class-having-just-two-int-properties + return width*41 + height; + } + + @NonNull + public String toString() { + StringBuilder s = new StringBuilder(); + for (int[] f : this.fps_ranges) { + s.append(" [").append(f[0]).append("-").append(f[1]).append("]"); + } + return this.width + "x" + this.height + " " + s + (this.high_speed ? "-hs" : ""); + } + } + + /** An area has values from [-1000,-1000] (for top-left) to [1000,1000] (for bottom-right) for whatever is + * the current field of view (i.e., taking zoom into account). + */ + public static class Area { + final Rect rect; + final int weight; + + public Area(Rect rect, int weight) { + this.rect = rect; + this.weight = weight; + } + } + + public interface FaceDetectionListener { + void onFaceDetection(Face[] faces); + } + + /** Interface to define callbacks related to taking photos. These callbacks are all called on the UI thread. + */ + public interface PictureCallback { + void onStarted(); // called immediately before we start capturing the picture + + void onCompleted(); // called after all relevant on*PictureTaken() callbacks have been called and returned + + void onPictureTaken(byte[] data); + + /** Only called if RAW is requested. + * Caller should call raw_image.close() when done with the image. + */ + void onRawPictureTaken(RawImage raw_image); + + /** Only called if burst is requested. + */ + void onBurstPictureTaken(List images); + + /** Only called if burst is requested. + */ + void onRawBurstPictureTaken(List raw_images); + + /** Reports percentage progress for vendor camera extensions. Note that not all devices support this being called. + */ + void onExtensionProgress(int progress); + + /* This is called for when burst mode is BURSTTYPE_FOCUS or BURSTTYPE_CONTINUOUS, to ask whether it's safe to take + * n_raw extra RAW images and n_jpegs extra JPEG images, or whether to wait. + */ + boolean imageQueueWouldBlock(int n_raw, int n_jpegs); + + /* This is called for flash_frontscreen_auto or flash_frontscreen_on mode to indicate the caller should light up the screen + * (for flash_frontscreen_auto it will only be called if the scene is considered dark enough to require the screen flash). + * The screen flash can be removed when or after onCompleted() is called. + */ + void onFrontScreenTurnOn(); + } + + /** Interface to define callback for autofocus completing. This callback may be called on the UI thread (CameraController1) + * or a background thread (CameraController2). + */ + public interface AutoFocusCallback { + void onAutoFocus(boolean success); + } + + /** Interface to define callback for continuous focus starting/stopping. This callback may be called on the + * UI thread (CameraController1) or a background thread (CameraController2). + */ + public interface ContinuousFocusMoveCallback { + void onContinuousFocusMove(boolean start); + } + + public interface ErrorCallback { + void onError(); + } + + public static class Face { + public final int score; + /* The rect has values from [-1000,-1000] (for top-left) to [1000,1000] (for bottom-right) for whatever is + * the current field of view (i.e., taking zoom into account). + */ + public final Rect rect; + /** The temp rect is temporary storage that can be used by callers. + */ + public final Rect temp = new Rect(); + + Face(int score, Rect rect) { + this.score = score; + this.rect = rect; + } + } + + public static class SupportedValues { + public final List values; + public final String selected_value; + SupportedValues(List values, String selected_value) { + this.values = values; + this.selected_value = selected_value; + } + } + + public abstract void release(); + public abstract void onError(); // triggers error mechanism - should only be called externally for testing purposes + + CameraController(int cameraId) { + this.cameraId = cameraId; + } + public abstract String getAPI(); + public abstract CameraFeatures getCameraFeatures() throws CameraControllerException; + public int getCameraId() { + return cameraId; + } + + /** For CameraController2 only. Applications should cover the preview textureview if since last resuming, camera_controller + * has never been non-null or this method has never returned false. + * Otherwise there is a risk when opening the camera that the textureview still shows an image from when + * the camera was previously opened (e.g., from pausing and resuming the application). This returns false (for CameraController2) + * when the camera has received its first frame. + * Update: on more recent Android versions this didn't work very well, possibly due to a screenshot being used for "recent apps" + * view; on Android 13+, the activity can make use of shouldCoverPreview(false) for this. + */ + public boolean shouldCoverPreview() { + return false; + } + /** For CameraController2 only. After calling this, shouldCoverPreview() will return true, until a new + * frame from the camera has been received. + */ + public void resetCoverPreview() { + } + public abstract SupportedValues setSceneMode(String value); + /** + * @return The current scene mode. Will be null if scene mode not supported. + */ + public abstract String getSceneMode(); + /** + * @return Returns true iff changing the scene mode can affect the available camera functionality + * (e.g., changing to Night scene mode might mean flash modes are no longer available). + */ + public abstract boolean sceneModeAffectsFunctionality(); + public abstract SupportedValues setColorEffect(String value); + public abstract String getColorEffect(); + public abstract SupportedValues setWhiteBalance(String value); + public abstract String getWhiteBalance(); + public abstract boolean setWhiteBalanceTemperature(int temperature); + public abstract int getWhiteBalanceTemperature(); + public abstract SupportedValues setAntiBanding(String value); + public abstract String getAntiBanding(); + public abstract SupportedValues setEdgeMode(String value); + public abstract String getEdgeMode(); + public abstract SupportedValues setNoiseReductionMode(String value); + public abstract String getNoiseReductionMode(); + /** Set an ISO value. Only supported if supports_iso_range is false. + */ + public abstract SupportedValues setISO(String value); + /** Switch between auto and manual ISO mode. Only supported if supports_iso_range is true. + * @param manual_iso Whether to switch to manual mode or back to auto. + * @param iso If manual_iso is true, this specifies the desired ISO value. If this is outside + * the min_iso/max_iso, the value will be snapped so it does lie within that range. + * If manual_iso i false, this value is ignored. + */ + public abstract void setManualISO(boolean manual_iso, int iso); + + /** + * @return Whether in manual ISO mode (as opposed to auto). + */ + public abstract boolean isManualISO(); + /** Specify a specific ISO value. Only supported if supports_iso_range is true. Callers should + * first switch to manual ISO mode using setManualISO(). + */ + public abstract boolean setISO(int iso); + public abstract String getISOKey(); + /** Returns the manual ISO value. Only supported if supports_iso_range is true. + */ + public abstract int getISO(); + public abstract long getExposureTime(); + public abstract boolean setExposureTime(long exposure_time); + public abstract void setAperture(float aperture); + public abstract CameraController.Size getPictureSize(); + public abstract void setPictureSize(int width, int height); + public abstract CameraController.Size getPreviewSize(); + public abstract void setPreviewSize(int width, int height); + + public abstract void setCameraExtension(boolean enabled, int extension); + public abstract boolean isCameraExtension(); + public abstract int getCameraExtension(); + // whether to take a burst of images, and if so, what type + public enum BurstType { + BURSTTYPE_NONE, // no burst + BURSTTYPE_EXPO, // enable expo bracketing mode + BURSTTYPE_FOCUS, // enable focus bracketing mode; + BURSTTYPE_NORMAL, // take a regular burst + BURSTTYPE_CONTINUOUS // as BURSTTYPE_NORMAL, but bursts will fire continually until stopContinuousBurst() is called. + } + public abstract void setBurstType(BurstType new_burst_type); + public abstract BurstType getBurstType(); + /** Only relevant if setBurstType() is also called with BURSTTYPE_NORMAL. Sets the number of + * images to take in the burst. + */ + public abstract void setBurstNImages(int burst_requested_n_images); + /** Only relevant if setBurstType() is also called with BURSTTYPE_NORMAL. If this method is + * called with burst_for_noise_reduction, then the number of burst images, and other settings, + * will be set for noise reduction mode (and setBurstNImages() is ignored). + */ + public abstract void setBurstForNoiseReduction(boolean burst_for_noise_reduction, boolean noise_reduction_low_light); + public abstract boolean isContinuousBurstInProgress(); + public abstract void stopContinuousBurst(); + public abstract void stopFocusBracketingBurst(); + /** Only relevant if setBurstType() is also called with BURSTTYPE_EXPO. Sets the number of + * images to take in the expo burst. + * @param n_images Must be an odd number greater than 1. + */ + public abstract void setExpoBracketingNImages(int n_images); + /** Only relevant if setBurstType() is also called with BURSTTYPE_EXPO. + */ + public abstract void setExpoBracketingStops(double stops); + public abstract void setUseExpoFastBurst(boolean use_expo_fast_burst); + /** Whether to enable a workaround hack for some Galaxy devices - take an additional dummy photo + * when taking an expo/HDR burst, to avoid problem where manual exposure is ignored for the + * first image. + */ + public abstract void setDummyCaptureHack(boolean dummy_capture_hack); + + /** Whether the current BurstType is one that requires the camera driver to capture the images + * as a burst at a fast rate. If true, we should not use high resolutions that don't support a + * capture burst (for Camera2 API, see StreamConfigurationMap.getHighResolutionOutputSizes()). + */ + public abstract boolean isCaptureFastBurst(); + /** If true, then the camera controller is currently capturing a burst of images. + */ + public abstract boolean isCapturingBurst(); + /** If isCapturingBurst() is true, then this returns the number of images in the current burst + * captured so far. + */ + public abstract int getNBurstTaken(); + /** If isCapturingBurst() is true, then this returns the total number of images in the current + * burst if known. If not known (e.g., for continuous burst mode), returns 0. + */ + public abstract int getBurstTotal(); + + /** + * @param want_jpeg_r Whether to enable taking photos in JPEG_R (Ultra HDR) format. + */ + public abstract void setJpegR(boolean want_jpeg_r); + + /** + * @param want_raw Whether to enable taking photos in RAW (DNG) format. + * @param max_raw_images The maximum number of unclosed DNG images that may be held in memory at any one + * time. Trying to take a photo, when the number of unclosed DNG images is already + * equal to this number, will result in an exception (java.lang.IllegalStateException + * - note, the exception will come from a CameraController2 callback, so can't be + * caught by the callera). + */ + public abstract void setRaw(boolean want_raw, int max_raw_images); + + /** Request a capture session compatible with high speed frame rates. + * This should be called only when the preview is paused or not yet started. + */ + public abstract void setVideoHighSpeed(boolean setVideoHighSpeed); + /** + * setUseCamera2FakeFlash() should be called after creating the CameraController, and before calling getCameraFeatures() or + * starting the preview (as it changes the available flash modes). + * "Fake flash" is an alternative mode for handling flash, for devices that have poor Camera2 support - typical symptoms + * include precapture never starting, flash not firing, photos being over or under exposed. + * Instead, we fake the precapture and flash simply by turning on the torch. After turning on torch, we wait for ae to stop + * scanning (and af too, as it can start scanning in continuous mode) - this is effectively the equivalent of precapture - + * before taking the photo. + * In auto-focus mode, we make the decision ourselves based on the current ISO. + * We also handle the flash firing for autofocus by turning the torch on and off too. Advantages are: + * - The flash tends to be brighter, and the photo can end up overexposed as a result if capture follows the autofocus. + * - Some devices also don't seem to fire flash for autofocus in Camera2 mode (e.g., Samsung S7) + * - When capture follows autofocus, we need to make the same decision for firing flash for both the autofocus and the capture. + */ + public void setUseCamera2FakeFlash(boolean use_fake_precapture) { + } + public boolean getUseCamera2FakeFlash() { + return false; + } + public abstract boolean getOpticalStabilization(); + /** Whether to enable digital video stabilization. Should only be set to true when intending to + * capture video. + */ + public abstract void setVideoStabilization(boolean enabled); + public abstract boolean getVideoStabilization(); + public enum TonemapProfile { + TONEMAPPROFILE_OFF, + TONEMAPPROFILE_REC709, + TONEMAPPROFILE_SRGB, + TONEMAPPROFILE_LOG, + TONEMAPPROFILE_GAMMA, + TONEMAPPROFILE_JTVIDEO, + TONEMAPPROFILE_JTLOG, + TONEMAPPROFILE_JTLOG2 + } + + /** Sets a tonemap profile. + * @param tonemap_profile The type of the tonemap profile. + * @param log_profile_strength Only relevant if tonemap_profile set to TONEMAPPROFILE_LOG. + * @param gamma Only relevant if tonemap_profile set to TONEMAPPROFILE_GAMMA + */ + public abstract void setTonemapProfile(TonemapProfile tonemap_profile, float log_profile_strength, float gamma); + public abstract TonemapProfile getTonemapProfile(); + public abstract int getJpegQuality(); + public abstract void setJpegQuality(int quality); + /** Returns the current zoom. The returned value is an index into the CameraFeatures.zoom_ratios + * array. + */ + public abstract int getZoom(); + /** Set the zoom. + * @param value The index into the CameraFeatures.zoom_ratios array. + */ + public abstract void setZoom(int value); + /** Set the zoom. Unlike setZoom(value), this allows specifying any zoom level within the + * supported range. + * @param value The index into the CameraFeatures.zoom_ratios array. + * @param smooth_zoom The desired zoom. With CameraController1 (old Camera API), this is ignored. + * With CameraController2 (Camera2 API), this is used instead of the zoom_ratios + * value. Note that getZoom() will return the value passed to this method, so + * passing an appropriate value (e.g., whatever zoom_ratio is closest to the + * smooth_zoom) is still useful if you want to make use of getZoom(). + * smooth_zoom must still be within the supported range of zoom values. + */ + public abstract void setZoom(int value, float smooth_zoom); + public abstract void resetZoom(); // resets to zoom 1x + public abstract int getExposureCompensation(); + public abstract boolean setExposureCompensation(int new_exposure); + public abstract void setPreviewFpsRange(int min, int max); + public abstract void clearPreviewFpsRange(); + public abstract List getSupportedPreviewFpsRange(); // result depends on setting of setVideoHighSpeed() + + public abstract void setFocusValue(String focus_value); + public abstract String getFocusValue(); + public abstract float getFocusDistance(); + public abstract boolean setFocusDistance(float focus_distance); + /** Only relevant if setBurstType() is also called with BURSTTYPE_FOCUS. Sets the number of + * images to take in the focus burst. + */ + public abstract void setFocusBracketingNImages(int n_images); + /** Only relevant if setBurstType() is also called with BURSTTYPE_FOCUS. If set to true, an + * additional image will be included at infinite distance. + */ + public abstract void setFocusBracketingAddInfinity(boolean focus_bracketing_add_infinity); + /** Only relevant if setBurstType() is also called with BURSTTYPE_FOCUS. Sets the source focus + * distance for focus bracketing. + */ + public abstract void setFocusBracketingSourceDistance(float focus_bracketing_source_distance); + public abstract float getFocusBracketingSourceDistance(); + /** Only relevant if setBurstType() is also called with BURSTTYPE_FOCUS. Sets the source focus + * distance to match the camera's current focus distance (typically useful if running in a + * non-manual focus mode). + */ + public abstract void setFocusBracketingSourceDistanceFromCurrent(); + /** Only relevant if setBurstType() is also called with BURSTTYPE_FOCUS. Sets the target focus + * distance for focus bracketing. + */ + public abstract void setFocusBracketingTargetDistance(float focus_bracketing_target_distance); + public abstract float getFocusBracketingTargetDistance(); + public abstract void setFlashValue(String flash_value); + public abstract String getFlashValue(); + public abstract void setRecordingHint(boolean hint); + public abstract void setAutoExposureLock(boolean enabled); + public abstract boolean getAutoExposureLock(); + public abstract void setAutoWhiteBalanceLock(boolean enabled); + public abstract boolean getAutoWhiteBalanceLock(); + public abstract void setRotation(int rotation); + public abstract void setLocationInfo(Location location); + public abstract void removeLocationInfo(); + public abstract void enableShutterSound(boolean enabled); + public abstract boolean setFocusAndMeteringArea(List areas); + public abstract void clearFocusAndMetering(); + public abstract List getFocusAreas(); + public abstract List getMeteringAreas(); + public abstract boolean supportsAutoFocus(); + public abstract boolean supportsMetering(); + public abstract boolean focusIsContinuous(); + public abstract boolean focusIsVideo(); + public abstract void reconnect() throws CameraControllerException; + public abstract void setPreviewDisplay(SurfaceHolder holder) throws CameraControllerException; + public abstract void setPreviewTexture(TextureView texture) throws CameraControllerException; + /** This should be called when using a TextureView, and the texture view has reported a change + * in size via onSurfaceTextureSizeChanged. + */ + public void updatePreviewTexture() { + // dummy implementation + } + /** Starts the camera preview. + * @throws CameraControllerException if the camera preview fails to start. + */ + /** Starts the camera preview. + * @param wait_until_started Whether to wait until the preview is started. Only relevant for + * CameraController2; CameraController1 will always wait. + * @param runnable If non-null, a runnable to be called once preview is started. If + * wait_until_started==true, or using CameraController1, this will be + * called on the current thread, before this method exits. Otherwise, + * this will be called on the UI thread, after this method exits (once + * the preview has started). + * @param on_failed If non-null, a runnable to be called if the preview fails to start. + * Only relevant for wait_until_started==false and when using + * CameraController2. In such cases, failing to start the camera preview + * may result in either CameraControllerException being thrown, or + * on_failed being called on the UI thread after this method exits + * (depending on when the failure occurs). If either of these happens, + * the "runnable" runnable will not be called. + * @throws CameraControllerException Failed to start preview. In this case, the runnable will not + * be called. + */ + public abstract void startPreview(boolean wait_until_started, Runnable runnable, Runnable on_failed) throws CameraControllerException; + /** Only relevant for CameraController2: stops the repeating burst for the previous (so effectively + * stops the preview), but does not close the capture session for the preview (for that, using + * stopPreview() instead of stopRepeating()). + */ + public abstract void stopRepeating(); + public abstract void stopPreview(); + public abstract boolean startFaceDetection(); + public abstract void setFaceDetectionListener(final CameraController.FaceDetectionListener listener); + + /** + * @param cb Callback to be called when autofocus completes. + * @param capture_follows_autofocus_hint Set to true if you intend to take a photo immediately after autofocus. If the + * decision changes after autofocus has started (e.g., user initiates autofocus, + * then takes photo before autofocus has completed), use setCaptureFollowAutofocusHint(). + */ + public abstract void autoFocus(final CameraController.AutoFocusCallback cb, boolean capture_follows_autofocus_hint); + /** See autoFocus() for details - used to update the capture_follows_autofocus_hint setting. + */ + public abstract void setCaptureFollowAutofocusHint(boolean capture_follows_autofocus_hint); + public abstract void cancelAutoFocus(); + public abstract void setContinuousFocusMoveCallback(ContinuousFocusMoveCallback cb); + public abstract void takePicture(final CameraController.PictureCallback picture, final ErrorCallback error); + public abstract void setDisplayOrientation(int degrees); + public abstract int getDisplayOrientation(); + public abstract int getCameraOrientation(); + public enum Facing { + FACING_BACK, + FACING_FRONT, + FACING_EXTERNAL, + FACING_UNKNOWN // returned if the Camera API returned an error or an unknown type + } + /** Returns whether the camera is front, back or external. + */ + public abstract Facing getFacing(); + public abstract void unlock(); + /** Call to initialise video recording, should call before MediaRecorder.prepare(). + * @param video_recorder The media recorder object. + */ + public abstract void initVideoRecorderPrePrepare(MediaRecorder video_recorder); + /** Call to initialise video recording, should call after MediaRecorder.prepare(), but before MediaRecorder.start(). + * @param video_recorder The media recorder object. + * @param want_photo_video_recording Whether support for taking photos whilst video recording is required. If this feature isn't supported, the option has no effect. + */ + public abstract void initVideoRecorderPostPrepare(MediaRecorder video_recorder, boolean want_photo_video_recording) throws CameraControllerException; + public abstract String getParametersString(); + public boolean captureResultIsAEScanning() { + return false; + } + /** + * @return whether flash will fire; returns false if not known + */ + public boolean needsFlash() { + return false; + } + /** + * @return whether front screen "flash" will fire; returns false if not known + */ + public boolean needsFrontScreenFlash() { + return false; + } + public boolean captureResultHasWhiteBalanceTemperature() { + return false; + } + public int captureResultWhiteBalanceTemperature() { + return 0; + } + public boolean captureResultHasIso() { + return false; + } + public int captureResultIso() { + return 0; + } + public boolean captureResultHasExposureTime() { + return false; + } + public long captureResultExposureTime() { + return 0; + } + public boolean captureResultHasFrameDuration() { + return false; + } + public long captureResultFrameDuration() { + return 0; + } + public boolean captureResultHasFocusDistance() { + return false; + } + public float captureResultFocusDistance() { + return 0.0f; + } + public boolean captureResultHasAperture() { + return false; + } + public float captureResultAperture() { + return 0.0f; + } + /*public boolean captureResultHasFocusDistance() { + return false; + }*/ + /*public float captureResultFocusDistanceMin() { + return 0.0f; + }*/ + /*public float captureResultFocusDistanceMax() { + return 0.0f; + }*/ + + // gets the available values of a generic mode, e.g., scene, color etc, and makes sure the requested mode is available + SupportedValues checkModeIsSupported(List values, String value, String default_value) { + if( values != null && values.size() > 1 ) { // n.b., if there is only 1 supported value, we also return null, as no point offering the choice to the user (there are some devices, e.g., Samsung, that only have a scene mode of "auto") + if( MyDebug.LOG ) { + for(int i=0;i pending_burst_images = new ArrayList<>(); // burst images that have been captured so far, but not yet sent to the application + private List burst_exposures; + private boolean want_expo_bracketing; + private final static int max_expo_bracketing_n_images = 3; // seem to have problems with 5 images in some cases, e.g., images coming out same brightness on OnePlus 3T + private int expo_bracketing_n_images = 3; + private double expo_bracketing_stops = 2.0; + + private Handler autofocus_timeout_handler; // handler for tracking autofocus timeout + private Runnable autofocus_timeout_runnable; // runnable set for tracking autofocus timeout + + // we keep track of some camera settings rather than reading from Camera.getParameters() every time. Firstly this is important + // for performance (affects UI rendering times, e.g., see profiling of GPU rendering). Secondly runtimeexceptions from + // Camera.getParameters() seem to be common in Google Play, particularly for getZoom(). + private int current_zoom_value; + private int current_exposure_compensation; + private int picture_width; + private int picture_height; + + /** Opens the camera device. + * @param cameraId Which camera to open (must be between 0 and CameraControllerManager1.getNumberOfCameras()-1). + * @param camera_error_cb onError() will be called if the camera closes due to serious error. No more calls to the CameraController1 object should be made (though a new one can be created, to try reopening the camera). + * @throws CameraControllerException if the camera device fails to open. + */ + public CameraController1(int cameraId, final ErrorCallback camera_error_cb) throws CameraControllerException { + super(cameraId); + if( MyDebug.LOG ) + Log.d(TAG, "create new CameraController1: " + cameraId); + this.camera_error_cb = camera_error_cb; + try { + camera = Camera.open(cameraId); + } + catch(RuntimeException e) { + MyDebug.logStackTrace(TAG, "failed to open camera", e); + throw new CameraControllerException(); + } + if( camera == null ) { + // Although the documentation says Camera.open() should throw a RuntimeException, it seems that it some cases it can return null + // I've seen this in some crashes reported in Google Play; also see: + // http://stackoverflow.com/questions/12054022/camera-open-returns-null + if( MyDebug.LOG ) + Log.e(TAG, "camera.open returned null"); + throw new CameraControllerException(); + } + try { + Camera.getCameraInfo(cameraId, camera_info); + } + catch(RuntimeException e) { + // Had reported RuntimeExceptions from Google Play + // also see http://stackoverflow.com/questions/22383708/java-lang-runtimeexception-fail-to-get-camera-info + MyDebug.logStackTrace(TAG, "failed to get camera info", e); + this.release(); + throw new CameraControllerException(); + } + + final CameraErrorCallback camera_error_callback = new CameraErrorCallback(); + camera.setErrorCallback(camera_error_callback); + + /*{ + // test error handling + final Handler handler = new Handler(); + handler.postDelayed(new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "test camera error"); + camera_error_callback.onError(Camera.CAMERA_ERROR_SERVER_DIED, camera); + } + }, 5000); + }*/ + } + + @Override + public void onError() { + Log.e(TAG, "onError"); + if( this.camera != null ) { // I got Google Play crash reports due to camera being null in v1.36 + this.camera.release(); + this.camera = null; + } + if( this.camera_error_cb != null ) { + // need to communicate the problem to the application + this.camera_error_cb.onError(); + } + } + + private class CameraErrorCallback implements Camera.ErrorCallback { + @Override + public void onError(int error, Camera cam) { + // n.b., as this is potentially serious error, we always log even if MyDebug.LOG is false + Log.e(TAG, "camera onError: " + error); + if( error == Camera.CAMERA_ERROR_SERVER_DIED ) { + Log.e(TAG, " CAMERA_ERROR_SERVER_DIED"); + CameraController1.this.onError(); + } + else if( error == Camera.CAMERA_ERROR_UNKNOWN ) { + Log.e(TAG, " CAMERA_ERROR_UNKNOWN "); + } + } + } + + public void release() { + if( camera != null ) { + // have had crashes when this is called from Preview/CloseCameraTask. + camera.release(); + camera = null; + } + } + + private Camera.Parameters getParameters() { + if( MyDebug.LOG ) + Log.d(TAG, "getParameters"); + return camera.getParameters(); + } + + private void setCameraParameters(Camera.Parameters parameters) { + if( MyDebug.LOG ) + Log.d(TAG, "setCameraParameters"); + try { + camera.setParameters(parameters); + if( MyDebug.LOG ) + Log.d(TAG, "done"); + } + catch(RuntimeException e) { + // just in case something has gone wrong + MyDebug.logStackTrace(TAG, "failed to set parameters", e); + count_camera_parameters_exception++; + } + } + + private List convertFlashModesToValues(List supported_flash_modes) { + if( MyDebug.LOG ) { + Log.d(TAG, "convertFlashModesToValues()"); + Log.d(TAG, "supported_flash_modes: " + supported_flash_modes); + } + List output_modes = new ArrayList<>(); + if( supported_flash_modes != null ) { + // also resort as well as converting + if( supported_flash_modes.contains(Camera.Parameters.FLASH_MODE_OFF) ) { + output_modes.add("flash_off"); + if( MyDebug.LOG ) + Log.d(TAG, " supports flash_off"); + } + if( supported_flash_modes.contains(Camera.Parameters.FLASH_MODE_AUTO) ) { + output_modes.add("flash_auto"); + if( MyDebug.LOG ) + Log.d(TAG, " supports flash_auto"); + } + if( supported_flash_modes.contains(Camera.Parameters.FLASH_MODE_ON) ) { + output_modes.add("flash_on"); + if( MyDebug.LOG ) + Log.d(TAG, " supports flash_on"); + } + if( supported_flash_modes.contains(Camera.Parameters.FLASH_MODE_TORCH) ) { + output_modes.add("flash_torch"); + if( MyDebug.LOG ) + Log.d(TAG, " supports flash_torch"); + } + if( supported_flash_modes.contains(Camera.Parameters.FLASH_MODE_RED_EYE) ) { + output_modes.add("flash_red_eye"); + if( MyDebug.LOG ) + Log.d(TAG, " supports flash_red_eye"); + } + } + + // Samsung Galaxy S7 at least for front camera has supported_flash_modes: auto, beach, portrait?! + // so rather than checking supported_flash_modes, we should check output_modes here + // this is always why we check whether the size is greater than 1, rather than 0 (this also matches + // the check we do in Preview.setupCameraParameters()). + if( output_modes.size() > 1 ) { + if( MyDebug.LOG ) + Log.d(TAG, "flash supported"); + } + else { + if( getFacing() == Facing.FACING_FRONT ) { + if( MyDebug.LOG ) + Log.d(TAG, "front-screen with no flash"); + output_modes.clear(); // clear any pre-existing mode (see note above about Samsung Galaxy S7) + output_modes.add("flash_off"); + output_modes.add("flash_frontscreen_on"); + output_modes.add("flash_frontscreen_torch"); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "no flash"); + // probably best to not return any modes, rather than one mode (see note about about Samsung Galaxy S7) + output_modes.clear(); + } + } + + return output_modes; + } + + private List convertFocusModesToValues(List supported_focus_modes) { + if( MyDebug.LOG ) + Log.d(TAG, "convertFocusModesToValues()"); + List output_modes = new ArrayList<>(); + if( supported_focus_modes != null ) { + // also resort as well as converting + if( supported_focus_modes.contains(Camera.Parameters.FOCUS_MODE_AUTO) ) { + output_modes.add("focus_mode_auto"); + if( MyDebug.LOG ) { + Log.d(TAG, " supports focus_mode_auto"); + } + } + if( supported_focus_modes.contains(Camera.Parameters.FOCUS_MODE_INFINITY) ) { + output_modes.add("focus_mode_infinity"); + if( MyDebug.LOG ) + Log.d(TAG, " supports focus_mode_infinity"); + } + if( supported_focus_modes.contains(Camera.Parameters.FOCUS_MODE_MACRO) ) { + output_modes.add("focus_mode_macro"); + if( MyDebug.LOG ) + Log.d(TAG, " supports focus_mode_macro"); + } + if( supported_focus_modes.contains(Camera.Parameters.FOCUS_MODE_AUTO) ) { + output_modes.add("focus_mode_locked"); + if( MyDebug.LOG ) { + Log.d(TAG, " supports focus_mode_locked"); + } + } + if( supported_focus_modes.contains(Camera.Parameters.FOCUS_MODE_FIXED) ) { + output_modes.add("focus_mode_fixed"); + if( MyDebug.LOG ) + Log.d(TAG, " supports focus_mode_fixed"); + } + if( supported_focus_modes.contains(Camera.Parameters.FOCUS_MODE_EDOF) ) { + output_modes.add("focus_mode_edof"); + if( MyDebug.LOG ) + Log.d(TAG, " supports focus_mode_edof"); + } + if( supported_focus_modes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE) ) { + output_modes.add("focus_mode_continuous_picture"); + if( MyDebug.LOG ) + Log.d(TAG, " supports focus_mode_continuous_picture"); + } + if( supported_focus_modes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO) ) { + output_modes.add("focus_mode_continuous_video"); + if( MyDebug.LOG ) + Log.d(TAG, " supports focus_mode_continuous_video"); + } + } + return output_modes; + } + + public String getAPI() { + return "Camera"; + } + + public CameraFeatures getCameraFeatures() throws CameraControllerException { + if( MyDebug.LOG ) + Log.d(TAG, "getCameraFeatures()"); + Camera.Parameters parameters; + try { + parameters = this.getParameters(); + } + catch(RuntimeException e) { + MyDebug.logStackTrace(TAG, "failed to get camera parameters", e); + throw new CameraControllerException(); + } + CameraFeatures camera_features = new CameraFeatures(); + camera_features.is_zoom_supported = parameters.isZoomSupported(); + if( camera_features.is_zoom_supported ) { + camera_features.max_zoom = parameters.getMaxZoom(); + try { + camera_features.zoom_ratios = parameters.getZoomRatios(); + } + catch(NumberFormatException e) { + // crash java.lang.NumberFormatException: Invalid int: " 500" reported in v1.4 on device "es209ra", Android 4.1, 3 Jan 2014 + // this is from java.lang.Integer.invalidInt(Integer.java:138) - unclear if this is a bug in Open Camera, all we can do for now is catch it + MyDebug.logStackTrace(TAG, "NumberFormatException in getZoomRatios()", e); + camera_features.is_zoom_supported = false; + camera_features.max_zoom = 0; + camera_features.zoom_ratios = null; + } + } + + camera_features.supports_face_detection = parameters.getMaxNumDetectedFaces() > 0; + + // get available sizes + List camera_picture_sizes = parameters.getSupportedPictureSizes(); + if( camera_picture_sizes == null ) { + // Google Play crashes suggest that getSupportedPictureSizes() can be null?! Better to fail gracefully + // instead of crashing + Log.e(TAG, "getSupportedPictureSizes() returned null!"); + throw new CameraControllerException(); + } + camera_features.picture_sizes = new ArrayList<>(); + //camera_features.picture_sizes.add(new CameraController.Size(1920, 1080)); // test + for(Camera.Size camera_size : camera_picture_sizes) { + // we leave supports_burst as true - strictly speaking it should be false, but we'll never use a fast burst mode + // with CameraController1 anyway + camera_features.picture_sizes.add(new CameraController.Size(camera_size.width, camera_size.height)); + } + // sizes are usually already sorted from high to low, but sort just in case + // note some devices do have sizes in a not fully sorted order (e.g., Nokia 8) + Collections.sort(camera_features.picture_sizes, new CameraController.SizeSorter()); + + //camera_features.supported_flash_modes = parameters.getSupportedFlashModes(); // Android format + List supported_flash_modes = parameters.getSupportedFlashModes(); // Android format + camera_features.supported_flash_values = convertFlashModesToValues(supported_flash_modes); // convert to our format (also resorts) + + List supported_focus_modes = parameters.getSupportedFocusModes(); // Android format + camera_features.supported_focus_values = convertFocusModesToValues(supported_focus_modes); // convert to our format (also resorts) + camera_features.max_num_focus_areas = parameters.getMaxNumFocusAreas(); + + camera_features.is_exposure_lock_supported = parameters.isAutoExposureLockSupported(); + + camera_features.is_white_balance_lock_supported = parameters.isAutoWhiteBalanceLockSupported(); + + camera_features.is_video_stabilization_supported = parameters.isVideoStabilizationSupported(); + + camera_features.is_photo_video_recording_supported = parameters.isVideoSnapshotSupported(); + + camera_features.min_exposure = parameters.getMinExposureCompensation(); + camera_features.max_exposure = parameters.getMaxExposureCompensation(); + camera_features.exposure_step = getExposureCompensationStep(); + camera_features.supports_expo_bracketing = ( camera_features.min_exposure != 0 && camera_features.max_exposure != 0 ); // require both a darker and brighter exposure, in order to support expo bracketing + camera_features.max_expo_bracketing_n_images = max_expo_bracketing_n_images; + + List camera_video_sizes = parameters.getSupportedVideoSizes(); + if( camera_video_sizes == null ) { + // if null, we should use the preview sizes - see http://stackoverflow.com/questions/14263521/android-getsupportedvideosizes-allways-returns-null + if( MyDebug.LOG ) + Log.d(TAG, "take video_sizes from preview sizes"); + camera_video_sizes = parameters.getSupportedPreviewSizes(); + } + camera_features.video_sizes = new ArrayList<>(); + //camera_features.video_sizes.add(new CameraController.Size(1920, 1080)); // test + for(Camera.Size camera_size : camera_video_sizes) { + camera_features.video_sizes.add(new CameraController.Size(camera_size.width, camera_size.height)); + } + // sizes are usually already sorted from high to low, but sort just in case + Collections.sort(camera_features.video_sizes, new CameraController.SizeSorter()); + + List camera_preview_sizes = parameters.getSupportedPreviewSizes(); + camera_features.preview_sizes = new ArrayList<>(); + for(Camera.Size camera_size : camera_preview_sizes) { + camera_features.preview_sizes.add(new CameraController.Size(camera_size.width, camera_size.height)); + } + + if( MyDebug.LOG ) + Log.d(TAG, "camera parameters: " + parameters.flatten()); + + camera_features.can_disable_shutter_sound = camera_info.canDisableShutterSound; + + // Determine view angles. Note that these can vary based on the resolution - and since we read these before the caller has + // set the desired resolution, this isn't strictly correct. However these are presumably view angles for the photo anyway, + // when some callers (e.g., DrawPreview) want view angles for the preview anyway - so these will only be an approximation for + // what we want anyway. + final float default_view_angle_x = 55.0f; + final float default_view_angle_y = 43.0f; + try { + camera_features.view_angle_x = parameters.getHorizontalViewAngle(); + camera_features.view_angle_y = parameters.getVerticalViewAngle(); + } + catch(Exception e) { + // apparently some devices throw exceptions... + MyDebug.logStackTrace(TAG, "exception reading horizontal or vertical view angles", e); + camera_features.view_angle_x = default_view_angle_x; + camera_features.view_angle_y = default_view_angle_y; + } + if( MyDebug.LOG ) { + Log.d(TAG, "view_angle_x: " + camera_features.view_angle_x); + Log.d(TAG, "view_angle_y: " + camera_features.view_angle_y); + } + // need to handle some devices reporting rubbish + if( camera_features.view_angle_x > 150.0f || camera_features.view_angle_y > 150.0f ) { + Log.e(TAG, "camera API reporting stupid view angles, set to sensible defaults"); + camera_features.view_angle_x = default_view_angle_x; + camera_features.view_angle_y = default_view_angle_y; + } + + return camera_features; + } + + /** Important, from docs: + * "Changing scene mode may override other parameters (such as flash mode, focus mode, white balance). + * For example, suppose originally flash mode is on and supported flash modes are on/off. In night + * scene mode, both flash mode and supported flash mode may be changed to off. After setting scene + * mode, applications should call getParameters to know if some parameters are changed." + */ + @Override + public SupportedValues setSceneMode(String value) { + Camera.Parameters parameters; + try { + parameters = this.getParameters(); + } + catch(RuntimeException e) { + MyDebug.logStackTrace(TAG, "exception from getParameters", e); + count_camera_parameters_exception++; + return null; + } + List values = parameters.getSupportedSceneModes(); + /*{ + // test + values = new ArrayList<>(); + values.add(ISO_DEFAULT); + }*/ + SupportedValues supported_values = checkModeIsSupported(values, value, SCENE_MODE_DEFAULT); + if( supported_values != null ) { + String scene_mode = parameters.getSceneMode(); + // if scene mode is null, it should mean scene modes aren't supported anyway + if( scene_mode != null && !scene_mode.equals(supported_values.selected_value) ) { + parameters.setSceneMode(supported_values.selected_value); + setCameraParameters(parameters); + } + } + return supported_values; + } + + @Override + public String getSceneMode() { + Camera.Parameters parameters = this.getParameters(); + return parameters.getSceneMode(); + } + + @Override + public boolean sceneModeAffectsFunctionality() { + // see https://developer.android.com/reference/android/hardware/Camera.Parameters.html#setSceneMode(java.lang.String) + // "Changing scene mode may override other parameters ... After setting scene mode, applications should call + // getParameters to know if some parameters are changed." + return true; + } + + public SupportedValues setColorEffect(String value) { + Camera.Parameters parameters = this.getParameters(); + List values = parameters.getSupportedColorEffects(); + SupportedValues supported_values = checkModeIsSupported(values, value, COLOR_EFFECT_DEFAULT); + if( supported_values != null ) { + String color_effect = parameters.getColorEffect(); + // have got nullpointerexception from Google Play, so now check for null + if( color_effect == null || !color_effect.equals(supported_values.selected_value) ) { + parameters.setColorEffect(supported_values.selected_value); + setCameraParameters(parameters); + } + } + return supported_values; + } + + public String getColorEffect() { + Camera.Parameters parameters = this.getParameters(); + return parameters.getColorEffect(); + } + + public SupportedValues setWhiteBalance(String value) { + if( MyDebug.LOG ) + Log.d(TAG, "setWhiteBalance: " + value); + Camera.Parameters parameters = this.getParameters(); + List values = parameters.getSupportedWhiteBalance(); + if( values != null ) { + // Some devices (e.g., OnePlus 3T) claim to support a "manual" mode, even though this + // isn't one of the possible white balances defined in Camera.Parameters. + // Since the old API doesn't support white balance temperatures, and this mode seems to + // have no useful effect, we remove it to avoid confusion. + while( values.contains("manual") ) { + values.remove("manual"); + } + } + SupportedValues supported_values = checkModeIsSupported(values, value, WHITE_BALANCE_DEFAULT); + if( supported_values != null ) { + String white_balance = parameters.getWhiteBalance(); + // if white balance is null, it should mean white balances aren't supported anyway + if( white_balance != null && !white_balance.equals(supported_values.selected_value) ) { + parameters.setWhiteBalance(supported_values.selected_value); + setCameraParameters(parameters); + } + } + return supported_values; + } + + public String getWhiteBalance() { + Camera.Parameters parameters = this.getParameters(); + return parameters.getWhiteBalance(); + } + + @Override + public boolean setWhiteBalanceTemperature(int temperature) { + // not supported for CameraController1 + return false; + } + + @Override + public int getWhiteBalanceTemperature() { + // not supported for CameraController1 + return 0; + } + + @Override + public SupportedValues setAntiBanding(String value) { + Camera.Parameters parameters = this.getParameters(); + List values = parameters.getSupportedAntibanding(); + SupportedValues supported_values = checkModeIsSupported(values, value, ANTIBANDING_DEFAULT); + if( supported_values != null ) { + // for antibanding, if the requested value isn't available, we don't modify it at all + // (so we stick with the device's default setting) + if( supported_values.selected_value.equals(value) ) { + String antibanding = parameters.getAntibanding(); + if( antibanding == null || !antibanding.equals(supported_values.selected_value) ) { + parameters.setAntibanding(supported_values.selected_value); + setCameraParameters(parameters); + } + } + } + return supported_values; + } + + @Override + public String getAntiBanding() { + Camera.Parameters parameters = this.getParameters(); + return parameters.getAntibanding(); + } + + @Override + public SupportedValues setEdgeMode(String value) { + return null; + } + + @Override + public String getEdgeMode() { + return null; + } + + @Override + public SupportedValues setNoiseReductionMode(String value) { + return null; + } + + @Override + public String getNoiseReductionMode() { + return null; + } + + @Override + public SupportedValues setISO(String value) { + Camera.Parameters parameters = this.getParameters(); + // get available isos - no standard value for this, see http://stackoverflow.com/questions/2978095/android-camera-api-iso-setting + String iso_values = parameters.get("iso-values"); + if( iso_values == null ) { + iso_values = parameters.get("iso-mode-values"); // Galaxy Nexus + if( iso_values == null ) { + iso_values = parameters.get("iso-speed-values"); // Micromax A101 + if( iso_values == null ) + iso_values = parameters.get("nv-picture-iso-values"); // LG dual P990 + } + } + List values = null; + if( iso_values != null && !iso_values.isEmpty() ) { + if( MyDebug.LOG ) + Log.d(TAG, "iso_values: " + iso_values); + String [] isos_array = iso_values.split(","); + // split shouldn't return null + if( isos_array.length > 0 ) { + // remove duplicates (OnePlus 3T has several duplicate "auto" entries) + HashSet hashSet = new HashSet<>(); + values = new ArrayList<>(); + // use hashset for efficiency + // make sure we alo preserve the order + for(String iso : isos_array) { + if( !hashSet.contains(iso) ) { + values.add(iso); + hashSet.add(iso); + } + } + } + } + + iso_key = "iso"; + if( parameters.get(iso_key) == null ) { + iso_key = "iso-speed"; // Micromax A101 + if( parameters.get(iso_key) == null ) { + iso_key = "nv-picture-iso"; // LG dual P990 + if( parameters.get(iso_key) == null ) { + if ( Build.MODEL.contains("Z00") ) + iso_key = "iso"; // Asus Zenfone 2 Z00A and Z008: see https://sourceforge.net/p/opencamera/tickets/183/ + else + iso_key = null; // not supported + } + } + } + /*values = new ArrayList<>(); + //values.add(ISO_DEFAULT); + //values.add("ISO_HJR"); + values.add("ISO50"); + values.add("ISO64"); + values.add("ISO80"); + values.add("ISO100"); + values.add("ISO125"); + values.add("ISO160"); + values.add("ISO200"); + values.add("ISO250"); + values.add("ISO320"); + values.add("ISO400"); + values.add("ISO500"); + values.add("ISO640"); + values.add("ISO800"); + values.add("ISO1000"); + values.add("ISO1250"); + values.add("ISO1600"); + values.add("ISO2000"); + values.add("ISO2500"); + values.add("ISO3200"); + values.add(ISO_DEFAULT); + //values.add("400"); + //values.add("800"); + //values.add("1600"); + iso_key = "iso";*/ + if( iso_key != null ){ + if( values == null ) { + // set a default for some devices which have an iso_key, but don't give a list of supported ISOs + values = new ArrayList<>(); + values.add(ISO_DEFAULT); + values.add("50"); + values.add("100"); + values.add("200"); + values.add("400"); + values.add("800"); + values.add("1600"); + } + SupportedValues supported_values = checkModeIsSupported(values, value, ISO_DEFAULT); + if( supported_values != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "set: " + iso_key + " to: " + supported_values.selected_value); + parameters.set(iso_key, supported_values.selected_value); + setCameraParameters(parameters); + } + return supported_values; + } + return null; + } + + @Override + public String getISOKey() { + if( MyDebug.LOG ) + Log.d(TAG, "getISOKey"); + return this.iso_key; + } + + @Override + public void setManualISO(boolean manual_iso, int iso) { + // not supported for CameraController1 + } + + @Override + public boolean isManualISO() { + // not supported for CameraController1 + return false; + } + + @Override + public boolean setISO(int iso) { + // not supported for CameraController1 + return false; + } + + @Override + public int getISO() { + // not supported for CameraController1 + return 0; + } + + @Override + public long getExposureTime() { + // not supported for CameraController1 + return 0L; + } + + @Override + public boolean setExposureTime(long exposure_time) { + // not supported for CameraController1 + return false; + } + + @Override + public void setAperture(float aperture) { + // not supported for CameraController1 + } + + @Override + public CameraController.Size getPictureSize() { + /*Camera.Parameters parameters = this.getParameters(); + Camera.Size camera_size = parameters.getPictureSize(); + return new CameraController.Size(camera_size.width, camera_size.height);*/ + return new CameraController.Size(picture_width, picture_height); + } + + @Override + public void setPictureSize(int width, int height) { + Camera.Parameters parameters = this.getParameters(); + this.picture_width = width; + this.picture_height = height; + parameters.setPictureSize(width, height); + if( MyDebug.LOG ) + Log.d(TAG, "set picture size: " + parameters.getPictureSize().width + ", " + parameters.getPictureSize().height); + setCameraParameters(parameters); + } + + @Override + public CameraController.Size getPreviewSize() { + Camera.Parameters parameters = this.getParameters(); + Camera.Size camera_size = parameters.getPreviewSize(); + return new CameraController.Size(camera_size.width, camera_size.height); + } + + @Override + public void setPreviewSize(int width, int height) { + Camera.Parameters parameters = this.getParameters(); + if( MyDebug.LOG ) + Log.d(TAG, "current preview size: " + parameters.getPreviewSize().width + ", " + parameters.getPreviewSize().height); + parameters.setPreviewSize(width, height); + if( MyDebug.LOG ) + Log.d(TAG, "new preview size: " + parameters.getPreviewSize().width + ", " + parameters.getPreviewSize().height); + setCameraParameters(parameters); + } + + @Override + public void setCameraExtension(boolean enabled, int extension) { + // not supported + } + + @Override + public boolean isCameraExtension() { + return false; + } + + @Override + public int getCameraExtension() { + return -1; + } + + @Override + public void setBurstType(BurstType burst_type) { + if( MyDebug.LOG ) + Log.d(TAG, "setBurstType: " + burst_type); + if( camera == null ) { + if( MyDebug.LOG ) + Log.e(TAG, "no camera"); + return; + } + if( burst_type != BurstType.BURSTTYPE_NONE && burst_type != BurstType.BURSTTYPE_EXPO ) { + Log.e(TAG, "burst type not supported"); + return; + } + this.want_expo_bracketing = burst_type == BurstType.BURSTTYPE_EXPO; + } + + @Override + public BurstType getBurstType() { + return want_expo_bracketing ? BurstType.BURSTTYPE_EXPO : BurstType.BURSTTYPE_NONE; + } + + @Override + public void setBurstNImages(int burst_requested_n_images) { + // not supported + } + + @Override + public void setBurstForNoiseReduction(boolean burst_for_noise_reduction, boolean noise_reduction_low_light) { + // not supported + } + + @Override + public boolean isContinuousBurstInProgress() { + // not supported + return false; + } + + @Override + public void stopContinuousBurst() { + // not supported + } + + @Override + public void stopFocusBracketingBurst() { + // not supported + } + + @Override + public void setExpoBracketingNImages(int n_images) { + if( MyDebug.LOG ) + Log.d(TAG, "setExpoBracketingNImages: " + n_images); + if( n_images <= 1 || (n_images % 2) == 0 ) { + if( MyDebug.LOG ) + Log.e(TAG, "n_images should be an odd number greater than 1"); + throw new RuntimeException(); // throw as RuntimeException, as this is a programming error + } + if( n_images > max_expo_bracketing_n_images ) { + n_images = max_expo_bracketing_n_images; + if( MyDebug.LOG ) + Log.e(TAG, "limiting n_images to max of " + n_images); + } + this.expo_bracketing_n_images = n_images; + } + + @Override + public void setExpoBracketingStops(double stops) { + if( MyDebug.LOG ) + Log.d(TAG, "setExpoBracketingStops: " + stops); + if( stops <= 0.0 ) { + if( MyDebug.LOG ) + Log.e(TAG, "stops should be positive"); + throw new RuntimeException(); // throw as RuntimeException, as this is a programming error + } + this.expo_bracketing_stops = stops; + } + + @Override + public void setDummyCaptureHack(boolean dummy_capture_hack) { + // not supported for CameraController1 + } + + @Override + public void setUseExpoFastBurst(boolean use_expo_fast_burst) { + // not supported for CameraController1 + } + + @Override + public boolean isCaptureFastBurst() { + // not supported for CameraController1 + return false; + } + + @Override + public boolean isCapturingBurst() { + return getBurstTotal() > 1 && getNBurstTaken() < getBurstTotal(); + } + + @Override + public int getNBurstTaken() { + return pending_burst_images.size(); + } + + @Override + public int getBurstTotal() { + return n_burst; + } + + @Override + public void setJpegR(boolean want_jpeg_r) { + // not supported for CameraController1 + } + + @Override + public void setRaw(boolean want_raw, int max_raw_images) { + // not supported for CameraController1 + } + + @Override + public void setVideoHighSpeed(boolean setVideoHighSpeed) { + // not supported for CameraController1 + } + + @Override + public boolean getOpticalStabilization() { + // not supported for CameraController1 + return false; + } + + @Override + public void setVideoStabilization(boolean enabled) { + Camera.Parameters parameters = this.getParameters(); + parameters.setVideoStabilization(enabled); + setCameraParameters(parameters); + } + + public boolean getVideoStabilization() { + try { + Camera.Parameters parameters = this.getParameters(); + return parameters.getVideoStabilization(); + } + catch(RuntimeException e) { + // have had crashes from Google Play for getParameters - assume video stabilization not enabled + MyDebug.logStackTrace(TAG, "failed to get parameters for video stabilization", e); + count_camera_parameters_exception++; + return false; + } + } + + @Override + public void setTonemapProfile(TonemapProfile tonemap_profile, float log_profile_strength, float gamma) { + // not supported for CameraController1! + } + + @Override + public TonemapProfile getTonemapProfile() { + // not supported for CameraController1! + return TonemapProfile.TONEMAPPROFILE_OFF; + } + + public int getJpegQuality() { + Camera.Parameters parameters = this.getParameters(); + return parameters.getJpegQuality(); + } + + public void setJpegQuality(int quality) { + Camera.Parameters parameters = this.getParameters(); + parameters.setJpegQuality(quality); + setCameraParameters(parameters); + } + + public int getZoom() { + /*Camera.Parameters parameters = this.getParameters(); + return parameters.getZoom();*/ + return this.current_zoom_value; + } + + public void setZoom(int value) { + try { + Camera.Parameters parameters = this.getParameters(); + if( MyDebug.LOG ) + Log.d(TAG, "zoom was: " + parameters.getZoom()); + this.current_zoom_value = value; + parameters.setZoom(value); + setCameraParameters(parameters); + } + catch(RuntimeException e) { + MyDebug.logStackTrace(TAG, "failed to set parameters for zoom", e); + count_camera_parameters_exception++; + } + } + + @Override + public void setZoom(int value, float smooth_zoom) { + setZoom(value); + } + + @Override + public void resetZoom() { + setZoom(0); + } + + public int getExposureCompensation() { + /*Camera.Parameters parameters = this.getParameters(); + return parameters.getExposureCompensation();*/ + return this.current_exposure_compensation; + } + + private float getExposureCompensationStep() { + float exposure_step; + Camera.Parameters parameters = this.getParameters(); + try { + exposure_step = parameters.getExposureCompensationStep(); + } + catch(Exception e) { + // received a NullPointerException from StringToReal.parseFloat() beneath getExposureCompensationStep() on Google Play! + MyDebug.logStackTrace(TAG, "exception from getExposureCompensationStep()", e); + exposure_step = 1.0f/3.0f; // make up a typical example + } + return exposure_step; + } + + // Returns whether exposure was modified + public boolean setExposureCompensation(int new_exposure) { + /*Camera.Parameters parameters = this.getParameters(); + int current_exposure = parameters.getExposureCompensation(); + if( new_exposure != current_exposure ) {*/ + if( new_exposure != current_exposure_compensation ) { + if( MyDebug.LOG ) + Log.d(TAG, "change exposure from " + current_exposure_compensation + " to " + new_exposure); + Camera.Parameters parameters = this.getParameters(); + this.current_exposure_compensation = new_exposure; + parameters.setExposureCompensation(new_exposure); + setCameraParameters(parameters); + return true; + } + return false; + } + + @Override + public void setPreviewFpsRange(int min, int max) { + if( MyDebug.LOG ) + Log.d(TAG, "setPreviewFpsRange: " + min + " to " + max); + try { + Camera.Parameters parameters = this.getParameters(); + parameters.setPreviewFpsRange(min, max); + setCameraParameters(parameters); + } + catch(RuntimeException e) { + // can get RuntimeException from getParameters - we don't catch within that function because callers may not be able to recover, + // but here it doesn't really matter if we fail to set the fps range + MyDebug.logStackTrace(TAG, "setPreviewFpsRange failed to get parameters", e); + count_camera_parameters_exception++; + } + } + + @Override + public void clearPreviewFpsRange() { + if( MyDebug.LOG ) + Log.d(TAG, "clearPreviewFpsRange"); + // not supported for old API + } + + public List getSupportedPreviewFpsRange() { + try { + Camera.Parameters parameters = this.getParameters(); + return parameters.getSupportedPreviewFpsRange(); + } + catch(RuntimeException e) { + /* N.B, have had reports of StringIndexOutOfBoundsException on Google Play on Sony Xperia M devices + at android.hardware.Camera$Parameters.splitRange(Camera.java:4098) + at android.hardware.Camera$Parameters.getSupportedPreviewFpsRange(Camera.java:2799) + But that's a subclass of RuntimeException which we now catch anyway. + */ + MyDebug.logStackTrace(TAG, "exception from getSupportedPreviewFpsRange", e); + count_camera_parameters_exception++; + } + return null; + } + + @Override + public void setFocusValue(String focus_value) { + Camera.Parameters parameters = this.getParameters(); + switch(focus_value) { + case "focus_mode_auto": + case "focus_mode_locked": + parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO); + break; + case "focus_mode_infinity": + parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_INFINITY); + break; + case "focus_mode_macro": + parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_MACRO); + break; + case "focus_mode_fixed": + parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED); + break; + case "focus_mode_edof": + parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_EDOF); + break; + case "focus_mode_continuous_picture": + parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE); + break; + case "focus_mode_continuous_video": + parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); + break; + default: + if (MyDebug.LOG) + Log.d(TAG, "setFocusValue() received unknown focus value " + focus_value); + break; + } + setCameraParameters(parameters); + } + + private String convertFocusModeToValue(String focus_mode) { + // focus_mode may be null on some devices; we return "" + if( MyDebug.LOG ) + Log.d(TAG, "convertFocusModeToValue: " + focus_mode); + String focus_value = ""; + if( focus_mode == null ) { + // ignore, leave focus_value at "" + } + else if( focus_mode.equals(Camera.Parameters.FOCUS_MODE_AUTO) ) { + focus_value = "focus_mode_auto"; + } + else if( focus_mode.equals(Camera.Parameters.FOCUS_MODE_INFINITY) ) { + focus_value = "focus_mode_infinity"; + } + else if( focus_mode.equals(Camera.Parameters.FOCUS_MODE_MACRO) ) { + focus_value = "focus_mode_macro"; + } + else if( focus_mode.equals(Camera.Parameters.FOCUS_MODE_FIXED) ) { + focus_value = "focus_mode_fixed"; + } + else if( focus_mode.equals(Camera.Parameters.FOCUS_MODE_EDOF) ) { + focus_value = "focus_mode_edof"; + } + else if( focus_mode.equals(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE) ) { + focus_value = "focus_mode_continuous_picture"; + } + else if( focus_mode.equals(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO) ) { + focus_value = "focus_mode_continuous_video"; + } + return focus_value; + } + + @Override + public String getFocusValue() { + // returns "" if Parameters.getFocusMode() returns null + Camera.Parameters parameters = this.getParameters(); + String focus_mode = parameters.getFocusMode(); + // getFocusMode() is documented as never returning null, however I've had null pointer exceptions reported in Google Play + return convertFocusModeToValue(focus_mode); + } + + @Override + public float getFocusDistance() { + // not supported for CameraController1! + return 0.0f; + } + + @Override + public boolean setFocusDistance(float focus_distance) { + // not supported for CameraController1! + return false; + } + + @Override + public void setFocusBracketingNImages(int n_images) { + // not supported for CameraController1 + } + + @Override + public void setFocusBracketingAddInfinity(boolean focus_bracketing_add_infinity) { + // not supported for CameraController1 + } + + @Override + public void setFocusBracketingSourceDistance(float focus_bracketing_source_distance) { + // not supported for CameraController1! + } + + @Override + public float getFocusBracketingSourceDistance() { + // not supported for CameraController1! + return 0.0f; + } + + @Override + public void setFocusBracketingSourceDistanceFromCurrent() { + // not supported for CameraController1! + } + + @Override + public void setFocusBracketingTargetDistance(float focus_bracketing_target_distance) { + // not supported for CameraController1! + } + + @Override + public float getFocusBracketingTargetDistance() { + // not supported for CameraController1! + return 0.0f; + } + + private String convertFlashValueToMode(String flash_value) { + String flash_mode = ""; + switch(flash_value) { + case "flash_off": + case "flash_frontscreen_on": + case "flash_frontscreen_torch": + flash_mode = Camera.Parameters.FLASH_MODE_OFF; + break; + case "flash_auto": + flash_mode = Camera.Parameters.FLASH_MODE_AUTO; + break; + case "flash_on": + flash_mode = Camera.Parameters.FLASH_MODE_ON; + break; + case "flash_torch": + flash_mode = Camera.Parameters.FLASH_MODE_TORCH; + break; + case "flash_red_eye": + flash_mode = Camera.Parameters.FLASH_MODE_RED_EYE; + break; + } + return flash_mode; + } + + public void setFlashValue(String flash_value) { + Camera.Parameters parameters = this.getParameters(); + if( MyDebug.LOG ) + Log.d(TAG, "setFlashValue: " + flash_value); + + this.frontscreen_flash = false; + if( flash_value.equals("flash_frontscreen_on") ) { + // we do this check first due to weird behaviour on Samsung Galaxy S7 front camera where parameters.getFlashMode() returns values (auto, beach, portrait) + this.frontscreen_flash = true; + return; + } + + if( parameters.getFlashMode() == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "flash mode not supported"); + return; + } + + final String flash_mode = convertFlashValueToMode(flash_value); + if( !flash_mode.isEmpty() && !flash_mode.equals(parameters.getFlashMode()) ) { + if( parameters.getFlashMode().equals(Camera.Parameters.FLASH_MODE_TORCH) && !flash_mode.equals(Camera.Parameters.FLASH_MODE_OFF) ) { + // workaround for bug on Nexus 5 and Nexus 6 where torch doesn't switch off until we set FLASH_MODE_OFF + if( MyDebug.LOG ) + Log.d(TAG, "first turn torch off"); + parameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF); + setCameraParameters(parameters); + // need to set the correct flash mode after a delay + Handler handler = new Handler(); + handler.postDelayed(new Runnable(){ + @Override + public void run(){ + if( MyDebug.LOG ) + Log.d(TAG, "now set actual flash mode after turning torch off"); + if( camera != null ) { // make sure camera wasn't released in the meantime (has a Google Play crash as a result of this) + Camera.Parameters parameters = getParameters(); + parameters.setFlashMode(flash_mode); + setCameraParameters(parameters); + } + } + }, 100); + } + else { + parameters.setFlashMode(flash_mode); + setCameraParameters(parameters); + } + } + } + + private String convertFlashModeToValue(String flash_mode) { + // flash_mode may be null, meaning flash isn't supported; we return "" + if( MyDebug.LOG ) + Log.d(TAG, "convertFlashModeToValue: " + flash_mode); + String flash_value = ""; + if( flash_mode == null ) { + // ignore, leave focus_value at "" + } + else if( flash_mode.equals(Camera.Parameters.FLASH_MODE_OFF) ) { + flash_value = "flash_off"; + } + else if( flash_mode.equals(Camera.Parameters.FLASH_MODE_AUTO) ) { + flash_value = "flash_auto"; + } + else if( flash_mode.equals(Camera.Parameters.FLASH_MODE_ON) ) { + flash_value = "flash_on"; + } + else if( flash_mode.equals(Camera.Parameters.FLASH_MODE_TORCH) ) { + flash_value = "flash_torch"; + } + else if( flash_mode.equals(Camera.Parameters.FLASH_MODE_RED_EYE) ) { + flash_value = "flash_red_eye"; + } + return flash_value; + } + + public String getFlashValue() { + // returns "" if flash isn't supported + Camera.Parameters parameters = this.getParameters(); + String flash_mode = parameters.getFlashMode(); // will be null if flash mode not supported + return convertFlashModeToValue(flash_mode); + } + + public void setRecordingHint(boolean hint) { + if( MyDebug.LOG ) + Log.d(TAG, "setRecordingHint: " + hint); + try { + Camera.Parameters parameters = this.getParameters(); + // Calling setParameters here with continuous video focus mode causes preview to not restart after taking a photo on Galaxy Nexus?! (fine on my Nexus 7). + // The issue seems to specifically be with setParameters (i.e., the problem occurs even if we don't setRecordingHint). + // In addition, I had a report of a bug on HTC Desire X, Android 4.0.4 where the saved video was corrupted. + // This worked fine in 1.7, then not in 1.8 and 1.9, then was fixed again in 1.10 + // The only thing in common to 1.7->1.8 and 1.9-1.10, that seems relevant, was adding this code to setRecordingHint() and setParameters() (unclear which would have been the problem), + // so we should be very careful about enabling this code again! + // Update for v1.23: the bug with Galaxy Nexus has come back (see comments in Preview.setPreviewFps()) and is now unavoidable, + // but I've still kept this check here - if nothing else, because it apparently caused video recording problems on other devices too. + // Update for v1.29: this doesn't seem to happen on Galaxy Nexus with continuous picture focus mode, which is what we now use; but again, still keepin the check here due to possible problems on other devices + String focus_mode = parameters.getFocusMode(); + // getFocusMode() is documented as never returning null, however I've had null pointer exceptions reported in Google Play + if( focus_mode != null && !focus_mode.equals(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO) ) { + parameters.setRecordingHint(hint); + setCameraParameters(parameters); + } + } + catch(RuntimeException e) { + // can get RuntimeException from getParameters - we don't catch within that function because callers may not be able to recover, + // but here it doesn't really matter if we fail to set the recording hint + MyDebug.logStackTrace(TAG, "setRecordingHint failed to get parameters", e); + count_camera_parameters_exception++; + } + } + + @Override + public void setAutoExposureLock(boolean enabled) { + Camera.Parameters parameters = this.getParameters(); + parameters.setAutoExposureLock(enabled); + setCameraParameters(parameters); + } + + @Override + public boolean getAutoExposureLock() { + Camera.Parameters parameters = this.getParameters(); + if( !parameters.isAutoExposureLockSupported() ) + return false; + return parameters.getAutoExposureLock(); + } + + @Override + public void setAutoWhiteBalanceLock(boolean enabled) { + Camera.Parameters parameters = this.getParameters(); + parameters.setAutoWhiteBalanceLock(enabled); + setCameraParameters(parameters); + } + + @Override + public boolean getAutoWhiteBalanceLock() { + Camera.Parameters parameters = this.getParameters(); + if( !parameters.isAutoWhiteBalanceLockSupported() ) + return false; + return parameters.getAutoWhiteBalanceLock(); + } + + public void setRotation(int rotation) { + Camera.Parameters parameters = this.getParameters(); + parameters.setRotation(rotation); + setCameraParameters(parameters); + } + + public void setLocationInfo(Location location) { + // don't log location, in case of privacy! + if( MyDebug.LOG ) + Log.d(TAG, "setLocationInfo"); + Camera.Parameters parameters = this.getParameters(); + parameters.removeGpsData(); + parameters.setGpsTimestamp(System.currentTimeMillis() / 1000); // initialise to a value (from Android camera source) + parameters.setGpsLatitude(location.getLatitude()); + parameters.setGpsLongitude(location.getLongitude()); + parameters.setGpsProcessingMethod(location.getProvider()); // from http://boundarydevices.com/how-to-write-an-android-camera-app/ + if( location.hasAltitude() ) { + parameters.setGpsAltitude(location.getAltitude()); + } + else { + // Android camera source claims we need to fake one if not present + // and indeed, this is needed to fix crash on Nexus 7 + parameters.setGpsAltitude(0); + } + if( location.getTime() != 0 ) { // from Android camera source + parameters.setGpsTimestamp(location.getTime() / 1000); + } + setCameraParameters(parameters); + } + + public void removeLocationInfo() { + Camera.Parameters parameters = this.getParameters(); + parameters.removeGpsData(); + setCameraParameters(parameters); + } + + public void enableShutterSound(boolean enabled) { + camera.enableShutterSound(enabled); + sounds_enabled = enabled; + } + + public boolean setFocusAndMeteringArea(List areas) { + List camera_areas = new ArrayList<>(); + for(CameraController.Area area : areas) { + camera_areas.add(new Camera.Area(area.rect, area.weight)); + } + try { + Camera.Parameters parameters = this.getParameters(); + String focus_mode = parameters.getFocusMode(); + // getFocusMode() is documented as never returning null, however I've had null pointer exceptions reported in Google Play + if( parameters.getMaxNumFocusAreas() != 0 && focus_mode != null && ( focus_mode.equals(Camera.Parameters.FOCUS_MODE_AUTO) || focus_mode.equals(Camera.Parameters.FOCUS_MODE_MACRO) || focus_mode.equals(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE) || focus_mode.equals(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO) ) ) { + parameters.setFocusAreas(camera_areas); + + // also set metering areas + if( parameters.getMaxNumMeteringAreas() == 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "metering areas not supported"); + } + else { + parameters.setMeteringAreas(camera_areas); + } + + setCameraParameters(parameters); + + return true; + } + else if( parameters.getMaxNumMeteringAreas() != 0 ) { + parameters.setMeteringAreas(camera_areas); + + setCameraParameters(parameters); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "metering areas not supported"); + } + } + catch(RuntimeException e) { + MyDebug.logStackTrace(TAG, "failed to set focus or metering areas", e); + count_camera_parameters_exception++; + } + return false; + } + + public void clearFocusAndMetering() { + try { + Camera.Parameters parameters = this.getParameters(); + boolean update_parameters = false; + if( parameters.getMaxNumFocusAreas() > 0 ) { + parameters.setFocusAreas(null); + update_parameters = true; + } + if( parameters.getMaxNumMeteringAreas() > 0 ) { + parameters.setMeteringAreas(null); + update_parameters = true; + } + if( update_parameters ) { + setCameraParameters(parameters); + } + } + catch(RuntimeException e) { + MyDebug.logStackTrace(TAG, "failed to clear focus or metering areas", e); + count_camera_parameters_exception++; + } + } + + public List getFocusAreas() { + Camera.Parameters parameters = this.getParameters(); + List camera_areas = parameters.getFocusAreas(); + if( camera_areas == null ) + return null; + List areas = new ArrayList<>(); + for(Camera.Area camera_area : camera_areas) { + areas.add(new CameraController.Area(camera_area.rect, camera_area.weight)); + } + return areas; + } + + public List getMeteringAreas() { + Camera.Parameters parameters = this.getParameters(); + List camera_areas = parameters.getMeteringAreas(); + if( camera_areas == null ) + return null; + List areas = new ArrayList<>(); + for(Camera.Area camera_area : camera_areas) { + areas.add(new CameraController.Area(camera_area.rect, camera_area.weight)); + } + return areas; + } + + @Override + public boolean supportsAutoFocus() { + try { + Camera.Parameters parameters = this.getParameters(); + String focus_mode = parameters.getFocusMode(); + // getFocusMode() is documented as never returning null, however I've had null pointer exceptions reported in Google Play from the below line (v1.7), + // on Galaxy Tab 10.1 (GT-P7500), Android 4.0.3 - 4.0.4; HTC EVO 3D X515m (shooteru), Android 4.0.3 - 4.0.4 + if( focus_mode != null && ( focus_mode.equals(Camera.Parameters.FOCUS_MODE_AUTO) || focus_mode.equals(Camera.Parameters.FOCUS_MODE_MACRO) ) ) { + return true; + } + } + catch(RuntimeException e) { + MyDebug.logStackTrace(TAG, "failed to get focus mode", e); + count_camera_parameters_exception++; + } + return false; + } + + @Override + public boolean supportsMetering() { + try { + Camera.Parameters parameters = this.getParameters(); + return parameters.getMaxNumMeteringAreas() > 0; + } + catch(RuntimeException e) { + MyDebug.logStackTrace(TAG, "failed to get metering support", e); + count_camera_parameters_exception++; + } + return false; + } + + @Override + public boolean focusIsContinuous() { + try { + Camera.Parameters parameters = this.getParameters(); + String focus_mode = parameters.getFocusMode(); + // getFocusMode() is documented as never returning null, however I've had null pointer exceptions reported in Google Play from the below line (v1.7), + // on Galaxy Tab 10.1 (GT-P7500), Android 4.0.3 - 4.0.4; HTC EVO 3D X515m (shooteru), Android 4.0.3 - 4.0.4 + if( focus_mode != null && ( focus_mode.equals(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE) || focus_mode.equals(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO) ) ) { + return true; + } + } + catch(RuntimeException e) { + MyDebug.logStackTrace(TAG, "failed to get focus mode", e); + count_camera_parameters_exception++; + } + return false; + } + + public boolean focusIsVideo() { + Camera.Parameters parameters = this.getParameters(); + String current_focus_mode = parameters.getFocusMode(); + // getFocusMode() is documented as never returning null, however I've had null pointer exceptions reported in Google Play + boolean focus_is_video = current_focus_mode != null && current_focus_mode.equals(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); + if( MyDebug.LOG ) { + Log.d(TAG, "current_focus_mode: " + current_focus_mode); + Log.d(TAG, "focus_is_video: " + focus_is_video); + } + return focus_is_video; + } + + @Override + public void reconnect() throws CameraControllerException { + if( MyDebug.LOG ) + Log.d(TAG, "reconnect"); + try { + camera.reconnect(); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "reconnect threw IOException", e); + throw new CameraControllerException(); + } + } + + @Override + public void setPreviewDisplay(SurfaceHolder holder) throws CameraControllerException { + if( MyDebug.LOG ) + Log.d(TAG, "setPreviewDisplay"); + try { + camera.setPreviewDisplay(holder); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "failed to set preview display", e); + throw new CameraControllerException(); + } + } + + @Override + public void setPreviewTexture(TextureView texture) throws CameraControllerException { + if( MyDebug.LOG ) + Log.d(TAG, "setPreviewTexture"); + try { + camera.setPreviewTexture(texture.getSurfaceTexture()); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "failed to set preview texture", e); + throw new CameraControllerException(); + } + } + + @Override + public void startPreview(boolean wait_until_started, Runnable runnable, Runnable on_failed) throws CameraControllerException { + if( MyDebug.LOG ) + Log.d(TAG, "startPreview"); + try { + camera.startPreview(); + } + catch(RuntimeException e) { + MyDebug.logStackTrace(TAG, "failed to start preview", e); + throw new CameraControllerException(); + } + if( runnable != null ) { + runnable.run(); + } + } + + @Override + public void stopRepeating() { + // not relevant for old camera API + } + + @Override + public void stopPreview() { + if( camera != null ) { + // have had crashes when this is called from Preview/CloseCameraTask. + camera.stopPreview(); + } + } + + // returns false if RuntimeException thrown (may include if face-detection already started) + public boolean startFaceDetection() { + if( MyDebug.LOG ) + Log.d(TAG, "startFaceDetection"); + try { + camera.startFaceDetection(); + } + catch(RuntimeException e) { + if( MyDebug.LOG ) + Log.d(TAG, "face detection failed or already started"); + count_camera_parameters_exception++; + return false; + } + return true; + } + + public void setFaceDetectionListener(final CameraController.FaceDetectionListener listener) { + if( listener != null ) { + class CameraFaceDetectionListener implements Camera.FaceDetectionListener { + @Override + public void onFaceDetection(Camera.Face[] camera_faces, Camera camera) { + Face [] faces = new Face[camera_faces.length]; + for(int i=0;i 1 ) { + pending_burst_images.add(data); + if( pending_burst_images.size() >= n_burst ) { // shouldn't ever be greater, but just in case + if( MyDebug.LOG ) + Log.d(TAG, "all burst images available"); + if( pending_burst_images.size() > n_burst ) { + Log.e(TAG, "pending_burst_images size " + pending_burst_images.size() + " is greater than n_burst " + n_burst); + } + + // set exposure compensation back to original + setExposureCompensation(burst_exposures.get(0)); + + // take a copy, so that we can clear pending_burst_images + // also allows us to reorder from dark to light + // since we took the images with the base exposure being first + int n_half_images = pending_burst_images.size()/2; + List images = new ArrayList<>(); + // darker images + for(int i=0;i requests = new ArrayList<>(); + + // do the current exposure first, so we can take the first shot immediately + // if we change the order, remember to update the code that re-orders for passing resultant images back to picture.onBurstPictureTaken() + requests.add(exposure_current); + + // darker images + for(int i=0;i camera_features_caches; // used to improve performance for subsequent CameraController2 objects; key is the cameraIdS string, value is a CameraFeaturesCache object + private CameraDevice camera; + private final String cameraIdS; // ID string of logical camera + private final String cameraIdSPhysical; // if non-null, ID string of underlying physical camera + + private final boolean is_samsung; + private final boolean is_samsung_s7; // Galaxy S7 or Galaxy S7 Edge + private final boolean is_samsung_galaxy_s; + private final boolean is_samsung_galaxy_f; // Galaxy fold or flip series + + // characteristics of camera - if a specific physical camera is being used, these are characteristics for the physical camera + private CameraCharacteristics characteristics; + private CameraExtensionCharacteristics extension_characteristics; + private CameraFeaturesCache camera_features_cache; // if non-null, this is the cache obtained from camera_features_caches + // cached characteristics (use this for values that need to be frequently accessed, e.g., per frame, to improve performance); + private int characteristics_sensor_orientation; + private Facing characteristics_facing; + + // camera features that we save (either to avoid repeatedly accessing, or we do our own modification) + private List zoom_ratios; + private int current_zoom_value; + private int zoom_value_1x; // index into zoom_ratios list that is for zoom 1x + private List supported_extensions_zoom; // if non-null, list of camera vendor extensions that support zoom + private boolean supports_face_detect_mode_simple; + private boolean supports_face_detect_mode_full; + private boolean supports_optical_stabilization; + private boolean supports_photo_video_recording; + private boolean supports_white_balance_temperature; + private String initial_focus_mode; // if non-null, focus mode to use if not set by Preview (rather than relying on the Builder template's default, which can be one that isn't supported, at least on Android emulator with its LIMITED camera!) + private boolean supports_exposure_time; + private long min_exposure_time; + private long max_exposure_time; + private float minimum_focus_distance; // for manual focus + //private boolean supports_low_light_boost; + + private boolean supports_tonemap_preset_curve; + private final static int tonemap_log_max_curve_points_c = 64; + private final static float [] jtvideo_values_base = new float[] { + 0.00f, 0.00f, + 0.01f, 0.055f, + 0.02f, 0.1f, + 0.05f, 0.21f, + 0.09f, 0.31f, + 0.13f, 0.38f, + 0.18f, 0.45f, + 0.28f, 0.57f, + 0.35f, 0.64f, + 0.45f, 0.72f, + 0.51f, 0.76f, + 0.60f, 0.82f, + 0.67f, 0.86f, + 0.77f, 0.91f, + 0.88f, 0.96f, + 0.97f, 0.99f, + 1.00f, 1.00f + }; + private final float [] jtvideo_values; + private final static float [] jtlog_values_base = new float[] { + 0.00f, 0.00f, + 0.01f, 0.07f, + 0.03f, 0.17f, + 0.05f, 0.25f, + 0.07f, 0.31f, + 0.09f, 0.36f, + 0.13f, 0.44f, + 0.18f, 0.51f, + 0.24f, 0.57f, + 0.31f, 0.64f, + 0.38f, 0.70f, + 0.46f, 0.76f, + 0.58f, 0.83f, + 0.70f, 0.89f, + 0.86f, 0.95f, + 0.99f, 0.99f, + 1.00f, 1.00f + }; + private final float [] jtlog_values; + private final static float [] jtlog2_values_base = new float[] { + 0.00f, 0.00f, + 0.01f, 0.09f, + 0.03f, 0.23f, + 0.07f, 0.37f, + 0.12f, 0.48f, + 0.17f, 0.56f, + 0.25f, 0.64f, + 0.32f, 0.70f, + 0.39f, 0.75f, + 0.50f, 0.81f, + 0.59f, 0.85f, + 0.66f, 0.88f, + 0.72f, 0.9f, + 0.78f, 0.92f, + 0.88f, 0.95f, + 0.92f, 0.96f, + 0.99f, 0.98f, + 1.00f, 1.00f + }; + private final float [] jtlog2_values; + + private final ErrorCallback preview_error_cb; + private final ErrorCallback camera_error_cb; + + private enum SessionType { + SESSIONTYPE_NORMAL, // standard use of Camera2 API, via CameraCaptureSession + SESSIONTYPE_EXTENSION, // use of vendor extension, via CameraExtensionSession + } + private SessionType sessionType = SessionType.SESSIONTYPE_NORMAL; + //private SessionType sessionType = SessionType.SESSIONTYPE_EXTENSION; // test + private CameraCaptureSession captureSession; // used if sessionType == SESSIONTYPE_NORMAL + private CameraExtensionSession extensionSession; // used if sessionType == SESSIONTYPE_EXTENSION + private int camera_extension = 0; // used if sessionType == SESSIONTYPE_EXTENSION + + private CaptureRequest.Builder previewBuilder; + private boolean previewIsVideoMode; + private AutoFocusCallback autofocus_cb; + private long autofocus_time_ms = -1; // time we set autofocus_cb to non-null + private static final long autofocus_timeout_c = 1000; // timeout for calling autofocus_cb (applies for both auto and continuous focus) + private boolean capture_follows_autofocus_hint; + private boolean ready_for_capture; + private FaceDetectionListener face_detection_listener; + private int last_faces_detected = -1; + private final Object open_camera_lock = new Object(); // lock to wait for camera to be opened from CameraDevice.StateCallback + private final Object background_camera_lock = new Object(); // lock to synchronize between UI thread and the background "CameraBackground" thread/handler + + private ImageReader imageReader; + + private BurstType burst_type = BurstType.BURSTTYPE_NONE; + // for BURSTTYPE_EXPO: + private final static int max_expo_bracketing_n_images = 5; // could be more, but limit to 5 for now + private int expo_bracketing_n_images = 3; + private double expo_bracketing_stops = 2.0; + private boolean use_expo_fast_burst = true; + // for BURSTTYPE_FOCUS: + private boolean focus_bracketing_in_progress; // whether focus bracketing in progress; set back to false to cancel + private int focus_bracketing_n_images = 3; + private float focus_bracketing_source_distance = 0.0f; + private float focus_bracketing_target_distance = 0.0f; + private boolean focus_bracketing_add_infinity = false; + // for BURSTTYPE_NORMAL: + private boolean burst_for_noise_reduction; // chooses number of burst images and other settings for Open Camera's noise reduction (NR) photo mode + private boolean noise_reduction_low_light; // if burst_for_noise_reduction==true, whether to optimise for low light scenes + private int burst_requested_n_images; // if burst_for_noise_reduction==false, this gives the number of images for the burst + //for BURSTTYPE_CONTINUOUS: + private boolean continuous_burst_in_progress; // whether we're currently taking a continuous burst + private boolean continuous_burst_requested_last_capture; // whether we've requested the last capture + + // Whether to enable a workaround hack for some Galaxy devices - take an additional dummy photo + // when taking an expo/HDR burst, to avoid problem where manual exposure is ignored for the + // first image. + private boolean dummy_capture_hack = false; + //private boolean dummy_capture_hack = true; // test + + private boolean want_jpeg_r; + private boolean want_raw; + //private boolean want_raw = true; + private int max_raw_images; + private android.util.Size raw_size; + private ImageReader imageReaderRaw; + private OnImageAvailableListener onImageAvailableListener; + private OnRawImageAvailableListener onRawImageAvailableListener; + private PictureCallback picture_cb; + private boolean jpeg_todo; // whether we are still waiting for JPEG images + private boolean raw_todo; // whether we are still waiting for RAW images + private boolean done_all_captures; // whether we've received the capture for the image (or all images if a burst) + //private CaptureRequest pending_request_when_ready; + private int n_burst; // number of expected (remaining) burst JPEG images in this capture + private int n_burst_taken; // number of burst JPEG images taken so far in this capture + private int n_burst_total; // total number of expected burst images in this capture (if known) (same for JPEG and RAW) + private int n_burst_raw; // number of expected (remaining) burst RAW images in this capture + private boolean burst_single_request; // if true then the burst images are returned in a single call to onBurstPictureTaken(), if false, then multiple calls to onPictureTaken() are made as soon as the image is available + private final List pending_burst_images = new ArrayList<>(); // burst images that have been captured so far, but not yet sent to the application + private final List pending_burst_images_raw = new ArrayList<>(); + private List slow_burst_capture_requests; // the set of burst capture requests - used when not using captureBurst() (e.g., when use_expo_fast_burst==false, or for focus bracketing) + private long slow_burst_start_ms = 0; // time when burst started (used for measuring performance of captures when not using captureBurst()) + private RawImage pending_raw_image; // used to ensure that when taking JPEG+RAW, the JPEG picture callback is called first (only used for non-burst cases) + private ErrorCallback take_picture_error_cb; + private boolean want_video_high_speed; + private boolean is_video_high_speed; // whether we're actually recording in high speed + private List ae_fps_ranges; + private List hs_fps_ranges; + //private ImageReader previewImageReader; + private SurfaceTexture texture; + private Surface surface_texture; + private HandlerThread thread; + private Handler handler; + private Executor executor; + private Surface video_recorder_surface; + + private int preview_width; + private int preview_height; + + private int picture_width; + private int picture_height; + + private static final int STATE_NORMAL = 0; + private static final int STATE_WAITING_AUTOFOCUS = 1; + private static final int STATE_WAITING_PRECAPTURE_START = 2; + private static final int STATE_WAITING_PRECAPTURE_DONE = 3; + private static final int STATE_WAITING_FAKE_PRECAPTURE_START = 4; + private static final int STATE_WAITING_FAKE_PRECAPTURE_DONE = 5; + private int state = STATE_NORMAL; + private long precapture_state_change_time_ms = -1; // time we changed state for precapture modes + private static final long precapture_start_timeout_c = 2000; + private static final long precapture_done_timeout_c = 3000; + + private boolean use_fake_precapture; // see CameraController.setUseCamera2FakeFlash() for details - this is the user/application setting, see use_fake_precapture_mode for whether fake precapture is enabled (as we may do this for other purposes, e.g., front screen flash) + private boolean use_fake_precapture_mode; // true if either use_fake_precapture is true, or we're temporarily using fake precapture mode (e.g., for front screen flash or exposure bracketing) + private boolean fake_precapture_torch_performed; // whether we turned on torch to do a fake precapture + private boolean fake_precapture_torch_focus_performed; // whether we turned on torch to do an autofocus, in fake precapture mode + private boolean fake_precapture_use_flash; // whether we decide to use flash in auto mode (if fake_precapture_use_autoflash_time_ms != -1) + private long fake_precapture_use_flash_time_ms = -1; // when we last checked to use flash in auto mode + + private ContinuousFocusMoveCallback continuous_focus_move_callback; + + private final MediaActionSound media_action_sound = new MediaActionSound(); + private final int shutter_click_sound; // which sound to use for shutter click + private boolean sounds_enabled = true; + + private boolean has_received_frame; + private boolean capture_result_is_ae_scanning; + private Integer capture_result_ae; // latest ae_state, null if not available + private boolean is_flash_required; // whether capture_result_ae suggests FLASH_REQUIRED? Or in neither FLASH_REQUIRED nor CONVERGED, this stores the last known result + private boolean modified_from_camera_settings; + // if modified_from_camera_settings set to true, then we've temporarily requested captures with settings such as + // exposure modified from the normal ones in camera_settings + private boolean capture_result_has_white_balance_rggb; + private RggbChannelVector capture_result_white_balance_rggb; + private boolean capture_result_has_iso; + private int capture_result_iso; + private boolean capture_result_has_exposure_time; + private long capture_result_exposure_time; + private boolean capture_result_has_frame_duration; + private long capture_result_frame_duration; + private boolean capture_result_has_focus_distance; + private float capture_result_focus_distance; + private boolean capture_result_has_aperture; + private float capture_result_aperture; + /*private boolean capture_result_has_focus_distance; + private float capture_result_focus_distance_min; + private float capture_result_focus_distance_max;*/ + /** Even if using long exposure, we want to set a maximum for the preview to avoid very low + * frame rates. + * Originally this was 1/12s, but I think we can get away with 1/5s - for this range, having + * a WYSIWYG preview is probably still better than the reduced framerate. Also as a side-benefit, + * it reduces the impact of the Samsung Galaxy Android 11 bug where manual exposure is ignored if + * different to the preview. + */ + private final static long max_preview_exposure_time_c = 1000000000L/5; + + private void resetCaptureResultInfo() { + capture_result_is_ae_scanning = false; + capture_result_ae = null; + is_flash_required = false; + capture_result_has_white_balance_rggb = false; + capture_result_has_iso = false; + capture_result_has_exposure_time = false; + capture_result_has_frame_duration = false; + capture_result_has_focus_distance = false; + capture_result_has_aperture = false; + } + + /* Callback to be called when we receive a capture with tag RUN_POST_CAPTURE. + */ + private abstract static class PostCapture { + public abstract void call() throws CameraAccessException; + } + private PostCapture run_post_capture; + + private enum RequestTagType { + RUN_POST_CAPTURE, // calls run_post_capture.call(), , if run_post_capture!=null + CAPTURE, // request is either for a regular non-burst capture, or the last of a burst capture sequence + CAPTURE_BURST_IN_PROGRESS // request is for a burst capture, but isn't the last of the burst capture sequence + //NONE // should be treated the same as if no tag had been set on the request - but allows the request tag type to be changed later + } + + /* The class that we use for setTag() and getTag() for capture requests. + We use this class instead of assigning the RequestTagType directly, so we can modify it + (even though CaptureRequest only has a getTag() method). + */ + private static class RequestTagObject { + private RequestTagType type; + + private RequestTagObject(RequestTagType type) { + this.type = type; + } + + private RequestTagType getType() { + return type; + } + + private void setType(RequestTagType type) { + this.type = type; + } + } + + private final static int min_white_balance_temperature_c = 1000; + private final static int max_white_balance_temperature_c = 15000; + + private class CameraSettings { + // keys that we need to store, to pass to the stillBuilder, but doesn't need to be passed to previewBuilder (should set sensible defaults) + private int rotation; + private Location location; + private byte jpeg_quality = 90; + + // keys that we have passed to the previewBuilder, that we need to store to also pass to the stillBuilder (should set sensible defaults, or use a has_ boolean if we don't want to set a default) + private int scene_mode = CameraMetadata.CONTROL_SCENE_MODE_DISABLED; + private int color_effect = CameraMetadata.CONTROL_EFFECT_MODE_OFF; + private int white_balance = CameraMetadata.CONTROL_AWB_MODE_AUTO; + private boolean has_default_color_correction; + private Integer default_color_correction; + private boolean has_antibanding; + private int antibanding = CameraMetadata.CONTROL_AE_ANTIBANDING_MODE_AUTO; + private boolean has_edge_mode; + private int edge_mode = CameraMetadata.EDGE_MODE_FAST; + private boolean has_default_edge_mode; + private Integer default_edge_mode; + private boolean has_noise_reduction_mode; + private int noise_reduction_mode = CameraMetadata.NOISE_REDUCTION_MODE_FAST; + private boolean has_default_noise_reduction_mode; + private Integer default_noise_reduction_mode; + private int white_balance_temperature = 5000; // used for white_balance == CONTROL_AWB_MODE_OFF + private String flash_value = "flash_off"; + private boolean has_iso; + //private int ae_mode = CameraMetadata.CONTROL_AE_MODE_ON; + //private int flash_mode = CameraMetadata.FLASH_MODE_OFF; + private int iso; + private long exposure_time = EXPOSURE_TIME_DEFAULT; + private boolean has_aperture; + private float aperture; + private boolean has_control_zoom_ratio; // zoom for Android 11+ + private float control_zoom_ratio; // zoom for Android 11+ + private Rect scalar_crop_region; // zoom for older Android versions; no need for has_scalar_crop_region, as we can set to null instead + private boolean has_ae_exposure_compensation; + private int ae_exposure_compensation; + private boolean has_af_mode; + private int af_mode = CaptureRequest.CONTROL_AF_MODE_AUTO; + private float focus_distance; // actual value passed to camera device (set to 0.0 if in infinity mode) + private float focus_distance_manual; // saved setting when in manual mode (so if user switches to infinity mode and back, we'll still remember the manual focus distance) + private boolean ae_lock; + private boolean wb_lock; + private MeteringRectangle [] af_regions; // no need for has_af_regions, as we can set to null instead + private MeteringRectangle [] ae_regions; // no need for has_ae_regions, as we can set to null instead + private boolean has_face_detect_mode; + private int face_detect_mode = CaptureRequest.STATISTICS_FACE_DETECT_MODE_OFF; + private Integer default_optical_stabilization; + private boolean video_stabilization; + private TonemapProfile tonemap_profile = TonemapProfile.TONEMAPPROFILE_OFF; + private float log_profile_strength; // for TONEMAPPROFILE_LOG + private float gamma_profile; // for TONEMAPPROFILE_GAMMA + private Integer default_tonemap_mode; // since we don't know what a device's tonemap mode is, we save it so we can switch back to it + private Range ae_target_fps_range; + private long sensor_frame_duration; + + private int getExifOrientation() { + int exif_orientation = ExifInterface.ORIENTATION_NORMAL; + switch( (rotation + 360) % 360 ) { + case 0: + exif_orientation = ExifInterface.ORIENTATION_NORMAL; + break; + case 90: + exif_orientation = (getFacing() == Facing.FACING_FRONT) ? + ExifInterface.ORIENTATION_ROTATE_270 : + ExifInterface.ORIENTATION_ROTATE_90; + break; + case 180: + exif_orientation = ExifInterface.ORIENTATION_ROTATE_180; + break; + case 270: + exif_orientation = (getFacing() == Facing.FACING_FRONT) ? + ExifInterface.ORIENTATION_ROTATE_90 : + ExifInterface.ORIENTATION_ROTATE_270; + break; + default: + // leave exif_orientation unchanged + if( MyDebug.LOG ) + Log.e(TAG, "unexpected rotation: " + rotation); + break; + } + if( MyDebug.LOG ) { + Log.d(TAG, "rotation: " + rotation); + Log.d(TAG, "exif_orientation: " + exif_orientation); + } + return exif_orientation; + } + + private void setupBuilder(CaptureRequest.Builder builder, boolean is_still) { + //builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); + //builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); + //builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO); + //builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); + //builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH); + + if( sessionType != SessionType.SESSIONTYPE_EXTENSION ) { + builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE); + } + + setSceneMode(builder); + setColorEffect(builder); + setWhiteBalance(builder); + setAntiBanding(builder); + setAEMode(builder, is_still); + setControlZoomRatio(builder); + setCropRegion(builder); + setExposureCompensation(builder); + setFocusMode(builder); + setFocusDistance(builder); + setAutoExposureLock(builder); + setAutoWhiteBalanceLock(builder); + setAFRegions(builder); + setAERegions(builder); + setFaceDetectMode(builder); + setRawMode(builder); + setStabilization(builder); + setTonemapProfile(builder); + + if( is_still ) { + if( location != null && sessionType != SessionType.SESSIONTYPE_EXTENSION ) { + // JPEG_GPS_LOCATION not supported for camera extensions, so instead this must + // be set by the caller when receiving the image data (see ImageSaver.modifyExif(), + // where we do this using ExifInterface.setGpsInfo()). + builder.set(CaptureRequest.JPEG_GPS_LOCATION, location); + } + builder.set(CaptureRequest.JPEG_ORIENTATION, rotation); + builder.set(CaptureRequest.JPEG_QUALITY, jpeg_quality); + } + + setEdgeMode(builder); + setNoiseReductionMode(builder); + + /*builder.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE, CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE_OFF); + builder.set(CaptureRequest.SHADING_MODE, CaptureRequest.SHADING_MODE_OFF); + builder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE_OFF); + builder.set(CaptureRequest.HOT_PIXEL_MODE, CaptureRequest.HOT_PIXEL_MODE_OFF);*/ + + /*builder.set(CaptureRequest.NOISE_REDUCTION_MODE, CaptureRequest.NOISE_REDUCTION_MODE_OFF); + builder.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE, CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE_OFF); + builder.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_OFF); + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.M ) { + builder.set(CaptureRequest.TONEMAP_MODE, CaptureRequest.TONEMAP_MODE_GAMMA_VALUE); + builder.set(CaptureRequest.TONEMAP_GAMMA, 5.0f); + }*/ + /*if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.N ) { + builder.set(CaptureRequest.CONTROL_POST_RAW_SENSITIVITY_BOOST, 0); + }*/ + /*builder.set(CaptureRequest.CONTROL_EFFECT_MODE, CaptureRequest.CONTROL_EFFECT_MODE_OFF); + builder.set(CaptureRequest.NOISE_REDUCTION_MODE, CaptureRequest.NOISE_REDUCTION_MODE_OFF); + builder.set(CaptureRequest.HOT_PIXEL_MODE, CaptureRequest.HOT_PIXEL_MODE_OFF); + builder.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_DISABLED); + builder.set(CaptureRequest.COLOR_CORRECTION_MODE, CaptureRequest.COLOR_CORRECTION_MODE_HIGH_QUALITY); + builder.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE, CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY); + builder.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_OFF); + builder.set(CaptureRequest.SHADING_MODE, CaptureRequest.SHADING_MODE_OFF); + builder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE_OFF);*/ + /*if( MyDebug.LOG ) { + builder.set(CaptureRequest.TONEMAP_MODE, CaptureRequest.TONEMAP_MODE_HIGH_QUALITY); + TonemapCurve original_curve = builder.get(CaptureRequest.TONEMAP_CURVE); + for(int c=0;c<3;c++) { + Log.d(TAG, "color c = " + c); + for(int i=0;i= Build.VERSION_CODES.M ) { + builder.set(CaptureRequest.TONEMAP_MODE, CaptureRequest.TONEMAP_MODE_PRESET_CURVE); + builder.set(CaptureRequest.TONEMAP_PRESET_CURVE, CaptureRequest.TONEMAP_PRESET_CURVE_SRGB); + }*/ + + if( MyDebug.LOG ) { + if( is_still ) { + Integer nr_mode = builder.get(CaptureRequest.NOISE_REDUCTION_MODE); + Log.d(TAG, "nr_mode: " + (nr_mode==null ? "null" : nr_mode)); + Integer edge_mode = builder.get(CaptureRequest.EDGE_MODE); + Log.d(TAG, "edge_mode: " + (edge_mode==null ? "null" : edge_mode)); + Integer control_mode = builder.get(CaptureRequest.CONTROL_MODE); + Log.d(TAG, "control_mode: " + (control_mode==null ? "null" : control_mode)); + Integer scene_mode = builder.get(CaptureRequest.CONTROL_SCENE_MODE); + Log.d(TAG, "scene_mode: " + (scene_mode==null ? "null" : scene_mode)); + Integer cc_mode = builder.get(CaptureRequest.COLOR_CORRECTION_MODE); + Log.d(TAG, "cc_mode: " + (cc_mode==null ? "null" : cc_mode)); + Integer cca_mode = builder.get(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE); + Log.d(TAG, "cca_mode: " + (cc_mode==null ? "null" : cca_mode)); + /*if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.N ) { + Integer raw_sensitivity_boost = builder.get(CaptureRequest.CONTROL_POST_RAW_SENSITIVITY_BOOST); + Log.d(TAG, "raw_sensitivity_boost: " + (raw_sensitivity_boost==null ? "null" : raw_sensitivity_boost)); + }*/ + } + //Integer ois_mode = builder.get(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE); + //Log.d(TAG, "ois_mode: " + (ois_mode==null ? "null" : ois_mode)); + } + } + + private boolean setSceneMode(CaptureRequest.Builder builder) { + if( MyDebug.LOG ) { + Log.d(TAG, "setSceneMode"); + Log.d(TAG, "builder: " + builder); + Log.d(TAG, "has_face_detect_mode: " + has_face_detect_mode); + } + + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + // don't set for extensions + return false; + } + + Integer current_mode = builder.get(CaptureRequest.CONTROL_MODE); + Integer current_scene_mode = builder.get(CaptureRequest.CONTROL_SCENE_MODE); + if( MyDebug.LOG ) + Log.d(TAG, "current_scene_mode: " + current_scene_mode); + if( has_face_detect_mode ) { + // face detection mode overrides scene mode + if( MyDebug.LOG ) + Log.d(TAG, "setting scene mode for face detection"); + builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_USE_SCENE_MODE); + builder.set(CaptureRequest.CONTROL_SCENE_MODE, CameraMetadata.CONTROL_SCENE_MODE_FACE_PRIORITY); + if( current_mode == null || current_mode != CameraMetadata.CONTROL_MODE_USE_SCENE_MODE || current_scene_mode == null || current_scene_mode != CameraMetadata.CONTROL_SCENE_MODE_FACE_PRIORITY ) + return true; + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "setting scene mode: " + scene_mode); + int new_mode; + if( scene_mode == CameraMetadata.CONTROL_SCENE_MODE_DISABLED ) { + // note we set CONTROL_MODE_AUTO even if using manual exposure, focus or awb, as we set that separately via + // CONTROL_AE_MODE_OFF etc + new_mode = CameraMetadata.CONTROL_MODE_AUTO; + } + else { + new_mode = CameraMetadata.CONTROL_MODE_USE_SCENE_MODE; + } + builder.set(CaptureRequest.CONTROL_MODE, new_mode); + builder.set(CaptureRequest.CONTROL_SCENE_MODE, scene_mode); + if( current_mode == null || current_mode != new_mode || current_scene_mode == null || current_scene_mode != scene_mode ) + return true; + } + return false; + } + + private boolean setColorEffect(CaptureRequest.Builder builder) { + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + // don't set for extensions + } + /*else if( builder.get(CaptureRequest.CONTROL_EFFECT_MODE) == null && color_effect == CameraMetadata.CONTROL_EFFECT_MODE_OFF ) { + // can leave off + }*/ + else if( builder.get(CaptureRequest.CONTROL_EFFECT_MODE) == null || builder.get(CaptureRequest.CONTROL_EFFECT_MODE) != color_effect ) { + if( MyDebug.LOG ) + Log.d(TAG, "setting color effect: " + color_effect); + builder.set(CaptureRequest.CONTROL_EFFECT_MODE, color_effect); + return true; + } + return false; + } + + private boolean setWhiteBalance(CaptureRequest.Builder builder) { + boolean changed = false; + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + // don't set for extensions + } + /*else if( builder.get(CaptureRequest.CONTROL_AWB_MODE) == null && white_balance == CameraMetadata.CONTROL_AWB_MODE_AUTO ) { + // can leave off + }*/ + else if( builder.get(CaptureRequest.CONTROL_AWB_MODE) == null || builder.get(CaptureRequest.CONTROL_AWB_MODE) != white_balance ) { + if( MyDebug.LOG ) + Log.d(TAG, "setting white balance: " + white_balance); + + // if we'd set COLOR_CORRECTION_MODE to non-default, now put it back to default + if( has_default_color_correction ) { + if( builder.get(CaptureRequest.COLOR_CORRECTION_MODE) != null && !builder.get(CaptureRequest.COLOR_CORRECTION_MODE).equals(default_color_correction) ) { + builder.set(CaptureRequest.COLOR_CORRECTION_MODE, default_color_correction); + } + has_default_color_correction = false; // set to false, as only need to set COLOR_CORRECTION_MODE back to default when changing from manual back to non-manual white balance + } + + builder.set(CaptureRequest.CONTROL_AWB_MODE, white_balance); + changed = true; + } + if( white_balance == CameraMetadata.CONTROL_AWB_MODE_OFF ) { + if( MyDebug.LOG ) + Log.d(TAG, "setting white balance temperature: " + white_balance_temperature); + // manual white balance + + if( !has_default_color_correction ) { + // save the default COLOR_CORRECTION_MODE + has_default_color_correction = true; + default_color_correction = builder.get(CaptureRequest.COLOR_CORRECTION_MODE); + if( MyDebug.LOG ) + Log.d(TAG, "default_color_correction: " + default_color_correction); + } + + RggbChannelVector rggbChannelVector = convertTemperatureToRggbVector(white_balance_temperature); + builder.set(CaptureRequest.COLOR_CORRECTION_MODE, CameraMetadata.COLOR_CORRECTION_MODE_TRANSFORM_MATRIX); + builder.set(CaptureRequest.COLOR_CORRECTION_GAINS, rggbChannelVector); + if( MyDebug.LOG ) { + Log.d(TAG, "original color_correction_transform: " + builder.get(CaptureRequest.COLOR_CORRECTION_TRANSFORM)); + } + // need to set COLOR_CORRECTION_TRANSFORM on some devices (e.g. Pixel 6 Pro) as they don't have it set by default + ColorSpaceTransform color_space_transform = new ColorSpaceTransform(new int[] + { + 1, 1, 0, 1, 0, 1, + 0, 1, 1, 1, 0, 1, + 0, 1, 0, 1, 1, 1 + }); + builder.set(CaptureRequest.COLOR_CORRECTION_TRANSFORM, color_space_transform); + changed = true; + } + return changed; + } + + private boolean setAntiBanding(CaptureRequest.Builder builder) { + boolean changed = false; + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + // don't set for extensions + } + else if( has_antibanding ) { + if( builder.get(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE) == null || builder.get(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE) != antibanding ) { + if( MyDebug.LOG ) + Log.d(TAG, "setting antibanding: " + antibanding); + builder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, antibanding); + changed = true; + } + } + return changed; + } + + private boolean setEdgeMode(CaptureRequest.Builder builder) { + if( MyDebug.LOG ) { + Log.d(TAG, "setEdgeMode"); + Log.d(TAG, "has_default_edge_mode: " + has_default_edge_mode); + Log.d(TAG, "default_edge_mode: " + default_edge_mode); + } + boolean changed = false; + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + // don't set for extensions + } + else if( has_edge_mode ) { + if( !has_default_edge_mode ) { + // save the default_edge_mode edge_mode + has_default_edge_mode = true; + default_edge_mode = builder.get(CaptureRequest.EDGE_MODE); + if( MyDebug.LOG ) + Log.d(TAG, "default_edge_mode: " + default_edge_mode); + } + if( builder.get(CaptureRequest.EDGE_MODE) == null || builder.get(CaptureRequest.EDGE_MODE) != edge_mode ) { + if( MyDebug.LOG ) + Log.d(TAG, "setting edge_mode: " + edge_mode); + builder.set(CaptureRequest.EDGE_MODE, edge_mode); + changed = true; + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "edge_mode was already set: " + edge_mode); + } + } + else if( is_samsung_s7 ) { + if( MyDebug.LOG ) + Log.d(TAG, "set EDGE_MODE_OFF"); + // see https://sourceforge.net/p/opencamera/discussion/general/thread/48bd836b/ , + // https://stackoverflow.com/questions/36028273/android-camera-api-glossy-effect-on-galaxy-s7 + // need EDGE_MODE_OFF to avoid a "glow" effect + builder.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_OFF); + } + else if( has_default_edge_mode ) { + if( builder.get(CaptureRequest.EDGE_MODE) != null && !builder.get(CaptureRequest.EDGE_MODE).equals(default_edge_mode) ) { + builder.set(CaptureRequest.EDGE_MODE, default_edge_mode); + changed = true; + } + } + return changed; + } + + private boolean setNoiseReductionMode(CaptureRequest.Builder builder) { + if( MyDebug.LOG ) { + Log.d(TAG, "setNoiseReductionMode"); + Log.d(TAG, "has_default_noise_reduction_mode: " + has_default_noise_reduction_mode); + Log.d(TAG, "default_noise_reduction_mode: " + default_noise_reduction_mode); + } + boolean changed = false; + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + // don't set for extensions + } + else if( has_noise_reduction_mode ) { + if( !has_default_noise_reduction_mode ) { + // save the default_noise_reduction_mode noise_reduction_mode + has_default_noise_reduction_mode = true; + default_noise_reduction_mode = builder.get(CaptureRequest.NOISE_REDUCTION_MODE); + if( MyDebug.LOG ) + Log.d(TAG, "default_noise_reduction_mode: " + default_noise_reduction_mode); + } + if( builder.get(CaptureRequest.NOISE_REDUCTION_MODE) == null || builder.get(CaptureRequest.NOISE_REDUCTION_MODE) != noise_reduction_mode ) { + if( MyDebug.LOG ) + Log.d(TAG, "setting noise_reduction_mode: " + noise_reduction_mode); + builder.set(CaptureRequest.NOISE_REDUCTION_MODE, noise_reduction_mode); + changed = true; + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "noise_reduction_mode was already set: " + noise_reduction_mode); + } + } + else if( is_samsung_s7 ) { + if( MyDebug.LOG ) + Log.d(TAG, "set NOISE_REDUCTION_MODE_OFF"); + // see https://sourceforge.net/p/opencamera/discussion/general/thread/48bd836b/ , + // https://stackoverflow.com/questions/36028273/android-camera-api-glossy-effect-on-galaxy-s7 + // need NOISE_REDUCTION_MODE_OFF to avoid excessive blurring + builder.set(CaptureRequest.NOISE_REDUCTION_MODE, CaptureRequest.NOISE_REDUCTION_MODE_OFF); + } + else if( has_default_noise_reduction_mode ) { + if( builder.get(CaptureRequest.NOISE_REDUCTION_MODE) != null && !builder.get(CaptureRequest.NOISE_REDUCTION_MODE).equals(default_noise_reduction_mode)) { + builder.set(CaptureRequest.NOISE_REDUCTION_MODE, default_noise_reduction_mode); + changed = true; + } + } + return changed; + } + + private boolean setAperture(CaptureRequest.Builder builder) { + if( MyDebug.LOG ) + Log.d(TAG, "setAperture"); + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + // don't set for extensions + } + else if( has_aperture ) { + if( MyDebug.LOG ) + Log.d(TAG, " aperture: " + aperture); + builder.set(CaptureRequest.LENS_APERTURE, aperture); + return true; + } + // don't set at all if has_aperture==false + return false; + } + + @SuppressWarnings("SameReturnValue") + private boolean setAEMode(CaptureRequest.Builder builder, boolean is_still) { + if( MyDebug.LOG ) + Log.d(TAG, "setAEMode"); + + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + // don't set for extensions + /* + // except for low light boost for night mode, if supported + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.VANILLA_ICE_CREAM && camera_extension == CameraExtensionCharacteristics.EXTENSION_NIGHT && supports_low_light_boost && !is_still ) { + builder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY); + return true; + }*/ + return false; + } + + if( has_iso ) { + if( MyDebug.LOG ) { + Log.d(TAG, "manual mode"); + Log.d(TAG, "iso: " + iso); + Log.d(TAG, "exposure_time: " + exposure_time); + } + builder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_OFF); + builder.set(CaptureRequest.SENSOR_SENSITIVITY, iso); + long actual_exposure_time = exposure_time; + if( !is_still ) { + // if this isn't for still capture, have a max exposure time of 1/12s + actual_exposure_time = Math.min(exposure_time, max_preview_exposure_time_c); + if( MyDebug.LOG ) + Log.d(TAG, "actually using exposure_time of: " + actual_exposure_time); + } + builder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, actual_exposure_time); + if (sensor_frame_duration > 0) { + builder.set(CaptureRequest.SENSOR_FRAME_DURATION, sensor_frame_duration); + } + //builder.set(CaptureRequest.SENSOR_FRAME_DURATION, 1000000000L); + //builder.set(CaptureRequest.SENSOR_FRAME_DURATION, 0L); + // only need to account for FLASH_MODE_TORCH, otherwise we use fake flash mode for manual ISO + if( flash_value.equals("flash_torch") ) { + builder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_TORCH); + } + else { + builder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF); + } + } + else { + if( MyDebug.LOG ) { + Log.d(TAG, "auto mode"); + Log.d(TAG, "flash_value: " + flash_value); + } + if( ae_target_fps_range != null ) { + Log.d(TAG, "set ae_target_fps_range: " + ae_target_fps_range); + builder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, ae_target_fps_range); + } + + // prefer to set flash via the ae mode (otherwise get even worse results), except for torch which we can't + switch(flash_value) { + case "flash_off": + builder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON); + builder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF); + break; + case "flash_auto": + // note we set this even in fake flash mode (where we manually turn torch on and off to simulate flash) so we + // can read the FLASH_REQUIRED state to determine if flash is required + /*if( use_fake_precapture || CameraController2.this.want_expo_bracketing ) + builder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON); + else*/ + builder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON_AUTO_FLASH); + builder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF); + break; + case "flash_on": + // see note above for "flash_auto" for why we set this even fake flash mode - arguably we don't need to know + // about FLASH_REQUIRED in flash_on mode, but we set it for consistency... + /*if( use_fake_precapture || CameraController2.this.want_expo_bracketing ) + builder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON); + else*/ + builder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON_ALWAYS_FLASH); + builder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF); + break; + case "flash_torch": + builder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON); + builder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_TORCH); + break; + case "flash_red_eye": + // not supported for expo bracketing or burst + if( CameraController2.this.burst_type != BurstType.BURSTTYPE_NONE ) + builder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON); + else + builder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE); + builder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF); + break; + case "flash_frontscreen_auto": + case "flash_frontscreen_on": + case "flash_frontscreen_torch": + //noinspection DuplicateBranchesInSwitch + builder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON); + builder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF); + break; + } + } + return true; + } + + private void setControlZoomRatio(CaptureRequest.Builder builder) { + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.R && has_control_zoom_ratio ) { + builder.set(CaptureRequest.CONTROL_ZOOM_RATIO, control_zoom_ratio); + } + } + + private void setCropRegion(CaptureRequest.Builder builder) { + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + // don't set for extensions + } + else if( scalar_crop_region != null && Build.VERSION.SDK_INT < Build.VERSION_CODES.R ) { + builder.set(CaptureRequest.SCALER_CROP_REGION, scalar_crop_region); + } + } + + private boolean setExposureCompensation(CaptureRequest.Builder builder) { + if( !has_ae_exposure_compensation ) + return false; + if( has_iso ) { + if( MyDebug.LOG ) + Log.d(TAG, "don't set exposure compensation in manual iso mode"); + return false; + } + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + // don't set for extensions + return false; + } + if( builder.get(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION) == null || ae_exposure_compensation != builder.get(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION) ) { + if( MyDebug.LOG ) + Log.d(TAG, "change exposure to " + ae_exposure_compensation); + builder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, ae_exposure_compensation); + return true; + } + return false; + } + + private void setFocusMode(CaptureRequest.Builder builder) { + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + // don't set for extensions + } + else if( has_af_mode ) { + if( MyDebug.LOG ) + Log.d(TAG, "change af mode to " + af_mode); + builder.set(CaptureRequest.CONTROL_AF_MODE, af_mode); + } + else { + if( MyDebug.LOG ) { + Log.d(TAG, "af mode left at " + builder.get(CaptureRequest.CONTROL_AF_MODE)); + } + } + } + + private void setFocusDistance(CaptureRequest.Builder builder) { + if( MyDebug.LOG ) + Log.d(TAG, "change focus distance to " + focus_distance); + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + // don't set for extensions + } + else { + builder.set(CaptureRequest.LENS_FOCUS_DISTANCE, focus_distance); + } + } + + private void setAutoExposureLock(CaptureRequest.Builder builder) { + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + // don't set for extensions + } + else { + builder.set(CaptureRequest.CONTROL_AE_LOCK, ae_lock); + } + } + + private void setAutoWhiteBalanceLock(CaptureRequest.Builder builder) { + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + // don't set for extensions + } + else { + builder.set(CaptureRequest.CONTROL_AWB_LOCK, wb_lock); + } + } + + private void setAFRegions(CaptureRequest.Builder builder) { + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + // don't set for extensions + } + else if( af_regions != null && characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF) > 0 ) { + builder.set(CaptureRequest.CONTROL_AF_REGIONS, af_regions); + } + } + + private void setAERegions(CaptureRequest.Builder builder) { + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + // don't set for extensions + } + else if( ae_regions != null && characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE) > 0 ) { + builder.set(CaptureRequest.CONTROL_AE_REGIONS, ae_regions); + } + } + + private void setFaceDetectMode(CaptureRequest.Builder builder) { + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + // don't set for extensions + } + else if( has_face_detect_mode ) + builder.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, face_detect_mode); + else + builder.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, CaptureRequest.STATISTICS_FACE_DETECT_MODE_OFF); + } + + private void setRawMode(CaptureRequest.Builder builder) { + // DngCreator says "For best quality DNG files, it is strongly recommended that lens shading map output is enabled if supported" + // docs also say "ON is always supported on devices with the RAW capability", so we don't check for STATISTICS_LENS_SHADING_MAP_MODE_ON being available + if( want_raw && !previewIsVideoMode ) { + builder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE_ON); + } + } + + private void setStabilization(CaptureRequest.Builder builder) { + if( MyDebug.LOG ) + Log.d(TAG, "setStabilization: " + video_stabilization); + + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + // don't set for extensions + return; + } + + builder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, video_stabilization ? CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON : CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF); + if( supports_optical_stabilization ) { + if( video_stabilization ) { + // should also disable OIS + if( default_optical_stabilization == null ) { + // save the default optical_stabilization + default_optical_stabilization = builder.get(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE); + if( MyDebug.LOG ) + Log.d(TAG, "default_optical_stabilization: " + default_optical_stabilization); + } + builder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF); + } + else if( default_optical_stabilization != null ) { + if( builder.get(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE) != null && !builder.get(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE).equals(default_optical_stabilization) ) { + if( MyDebug.LOG ) + Log.d(TAG, "set optical stabilization back to: " + default_optical_stabilization); + builder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, default_optical_stabilization); + } + } + } + } + + private float getLogProfile(float in) { + //final float black_level = 4.0f/255.0f; + //final float power = 1.0f/2.2f; + final float log_A = log_profile_strength; + /*float out; + if( in <= black_level ) { + out = in; + } + else { + float in_m = (in - black_level) / (1.0f - black_level); + out = (float) (Math.log1p(log_A * in_m) / Math.log1p(log_A)); + out = black_level + (1.0f - black_level)*out; + }*/ + float out = (float) (Math.log1p(log_A * in) / Math.log1p(log_A)); + + // apply gamma + // update: no longer need to do this with improvements made in 1.48 onwards + //out = (float)Math.pow(out, power); + //out = Math.max(out, 0.5f); + + return out; + } + + private float getGammaProfile(float in) { + return (float)Math.pow(in, 1.0f/gamma_profile); + } + + private void setTonemapProfile(CaptureRequest.Builder builder) { + if( MyDebug.LOG ) { + Log.d(TAG, "setTonemapProfile"); + Log.d(TAG, "tonemap_profile: " + tonemap_profile); + Log.d(TAG, "log_profile_strength: " + log_profile_strength); + Log.d(TAG, "gamma_profile: " + gamma_profile); + Log.d(TAG, "default_tonemap_mode: " + default_tonemap_mode); + } + boolean have_tonemap_profile = tonemap_profile != TonemapProfile.TONEMAPPROFILE_OFF; + if( tonemap_profile == TonemapProfile.TONEMAPPROFILE_LOG && log_profile_strength == 0.0f ) + have_tonemap_profile = false; + else if( tonemap_profile == TonemapProfile.TONEMAPPROFILE_GAMMA && gamma_profile == 0.0f ) + have_tonemap_profile = false; + + // to use test_new, also need to uncomment the test code in setFocusValue() to call setTonemapProfile() + //boolean test_new = this.af_mode == CaptureRequest.CONTROL_AF_MODE_AUTO; // testing + + //if( test_new ) + // have_tonemap_profile = false; + + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + // don't set for extensions + } + else if( have_tonemap_profile ) { + if( default_tonemap_mode == null ) { + // save the default tonemap_mode + default_tonemap_mode = builder.get(CaptureRequest.TONEMAP_MODE); + if( MyDebug.LOG ) + Log.d(TAG, "default_tonemap_mode: " + default_tonemap_mode); + } + + final boolean use_preset_curve = supports_tonemap_preset_curve; + //final boolean use_preset_curve = false; // test + if( use_preset_curve && tonemap_profile == TonemapProfile.TONEMAPPROFILE_REC709 && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M ) { + if( MyDebug.LOG ) + Log.d(TAG, "set TONEMAP_PRESET_CURVE_REC709"); + builder.set(CaptureRequest.TONEMAP_MODE, CaptureRequest.TONEMAP_MODE_PRESET_CURVE); + builder.set(CaptureRequest.TONEMAP_PRESET_CURVE, CaptureRequest.TONEMAP_PRESET_CURVE_REC709); + } + else if( use_preset_curve && tonemap_profile == TonemapProfile.TONEMAPPROFILE_SRGB && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M ) { + if( MyDebug.LOG ) + Log.d(TAG, "set TONEMAP_PRESET_CURVE_SRGB"); + builder.set(CaptureRequest.TONEMAP_MODE, CaptureRequest.TONEMAP_MODE_PRESET_CURVE); + builder.set(CaptureRequest.TONEMAP_PRESET_CURVE, CaptureRequest.TONEMAP_PRESET_CURVE_SRGB); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "handle via TONEMAP_MODE_CONTRAST_CURVE / TONEMAP_CURVE"); + float [] values = null; + switch( tonemap_profile ) { + case TONEMAPPROFILE_REC709: + // y = 4.5x if x < 0.018, else y = 1.099*x^0.45 - 0.099 + float [] x_values = new float[] { + 0.0000f, 0.0667f, 0.1333f, 0.2000f, + 0.2667f, 0.3333f, 0.4000f, 0.4667f, + 0.5333f, 0.6000f, 0.6667f, 0.7333f, + 0.8000f, 0.8667f, 0.9333f, 1.0000f + }; + values = new float[2*x_values.length]; + int c = 0; + for(float x_value : x_values) { + float out; + if( x_value < 0.018f ) { + out = 4.5f * x_value; + } + else { + out = (float)(1.099*Math.pow(x_value, 0.45) - 0.099); + } + values[c++] = x_value; + values[c++] = out; + } + break; + case TONEMAPPROFILE_SRGB: + values = new float [] { + 0.0000f, 0.0000f, 0.0667f, 0.2864f, 0.1333f, 0.4007f, 0.2000f, 0.4845f, + 0.2667f, 0.5532f, 0.3333f, 0.6125f, 0.4000f, 0.6652f, 0.4667f, 0.7130f, + 0.5333f, 0.7569f, 0.6000f, 0.7977f, 0.6667f, 0.8360f, 0.7333f, 0.8721f, + 0.8000f, 0.9063f, 0.8667f, 0.9389f, 0.9333f, 0.9701f, 1.0000f, 1.0000f + }; + break; + case TONEMAPPROFILE_LOG: + case TONEMAPPROFILE_GAMMA: + { + // better to use uniformly spaced values, otherwise we get a weird looking effect - this can be + // seen most prominently when using gamma 1.0f, which should look linear (and hence be independent + // of the x values we use) + // can be reproduced on at least OnePlus 3T and Galaxy S10e (although the exact behaviour of the + // poor results is different on those devices) + int n_values = tonemap_log_max_curve_points_c; + if( is_samsung ) { + // unfortunately odd bug on Samsung devices (at least S7 and S10e) where if more than 32 control points, + // the maximum brightness value is reduced (can best be seen with 64 points, and using gamma==1.0) + // note that Samsung devices also need at least 16 control points - or in some cases 32, see comments for + // enforceMinTonemapCurvePoints(). + // 32 is better than 16 anyway, as better to have more points for finer curve where possible. + n_values = 32; + } + //int n_values = test_new ? 32 : 128; + //int n_values = 32; + if( MyDebug.LOG ) + Log.d(TAG, "n_values: " + n_values); + values = new float [2*n_values]; + for(int i=0;i 255 ) + red = 255; + } + + if( temperature <= 66 ) { + green = temperature; + green = (float)(99.4708025861 * Math.log(green) - 161.1195681661); + if( green < 0 ) + green = 0; + if( green > 255 ) + green = 255; + } + else { + green = temperature - 60; + green = (float)(288.1221695283 * (Math.pow(green, -0.0755148492))); + if (green < 0) + green = 0; + if (green > 255) + green = 255; + } + + if( temperature >= 66 ) + blue = 255; + else if( temperature <= 19 ) + blue = 0; + else { + blue = temperature - 10; + blue = (float)(138.5177312231 * Math.log(blue) - 305.0447927307); + if( blue < 0 ) + blue = 0; + if( blue > 255 ) + blue = 255; + } + + if( MyDebug.LOG ) { + Log.d(TAG, "red: " + red); + Log.d(TAG, "green: " + green); + Log.d(TAG, "blue: " + blue); + } + + red = (red/255.0f); + green = (green/255.0f); + blue = (blue/255.0f); + + red = RGBtoGain(red); + green = RGBtoGain(green); + blue = RGBtoGain(blue); + if( MyDebug.LOG ) { + Log.d(TAG, "red gain: " + red); + Log.d(TAG, "green gain: " + green); + Log.d(TAG, "blue gain: " + blue); + } + + return new float[]{red,green/2,green/2,blue}; + } + + private static float RGBtoGain(float value) { + final float max_gain_c = 10.0f; + if( value < 1.0e-5f ) { + return max_gain_c; + } + value = 1.0f/value; + value = Math.min(max_gain_c, value); + return value; + } + + public static int convertRggbVectorToTemperature(RggbChannelVector rggbChannelVector) { + return convertRggbToTemperature(new float[]{rggbChannelVector.getRed(), rggbChannelVector.getGreenEven(), rggbChannelVector.getGreenOdd(), rggbChannelVector.getBlue()}); + } + + /** Converts a red, green even, green odd and blue components to a white balance temperature. + * Note that this is not necessarily an inverse of convertTemperatureToRggb, since many rggb + * values can map to the same temperature. + */ + public static int convertRggbToTemperature(float [] rggb) { + if( MyDebug.LOG ) { + Log.d(TAG, "temperature:"); + Log.d(TAG, " red: " + rggb[0]); + Log.d(TAG, " green even: " + rggb[1]); + Log.d(TAG, " green odd: " + rggb[2]); + Log.d(TAG, " blue: " + rggb[3]); + } + float red = rggb[0]; + float green_even = rggb[1]; + float green_odd = rggb[2]; + float blue = rggb[3]; + float green = (green_even + green_odd); + + red = GaintoRGB(red); + green = GaintoRGB(green); + blue = GaintoRGB(blue); + + red *= 255.0f; + green *= 255.0f; + blue *= 255.0f; + + int red_i = (int)(red+0.5f); + int green_i = (int)(green+0.5f); + int blue_i = (int)(blue+0.5f); + int temperature; + if( red_i == blue_i ) { + temperature = 6600; + } + else if( red_i > blue_i ) { + // temperature <= 6600 + float t_g = (float)( 100 * Math.exp((green + 161.1195681661) / 99.4708025861) ); + if( blue_i == 0 ) { + temperature = (int)(t_g+0.5f); + } + else { + float t_b = (float)( 100 * (Math.exp((blue + 305.0447927307) / 138.5177312231) + 10) ); + temperature = (int)((t_g + t_b)/2+0.5f); + } + } + else { + // temperature >= 6600 + if( red_i <= 1 || green_i <= 1 ) { + temperature = max_white_balance_temperature_c; + } + else { + float t_r = (float)(100 * (Math.pow(red / 329.698727446, 1.0 / -0.1332047592) + 60.0)); + float t_g = (float)(100 * (Math.pow(green / 288.1221695283, 1.0 / -0.0755148492) + 60.0)); + temperature = (int)((t_r + t_g)/2+0.5f); + } + } + temperature = Math.max(temperature, min_white_balance_temperature_c); + temperature = Math.min(temperature, max_white_balance_temperature_c); + if( MyDebug.LOG ) { + Log.d(TAG, " temperature: " + temperature); + } + return temperature; + } + + private static float GaintoRGB(float value) { + if( value <= 1.0f ) { + return 1.0f; + } + value = 1.0f/value; + return value; + } + + /** Issues the next slow burst capture, on a post delayed on the handler. + */ + private void postNextSlowBurst() { + if( MyDebug.LOG ) + Log.d(TAG, "postNextSlowBurst"); + handler.postDelayed(new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "take picture after delay for next slow burst"); + if( camera != null && hasCaptureSession() ) { // make sure camera wasn't released in the meantime + // check for imageQueueWouldBlock needed for focus bracketing + if( picture_cb.imageQueueWouldBlock(imageReaderRaw != null ? 1 : 0, 1) ) { + if( MyDebug.LOG ) { + Log.d(TAG, "...but wait for next bracket, as image queue would block"); + } + handler.postDelayed(this, 100); + //throw new RuntimeException(); // test + } + else { + if( burst_type == BurstType.BURSTTYPE_FOCUS ) { + // For focus bracketing mode, we play the shutter sound per shot (so the user can tell when the sequence is complete). + // From a user mode, the gap between shots in focus bracketing mode makes this more analogous to the auto-repeat mode + // (at the Preview level), which makes the shutter sound per shot. + + playSound(shutter_click_sound); + } + try { + captureSession.capture(slow_burst_capture_requests.get(n_burst_taken), previewCaptureCallback, handler); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to take next focus bracket", e); + jpeg_todo = false; + raw_todo = false; + picture_cb = null; + if( take_picture_error_cb != null ) { + take_picture_error_cb.onError(); + take_picture_error_cb = null; + } + } + } + } + } + }, 500); + } + + private class OnImageAvailableListener implements ImageReader.OnImageAvailableListener { + private boolean skip_next_image = false; // whether to ignore the next image (used for dummy_capture_hack) + + @Override + public void onImageAvailable(ImageReader reader) { + if( MyDebug.LOG ) + Log.d(TAG, "new still image available"); + if( picture_cb == null || !jpeg_todo ) { + // in theory this shouldn't happen - but if this happens, still free the image to avoid risk of memory leak, + // or strange behaviour where an old image appears when the user next takes a photo + Log.e(TAG, "no picture callback available"); + Image image = reader.acquireNextImage(); + if( image != null ) + image.close(); + return; + } + if( skip_next_image ) { + if( MyDebug.LOG ) + Log.d(TAG, "skipping image"); + skip_next_image = false; + Image image = reader.acquireNextImage(); + if( image != null ) + image.close(); + return; + } + + List single_burst_complete_images = null; + boolean call_takePhotoPartial = false; + boolean call_takePhotoCompleted = false; + + Image image = reader.acquireNextImage(); + if( image == null ) { + // can happen if camera closed whilst taking photo - this happens in testTakePhotoAutoFocusReleaseDuringPhoto() on Pixel 6 Pro + Log.e(TAG, "onImageAvailable: image is null"); + return; + } + if( MyDebug.LOG ) + Log.d(TAG, "image timestamp: " + image.getTimestamp()); + ByteBuffer buffer = image.getPlanes()[0].getBuffer(); + byte [] bytes = new byte[buffer.remaining()]; + if( MyDebug.LOG ) + Log.d(TAG, "read " + bytes.length + " bytes"); + buffer.get(bytes); + image.close(); + + synchronized( background_camera_lock ) { + n_burst_taken++; + if( MyDebug.LOG ) { + Log.d(TAG, "n_burst_taken is now: " + n_burst_taken); + Log.d(TAG, "n_burst: " + n_burst); + Log.d(TAG, "burst_single_request: " + burst_single_request); + } + if( burst_single_request ) { + pending_burst_images.add(bytes); + if( MyDebug.LOG ) { + Log.d(TAG, "pending_burst_images size is now: " + pending_burst_images.size()); + } + if( pending_burst_images.size() >= n_burst ) { // shouldn't ever be greater, but just in case + if( MyDebug.LOG ) + Log.d(TAG, "all burst images available"); + if( pending_burst_images.size() > n_burst ) { + Log.e(TAG, "pending_burst_images size " + pending_burst_images.size() + " is greater than n_burst " + n_burst); + } + // take a copy, so that we can clear pending_burst_images + single_burst_complete_images = new ArrayList<>(pending_burst_images); + // continued below after lock... + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "number of burst images is now: " + pending_burst_images.size()); + call_takePhotoPartial = true; + } + } + // case for burst_single_request==false handled below + } + + // need to call without a lock + if( single_burst_complete_images != null ) { + picture_cb.onBurstPictureTaken(single_burst_complete_images); + } + else if( !burst_single_request ) { + picture_cb.onPictureTaken(bytes); + } + + synchronized( background_camera_lock ) { + if( single_burst_complete_images != null ) { + pending_burst_images.clear(); + + call_takePhotoCompleted = true; + } + else if( !burst_single_request ) { + n_burst--; + if( MyDebug.LOG ) + Log.d(TAG, "n_burst is now " + n_burst); + if( burst_type == BurstType.BURSTTYPE_CONTINUOUS && !continuous_burst_requested_last_capture ) { + // even if n_burst is 0, we don't want to give up if we're still in continuous burst mode + // also note if we do have continuous_burst_requested_last_capture==true, we still check for + // n_burst==0 below (as there may have been more than one image still to be received) + if( MyDebug.LOG ) + Log.d(TAG, "continuous burst mode still in progress"); + call_takePhotoPartial = true; + } + else if( n_burst == 0 ) { + call_takePhotoCompleted = true; + } + else { + call_takePhotoPartial = true; + } + } + } + + // need to call outside of lock (because they can lead to calls to external callbacks) + if( call_takePhotoPartial ) { + takePhotoPartial(); + } + else if( call_takePhotoCompleted ) { + takePhotoCompleted(); + } + + if( MyDebug.LOG ) + Log.d(TAG, "done onImageAvailable"); + } + + /** Called when an image has been received, but we're in a burst mode, and not all images have + * been received. + */ + private void takePhotoPartial() { + if( MyDebug.LOG ) + Log.d(TAG, "takePhotoPartial"); + BLOCK_FOR_EXTENSIONS(); // not supported for extension sessions + + ErrorCallback push_take_picture_error_cb = null; + + synchronized( background_camera_lock ) { + if( slow_burst_capture_requests != null ) { + if( MyDebug.LOG ) { + Log.d(TAG, "need to execute the next capture"); + Log.d(TAG, "time since start: " + (System.currentTimeMillis() - slow_burst_start_ms)); + } + if( burst_type != BurstType.BURSTTYPE_FOCUS ) { + /*try { + if( camera != null && hasCaptureSession() ) { // make sure camera wasn't released in the meantime + captureSession.capture(slow_burst_capture_requests.get(n_burst_taken), previewCaptureCallback, handler); + } + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to take next burst", e); + jpeg_todo = false; + raw_todo = false; + picture_cb = null; + push_take_picture_error_cb = take_picture_error_cb; + }*/ + // see note in takePictureBurstBracketing() for why we also set preview for slow burst with expo bracketing - + // helps Samsung Galaxy devices + if( previewBuilder != null ) { // make sure camera wasn't released in the meantime + try { + long exposure_time = slow_burst_capture_requests.get(n_burst_taken).get(CaptureRequest.SENSOR_EXPOSURE_TIME); + if( MyDebug.LOG ) { + Log.d(TAG, "prepare preview for next exposure: " + exposure_time); + } + previewBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, exposure_time); + + setRepeatingRequest(previewBuilder.build()); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to take set exposure for next expo bracketing burst", e); + jpeg_todo = false; + raw_todo = false; + picture_cb = null; + push_take_picture_error_cb = take_picture_error_cb; + } + postNextSlowBurst(); + } + } + else if( previewBuilder != null ) { // make sure camera wasn't released in the meantime + if( MyDebug.LOG ) + Log.d(TAG, "focus bracketing"); + + if( !focus_bracketing_in_progress ) { + if( MyDebug.LOG ) + Log.d(TAG, "focus bracketing was cancelled"); + // ideally we'd stop altogether, but instead we take one last shot, so that we can mark it with the + // RequestTagType.CAPTURE tag, so onCaptureCompleted() is called knowing it's for the last image + if( MyDebug.LOG ) { + Log.d(TAG, "slow_burst_capture_requests size was: " + slow_burst_capture_requests.size()); + Log.d(TAG, "n_burst size was: " + n_burst); + Log.d(TAG, "n_burst_taken: " + n_burst_taken); + } + slow_burst_capture_requests.subList(n_burst_taken+1, slow_burst_capture_requests.size()).clear(); // resize to n_burst_taken + // if burst_single_request==true, n_burst is constant and we stop when pending_burst_images.size() >= n_burst + // if burst_single_request==false, n_burst counts down and we stop when n_burst==0 + if( burst_single_request ) { + n_burst = slow_burst_capture_requests.size(); + if( n_burst_raw > 0 ) { + n_burst_raw = slow_burst_capture_requests.size(); + } + } + else { + n_burst = 1; + if( n_burst_raw > 0 ) { + n_burst_raw = 1; + } + } + if( MyDebug.LOG ) { + Log.d(TAG, "size is now: " + slow_burst_capture_requests.size()); + Log.d(TAG, "n_burst is now: " + n_burst); + Log.d(TAG, "n_burst_raw is now: " + n_burst_raw); + } + RequestTagObject requestTag = (RequestTagObject)slow_burst_capture_requests.get(slow_burst_capture_requests.size()-1).getTag(); + requestTag.setType(RequestTagType.CAPTURE); + } + + // code for focus bracketing + try { + float focus_distance = slow_burst_capture_requests.get(n_burst_taken).get(CaptureRequest.LENS_FOCUS_DISTANCE); + if( MyDebug.LOG ) { + Log.d(TAG, "prepare preview for next focus_distance: " + focus_distance); + } + previewBuilder.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_OFF); + previewBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, focus_distance); + + setRepeatingRequest(previewBuilder.build()); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to take set focus distance for next focus bracketing burst", e); + jpeg_todo = false; + raw_todo = false; + picture_cb = null; + push_take_picture_error_cb = take_picture_error_cb; + } + postNextSlowBurst(); + } + } + } + + // need to call callbacks without a lock + if( push_take_picture_error_cb != null ) { + push_take_picture_error_cb.onError(); + } + } + + /** Called when an image has been received, but either we're not in a burst mode, or we are + * but all images have been received. + */ + private void takePhotoCompleted() { + if( MyDebug.LOG ) + Log.d(TAG, "takePhotoCompleted"); + // need to set jpeg_todo to false before calling onCompleted, as that may reenter CameraController to take another photo (if in auto-repeat burst mode) - see testTakePhotoRepeat() + synchronized( background_camera_lock ) { + jpeg_todo = false; + } + checkImagesCompleted(); + } + } + + private class OnRawImageAvailableListener implements ImageReader.OnImageAvailableListener { + private final Queue capture_results = new LinkedList<>(); + private final Queue images = new LinkedList<>(); + private boolean skip_next_image = false; // whether to ignore the next image (used for dummy_capture_hack) + + void setCaptureResult(CaptureResult capture_result) { + if( MyDebug.LOG ) + Log.d(TAG, "setCaptureResult()"); + synchronized( background_camera_lock ) { + /* synchronize, as we don't want to set the capture_result, at the same time that onImageAvailable() is called, as + * we'll end up calling processImage() both in onImageAvailable() and here. + */ + this.capture_results.add(capture_result); + if( !images.isEmpty() ) { + if( MyDebug.LOG ) + Log.d(TAG, "can now process the image"); + // should call processImage() on UI thread, to be consistent with onImageAvailable()->processImage() + // important to avoid crash when pause preview is option, tested in testTakePhotoRawWaitCaptureResult() + final Activity activity = (Activity)context; + activity.runOnUiThread(new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "setCaptureResult UI thread call processImage()"); + // n.b., intentionally don't set the lock again + processImage(); + } + }); + } + } + } + + void clear() { + if( MyDebug.LOG ) + Log.d(TAG, "clear()"); + synchronized( background_camera_lock ) { + // synchronize just to be safe? + capture_results.clear(); + images.clear(); + } + } + + private void processImage() { + if( MyDebug.LOG ) + Log.d(TAG, "processImage()"); + + List single_burst_complete_images = null; + boolean call_takePhotoCompleted = false; + DngCreator dngCreator; + CaptureResult capture_result; + Image image; + + synchronized( background_camera_lock ) { + if( capture_results.isEmpty() ) { + if( MyDebug.LOG ) + Log.d(TAG, "don't yet have still_capture_result"); + return; + } + if( images.isEmpty() ) { + if( MyDebug.LOG ) + Log.d(TAG, "don't have image?!"); + return; + } + capture_result = capture_results.remove(); + image = images.remove(); + if( MyDebug.LOG ) { + Log.d(TAG, "now have all info to process raw image"); + Log.d(TAG, "image timestamp: " + image.getTimestamp()); + } + dngCreator = new DngCreator(characteristics, capture_result); + // set fields + dngCreator.setOrientation(camera_settings.getExifOrientation()); + if( camera_settings.location != null ) { + dngCreator.setLocation(camera_settings.location); + } + + if( n_burst_total == 1 && burst_type != BurstType.BURSTTYPE_CONTINUOUS ) { + // Rather than call onRawPictureTaken straight away, we set pending_raw_image so that + // it's called in checkImagesCompleted, to ensure the RAW callback is taken after the JPEG callback. + // This isn't required, but can give an appearance of better performance to the user, as the thumbnail + // animation for a photo having been taken comes from the JPEG. + // We don't do this for burst mode, as it would get too complicated trying to enforce an ordering... + pending_raw_image = new RawImage(dngCreator, image); + } + else if( burst_single_request ) { + pending_burst_images_raw.add(new RawImage(dngCreator, image)); + if( MyDebug.LOG ) { + Log.d(TAG, "pending_burst_images_raw size is now: " + pending_burst_images_raw.size()); + } + if( pending_burst_images_raw.size() >= n_burst_raw ) { // shouldn't ever be greater, but just in case + if( MyDebug.LOG ) + Log.d(TAG, "all raw burst images available"); + if( pending_burst_images_raw.size() > n_burst_raw ) { + Log.e(TAG, "pending_burst_images_raw size " + pending_burst_images_raw.size() + " is greater than n_burst_raw " + n_burst_raw); + } + // take a copy, so that we can clear pending_burst_images_raw + single_burst_complete_images = new ArrayList<>(pending_burst_images_raw); + // continued below after lock... + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "number of raw burst images is now: " + pending_burst_images_raw.size()); + } + } + // case for burst_single_request==false handled below + } + + if( pending_raw_image != null ) { + //takePendingRaw(); // test not waiting for JPEG callback + + checkImagesCompleted(); + } + else { + // burst-only code + // need to call without a lock + if( single_burst_complete_images != null ) { + picture_cb.onRawBurstPictureTaken(single_burst_complete_images); + } + else if( !burst_single_request ) { + picture_cb.onRawPictureTaken(new RawImage(dngCreator, image)); + } + + synchronized( background_camera_lock ) { + if( single_burst_complete_images != null ) { + pending_burst_images_raw.clear(); + + call_takePhotoCompleted = true; + } + else if( !burst_single_request ) { + n_burst_raw--; + if( MyDebug.LOG ) + Log.d(TAG, "n_burst_raw is now " + n_burst_raw); + if( burst_type == BurstType.BURSTTYPE_CONTINUOUS && !continuous_burst_requested_last_capture ) { + // even if n_burst_raw is 0, we don't want to give up if we're still in continuous burst mode + // also note if we do have continuous_burst_requested_last_capture==true, we still check for + // n_burst_raw==0 below (as there may have been more than one image still to be received) + if( MyDebug.LOG ) + Log.d(TAG, "continuous burst mode still in progress"); + } + else if( n_burst_raw == 0 ) { + call_takePhotoCompleted = true; + } + } + } + + // need to call outside of lock (because they can lead to calls to external callbacks) + if( call_takePhotoCompleted ) { + synchronized( background_camera_lock ) { + raw_todo = false; + } + checkImagesCompleted(); + } + } + + if( MyDebug.LOG ) + Log.d(TAG, "done processImage"); + } + + @Override + public void onImageAvailable(ImageReader reader) { + if( MyDebug.LOG ) + Log.d(TAG, "new still raw image available"); + if( picture_cb == null || !raw_todo ) { + // in theory this shouldn't happen - but if this happens, still free the image to avoid risk of memory leak, + // or strange behaviour where an old image appears when the user next takes a photo + Log.e(TAG, "no picture callback available"); + Image this_image = reader.acquireNextImage(); + if( this_image != null ) + this_image.close(); + return; + } + if( skip_next_image ) { + if( MyDebug.LOG ) + Log.d(TAG, "skipping image"); + skip_next_image = false; + Image image = reader.acquireNextImage(); + if( image != null ) + image.close(); + return; + } + synchronized( background_camera_lock ) { + // see comment above in setCaptureResult() for why we synchronize + Image image = reader.acquireNextImage(); + if( image == null ) { + Log.e(TAG, "RAW onImageAvailable: image is null"); + return; + } + images.add(image); + } + processImage(); + if( MyDebug.LOG ) + Log.d(TAG, "done (RAW) onImageAvailable"); + } + } + + private final CameraSettings camera_settings = new CameraSettings(); + private boolean push_repeating_request_when_torch_off = false; + private CaptureRequest push_repeating_request_when_torch_off_id = null; + /*private boolean push_set_ae_lock = false; + private CaptureRequest push_set_ae_lock_id = null;*/ + + private CaptureRequest fake_precapture_turn_on_torch_id = null; // the CaptureRequest used to turn on torch when starting the "fake" precapture + + @Override + public void onError() { + Log.e(TAG, "onError"); + if( camera != null ) { + onError(camera); + } + } + + private void onError(@NonNull CameraDevice cam) { + Log.e(TAG, "onError"); + boolean camera_already_opened = this.camera != null; + // need to set the camera to null first, as closing the camera may take some time, and we don't want any other operations to continue (if called from main thread) + this.camera = null; + if( MyDebug.LOG ) + Log.d(TAG, "onError: camera is now set to null"); + cam.close(); + if( MyDebug.LOG ) + Log.d(TAG, "onError: camera is now closed"); + + if( camera_already_opened ) { + // need to communicate the problem to the application + // n.b., as this is potentially serious error, we always log even if MyDebug.LOG is false + Log.e(TAG, "error occurred after camera was opened"); + // important to run on UI thread to avoid synchronisation issues in the Preview + final Activity activity = (Activity)context; + activity.runOnUiThread(new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "onError: call camera_error_cb.onError() on UI thread"); + camera_error_cb.onError(); + } + }); + } + } + + /** Opens the camera device. + * @param context Application context. + * @param cameraId Which camera to open (must be between 0 and CameraControllerManager2.getNumberOfCameras()-1). + * @param cameraIdSPhysical If non-null, specifies a physical camera to use (must be a member of CameraFeatures.physical_camera_ids for this camera or the corresponding logical camera) + * @param camera_features_caches This should be supplied as an initially empty map, which CameraController2 can use to improve performance on subsequent creations of CameraController2. + * The same camera_features_caches should be supplied to future new CameraController2 objects in order to benefit. + * @param preview_error_cb onError() will be called if the preview stops due to error. + * @param camera_error_cb onError() will be called if the camera closes due to serious error. No more calls to the CameraController2 object should be made (though a new one can be created, to try reopening the camera). + * @throws CameraControllerException if the camera device fails to open. + */ + public CameraController2(Context context, int cameraId, String cameraIdSPhysical, Map camera_features_caches, final ErrorCallback preview_error_cb, final ErrorCallback camera_error_cb) throws CameraControllerException { + super(cameraId); + if( MyDebug.LOG ) { + Log.d(TAG, "create new CameraController2: " + cameraId + " / " + cameraIdSPhysical); + Log.d(TAG, "this: " + this); + } + + this.camera_features_caches = camera_features_caches; + this.cameraIdSPhysical = cameraIdSPhysical; + + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.S ) { + this.previewExtensionCaptureCallback = new MyExtensionCaptureCallback(); + } + else { + this.previewExtensionCaptureCallback = null; + } + + this.context = context; + this.preview_error_cb = preview_error_cb; + this.camera_error_cb = camera_error_cb; + + //this.is_oneplus = Build.MANUFACTURER.toLowerCase(Locale.US).contains("oneplus"); + this.is_samsung = Build.MANUFACTURER.toLowerCase(Locale.US).contains("samsung"); + this.is_samsung_s7 = Build.MODEL.toLowerCase(Locale.US).contains("sm-g93"); + this.is_samsung_galaxy_s = is_samsung && ( Build.MODEL.toLowerCase(Locale.US).contains("sm-g") || Build.MODEL.toLowerCase(Locale.US).contains("sm-s") ); + this.is_samsung_galaxy_f = is_samsung && Build.MODEL.toLowerCase(Locale.US).contains("sm-f"); + if( MyDebug.LOG ) { + Log.d(TAG, "is_samsung: " + is_samsung); + Log.d(TAG, "is_samsung_s7: " + is_samsung_s7); + Log.d(TAG, "is_samsung_galaxy_s: " + is_samsung_galaxy_s); + Log.d(TAG, "is_samsung_galaxy_f: " + is_samsung_galaxy_f); + } + + thread = new HandlerThread("CameraBackground"); + thread.start(); + handler = new Handler(thread.getLooper()); + executor = new Executor() { + @Override + public void execute(Runnable command) { + handler.post(command); + } + }; + + final CameraManager manager = (CameraManager)context.getSystemService(Context.CAMERA_SERVICE); + + class MyStateCallback extends CameraDevice.StateCallback { + boolean callback_done; // must synchronize on this and notifyAll when setting to true + boolean first_callback = true; // Google Camera says we may get multiple callbacks, but only the first indicates the status of the camera opening operation + @Override + public void onOpened(@NonNull CameraDevice cam) { + if( MyDebug.LOG ) + Log.d(TAG, "camera opened, first_callback? " + first_callback); + /*if( true ) // uncomment to test timeout code + return;*/ + if( first_callback ) { + first_callback = false; + + try { + // we should be able to get characteristics at any time, but Google Camera only does so when camera opened - so do so similarly to be safe + if( MyDebug.LOG ) + Log.d(TAG, "try to get camera characteristics"); + characteristics = manager.getCameraCharacteristics(cameraIdSPhysical != null ? cameraIdSPhysical : cameraIdS); + if( MyDebug.LOG ) + Log.d(TAG, "successfully obtained camera characteristics"); + // now read cached values + characteristics_sensor_orientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); + + switch( characteristics.get(CameraCharacteristics.LENS_FACING) ) { + case CameraMetadata.LENS_FACING_FRONT: + characteristics_facing = Facing.FACING_FRONT; + break; + case CameraMetadata.LENS_FACING_BACK: + characteristics_facing = Facing.FACING_BACK; + break; + case CameraMetadata.LENS_FACING_EXTERNAL: + characteristics_facing = Facing.FACING_EXTERNAL; + break; + default: + Log.e(TAG, "unknown camera_facing: " + characteristics.get(CameraCharacteristics.LENS_FACING)); + characteristics_facing = Facing.FACING_UNKNOWN; + break; + } + + if( MyDebug.LOG ) { + Log.d(TAG, "characteristics_sensor_orientation: " + characteristics_sensor_orientation); + Log.d(TAG, "characteristics_facing: " + characteristics_facing); + } + + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.S && cameraIdSPhysical == null ) { + // n.b., getCameraExtensionCharacteristics is documented as saying this must be the standalone cameraID that can be directly opened with openCamera() + // however on Pixel 6 Pro at least, night mode extension only ever uses the wide camera, even if telephoto or ultrawide is set as a physical camera, + // so don't support for now + extension_characteristics = manager.getCameraExtensionCharacteristics(cameraIdS); + if( MyDebug.LOG ) + Log.d(TAG, "successfully obtained camera characteristics"); + + // if we update the key used for camera_features_caches, remember to also update the code + // for adding to the camera_features_caches + camera_features_cache = camera_features_caches.get(cameraIdS); + } + + CameraController2.this.camera = cam; + + // note, this won't start the preview yet, but we create the previewBuilder in order to start setting camera parameters + createPreviewRequest(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to get camera characteristics", e); + // don't throw CameraControllerException here - instead error is handled by setting callback_done to callback_done, and the fact that camera will still be null + } + + if( MyDebug.LOG ) + Log.d(TAG, "about to synchronize to say callback done"); + synchronized( open_camera_lock ) { + callback_done = true; + if( MyDebug.LOG ) + Log.d(TAG, "callback done, about to notify"); + open_camera_lock.notifyAll(); + if( MyDebug.LOG ) + Log.d(TAG, "callback done, notify done"); + } + } + } + + @Override + public void onClosed(@NonNull CameraDevice cam) { + if( MyDebug.LOG ) + Log.d(TAG, "camera closed, first_callback? " + first_callback); + // caller should ensure camera variables are set to null + if( first_callback ) { + first_callback = false; + } + } + + @Override + public void onDisconnected(@NonNull CameraDevice cam) { + if( MyDebug.LOG ) + Log.d(TAG, "camera disconnected, first_callback? " + first_callback); + if( first_callback ) { + first_callback = false; + // must call close() if disconnected before camera was opened + // need to set the camera to null first, as closing the camera may take some time, and we don't want any other operations to continue (if called from main thread) + CameraController2.this.camera = null; + if( MyDebug.LOG ) + Log.d(TAG, "onDisconnected: camera is now set to null"); + cam.close(); + if( MyDebug.LOG ) + Log.d(TAG, "onDisconnected: camera is now closed"); + if( MyDebug.LOG ) + Log.d(TAG, "about to synchronize to say callback done"); + synchronized( open_camera_lock ) { + callback_done = true; + if( MyDebug.LOG ) + Log.d(TAG, "callback done, about to notify"); + open_camera_lock.notifyAll(); + if( MyDebug.LOG ) + Log.d(TAG, "callback done, notify done"); + } + } + } + + @Override + public void onError(@NonNull CameraDevice cam, int error) { + // n.b., as this is potentially serious error, we always log even if MyDebug.LOG is false + Log.e(TAG, "camera error: " + error); + if( MyDebug.LOG ) { + Log.d(TAG, "received camera: " + cam); + Log.d(TAG, "actual camera: " + CameraController2.this.camera); + Log.d(TAG, "first_callback? " + first_callback); + } + if( first_callback ) { + first_callback = false; + } + CameraController2.this.onError(cam); + if( MyDebug.LOG ) + Log.d(TAG, "about to synchronize to say callback done"); + synchronized( open_camera_lock ) { + callback_done = true; + if( MyDebug.LOG ) + Log.d(TAG, "callback done, about to notify"); + open_camera_lock.notifyAll(); + if( MyDebug.LOG ) + Log.d(TAG, "callback done, notify done"); + } + } + } + final MyStateCallback myStateCallback = new MyStateCallback(); + + try { + if( MyDebug.LOG ) + Log.d(TAG, "get camera id list"); + this.cameraIdS = manager.getCameraIdList()[cameraId]; + if( MyDebug.LOG ) + Log.d(TAG, "about to open camera: " + cameraIdS); + manager.openCamera(cameraIdS, myStateCallback, handler); + if( MyDebug.LOG ) + Log.d(TAG, "open camera request complete"); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to open camera: CameraAccessException", e); + throw new CameraControllerException(); + } + catch(UnsupportedOperationException e) { + // Google Camera catches UnsupportedOperationException + MyDebug.logStackTrace(TAG, "failed to open camera: UnsupportedOperationException", e); + throw new CameraControllerException(); + } + catch(SecurityException e) { + // Google Camera catches SecurityException + MyDebug.logStackTrace(TAG, "failed to open camera: SecurityException", e); + throw new CameraControllerException(); + } + catch(IllegalArgumentException e) { + // have seen this from Google Play + MyDebug.logStackTrace(TAG, "failed to open camera: IllegalArgumentException", e); + throw new CameraControllerException(); + } + catch(ArrayIndexOutOfBoundsException e) { + // Have seen this from Google Play - even though the Preview should have checked the + // cameraId is within the valid range! Although potentially this could happen if + // getCameraIdList() returns an empty list. + MyDebug.logStackTrace(TAG, "failed to open camera: ArrayIndexOutOfBoundsException", e); + throw new CameraControllerException(); + } + + // set up a timeout - sometimes if the camera has got in a state where it can't be opened until after a reboot, we'll never even get a myStateCallback callback called + handler.postDelayed(new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "check if camera has opened in reasonable time: " + this); + synchronized( open_camera_lock ) { + if( MyDebug.LOG ) { + Log.d(TAG, "synchronized on open_camera_lock"); + Log.d(TAG, "callback_done: " + myStateCallback.callback_done); + } + if( !myStateCallback.callback_done ) { + // n.b., as this is potentially serious error, we always log even if MyDebug.LOG is false + Log.e(TAG, "timeout waiting for camera callback"); + myStateCallback.first_callback = true; + myStateCallback.callback_done = true; + open_camera_lock.notifyAll(); + } + } + } + }, 10000); + + if( MyDebug.LOG ) + Log.d(TAG, "wait until camera opened..."); + // need to wait until camera is opened + // whilst this blocks, this should be running on a background thread anyway (see Preview.openCamera()) - due to maintaining + // compatibility with the way the old camera API works, it's easier to handle running on a background thread at a higher level, + // rather than exiting here + synchronized( open_camera_lock ) { + while( !myStateCallback.callback_done ) { + try { + // release the lock, and wait until myStateCallback calls notifyAll() + open_camera_lock.wait(); + } + catch(InterruptedException e) { + MyDebug.logStackTrace(TAG, "interrupted while waiting until camera opened", e); + } + } + } + if( camera == null ) { + // n.b., as this is potentially serious error, we always log even if MyDebug.LOG is false + Log.e(TAG, "camera failed to open"); + throw new CameraControllerException(); + } + if( MyDebug.LOG ) + Log.d(TAG, "camera now opened: " + camera); + + /*{ + // test error handling on background thread + handler.postDelayed(new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "test camera error"); + myStateCallback.onError(camera, CameraDevice.StateCallback.ERROR_CAMERA_DEVICE); + } + }, 5000); + }*/ + + /*CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraIdSPhysical != null ? cameraIdSPhysical : cameraIdS); + StreamConfigurationMap configs = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); + android.util.Size [] camera_picture_sizes = configs.getOutputSizes(ImageFormat.JPEG); + imageReader = ImageReader.newInstance(camera_picture_sizes[0].getWidth(), , ImageFormat.JPEG, 2);*/ + + // preload sounds to reduce latency - important so that START_VIDEO_RECORDING sound doesn't play after video has started (which means it'll be heard in the resultant video) + media_action_sound.load(MediaActionSound.START_VIDEO_RECORDING); + media_action_sound.load(MediaActionSound.STOP_VIDEO_RECORDING); + media_action_sound.load(MediaActionSound.SHUTTER_CLICK); + // Samsung Galaxy devices have bug where MediaActionSound always plays at 100% volume - the SHUTTER_CLICK sounds + // really harsh/loud, so the video recording beep reduces this problem + shutter_click_sound = is_samsung ? MediaActionSound.START_VIDEO_RECORDING : MediaActionSound.SHUTTER_CLICK; + + // expand tonemap curves + jtvideo_values = enforceMinTonemapCurvePoints(jtvideo_values_base); + jtlog_values = enforceMinTonemapCurvePoints(jtlog_values_base); + jtlog2_values = enforceMinTonemapCurvePoints(jtlog2_values_base); + } + + /** Closes the captureSession, if it exists. + */ + private void closeCaptureSession() { + synchronized( background_camera_lock ) { + if( captureSession != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "close capture session"); + captureSession.close(); + captureSession = null; + } + if( extensionSession != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "close extension session"); + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.S ) { + try { + extensionSession.close(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to close extensionSession", e); + } + } + extensionSession = null; + } + } + } + + @Override + public void release() { + if( MyDebug.LOG ) + Log.d(TAG, "release: " + this); + closeCaptureSession(); + CameraDevice camera_to_close = this.camera; + synchronized( background_camera_lock ) { + // set all to null straight away, as this can be called on background thread, but also + // don't want to be in an incomplete state for other threads where camera is non-null but + // previewBuilder is null + previewBuilder = null; + previewIsVideoMode = false; + camera = null; + } + if( camera_to_close != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "close camera: " + camera_to_close); + camera_to_close.close(); + if( MyDebug.LOG ) + Log.d(TAG, "close camera complete: " + camera_to_close); + } + closePictureImageReader(); + /*if( previewImageReader != null ) { + previewImageReader.close(); + previewImageReader = null; + }*/ + if( thread != null ) { + // should only close thread after closing the camera, otherwise we get messages "sending message to a Handler on a dead thread" + // see https://sourceforge.net/p/opencamera/discussion/general/thread/32c2b01b/?limit=25 + thread.quitSafely(); + try { + thread.join(); + thread = null; + handler = null; + executor = null; + } + catch(InterruptedException e) { + MyDebug.logStackTrace(TAG, "InterruptedException", e); + } + } + if( MyDebug.LOG ) + Log.d(TAG, "release exit: " + this); + } + + /** Enforce a minimum number of points in tonemap curves - needed due to Galaxy S10e having wrong behaviour if fewer + * than 16 or in some cases 32 points?! OnePlus 3T meanwhile has more gradual behaviour where it gets better at 64 points. + */ + private float [] enforceMinTonemapCurvePoints(float[] in_values) { + if( MyDebug.LOG ) { + Log.d(TAG, "enforceMinTonemapCurvePoints: " + Arrays.toString(in_values)); + Log.d(TAG, "length: " + in_values.length/2); + } + int min_points_c = 64; + if( is_samsung ) { + // Unfortunately odd bug on Samsung devices (at least S7 and S10e) where if more than 32 control points, + // the maximum brightness value is reduced (can best be seen with 64 points, and using gamma==1.0). + // Also note that Samsung devices also need at least 16 control points, or in some cases 32, due to problem + // where things come out almost all black with some white. So choose 32! + //min_points_c = 16; + min_points_c = 32; + } + if( MyDebug.LOG ) + Log.d(TAG, "min_points_c: " + min_points_c); + if( in_values.length >= 2*min_points_c ) { + if( MyDebug.LOG ) + Log.d(TAG, "already enough points"); + return in_values; // fine + } + List> points = new ArrayList<>(); + for(int i=0;i point = new Pair<>(in_values[2*i], in_values[2*i+1]); + points.add(point); + } + if( points.size() < 2 ) { + Log.e(TAG, "less than 2 points?!"); + return in_values; + } + + while( points.size() < min_points_c ) { + // find largest interval, and subdivide + int largest_indx = 0; + float largest_dist = 0.0f; + for(int i=0;i p0 = points.get(i); + Pair p1 = points.get(i+1); + float dist = p1.first - p0.first; + if( dist > largest_dist ) { + largest_indx = i; + largest_dist = dist; + } + } + /*if( MyDebug.LOG ) + Log.d(TAG, "largest indx " + largest_indx + " dist: " + largest_dist);*/ + Pair p0 = points.get(largest_indx); + Pair p1 = points.get(largest_indx+1); + float mid_x = 0.5f*(p0.first + p1.first); + float mid_y = 0.5f*(p0.second + p1.second); + /*if( MyDebug.LOG ) + Log.d(TAG, " insert: " + mid_x + " , " + mid_y);*/ + points.add(largest_indx+1, new Pair<>(mid_x, mid_y)); + } + + float [] out_values = new float[2*points.size()]; + for(int i=0;i point = points.get(i); + out_values[2*i] = point.first; + out_values[2*i+1] = point.second; + /*if( MyDebug.LOG ) + Log.d(TAG, "out point[" + i + "]: " + point.first + " , " + point.second);*/ + } + return out_values; + } + + private void closePictureImageReader() { + if( MyDebug.LOG ) + Log.d(TAG, "closePictureImageReader()"); + if( imageReader != null ) { + imageReader.close(); + imageReader = null; + onImageAvailableListener = null; + } + if( imageReaderRaw != null ) { + imageReaderRaw.close(); + imageReaderRaw = null; + onRawImageAvailableListener = null; + } + } + + private List convertFocusModesToValues(int [] supported_focus_modes_arr) { + if( MyDebug.LOG ) { + Log.d(TAG, "convertFocusModesToValues()"); + Log.d(TAG, "supported_focus_modes_arr: " + Arrays.toString(supported_focus_modes_arr)); + } + if( supported_focus_modes_arr.length == 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "no supported focus modes"); + return null; + } + List supported_focus_modes = new ArrayList<>(); + for(Integer supported_focus_mode : supported_focus_modes_arr) + supported_focus_modes.add(supported_focus_mode); + List output_modes = new ArrayList<>(); + // also resort as well as converting + if( supported_focus_modes.contains(CaptureRequest.CONTROL_AF_MODE_AUTO) ) { + output_modes.add("focus_mode_auto"); + if( MyDebug.LOG ) { + Log.d(TAG, " supports focus_mode_auto"); + } + } + if( supported_focus_modes.contains(CaptureRequest.CONTROL_AF_MODE_MACRO) ) { + output_modes.add("focus_mode_macro"); + if( MyDebug.LOG ) + Log.d(TAG, " supports focus_mode_macro"); + } + if( supported_focus_modes.contains(CaptureRequest.CONTROL_AF_MODE_AUTO) ) { + output_modes.add("focus_mode_locked"); + if( MyDebug.LOG ) { + Log.d(TAG, " supports focus_mode_locked"); + } + } + if( supported_focus_modes.contains(CaptureRequest.CONTROL_AF_MODE_OFF) ) { + output_modes.add("focus_mode_infinity"); + if( MyDebug.LOG ) { + Log.d(TAG, " supports focus_mode_infinity"); + } + if( minimum_focus_distance > 0.0f ) { + output_modes.add("focus_mode_manual2"); + if( MyDebug.LOG ) { + Log.d(TAG, " supports focus_mode_manual2"); + } + } + } + if( supported_focus_modes.contains(CaptureRequest.CONTROL_AF_MODE_EDOF) ) { + output_modes.add("focus_mode_edof"); + if( MyDebug.LOG ) + Log.d(TAG, " supports focus_mode_edof"); + } + if( supported_focus_modes.contains(CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE) ) { + output_modes.add("focus_mode_continuous_picture"); + if( MyDebug.LOG ) + Log.d(TAG, " supports focus_mode_continuous_picture"); + } + if( supported_focus_modes.contains(CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) ) { + output_modes.add("focus_mode_continuous_video"); + if( MyDebug.LOG ) + Log.d(TAG, " supports focus_mode_continuous_video"); + } + return output_modes; + } + + public String getAPI() { + return "Camera2 (Android L)"; + } + + /** Computes the zoom ratios to use, for devices that support zoom. + * @param ratios List to be filled with zoom ratios. + * @param min_zoom Minimum zoom supported. + * @param max_zoom Maximum zoom supported. + * @return Index of ratios list that is for 1x zoom. + */ + public static int computeZoomRatios(List ratios, float min_zoom, float max_zoom) { + int zoom_value_1x; + + // prepare zoom rations > 1x + // set 40 steps per 2x factor + final double scale_factor_c = 1.0174796921026863936352862847966; + List zoom_ratios_above_one = new ArrayList<>(); + double zoom = scale_factor_c; + while( zoom < max_zoom - 1.0e-5f ) { + int zoom_ratio = (int)(zoom*100+1.0e-5); + zoom_ratios_above_one.add(zoom_ratio); + zoom *= scale_factor_c; + } + int max_zoom_ratio = (int)(max_zoom*100); + if( zoom_ratios_above_one.isEmpty() || zoom_ratios_above_one.get(zoom_ratios_above_one.size()-1) != max_zoom_ratio ) { + zoom_ratios_above_one.add(max_zoom_ratio); + } + int n_steps_above_one = zoom_ratios_above_one.size(); + if( MyDebug.LOG ) { + Log.d(TAG, "n_steps_above_one: " + n_steps_above_one); + } + + // now populate full zoom ratios + + // add minimum zoom + ratios.add((int)(min_zoom*100)); + if( ratios.get(0)/100.0f < min_zoom ) { + // fix for rounding down to less than the min_zoom + // e.g. if min_zoom = 0.666, we'd have stored a zoom ratio of 66 which then would + // convert back to 0.66 + ratios.set(0, ratios.get(0) + 1); + } + + if( ratios.get(0) < 100 ) { + int n_steps_below_one = Math.max(1, n_steps_above_one/5); + // if the min zoom is < 1.0, we add multiple entries for 1x zoom, when using the zoom + // seekbar it's easy for the user to zoom to exactly 1x + int n_steps_one = Math.max(1, n_steps_above_one/10); + if( MyDebug.LOG ) { + Log.d(TAG, "n_steps_below_one: " + n_steps_below_one); + Log.d(TAG, "n_steps_one: " + n_steps_one); + } + + // add rest of zoom values < 1.0f + zoom = min_zoom; + final double scale_factor = Math.pow(1.0f / min_zoom, 1.0/(double)n_steps_below_one); + if( MyDebug.LOG ) { + Log.d(TAG, "scale_factor for below 1.0x: " + scale_factor); + } + for(int i=0;i ratios.get(0) ) { + // on some devices (e.g., Pixel 6 Pro), the second entry would equal the first entry, due to the rounding fix above + ratios.add(zoom_ratio); + } + } + + // add values for 1.0f (we add repeated values so for cameras with min_zoom < 1x, the zoom seekbar will snap to 1x) + zoom_value_1x = ratios.size(); + for(int i=0;i 1.0f + int n_steps_power_two = Math.max(1, (int)(0.5f+n_steps_above_one/15.0f)); + if( MyDebug.LOG ) { + Log.d(TAG, "n_steps_power_two: " + n_steps_power_two); + } + for(int zoom_ratio : zoom_ratios_above_one) { + ratios.add(zoom_ratio); + + if( zoom_ratio != zoom_ratios_above_one.get(zoom_ratios_above_one.size()-1) && zoom_ratio % 100 == 0 ) { + int zoom_ratio_int = zoom_ratio/100; + if( zoom_ratio_int != 0 && (zoom_ratio_int & (zoom_ratio_int-1)) == 0 ) { + // is power of 2 that isn't the max zoom + for(int i=0;i= Build.VERSION_CODES.R ) { + Capability [] capabilities = characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES); + Log.d(TAG, "capabilities:"); + if( capabilities == null ) { + Log.d(TAG, " none"); + } + else { + for(int i=0;i= Build.VERSION_CODES.R ) { + // use CONTROL_ZOOM_RATIO_RANGE on Android 11+, to support multiple cameras with zoom ratios + // less than 1 + try { + Range zoom_ratio_range = characteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE); + if( zoom_ratio_range != null ) { + min_zoom = zoom_ratio_range.getLower(); + max_zoom = zoom_ratio_range.getUpper(); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "zoom_ratio_range not supported"); + } + } + catch(AssertionError e) { + // have had this crash from characteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE) on Google Play for some older Samsung Galaxy A* and Nokia devices + MyDebug.logStackTrace(TAG, "failed to CONTROL_ZOOM_RATIO_RANGE", e); + } + } + else { + min_zoom = 1.0f; + max_zoom = characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM); + } + camera_features.is_zoom_supported = max_zoom > 0.0f && min_zoom > 0.0f; + if( MyDebug.LOG ) { + Log.d(TAG, "min_zoom: " + min_zoom); + Log.d(TAG, "max_zoom: " + max_zoom); + } + if( camera_features.is_zoom_supported ) { + + List ratios = new ArrayList<>(); + this.zoom_value_1x = computeZoomRatios(ratios, min_zoom, max_zoom); + + camera_features.zoom_ratios = ratios; + camera_features.max_zoom = camera_features.zoom_ratios.size()-1; + this.zoom_ratios = camera_features.zoom_ratios; + if( MyDebug.LOG ) { + Log.d(TAG, "zoom_ratios: " + zoom_ratios); + } + } + else { + this.zoom_ratios = null; + } + + int [] face_modes = characteristics.get(CameraCharacteristics.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES); + camera_features.supports_face_detection = false; + supports_face_detect_mode_simple = false; + supports_face_detect_mode_full = false; + for(int face_mode : face_modes) { + if( MyDebug.LOG ) + Log.d(TAG, "face detection mode: " + face_mode); + // we currently only make use of the "SIMPLE" features, documented as: + // "Return face rectangle and confidence values only." + // note that devices that support STATISTICS_FACE_DETECT_MODE_FULL (e.g., Nexus 6) don't return + // STATISTICS_FACE_DETECT_MODE_SIMPLE in the list, so we have check for either + if( face_mode == CameraCharacteristics.STATISTICS_FACE_DETECT_MODE_SIMPLE ) { + camera_features.supports_face_detection = true; + supports_face_detect_mode_simple = true; + if( MyDebug.LOG ) + Log.d(TAG, "supports simple face detection mode"); + } + else if( face_mode == CameraCharacteristics.STATISTICS_FACE_DETECT_MODE_FULL ) { + camera_features.supports_face_detection = true; + supports_face_detect_mode_full = true; + if( MyDebug.LOG ) + Log.d(TAG, "supports full face detection mode"); + } + } + if( camera_features.supports_face_detection ) { + int face_count = characteristics.get(CameraCharacteristics.STATISTICS_INFO_MAX_FACE_COUNT); + if( face_count <= 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "can't support face detection, as zero max face count"); + camera_features.supports_face_detection = false; + supports_face_detect_mode_simple = false; + supports_face_detect_mode_full = false; + } + } + if( camera_features.supports_face_detection ) { + // check we have scene mode CONTROL_SCENE_MODE_FACE_PRIORITY + int [] values2 = characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_SCENE_MODES); + boolean has_face_priority = false; + for(int value2 : values2) { + if( value2 == CameraMetadata.CONTROL_SCENE_MODE_FACE_PRIORITY ) { + has_face_priority = true; + break; + } + } + if( MyDebug.LOG ) + Log.d(TAG, "has_face_priority: " + has_face_priority); + if( !has_face_priority ) { + if( MyDebug.LOG ) + Log.d(TAG, "can't support face detection, as no CONTROL_SCENE_MODE_FACE_PRIORITY"); + camera_features.supports_face_detection = false; + supports_face_detect_mode_simple = false; + supports_face_detect_mode_full = false; + } + } + + int [] capabilities = characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); + + CameraCharacteristics logical_characteristics; + int [] logical_capabilities; + if( cameraIdSPhysical != null ) { + // If we have a physical camera ID, characteristics refer to the physical camera ID. But for some things, + // we want to query the characteristics of the logical camera. + CameraManager manager = (CameraManager)context.getSystemService(Context.CAMERA_SERVICE); + try { + logical_characteristics = manager.getCameraCharacteristics(cameraIdS); + logical_capabilities = logical_characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to get logical_characteristics for: " + cameraIdS, e); + throw new CameraControllerException(); + } + if( MyDebug.LOG ) + Log.d(TAG, "successfully obtained logical camera characteristics"); + } + else { + logical_characteristics = characteristics; + logical_capabilities = capabilities; + } + + //boolean capabilities_manual_sensor = false; + boolean capabilities_manual_post_processing = false; + boolean capabilities_raw = false; + boolean capabilities_high_speed_video = false; + boolean capabilities_10bit = false; + for(int capability : capabilities) { + /*if( capability == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR ) { + // At least some Huawei devices (at least, the Huawei device model FIG-LX3, device code-name hi6250) don't + // have REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR, but I had a user complain that HDR mode and manual ISO + // had previously worked for them. Note that we still check below for SENSOR_INFO_SENSITIVITY_RANGE and + // SENSOR_INFO_EXPOSURE_TIME_RANGE, so not checking REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR shouldn't + // enable manual ISO/exposure on devices that don't support it. + // Also may affect Samsung Galaxy A8(2018). + // Instead we just block LEGACY devices (probably don't need to, again because we check + // SENSOR_INFO_SENSITIVITY_RANGE and SENSOR_INFO_EXPOSURE_TIME_RANGE, but just in case). + capabilities_manual_sensor = true; + } + else*/ if( capability == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING ) { + capabilities_manual_post_processing = true; + } + else if( capability == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW ) { + capabilities_raw = true; + } + /*else if( capability == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE ) { + // see note below + camera_features.supports_burst = true; + }*/ + else if( capability == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M ) { + // we test for at least Android M just to be safe (this is needed for createConstrainedHighSpeedCaptureSession()) + capabilities_high_speed_video = true; + } + else if( capability == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT ) { + capabilities_10bit = true; + } + else if( capability == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR && Build.VERSION.SDK_INT >= Build.VERSION_CODES.S ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera supports ultra high resolution"); + } + } + boolean capabilities_logical_multi_camera = false; + for(int capability : logical_capabilities) { + // to be safe, we check the REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA from the logical camera + if( capability == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA && Build.VERSION.SDK_INT >= Build.VERSION_CODES.P ) { + // we test for at least Android 9 just to be safe (this is needed for getPhysicalCameraIds()) + if( MyDebug.LOG ) + Log.d(TAG, "camera is a logical multi-camera"); + capabilities_logical_multi_camera = true; + } + } + // At least some Huawei devices (at least, the Huawei device model FIG-LX3, device code-name hi6250) don't have + // REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE, but I had a user complain that NR mode at least had previously + // (before 1.45) worked for them. It might be that this can still work, just not at 20fps. + // So instead set to true for all LIMITED devices. Still keep block for LEGACY devices (which definitely shouldn't + // support fast burst - and which Open Camera never allowed with Camera2 before 1.45). + // Also may affect Samsung Galaxy A8(2018). + camera_features.supports_burst = CameraControllerManager2.isHardwareLevelSupported(characteristics, CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED); + + if( MyDebug.LOG ) { + //Log.d(TAG, "capabilities_manual_sensor?: " + capabilities_manual_sensor); + Log.d(TAG, "capabilities_manual_post_processing?: " + capabilities_manual_post_processing); + Log.d(TAG, "capabilities_raw?: " + capabilities_raw); + Log.d(TAG, "supports_burst?: " + camera_features.supports_burst); + Log.d(TAG, "capabilities_high_speed_video?: " + capabilities_high_speed_video); + Log.d(TAG, "capabilities_10bit?: " + capabilities_10bit); + } + + /*if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE ) { + boolean supports_autoframing = false; + Boolean bool = characteristics.get(CameraCharacteristics.CONTROL_AUTOFRAMING_AVAILABLE); + if( bool != null && bool) { + supports_autoframing = true; + } + if( MyDebug.LOG ) + Log.d(TAG, "supports_autoframing?: " + supports_autoframing); + }*/ + + StreamConfigurationMap configs; + try { + configs = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); + } + catch(IllegalArgumentException | NullPointerException e) { + // have had IllegalArgumentException crashes from Google Play - unclear what the cause is, but at least fail gracefully + // similarly for NullPointerException - note, these aren't from characteristics being null, but from + // com.android.internal.util.Preconditions.checkArrayElementsNotNull (Preconditions.java:395) - all are from + // Nexus 7 (2013)s running Android 8.1, but again better to fail gracefully + MyDebug.logStackTrace(TAG, "failed to get SCALER_STREAM_CONFIGURATION_MAP", e); + throw new CameraControllerException(); + } + + android.util.Size [] camera_picture_sizes = configs.getOutputSizes(ImageFormat.JPEG); + + camera_features.supports_jpeg_r = false; + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE && capabilities_10bit ) { + long debug_time = 0; + if( MyDebug.LOG ) { + debug_time = System.currentTimeMillis(); + } + + android.util.Size [] jpeg_r_camera_picture_sizes = configs.getOutputSizes(ImageFormat.JPEG_R); + if( jpeg_r_camera_picture_sizes != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "JPEG_R sizes: " + Arrays.toString(jpeg_r_camera_picture_sizes)); + camera_features.supports_jpeg_r = true; + // For simplicity, we only support JPEG_R if it has the same support as for JPEG. + // Further checks are done below for getHighResolutionOutputSizes. + // Note that extensions don't support JPEG_R (extension_characteristics.getExtensionSupportedSizes + // is documented that it throws IllegalArgumentException if not JPEG or YUV_420_888). + if( !sizeSubset(camera_picture_sizes, jpeg_r_camera_picture_sizes) ) { + if( MyDebug.LOG ) + Log.d(TAG, "don't support JPEG_R: some picture sizes not supported"); + camera_features.supports_jpeg_r = false; + } + + if( camera_features.supports_jpeg_r ) { + // documentation says HLG10 must be supported by all devices with REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT, + // but check just to be safe + DynamicRangeProfiles profiles = characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES); + if( profiles == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "don't support JPEG_R: no DynamicRangeProfiles"); + camera_features.supports_jpeg_r = false; + } + else if( !profiles.getSupportedProfiles().contains(DynamicRangeProfiles.HLG10) ) { + if( MyDebug.LOG ) + Log.d(TAG, "don't support JPEG_R: no HLG10"); + camera_features.supports_jpeg_r = false; + } + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "JPEG_R not supported"); + } + + if( MyDebug.LOG ) + Log.d(TAG, "time for jpeg_r testing: " + (System.currentTimeMillis() - debug_time)); + } + + camera_features.picture_sizes = new ArrayList<>(); + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.M ) { + android.util.Size [] camera_picture_sizes_hires = configs.getHighResolutionOutputSizes(ImageFormat.JPEG); + if( camera_picture_sizes_hires != null ) { + for(android.util.Size camera_size : camera_picture_sizes_hires) { + if( MyDebug.LOG ) + Log.d(TAG, "high resolution picture size: " + camera_size.getWidth() + " x " + camera_size.getHeight()); + // Check not already listed? If it's listed in both, we'll add it later on when scanning camera_picture_sizes + // (and we don't want to set supports_burst to false for such a resolution). + boolean found = false; + for(android.util.Size sz : camera_picture_sizes) { + if( sz.equals(camera_size) ) { + found = true; + break; + } + } + if( !found ) { + if( MyDebug.LOG ) + Log.d(TAG, "high resolution [non-burst] picture size: " + camera_size.getWidth() + " x " + camera_size.getHeight()); + CameraController.Size size = new CameraController.Size(camera_size.getWidth(), camera_size.getHeight()); + size.supports_burst = false; + camera_features.picture_sizes.add(size); + } + } + + if( camera_features.supports_jpeg_r && Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE ) { + android.util.Size [] camera_picture_sizes_hires_jpeg_r = configs.getHighResolutionOutputSizes(ImageFormat.JPEG_R); + if( !sizeSubset(camera_picture_sizes_hires, camera_picture_sizes_hires_jpeg_r) ) { + if( MyDebug.LOG ) + Log.d(TAG, "don't support JPEG_R: some high resolution (non-burst) picture sizes not supported"); + camera_features.supports_jpeg_r = false; + } + } + } + } + if( camera_picture_sizes == null ) { + // camera_picture_sizes is null on Samsung Galaxy Note 10+ and S20 for camera ID 4! + Log.e(TAG, "no picture sizes returned by getOutputSizes"); + throw new CameraControllerException(); + } + else { + for(android.util.Size camera_size : camera_picture_sizes) { + if( MyDebug.LOG ) + Log.d(TAG, "picture size: " + camera_size.getWidth() + " x " + camera_size.getHeight()); + camera_features.picture_sizes.add(new CameraController.Size(camera_size.getWidth(), camera_size.getHeight())); + } + } + // sizes are usually already sorted from high to low, but sort just in case + // note some devices do have sizes in a not fully sorted order (e.g., Nokia 8) + Collections.sort(camera_features.picture_sizes, new CameraController.SizeSorter()); + // test high resolution modes not supporting burst: + //camera_features.picture_sizes.get(0).supports_burst = false; + + raw_size = null; + if( capabilities_raw ) { + android.util.Size [] raw_camera_picture_sizes = configs.getOutputSizes(ImageFormat.RAW_SENSOR); + if( raw_camera_picture_sizes == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "RAW not supported, failed to get RAW_SENSOR sizes"); + want_raw = false; // just in case it got set to true somehow + } + else { + for(android.util.Size size : raw_camera_picture_sizes) { + if( raw_size == null || size.getWidth()*size.getHeight() > raw_size.getWidth()*raw_size.getHeight() ) { + raw_size = size; + } + } + if( raw_size == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "RAW not supported, failed to find a raw size"); + want_raw = false; // just in case it got set to true somehow + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "raw supported, raw size: " + raw_size.getWidth() + " x " + raw_size.getHeight()); + camera_features.supports_raw = true; + } + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "RAW capability not supported"); + want_raw = false; // just in case it got set to true somehow + } + + if( MyDebug.LOG ) { + Log.d(TAG, "output_formats: " + Arrays.toString(configs.getOutputFormats())); + } + + ae_fps_ranges = new ArrayList<>(); + for (Range r : characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES)) { + ae_fps_ranges.add(new int[] {r.getLower(), r.getUpper()}); + } + Collections.sort(ae_fps_ranges, new CameraController.RangeSorter()); + if( MyDebug.LOG ) { + Log.d(TAG, "Supported AE video fps ranges: "); + for (int[] f : ae_fps_ranges) { + Log.d(TAG, " ae range: [" + f[0] + "-" + f[1] + "]"); + } + } + + android.util.Size[] camera_video_sizes = configs.getOutputSizes(MediaRecorder.class); + camera_features.video_sizes = new ArrayList<>(); + int min_fps = 9999; + for(int[] r : this.ae_fps_ranges) { + min_fps = Math.min(min_fps, r[0]); + } + if( camera_video_sizes == null ) { + // camera_video_sizes is null on Samsung Galaxy Note 10+ and S20 for camera ID 4! + Log.e(TAG, "no video sizes returned by getOutputSizes"); + throw new CameraControllerException(); + } + else { + for(android.util.Size camera_size : camera_video_sizes) { + if( camera_size.getWidth() > 4096 || camera_size.getHeight() > 2160 ) + continue; // Nexus 6 returns these, even though not supported?! + long mfd = configs.getOutputMinFrameDuration(MediaRecorder.class, camera_size); + int max_fps = (int)((1.0 / mfd) * 1000000000L); + ArrayList fr = new ArrayList<>(); + fr.add(new int[] {min_fps, max_fps}); + CameraController.Size normal_video_size = new CameraController.Size(camera_size.getWidth(), camera_size.getHeight(), fr, false); + camera_features.video_sizes.add(normal_video_size); + if( MyDebug.LOG ) { + Log.d(TAG, "normal video size: " + normal_video_size); + } + } + } + Collections.sort(camera_features.video_sizes, new CameraController.SizeSorter()); + + // don't support high speed if physical camera specified - seems unreliable on Pixel 6 Pro and Galaxy S24+ + if( capabilities_high_speed_video && cameraIdSPhysical == null ) { + hs_fps_ranges = new ArrayList<>(); + camera_features.video_sizes_high_speed = new ArrayList<>(); + + for (Range r : configs.getHighSpeedVideoFpsRanges()) { + // Some devices e.g. Pixel 6 Pro have high-speed fps ranges like [30-120]. We skip these because: + // Firstly we'd risk choosing this for 60fps, when 60fps shouldn't require high-speed. + // Secondly captureSessionHighSpeed.createHighSpeedRequestList() documentation says fps range + // should have min==max, so we don't want to include high speed ranges where this isn't true. + // Without this fix, Slow motion 0.5x (which uses 60fps) fails to start recording on Pixel 6 Pro. + if( r.getLower().intValue() != r.getUpper().intValue() ) { + if( MyDebug.LOG ) + Log.d(TAG, "skip high speed video fps range: " + r); + continue; + } + hs_fps_ranges.add(new int[] {r.getLower(), r.getUpper()}); + } + Collections.sort(hs_fps_ranges, new CameraController.RangeSorter()); + if( MyDebug.LOG ) { + Log.d(TAG, "Supported high speed video fps ranges: "); + for (int[] f : hs_fps_ranges) { + Log.d(TAG, " hs range: [" + f[0] + "-" + f[1] + "]"); + } + } + + android.util.Size[] camera_video_sizes_high_speed = configs.getHighSpeedVideoSizes(); + for(android.util.Size camera_size : camera_video_sizes_high_speed) { + ArrayList fr = new ArrayList<>(); + for (Range r : configs.getHighSpeedVideoFpsRangesFor(camera_size)) { + // see comment above for why we require min==max + if( r.getLower().intValue() != r.getUpper().intValue() ) { + continue; + } + int [] this_fps_range = new int[] { r.getLower(), r.getUpper()}; + // In theory, all fps ranges returned by getHighSpeedVideoFpsRangesFor() should surely be + // a subset of fps ranges returned by getHighSpeedVideoFpsRanges(), but we check just in case + // (when deciding whether slow motion or high speed frame rates are supported, this means we + // only need to check the frame rates of particular video sizes, as done in + // MyApplicationInterface.getSupportedVideoCaptureRates()). + boolean found = false; + for(int [] hs_fps_range : hs_fps_ranges) { + if( Arrays.equals(hs_fps_range, this_fps_range) ) { + found = true; + break; + } + } + if( !found ) { + if( MyDebug.LOG ) + Log.e(TAG, "video size " + camera_size + " has high speed frame rate " + Arrays.toString(this_fps_range) + " that wasn't returned by configs.getHighSpeedVideoFpsRanges()"); + continue; + } + fr.add(this_fps_range); + } + if (camera_size.getWidth() > 4096 || camera_size.getHeight() > 2160) + continue; // just in case? see above + CameraController.Size hs_video_size = new CameraController.Size(camera_size.getWidth(), camera_size.getHeight(), fr, true); + if (MyDebug.LOG) { + Log.d(TAG, "high speed video size: " + hs_video_size); + } + camera_features.video_sizes_high_speed.add(hs_video_size); + } + Collections.sort(camera_features.video_sizes_high_speed, new CameraController.SizeSorter()); + } + + android.util.Size [] camera_preview_sizes = configs.getOutputSizes(SurfaceTexture.class); + camera_features.preview_sizes = new ArrayList<>(); + Point display_size = new Point(); + Activity activity = (Activity)context; + if( Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.R ) { + // use non-deprecated equivalent of Display.getRealSize() + WindowMetrics window_metrics = activity.getWindowManager().getCurrentWindowMetrics(); + final Rect bounds = window_metrics.getBounds(); + display_size.x = bounds.width(); + display_size.y = bounds.height(); + } + else { + Display display = activity.getWindowManager().getDefaultDisplay(); + display.getRealSize(display_size); + } + // getRealSize() is adjusted based on the current rotation, but we still want + // display_size.x > display_size.y as preview resolutions also have width > height + if( display_size.x < display_size.y ) { + //noinspection SuspiciousNameCombination + display_size.set(display_size.y, display_size.x); + } + if( MyDebug.LOG ) + Log.d(TAG, "display_size: " + display_size.x + " x " + display_size.y); + if( camera_preview_sizes == null ) { + // camera_preview_sizes is null on Samsung Galaxy Note 10+ and S20 for camera ID 4! + Log.e(TAG, "no preview sizes returned by getOutputSizes"); + throw new CameraControllerException(); + } + else { + for(android.util.Size camera_size : camera_preview_sizes) { + if( MyDebug.LOG ) + Log.d(TAG, "preview size: " + camera_size.getWidth() + " x " + camera_size.getHeight()); + if( camera_size.getWidth() > Math.max(display_size.x, 1280) || camera_size.getHeight() > Math.max(display_size.y, 720) ) { + // Nexus 6 returns these, even though not supported?! (get green corruption lines if we allow these) + // Google Camera filters anything larger than height 1080, with a todo saying to use device's measurements + // But we only considering filtering if also greater than width 1280 or height 720, as these should be fine - also + // need to account for running in multi-window mode - we don't want to exclude all preview resolutions just because the + // window size was small! + continue; + } + camera_features.preview_sizes.add(new CameraController.Size(camera_size.getWidth(), camera_size.getHeight())); + } + } + + final boolean use_cache = true; + //final boolean use_cache = false; + if( extension_characteristics == null ) { + // no extension characteristics + } + else if( use_cache && Build.VERSION.SDK_INT >= Build.VERSION_CODES.S && camera_features_cache != null ) { + // read extensions info from cache for performance + if( MyDebug.LOG ) + Log.d(TAG, "read vendor extensions info from cache"); + if( camera_features_cache.supported_extensions != null ) + camera_features.supported_extensions = new ArrayList<>(camera_features_cache.supported_extensions); + if( camera_features_cache.supported_extensions_zoom != null ) + camera_features.supported_extensions_zoom = new ArrayList<>(camera_features_cache.supported_extensions_zoom); + + if( camera_features.supported_extensions != null ) { + for(int extension : camera_features.supported_extensions) { + if( MyDebug.LOG ) + Log.d(TAG, "vendor extension: " + extension); + List extension_picture_sizes = camera_features_cache.extension_picture_sizes_map.get(extension); + List extension_preview_sizes = camera_features_cache.extension_preview_sizes_map.get(extension); + boolean has_picture_resolution = updatePictureSizesForExtension(camera_features.picture_sizes, extension_picture_sizes, extension); + boolean has_preview_resolution = updatePreviewSizesForExtension(camera_features.preview_sizes, extension_preview_sizes, extension); + if( has_picture_resolution && has_preview_resolution ) { + // fine + } + else { + if( MyDebug.LOG ) + Log.e(TAG, "cached extension not actually supported?!: " + extension); + camera_features.supported_extensions.remove((Integer)extension); + camera_features.supported_extensions_zoom.remove((Integer)extension); + } + } + } + if( MyDebug.LOG ) + Log.d(TAG, "done read vendor extensions info from cache"); + } + else if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.S ) { + if( MyDebug.LOG ) + Log.d(TAG, "check for vendor extensions"); + Map> extension_picture_sizes_map = new Hashtable<>(); + Map> extension_preview_sizes_map = new Hashtable<>(); + + List extensions = null; + try { + extensions = extension_characteristics.getSupportedExtensions(); + } + catch(Exception e) { + // have IllegalArgumentException at least from Google Play crashes + if( MyDebug.LOG ) + Log.e(TAG, "exception from getSupportedExtensions"); + } + if( extensions != null ) { + camera_features.supported_extensions = new ArrayList<>(); + camera_features.supported_extensions_zoom = new ArrayList<>(); + for(int extension : extensions) { + if( MyDebug.LOG ) + Log.d(TAG, "vendor extension: " + extension); + + try { + // we assume that the allowed extension sizes are a subset of the full sizes - makes things easier to manage + + List extension_picture_sizes = extension_characteristics.getExtensionSupportedSizes(extension, ImageFormat.JPEG); + if( MyDebug.LOG ) + Log.d(TAG, " extension_picture_sizes: " + extension_picture_sizes); + boolean has_picture_resolution = updatePictureSizesForExtension(camera_features.picture_sizes, extension_picture_sizes, extension); + + List extension_preview_sizes = extension_characteristics.getExtensionSupportedSizes(extension, SurfaceTexture.class); + if( MyDebug.LOG ) + Log.d(TAG, " extension_preview_sizes: " + extension_preview_sizes); + boolean has_preview_resolution = updatePreviewSizesForExtension(camera_features.preview_sizes, extension_preview_sizes, extension); + + if( has_picture_resolution && has_preview_resolution ) { + if( MyDebug.LOG ) + Log.d(TAG, " extension is supported: " + extension); + camera_features.supported_extensions.add(extension); + extension_picture_sizes_map.put(extension, extension_picture_sizes); + extension_preview_sizes_map.put(extension, extension_preview_sizes); + + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU ) { + Set extension_supported_request_keys = extension_characteristics.getAvailableCaptureRequestKeys(extension); + for(CaptureRequest.Key key : extension_supported_request_keys) { + if( MyDebug.LOG ) + Log.d(TAG, " supported capture request key: " + key.getName()); + if( key == CaptureRequest.CONTROL_ZOOM_RATIO ) { + camera_features.supported_extensions_zoom.add(extension); + } + } + Set extension_supported_result_keys = extension_characteristics.getAvailableCaptureResultKeys(extension); + for(CaptureResult.Key key : extension_supported_result_keys) { + if( MyDebug.LOG ) + Log.d(TAG, " supported capture result key: " + key.getName()); + } + } + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE ) { + if( MyDebug.LOG ) { + Log.d(TAG, " isCaptureProcessProgressAvailable: " + extension_characteristics.isCaptureProcessProgressAvailable(extension)); + } + } + } + } + catch(Exception exception) { + // have IllegalArgumentException from getExtensionSupportedSizes() and getAvailableCaptureRequestKeys() at least from Google Play crashes + if( MyDebug.LOG ) + Log.e(TAG, "exception trying to query extension: " + extension); + camera_features.supported_extensions.remove((Integer)extension); + camera_features.supported_extensions_zoom.remove((Integer)extension); + extension_picture_sizes_map.remove(extension); + extension_preview_sizes_map.remove(extension); + } + } + } + + // add to cache + CameraFeaturesCache cache = new CameraFeaturesCache(camera_features, extension_picture_sizes_map, extension_preview_sizes_map); + camera_features_caches.put(cameraIdS, cache); + if( MyDebug.LOG ) + Log.d(TAG, "done check for vendor extensions"); + } + // save to local fields: + this.supported_extensions_zoom = camera_features.supported_extensions_zoom; + + if( characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) ) { + int [] supported_flash_modes_arr = characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES); // Android format + List supported_flash_modes = new ArrayList<>(); + for(Integer supported_flash_mode : supported_flash_modes_arr) + supported_flash_modes.add(supported_flash_mode); + + camera_features.supported_flash_values = new ArrayList<>(); + // also resort as well as converting + + // documentation for CONTROL_AE_AVAILABLE_MODES says the following modes are always supported: + camera_features.supported_flash_values.add("flash_off"); + camera_features.supported_flash_values.add("flash_auto"); + camera_features.supported_flash_values.add("flash_on"); + camera_features.supported_flash_values.add("flash_torch"); + + if( !use_fake_precapture ) { + if( supported_flash_modes.contains(CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) ) { + camera_features.supported_flash_values.add("flash_red_eye"); + if( MyDebug.LOG ) { + Log.d(TAG, " supports flash_red_eye"); + } + } + } + + /*if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.VANILLA_ICE_CREAM && supported_flash_modes.contains(CaptureRequest.CONTROL_AE_MODE_ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY) ) { + this.supports_low_light_boost = true; + if( MyDebug.LOG ) { + Log.d(TAG, " supports low light boost"); + } + }*/ + } + else if( (getFacing() == Facing.FACING_FRONT) ) { + camera_features.supported_flash_values = new ArrayList<>(); + camera_features.supported_flash_values.add("flash_off"); + camera_features.supported_flash_values.add("flash_frontscreen_auto"); + camera_features.supported_flash_values.add("flash_frontscreen_on"); + camera_features.supported_flash_values.add("flash_frontscreen_torch"); + } + + Float minimum_focus_distance_f = characteristics.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE); // may be null on some devices + if( minimum_focus_distance_f != null ) { + camera_features.minimum_focus_distance = minimum_focus_distance_f; + if( MyDebug.LOG ) + Log.d(TAG, "minimum_focus_distance: " + camera_features.minimum_focus_distance); + } + else { + camera_features.minimum_focus_distance = 0.0f; + } + // save to local fields: + this.minimum_focus_distance = camera_features.minimum_focus_distance; + + int [] supported_focus_modes = characteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES); // Android format + camera_features.supported_focus_values = convertFocusModesToValues(supported_focus_modes); // convert to our format (also resorts) + if( camera_features.supported_focus_values != null && camera_features.supported_focus_values.contains("focus_mode_manual2") ) { + camera_features.supports_focus_bracketing = true; + } + if( camera_features.supported_focus_values != null ) { + // prefer continuous focus mode + if( camera_features.supported_focus_values.contains("focus_mode_continuous_picture") ) { + initial_focus_mode = "focus_mode_continuous_picture"; + } + else { + // just go with the first one + initial_focus_mode = camera_features.supported_focus_values.get(0); + } + if( MyDebug.LOG ) + Log.d(TAG, "initial_focus_mode: " + initial_focus_mode); + } + else { + initial_focus_mode = null; + } + + camera_features.max_num_focus_areas = characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF); + + camera_features.is_exposure_lock_supported = true; + + camera_features.is_white_balance_lock_supported = true; + + camera_features.is_optical_stabilization_supported = false; + int [] supported_optical_stabilization_modes = characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION); + if( supported_optical_stabilization_modes != null ) { + for(int supported_optical_stabilization_mode : supported_optical_stabilization_modes) { + if( supported_optical_stabilization_mode == CameraCharacteristics.LENS_OPTICAL_STABILIZATION_MODE_ON ) { + camera_features.is_optical_stabilization_supported = true; + break; + } + } + } + if( MyDebug.LOG ) + Log.d(TAG, "is_optical_stabilization_supported: " + camera_features.is_optical_stabilization_supported); + supports_optical_stabilization = camera_features.is_optical_stabilization_supported; + + camera_features.is_video_stabilization_supported = false; + int [] supported_video_stabilization_modes = characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES); + if( supported_video_stabilization_modes != null ) { + for(int supported_video_stabilization_mode : supported_video_stabilization_modes) { + if( supported_video_stabilization_mode == CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_ON ) { + camera_features.is_video_stabilization_supported = true; + break; + } + } + } + if( MyDebug.LOG ) + Log.d(TAG, "is_video_stabilization_supported: " + camera_features.is_video_stabilization_supported); + + camera_features.is_photo_video_recording_supported = CameraControllerManager2.isHardwareLevelSupported(characteristics, CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED); + supports_photo_video_recording = camera_features.is_photo_video_recording_supported; + + int [] white_balance_modes = characteristics.get(CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES); + if( white_balance_modes != null ) { + for(int value : white_balance_modes) { + // n.b., Galaxy S10e for front and ultra-wide cameras offers CONTROL_AWB_MODE_OFF despite + // capabilities_manual_post_processing==false; if we don't check for capabilities_manual_post_processing, + // adjusting white balance temperature seems to work, but seems safest to require + // capabilities_manual_post_processing anyway + if( value == CameraMetadata.CONTROL_AWB_MODE_OFF && capabilities_manual_post_processing && allowManualWB() ) { + camera_features.supports_white_balance_temperature = true; + camera_features.min_temperature = min_white_balance_temperature_c; + camera_features.max_temperature = max_white_balance_temperature_c; + } + } + } + supports_white_balance_temperature = camera_features.supports_white_balance_temperature; + + // see note above + //if( capabilities_manual_sensor ) + if( CameraControllerManager2.isHardwareLevelSupported(characteristics, CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED) ) + { + Range iso_range = characteristics.get(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE); // may be null on some devices + if( iso_range != null ) { + camera_features.supports_iso_range = true; + camera_features.min_iso = iso_range.getLower(); + camera_features.max_iso = iso_range.getUpper(); + // we only expose exposure_time if iso_range is supported + Range exposure_time_range = characteristics.get(CameraCharacteristics.SENSOR_INFO_EXPOSURE_TIME_RANGE); // may be null on some devices + if( exposure_time_range != null ) { + camera_features.supports_exposure_time = true; + camera_features.supports_expo_bracketing = true; + camera_features.max_expo_bracketing_n_images = max_expo_bracketing_n_images; + camera_features.min_exposure_time = exposure_time_range.getLower(); + camera_features.max_exposure_time = exposure_time_range.getUpper(); + if( ( is_samsung_galaxy_s || is_samsung_galaxy_f ) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q ) { + // seems we can get away with longer exposure on some devices (e.g., Galaxy S10e claims only max of 0.1s, but works with 1/3s) + // but Android 11 on Samsung devices also introduces a bug where manual exposure gets ignored if different to the preview, + // and since the max preview rate is limited to 1/5s (see max_preview_exposure_time_c), there's no point + // going above this! + // update: as of 1.54, we now can go above the max_preview_exposure_time_c, by using RequestTagType.RUN_POST_CAPTURE + // (see adjustPreviewToStill()) + if( MyDebug.LOG ) + Log.d(TAG, "boost max_exposure_time, was: " + max_exposure_time); + camera_features.max_exposure_time = Math.max(camera_features.max_exposure_time, 1000000000L/2); + } + } + } + } + // save to local fields: + this.supports_exposure_time = camera_features.supports_exposure_time; + this.min_exposure_time = camera_features.min_exposure_time; + this.max_exposure_time = camera_features.max_exposure_time; + + Range exposure_range = characteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE); + camera_features.min_exposure = exposure_range.getLower(); + camera_features.max_exposure = exposure_range.getUpper(); + camera_features.exposure_step = characteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP).floatValue(); + + camera_features.can_disable_shutter_sound = true; + + if( capabilities_manual_post_processing ) { + Integer tonemap_max_curve_points = characteristics.get(CameraCharacteristics.TONEMAP_MAX_CURVE_POINTS); + if( tonemap_max_curve_points != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "tonemap_max_curve_points: " + tonemap_max_curve_points); + + int [] tonemap_modes = characteristics.get(CameraCharacteristics.TONEMAP_AVAILABLE_TONE_MAP_MODES); + if( tonemap_modes == null ) { + // if no tonemap modes, can't support tonemapping + if( MyDebug.LOG ) + Log.d(TAG, "tonemap_modes is null"); + } + else { + boolean supports_tonemap_contrast_curve = false; + for(int tonemap_mode : tonemap_modes) { + if( tonemap_mode == CaptureRequest.TONEMAP_MODE_PRESET_CURVE ) { + supports_tonemap_preset_curve = true; + } + else if( tonemap_mode == CaptureRequest.TONEMAP_MODE_CONTRAST_CURVE ) { + supports_tonemap_contrast_curve = true; + } + } + if( MyDebug.LOG ) { + Log.d(TAG, "supports_tonemap_preset_curve: " + supports_tonemap_preset_curve); + Log.d(TAG, "supports_tonemap_contrast_curve: " + supports_tonemap_contrast_curve); + } + + // if supports_tonemap_contrast_curve==false, don't bother supporting tonemapping (in theory we could support the preset curves alone, but not supported for simplicity) + // if supports_tonemap_contrast_curve==true but supports_tonemap_preset_curve==false, we'll still support tonemapping, but always use contrast curves + if( supports_tonemap_contrast_curve ) { + camera_features.tonemap_max_curve_points = tonemap_max_curve_points; + // for now we only expose supporting of custom tonemap curves if there are enough curve points for all the + // profiles we support + // remember to divide by 2 if we're comparing against the raw array length! + camera_features.supports_tonemap_curve = + tonemap_max_curve_points >= tonemap_log_max_curve_points_c && + tonemap_max_curve_points >= jtvideo_values.length/2 && + tonemap_max_curve_points >= jtlog_values.length/2 && + tonemap_max_curve_points >= jtlog2_values.length/2; + } + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "tonemap_max_curve_points is null"); + } + } + if( MyDebug.LOG ) + Log.d(TAG, "supports_tonemap_curve?: " + camera_features.supports_tonemap_curve); + + float [] apertures = characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_APERTURES); + //float [] apertures = new float[]{1.5f, 1.9f, 2.0f, 2.2f, 2.4f, 4.0f, 8.0f, 16.0f}; // test + if( MyDebug.LOG ) + Log.d(TAG, "apertures: " + Arrays.toString(apertures)); + // no point supporting if only a single aperture + if( apertures != null && apertures.length > 1 ) { + camera_features.apertures = apertures; + } + + SizeF view_angle = CameraControllerManager2.computeViewAngles(characteristics); + camera_features.view_angle_x = view_angle.getWidth(); + camera_features.view_angle_y = view_angle.getHeight(); + + if( capabilities_logical_multi_camera && Build.VERSION.SDK_INT >= Build.VERSION_CODES.P ) { + // to be safe, read from the logical camera characteristics + camera_features.physical_camera_ids = logical_characteristics.getPhysicalCameraIds(); + if( MyDebug.LOG ) + Log.d(TAG, "physical_camera_ids: " + camera_features.physical_camera_ids); + if( camera_features.physical_camera_ids.size() <= 1 ) { + // no point supporting + camera_features.physical_camera_ids = null; + } + } + + if( !camera_features.supports_jpeg_r ) { + want_jpeg_r = false; // just in case it got set to true somehow + } + + return camera_features; + } + + /** Returns true iff every entry in camera_sizes is also a member of alt_camera_sizes (order + * doesn't matter). + */ + public static boolean sizeSubset(int [] camera_widths, int [] camera_heights, int [] alt_camera_widths, int [] alt_camera_heights) { + if( camera_widths == null && camera_heights == null) + return true; + if( alt_camera_widths == null && alt_camera_heights == null) + return false; + for(int i=0;i picture_sizes, List extension_picture_sizes, int extension) { + boolean has_picture_resolution = false; + for(CameraController.Size size : picture_sizes) { + if( extension_picture_sizes.contains(new android.util.Size(size.width, size.height)) ) { + if( MyDebug.LOG ) { + Log.d(TAG, " picture size supports extension: " + size.width + " , " + size.height); + } + has_picture_resolution = true; + if( size.supported_extensions == null ) { + size.supported_extensions = new ArrayList<>(); + } + size.supported_extensions.add(extension); + } + else { + if( MyDebug.LOG ) { + Log.d(TAG, " picture size does NOT support extension: " + size.width + " , " + size.height); + } + } + } + return has_picture_resolution; + } + + /** For each of the preview_sizes, update the CameraController.Size.supported_extensions field to record if that resolution + * supports the supplied extension. + * @param preview_sizes Preview sizes to update. + * @param extension_preview_sizes Preview sizes supported by the extension. + * @param extension Extension to test. + * @return If false, then none of the preview_sizes are supported by this extension. + */ + private boolean updatePreviewSizesForExtension(List preview_sizes, List extension_preview_sizes, int extension) { + boolean has_preview_resolution = false; + for(CameraController.Size size : preview_sizes) { + if( extension_preview_sizes.contains(new android.util.Size(size.width, size.height)) ) { + if( MyDebug.LOG ) { + Log.d(TAG, " preview size supports extension: " + size.width + " , " + size.height); + } + has_preview_resolution = true; + if( size.supported_extensions == null ) { + size.supported_extensions = new ArrayList<>(); + } + size.supported_extensions.add(extension); + } + else { + if( MyDebug.LOG ) { + Log.d(TAG, " preview size does NOT support extension: " + size.width + " , " + size.height); + } + } + } + return has_preview_resolution; + } + + public boolean shouldCoverPreview() { + return !has_received_frame; + } + + public void resetCoverPreview() { + this.has_received_frame = false; + } + + private String convertSceneMode(int value2) { + String value; + switch( value2 ) { + case CameraMetadata.CONTROL_SCENE_MODE_ACTION: + value = "action"; + break; + case CameraMetadata.CONTROL_SCENE_MODE_BARCODE: + value = "barcode"; + break; + case CameraMetadata.CONTROL_SCENE_MODE_BEACH: + value = "beach"; + break; + case CameraMetadata.CONTROL_SCENE_MODE_CANDLELIGHT: + value = "candlelight"; + break; + case CameraMetadata.CONTROL_SCENE_MODE_DISABLED: + value = SCENE_MODE_DEFAULT; + break; + case CameraMetadata.CONTROL_SCENE_MODE_FIREWORKS: + value = "fireworks"; + break; + // "hdr" no longer available in Camera2 + /*case CameraMetadata.CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO: + // new for Camera2 + value = "high-speed-video"; + break;*/ + case CameraMetadata.CONTROL_SCENE_MODE_LANDSCAPE: + value = "landscape"; + break; + case CameraMetadata.CONTROL_SCENE_MODE_NIGHT: + value = "night"; + break; + case CameraMetadata.CONTROL_SCENE_MODE_NIGHT_PORTRAIT: + value = "night-portrait"; + break; + case CameraMetadata.CONTROL_SCENE_MODE_PARTY: + value = "party"; + break; + case CameraMetadata.CONTROL_SCENE_MODE_PORTRAIT: + value = "portrait"; + break; + case CameraMetadata.CONTROL_SCENE_MODE_SNOW: + value = "snow"; + break; + case CameraMetadata.CONTROL_SCENE_MODE_SPORTS: + value = "sports"; + break; + case CameraMetadata.CONTROL_SCENE_MODE_STEADYPHOTO: + value = "steadyphoto"; + break; + case CameraMetadata.CONTROL_SCENE_MODE_SUNSET: + value = "sunset"; + break; + case CameraMetadata.CONTROL_SCENE_MODE_THEATRE: + value = "theatre"; + break; + default: + if( MyDebug.LOG ) + Log.d(TAG, "unknown scene mode: " + value2); + value = null; + break; + } + return value; + } + + @Override + public SupportedValues setSceneMode(String value) { + if( MyDebug.LOG ) + Log.d(TAG, "setSceneMode: " + value); + // we convert to/from strings to be compatible with original Android Camera API + int [] values2 = characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_SCENE_MODES); + boolean has_disabled = false; + List values = new ArrayList<>(); + if( values2 != null ) { + // CONTROL_AVAILABLE_SCENE_MODES is supposed to always be available, but have had some (rare) crashes from Google Play due to being null + for(int value2 : values2) { + if( value2 == CameraMetadata.CONTROL_SCENE_MODE_DISABLED ) + has_disabled = true; + String this_value = convertSceneMode(value2); + if( this_value != null ) { + values.add(this_value); + } + } + } + if( !has_disabled ) { + values.add(0, SCENE_MODE_DEFAULT); + } + SupportedValues supported_values = checkModeIsSupported(values, value, SCENE_MODE_DEFAULT); + if( supported_values != null ) { + int selected_value2 = CameraMetadata.CONTROL_SCENE_MODE_DISABLED; + switch(supported_values.selected_value) { + case "action": + selected_value2 = CameraMetadata.CONTROL_SCENE_MODE_ACTION; + break; + case "barcode": + selected_value2 = CameraMetadata.CONTROL_SCENE_MODE_BARCODE; + break; + case "beach": + selected_value2 = CameraMetadata.CONTROL_SCENE_MODE_BEACH; + break; + case "candlelight": + selected_value2 = CameraMetadata.CONTROL_SCENE_MODE_CANDLELIGHT; + break; + case SCENE_MODE_DEFAULT: + selected_value2 = CameraMetadata.CONTROL_SCENE_MODE_DISABLED; + break; + case "fireworks": + selected_value2 = CameraMetadata.CONTROL_SCENE_MODE_FIREWORKS; + break; + // "hdr" no longer available in Camera2 + case "landscape": + selected_value2 = CameraMetadata.CONTROL_SCENE_MODE_LANDSCAPE; + break; + case "night": + selected_value2 = CameraMetadata.CONTROL_SCENE_MODE_NIGHT; + break; + case "night-portrait": + selected_value2 = CameraMetadata.CONTROL_SCENE_MODE_NIGHT_PORTRAIT; + break; + case "party": + selected_value2 = CameraMetadata.CONTROL_SCENE_MODE_PARTY; + break; + case "portrait": + selected_value2 = CameraMetadata.CONTROL_SCENE_MODE_PORTRAIT; + break; + case "snow": + selected_value2 = CameraMetadata.CONTROL_SCENE_MODE_SNOW; + break; + case "sports": + selected_value2 = CameraMetadata.CONTROL_SCENE_MODE_SPORTS; + break; + case "steadyphoto": + selected_value2 = CameraMetadata.CONTROL_SCENE_MODE_STEADYPHOTO; + break; + case "sunset": + selected_value2 = CameraMetadata.CONTROL_SCENE_MODE_SUNSET; + break; + case "theatre": + selected_value2 = CameraMetadata.CONTROL_SCENE_MODE_THEATRE; + break; + default: + if (MyDebug.LOG) + Log.d(TAG, "unknown selected_value: " + supported_values.selected_value); + break; + } + + camera_settings.scene_mode = selected_value2; + if( camera_settings.setSceneMode(previewBuilder) ) { + try { + setRepeatingRequest(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set scene mode", e); + } + } + } + return supported_values; + } + + @Override + public String getSceneMode() { + if( previewBuilder.get(CaptureRequest.CONTROL_SCENE_MODE) == null ) + return null; + int value2 = previewBuilder.get(CaptureRequest.CONTROL_SCENE_MODE); + return convertSceneMode(value2); + } + + @Override + public boolean sceneModeAffectsFunctionality() { + // Camera2 API doesn't seem to have any warnings that changing scene mode can affect available functionality + return false; + } + + private String convertColorEffect(int value2) { + String value; + switch( value2 ) { + case CameraMetadata.CONTROL_EFFECT_MODE_AQUA: + value = "aqua"; + break; + case CameraMetadata.CONTROL_EFFECT_MODE_BLACKBOARD: + value = "blackboard"; + break; + case CameraMetadata.CONTROL_EFFECT_MODE_MONO: + value = "mono"; + break; + case CameraMetadata.CONTROL_EFFECT_MODE_NEGATIVE: + value = "negative"; + break; + case CameraMetadata.CONTROL_EFFECT_MODE_OFF: + value = COLOR_EFFECT_DEFAULT; + break; + case CameraMetadata.CONTROL_EFFECT_MODE_POSTERIZE: + value = "posterize"; + break; + case CameraMetadata.CONTROL_EFFECT_MODE_SEPIA: + value = "sepia"; + break; + case CameraMetadata.CONTROL_EFFECT_MODE_SOLARIZE: + value = "solarize"; + break; + case CameraMetadata.CONTROL_EFFECT_MODE_WHITEBOARD: + value = "whiteboard"; + break; + default: + if( MyDebug.LOG ) + Log.d(TAG, "unknown effect mode: " + value2); + value = null; + break; + } + return value; + } + + @Override + public SupportedValues setColorEffect(String value) { + if( MyDebug.LOG ) + Log.d(TAG, "setColorEffect: " + value); + // we convert to/from strings to be compatible with original Android Camera API + int [] values2 = characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_EFFECTS); + if( values2 == null ) { + return null; + } + List values = new ArrayList<>(); + for(int value2 : values2) { + String this_value = convertColorEffect(value2); + if( this_value != null ) { + values.add(this_value); + } + } + SupportedValues supported_values = checkModeIsSupported(values, value, COLOR_EFFECT_DEFAULT); + if( supported_values != null ) { + int selected_value2 = CameraMetadata.CONTROL_EFFECT_MODE_OFF; + switch(supported_values.selected_value) { + case "aqua": + selected_value2 = CameraMetadata.CONTROL_EFFECT_MODE_AQUA; + break; + case "blackboard": + selected_value2 = CameraMetadata.CONTROL_EFFECT_MODE_BLACKBOARD; + break; + case "mono": + selected_value2 = CameraMetadata.CONTROL_EFFECT_MODE_MONO; + break; + case "negative": + selected_value2 = CameraMetadata.CONTROL_EFFECT_MODE_NEGATIVE; + break; + case COLOR_EFFECT_DEFAULT: + selected_value2 = CameraMetadata.CONTROL_EFFECT_MODE_OFF; + break; + case "posterize": + selected_value2 = CameraMetadata.CONTROL_EFFECT_MODE_POSTERIZE; + break; + case "sepia": + selected_value2 = CameraMetadata.CONTROL_EFFECT_MODE_SEPIA; + break; + case "solarize": + selected_value2 = CameraMetadata.CONTROL_EFFECT_MODE_SOLARIZE; + break; + case "whiteboard": + selected_value2 = CameraMetadata.CONTROL_EFFECT_MODE_WHITEBOARD; + break; + default: + if (MyDebug.LOG) + Log.d(TAG, "unknown selected_value: " + supported_values.selected_value); + break; + } + + camera_settings.color_effect = selected_value2; + if( camera_settings.setColorEffect(previewBuilder) ) { + try { + setRepeatingRequest(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set color effect", e); + } + } + } + return supported_values; + } + + @Override + public String getColorEffect() { + if( previewBuilder.get(CaptureRequest.CONTROL_EFFECT_MODE) == null ) + return null; + int value2 = previewBuilder.get(CaptureRequest.CONTROL_EFFECT_MODE); + return convertColorEffect(value2); + } + + private String convertWhiteBalance(int value2) { + String value; + switch( value2 ) { + case CameraMetadata.CONTROL_AWB_MODE_AUTO: + value = WHITE_BALANCE_DEFAULT; + break; + case CameraMetadata.CONTROL_AWB_MODE_CLOUDY_DAYLIGHT: + value = "cloudy-daylight"; + break; + case CameraMetadata.CONTROL_AWB_MODE_DAYLIGHT: + value = "daylight"; + break; + case CameraMetadata.CONTROL_AWB_MODE_FLUORESCENT: + value = "fluorescent"; + break; + case CameraMetadata.CONTROL_AWB_MODE_INCANDESCENT: + value = "incandescent"; + break; + case CameraMetadata.CONTROL_AWB_MODE_SHADE: + value = "shade"; + break; + case CameraMetadata.CONTROL_AWB_MODE_TWILIGHT: + value = "twilight"; + break; + case CameraMetadata.CONTROL_AWB_MODE_WARM_FLUORESCENT: + value = "warm-fluorescent"; + break; + case CameraMetadata.CONTROL_AWB_MODE_OFF: + value = "manual"; + break; + default: + if( MyDebug.LOG ) + Log.d(TAG, "unknown white balance: " + value2); + value = null; + break; + } + return value; + } + + /** Whether we should allow manual white balance, even if the device supports CONTROL_AWB_MODE_OFF. + */ + private boolean allowManualWB() { + boolean is_nexus6 = Build.MODEL.toLowerCase(Locale.US).contains("nexus 6"); + // manual white balance doesn't seem to work on Nexus 6! + return !is_nexus6; + } + + @Override + public SupportedValues setWhiteBalance(String value) { + if( MyDebug.LOG ) + Log.d(TAG, "setWhiteBalance: " + value); + // we convert to/from strings to be compatible with original Android Camera API + int [] values2 = characteristics.get(CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES); + if( values2 == null ) { + return null; + } + List values = new ArrayList<>(); + for(int value2 : values2) { + String this_value = convertWhiteBalance(value2); + if( this_value != null ) { + if( value2 == CameraMetadata.CONTROL_AWB_MODE_OFF && !supports_white_balance_temperature ) { + // filter + } + else { + values.add(this_value); + } + } + } + { + // re-order so that auto is first, manual is second + boolean has_auto = values.remove(WHITE_BALANCE_DEFAULT); + boolean has_manual = values.remove("manual"); + if( has_manual ) + values.add(0, "manual"); + if( has_auto ) + values.add(0, WHITE_BALANCE_DEFAULT); + } + SupportedValues supported_values = checkModeIsSupported(values, value, WHITE_BALANCE_DEFAULT); + if( supported_values != null ) { + int selected_value2 = CameraMetadata.CONTROL_AWB_MODE_AUTO; + switch(supported_values.selected_value) { + case WHITE_BALANCE_DEFAULT: + selected_value2 = CameraMetadata.CONTROL_AWB_MODE_AUTO; + break; + case "cloudy-daylight": + selected_value2 = CameraMetadata.CONTROL_AWB_MODE_CLOUDY_DAYLIGHT; + break; + case "daylight": + selected_value2 = CameraMetadata.CONTROL_AWB_MODE_DAYLIGHT; + break; + case "fluorescent": + selected_value2 = CameraMetadata.CONTROL_AWB_MODE_FLUORESCENT; + break; + case "incandescent": + selected_value2 = CameraMetadata.CONTROL_AWB_MODE_INCANDESCENT; + break; + case "shade": + selected_value2 = CameraMetadata.CONTROL_AWB_MODE_SHADE; + break; + case "twilight": + selected_value2 = CameraMetadata.CONTROL_AWB_MODE_TWILIGHT; + break; + case "warm-fluorescent": + selected_value2 = CameraMetadata.CONTROL_AWB_MODE_WARM_FLUORESCENT; + break; + case "manual": + selected_value2 = CameraMetadata.CONTROL_AWB_MODE_OFF; + break; + default: + if (MyDebug.LOG) + Log.d(TAG, "unknown selected_value: " + supported_values.selected_value); + break; + } + + camera_settings.white_balance = selected_value2; + if( camera_settings.setWhiteBalance(previewBuilder) ) { + try { + setRepeatingRequest(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set white balance", e); + } + } + } + return supported_values; + } + + @Override + public String getWhiteBalance() { + if( previewBuilder.get(CaptureRequest.CONTROL_AWB_MODE) == null ) + return null; + int value2 = previewBuilder.get(CaptureRequest.CONTROL_AWB_MODE); + return convertWhiteBalance(value2); + } + + @Override + // Returns whether white balance temperature was modified + public boolean setWhiteBalanceTemperature(int temperature) { + if( MyDebug.LOG ) + Log.d(TAG, "setWhiteBalanceTemperature: " + temperature); + if( camera_settings.white_balance == temperature ) { + if( MyDebug.LOG ) + Log.d(TAG, "already set"); + return false; + } + try { + temperature = Math.max(temperature, min_white_balance_temperature_c); + temperature = Math.min(temperature, max_white_balance_temperature_c); + camera_settings.white_balance_temperature = temperature; + if( camera_settings.setWhiteBalance(previewBuilder) ) { + setRepeatingRequest(); + } + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set white balance temperature", e); + } + return true; + } + + @Override + public int getWhiteBalanceTemperature() { + return camera_settings.white_balance_temperature; + } + + private String convertAntiBanding(int value2) { + String value; + switch( value2 ) { + case CameraMetadata.CONTROL_AE_ANTIBANDING_MODE_AUTO: + value = ANTIBANDING_DEFAULT; + break; + case CameraMetadata.CONTROL_AE_ANTIBANDING_MODE_50HZ: + value = "50hz"; + break; + case CameraMetadata.CONTROL_AE_ANTIBANDING_MODE_60HZ: + value = "60hz"; + break; + case CameraMetadata.CONTROL_AE_ANTIBANDING_MODE_OFF: + value = "off"; + break; + default: + if( MyDebug.LOG ) + Log.d(TAG, "unknown antibanding: " + value2); + value = null; + break; + } + return value; + } + + @Override + public SupportedValues setAntiBanding(String value) { + if( MyDebug.LOG ) + Log.d(TAG, "setAntiBanding: " + value); + // we convert to/from strings to be compatible with original Android Camera API + int [] values2 = characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_ANTIBANDING_MODES ); + if( values2 == null ) { + return null; + } + List values = new ArrayList<>(); + for(int value2 : values2) { + String this_value = convertAntiBanding(value2); + if( this_value != null ) { + values.add(this_value); + } + } + SupportedValues supported_values = checkModeIsSupported(values, value, ANTIBANDING_DEFAULT); + if( supported_values != null ) { + // for antibanding, if the requested value isn't available, we don't modify it at all + // (so we stick with the device's default setting) + if( supported_values.selected_value.equals(value) ) { + int selected_value2 = CameraMetadata.CONTROL_AE_ANTIBANDING_MODE_AUTO; + switch(supported_values.selected_value) { + case ANTIBANDING_DEFAULT: + selected_value2 = CameraMetadata.CONTROL_AE_ANTIBANDING_MODE_AUTO; + break; + case "50hz": + selected_value2 = CameraMetadata.CONTROL_AE_ANTIBANDING_MODE_50HZ; + break; + case "60hz": + selected_value2 = CameraMetadata.CONTROL_AE_ANTIBANDING_MODE_60HZ; + break; + case "off": + selected_value2 = CameraMetadata.CONTROL_AE_ANTIBANDING_MODE_OFF; + break; + default: + if( MyDebug.LOG ) + Log.d(TAG, "unknown selected_value: " + supported_values.selected_value); + break; + } + + camera_settings.has_antibanding = true; + camera_settings.antibanding = selected_value2; + if( camera_settings.setAntiBanding(previewBuilder) ) { + try { + setRepeatingRequest(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set antibanding", e); + } + } + } + } + return supported_values; + } + + @Override + public String getAntiBanding() { + if( previewBuilder.get(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE) == null ) + return null; + int value2 = previewBuilder.get(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE); + return convertAntiBanding(value2); + } + + private String convertEdgeMode(int value2) { + String value; + switch( value2 ) { + case CameraMetadata.EDGE_MODE_FAST: + value = "fast"; + break; + case CameraMetadata.EDGE_MODE_HIGH_QUALITY: + value = "high_quality"; + break; + case CameraMetadata.EDGE_MODE_OFF: + value = "off"; + break; + case CameraMetadata.EDGE_MODE_ZERO_SHUTTER_LAG: + // we don't make use of zero shutter lag + value = null; + break; + default: + if( MyDebug.LOG ) + Log.d(TAG, "unknown edge_mode: " + value2); + value = null; + break; + } + return value; + } + + @Override + public SupportedValues setEdgeMode(String value) { + if( MyDebug.LOG ) + Log.d(TAG, "setEdgeMode: " + value); + int [] values2 = characteristics.get(CameraCharacteristics.EDGE_AVAILABLE_EDGE_MODES); + if( values2 == null ) { + return null; + } + List values = new ArrayList<>(); + values.add(EDGE_MODE_DEFAULT); + for(int value2 : values2) { + String this_value = convertEdgeMode(value2); + if( this_value != null ) { + values.add(this_value); + } + } + SupportedValues supported_values = checkModeIsSupported(values, value, EDGE_MODE_DEFAULT); + if( supported_values != null ) { + // for edge mode, if the requested value isn't available, we don't modify it at all + if( supported_values.selected_value.equals(value) ) { + boolean has_edge_mode = false; + int selected_value2 = CameraMetadata.EDGE_MODE_FAST; + // if EDGE_MODE_DEFAULT, this means to stick with the device default + if( !value.equals(EDGE_MODE_DEFAULT) ) { + switch(supported_values.selected_value) { + case "fast": + has_edge_mode = true; + selected_value2 = CameraMetadata.EDGE_MODE_FAST; + break; + case "high_quality": + has_edge_mode = true; + selected_value2 = CameraMetadata.EDGE_MODE_HIGH_QUALITY; + break; + case "off": + has_edge_mode = true; + selected_value2 = CameraMetadata.EDGE_MODE_OFF; + break; + default: + if( MyDebug.LOG ) + Log.d(TAG, "unknown selected_value: " + supported_values.selected_value); + break; + } + } + + if( camera_settings.has_edge_mode != has_edge_mode || camera_settings.edge_mode != selected_value2 ) { + camera_settings.has_edge_mode = has_edge_mode; + camera_settings.edge_mode = selected_value2; + if( camera_settings.setEdgeMode(previewBuilder) ) { + try { + setRepeatingRequest(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set edge_mode", e); + } + } + } + } + } + return supported_values; + } + + @Override + public String getEdgeMode() { + if( previewBuilder.get(CaptureRequest.EDGE_MODE) == null ) + return null; + int value2 = previewBuilder.get(CaptureRequest.EDGE_MODE); + return convertEdgeMode(value2); + } + + private String convertNoiseReductionMode(int value2) { + String value; + switch( value2 ) { + case CameraMetadata.NOISE_REDUCTION_MODE_FAST: + value = "fast"; + break; + case CameraMetadata.NOISE_REDUCTION_MODE_HIGH_QUALITY: + value = "high_quality"; + break; + case CameraMetadata.NOISE_REDUCTION_MODE_MINIMAL: + value = "minimal"; + break; + case CameraMetadata.NOISE_REDUCTION_MODE_OFF: + value = "off"; + break; + case CameraMetadata.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG: + // we don't make use of zero shutter lag + value = null; + break; + default: + if( MyDebug.LOG ) + Log.d(TAG, "unknown noise_reduction_mode: " + value2); + value = null; + break; + } + return value; + } + + @Override + public SupportedValues setNoiseReductionMode(String value) { + if( MyDebug.LOG ) + Log.d(TAG, "setNoiseReductionMode: " + value); + int [] values2 = characteristics.get(CameraCharacteristics.NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES ); + if( values2 == null ) { + return null; + } + List values = new ArrayList<>(); + values.add(NOISE_REDUCTION_MODE_DEFAULT); + for(int value2 : values2) { + String this_value = convertNoiseReductionMode(value2); + if( this_value != null ) { + values.add(this_value); + } + } + SupportedValues supported_values = checkModeIsSupported(values, value, NOISE_REDUCTION_MODE_DEFAULT); + if( supported_values != null ) { + // for noise reduction, if the requested value isn't available, we don't modify it at all + if( supported_values.selected_value.equals(value) ) { + boolean has_noise_reduction_mode = false; + int selected_value2 = CameraMetadata.NOISE_REDUCTION_MODE_FAST; + // if NOISE_REDUCTION_MODE_DEFAULT, this means to stick with the device default + if( !value.equals(NOISE_REDUCTION_MODE_DEFAULT) ) { + switch(supported_values.selected_value) { + case "fast": + has_noise_reduction_mode = true; + selected_value2 = CameraMetadata.NOISE_REDUCTION_MODE_FAST; + break; + case "high_quality": + has_noise_reduction_mode = true; + selected_value2 = CameraMetadata.NOISE_REDUCTION_MODE_HIGH_QUALITY; + break; + case "minimal": + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.M ) { + has_noise_reduction_mode = true; + selected_value2 = CameraMetadata.NOISE_REDUCTION_MODE_MINIMAL; + } + else { + // shouldn't ever be here, as NOISE_REDUCTION_MODE_MINIMAL shouldn't be a supported value! + // treat as fast instead + Log.e(TAG, "noise reduction minimal, but pre-Android M!"); + has_noise_reduction_mode = true; + selected_value2 = CameraMetadata.NOISE_REDUCTION_MODE_FAST; + } + break; + case "off": + has_noise_reduction_mode = true; + selected_value2 = CameraMetadata.NOISE_REDUCTION_MODE_OFF; + break; + default: + if( MyDebug.LOG ) + Log.d(TAG, "unknown selected_value: " + supported_values.selected_value); + break; + } + } + + if( camera_settings.has_noise_reduction_mode != has_noise_reduction_mode || camera_settings.noise_reduction_mode != selected_value2 ) { + camera_settings.has_noise_reduction_mode = has_noise_reduction_mode; + camera_settings.noise_reduction_mode = selected_value2; + if( camera_settings.setNoiseReductionMode(previewBuilder) ) { + try { + setRepeatingRequest(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set noise_reduction_mode", e); + } + } + } + } + } + return supported_values; + } + + @Override + public String getNoiseReductionMode() { + if( previewBuilder.get(CaptureRequest.NOISE_REDUCTION_MODE) == null ) + return null; + int value2 = previewBuilder.get(CaptureRequest.NOISE_REDUCTION_MODE); + return convertNoiseReductionMode(value2); + } + + @Override + public SupportedValues setISO(String value) { + // not supported for CameraController2 - but Camera2 devices that don't support manual ISO can call this, + // so assume this is for auto ISO + this.setManualISO(false, 0); + return null; + } + + @Override + public String getISOKey() { + return ""; + } + + @Override + public void setManualISO(boolean manual_iso, int iso) { + if( MyDebug.LOG ) + Log.d(TAG, "setManualISO: " + manual_iso); + try { + if( manual_iso ) { + if( MyDebug.LOG ) + Log.d(TAG, "switch to iso: " + iso); + Range iso_range = characteristics.get(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE); // may be null on some devices + if( iso_range == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "iso not supported"); + return; + } + if( MyDebug.LOG ) + Log.d(TAG, "iso range from " + iso_range.getLower() + " to " + iso_range.getUpper()); + + camera_settings.has_iso = true; + iso = Math.max(iso, iso_range.getLower()); + iso = Math.min(iso, iso_range.getUpper()); + camera_settings.iso = iso; + } + else { + camera_settings.has_iso = false; + camera_settings.iso = 0; + } + updateUseFakePrecaptureMode(camera_settings.flash_value); + + if( camera_settings.setAEMode(previewBuilder, false) ) { + setRepeatingRequest(); + } + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set ISO", e); + } + } + + @Override + public boolean isManualISO() { + return camera_settings.has_iso; + } + + @Override + // Returns whether ISO was modified + // N.B., use setManualISO() to switch between auto and manual mode + public boolean setISO(int iso) { + if( MyDebug.LOG ) + Log.d(TAG, "setISO: " + iso); + if( camera_settings.iso == iso ) { + if( MyDebug.LOG ) + Log.d(TAG, "already set"); + return false; + } + try { + camera_settings.iso = iso; + if( camera_settings.setAEMode(previewBuilder, false) ) { + setRepeatingRequest(); + } + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set ISO", e); + } + return true; + } + + @Override + public int getISO() { + return camera_settings.iso; + } + + @Override + public long getExposureTime() { + return camera_settings.exposure_time; + } + + @Override + // Returns whether exposure time was modified + // N.B., use setISO(String) to switch between auto and manual mode + public boolean setExposureTime(long exposure_time) { + if( MyDebug.LOG ) { + Log.d(TAG, "setExposureTime: " + exposure_time); + Log.d(TAG, "current exposure time: " + camera_settings.exposure_time); + } + if( camera_settings.exposure_time == exposure_time ) { + if( MyDebug.LOG ) + Log.d(TAG, "already set"); + return false; + } + try { + camera_settings.exposure_time = exposure_time; + if( camera_settings.setAEMode(previewBuilder, false) ) { + setRepeatingRequest(); + } + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set exposure time", e); + } + return true; + } + + @Override + public void setAperture(float aperture) { + if( MyDebug.LOG ) { + Log.d(TAG, "setAperture: " + aperture); + Log.d(TAG, "current aperture: " + camera_settings.aperture); + } + if( camera_settings.has_aperture && camera_settings.aperture == aperture ) { + if( MyDebug.LOG ) + Log.d(TAG, "already set"); + } + try { + camera_settings.has_aperture = true; + camera_settings.aperture = aperture; + if( camera_settings.setAperture(previewBuilder) ) { + setRepeatingRequest(); + } + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set aperture", e); + } + } + + @Override + public Size getPictureSize() { + return new Size(picture_width, picture_height); + } + + @Override + public void setPictureSize(int width, int height) { + if( MyDebug.LOG ) + Log.d(TAG, "setPictureSize: " + width + " x " + height); + if( camera == null ) { + if( MyDebug.LOG ) + Log.e(TAG, "no camera"); + return; + } + if( hasCaptureSession() ) { + // can only call this when captureSession not created - as the surface of the imageReader we create has to match the surface we pass to the captureSession + if( MyDebug.LOG ) + Log.e(TAG, "can't set picture size when captureSession running!"); + throw new RuntimeException(); // throw as RuntimeException, as this is a programming error + } + this.picture_width = width; + this.picture_height = height; + } + + @Override + public void setJpegR(boolean want_jpeg_r) { + if( MyDebug.LOG ) { + Log.d(TAG, "setJpegR: " + want_jpeg_r); + } + if( camera == null ) { + if( MyDebug.LOG ) + Log.e(TAG, "no camera"); + return; + } + if( this.want_jpeg_r == want_jpeg_r ) { + return; + } + if( hasCaptureSession() ) { + // can only call this when captureSession not created - as it affects how we create the imageReader + if( MyDebug.LOG ) + Log.e(TAG, "can't set jpeg_r when captureSession running!"); + throw new RuntimeException(); // throw as RuntimeException, as this is a programming error + } + this.want_jpeg_r = want_jpeg_r; + } + + @Override + public void setRaw(boolean want_raw, int max_raw_images) { + if( MyDebug.LOG ) { + Log.d(TAG, "setRaw: " + want_raw); + Log.d(TAG, "max_raw_images: " + max_raw_images); + } + if( camera == null ) { + if( MyDebug.LOG ) + Log.e(TAG, "no camera"); + return; + } + if( this.want_raw == want_raw && this.max_raw_images == max_raw_images ) { + return; + } + if( want_raw && this.raw_size == null ) { + if( MyDebug.LOG ) + Log.e(TAG, "can't set raw when raw not supported"); + return; + } + if( hasCaptureSession() ) { + // can only call this when captureSession not created - as it affects how we create the imageReader + if( MyDebug.LOG ) + Log.e(TAG, "can't set raw when captureSession running!"); + throw new RuntimeException(); // throw as RuntimeException, as this is a programming error + } + this.want_raw = want_raw; + this.max_raw_images = max_raw_images; + } + + @Override + public void setVideoHighSpeed(boolean want_video_high_speed) { + if( MyDebug.LOG ) + Log.d(TAG, "setVideoHighSpeed: " + want_video_high_speed); + if( camera == null ) { + if( MyDebug.LOG ) + Log.e(TAG, "no camera"); + return; + } + if( this.want_video_high_speed == want_video_high_speed ) { + return; + } + if( hasCaptureSession() ) { + // can only call this when captureSession not created - as it affects how we create the session + if( MyDebug.LOG ) + Log.e(TAG, "can't set high speed when captureSession running!"); + throw new RuntimeException(); // throw as RuntimeException, as this is a programming error + } + this.want_video_high_speed = want_video_high_speed; + this.is_video_high_speed = false; // reset just to be safe + } + + @Override + public void setCameraExtension(boolean enabled, int extension) { + if( MyDebug.LOG ) { + Log.d(TAG, "setCameraExtension?: " + enabled); + Log.d(TAG, "extension: " + extension); + } + + if( camera == null ) { + if( MyDebug.LOG ) + Log.e(TAG, "no camera"); + return; + } + if( sessionType == (enabled ? SessionType.SESSIONTYPE_EXTENSION : SessionType.SESSIONTYPE_NORMAL) && this.camera_extension == (enabled ? extension : 0) ) { + // quick exit + if( MyDebug.LOG ) + Log.d(TAG, " no change"); + return; + } + if( hasCaptureSession() ) { + // can only call this when captureSession not created - as it affects how we create the imageReader + if( MyDebug.LOG ) + Log.e(TAG, "can't set extension when captureSession running!"); + throw new RuntimeException(); // throw as RuntimeException, as this is a programming error + } + + if( enabled != (sessionType == SessionType.SESSIONTYPE_EXTENSION) ) { + if( MyDebug.LOG ) + Log.d(TAG, "turning extension session on or off"); + // Ideally we'd probably only create the previewBuilder when starting the preview (so we + // start off with a "fresh" one), but for now at least ensure we start off with a fresh + // previewBuilder when enabling extensions (and might as well do so when disabling + // extensions too). + // This saves us having to set capture request parameters back to their defaults, and is + // also useful for modes like CONTROL_AE_ANTIBANDING_MODE where there isn't an obvious + // "default" to set (in theory extensions mode should just ignore such keys, but it'd be + // nicer to never set them). + previewBuilder = null; + createPreviewRequest(); + } + + if( enabled ) { + this.sessionType = SessionType.SESSIONTYPE_EXTENSION; + this.camera_extension = extension; + } + else { + this.sessionType = SessionType.SESSIONTYPE_NORMAL; + this.camera_extension = 0; + } + } + + @Override + public boolean isCameraExtension() { + return this.sessionType == SessionType.SESSIONTYPE_EXTENSION; + } + + @Override + public int getCameraExtension() { + if( isCameraExtension() ) + return camera_extension; + return -1; + } + + @Override + public void setBurstType(BurstType burst_type) { + if( MyDebug.LOG ) + Log.d(TAG, "setBurstType: " + burst_type); + if( camera == null ) { + if( MyDebug.LOG ) + Log.e(TAG, "no camera"); + return; + } + if( this.burst_type == burst_type ) { + return; + } + /*if( hasCaptureSession() ) { + // can only call this when captureSession not created - as it affects how we create the imageReader + if( MyDebug.LOG ) + Log.e(TAG, "can't set burst type when captureSession running!"); + throw new RuntimeException(); // throw as RuntimeException, as this is a programming error + }*/ + this.burst_type = burst_type; + updateUseFakePrecaptureMode(camera_settings.flash_value); + camera_settings.setAEMode(previewBuilder, false); // may need to set the ae mode, as flash is disabled for burst modes + } + + @Override + public BurstType getBurstType() { + return burst_type; + } + + @Override + public void setExpoBracketingNImages(int n_images) { + if( MyDebug.LOG ) + Log.d(TAG, "setExpoBracketingNImages: " + n_images); + if( n_images <= 1 || (n_images % 2) == 0 ) { + if( MyDebug.LOG ) + Log.e(TAG, "n_images should be an odd number greater than 1"); + throw new RuntimeException("n_images should be an odd number greater than 1"); // throw as RuntimeException, as this is a programming error + } + if( n_images > max_expo_bracketing_n_images ) { + n_images = max_expo_bracketing_n_images; + if( MyDebug.LOG ) + Log.e(TAG, "limiting n_images to max of " + n_images); + } + this.expo_bracketing_n_images = n_images; + } + + @Override + public void setExpoBracketingStops(double stops) { + if( MyDebug.LOG ) + Log.d(TAG, "setExpoBracketingStops: " + stops); + if( stops <= 0.0 ) { + if( MyDebug.LOG ) + Log.e(TAG, "stops should be positive"); + throw new RuntimeException(); // throw as RuntimeException, as this is a programming error + } + this.expo_bracketing_stops = stops; + } + + @Override + public void setDummyCaptureHack(boolean dummy_capture_hack) { + if( MyDebug.LOG ) + Log.d(TAG, "setDummyCaptureHack: " + dummy_capture_hack); + this.dummy_capture_hack = dummy_capture_hack; + } + + @Override + public void setUseExpoFastBurst(boolean use_expo_fast_burst) { + if( MyDebug.LOG ) + Log.d(TAG, "setUseExpoFastBurst: " + use_expo_fast_burst); + this.use_expo_fast_burst = use_expo_fast_burst; + } + + @Override + public boolean isCaptureFastBurst() { + // BURSTTYPE_FOCUS photos are captured at a slow rate, so fine to return false for that (means + // devices can still use highest resolutions) + return this.burst_type != BurstType.BURSTTYPE_NONE && this.burst_type != BurstType.BURSTTYPE_FOCUS; + } + + @Override + public boolean isCapturingBurst() { + if( this.burst_type == BurstType.BURSTTYPE_NONE ) + return false; + if( burst_type == BurstType.BURSTTYPE_CONTINUOUS ) + return continuous_burst_in_progress || n_burst > 0 || n_burst_raw > 0; + return getBurstTotal() > 1 && getNBurstTaken() < getBurstTotal(); + } + + @Override + public int getNBurstTaken() { + return n_burst_taken; + } + + @Override + public int getBurstTotal() { + if( burst_type == BurstType.BURSTTYPE_CONTINUOUS ) + return 0; // total burst size is unknown + return n_burst_total; + } + + @Override + public void setBurstNImages(int burst_requested_n_images) { + if( MyDebug.LOG ) + Log.d(TAG, "setBurstNImages: " + burst_requested_n_images); + this.burst_requested_n_images = burst_requested_n_images; + } + + @Override + public void setBurstForNoiseReduction(boolean burst_for_noise_reduction, boolean noise_reduction_low_light) { + if( MyDebug.LOG ) { + Log.d(TAG, "setBurstForNoiseReduction: " + burst_for_noise_reduction); + Log.d(TAG, "noise_reduction_low_light: " + noise_reduction_low_light); + } + this.burst_for_noise_reduction = burst_for_noise_reduction; + this.noise_reduction_low_light = noise_reduction_low_light; + } + + @Override + public boolean isContinuousBurstInProgress() { + return continuous_burst_in_progress; + } + + @Override + public void stopContinuousBurst() { + if( MyDebug.LOG ) + Log.d(TAG, "stopContinuousBurst"); + continuous_burst_in_progress = false; + } + + @Override + public void stopFocusBracketingBurst() { + if( MyDebug.LOG ) + Log.d(TAG, "stopFocusBracketingBurst"); + if( burst_type == BurstType.BURSTTYPE_FOCUS ) { + focus_bracketing_in_progress = false; + } + else { + Log.e(TAG, "stopFocusBracketingBurst burst_type is: " + burst_type); + } + } + + @Override + public void setUseCamera2FakeFlash(boolean use_fake_precapture) { + if( MyDebug.LOG ) + Log.d(TAG, "setUseCamera2FakeFlash: " + use_fake_precapture); + if( camera == null ) { + if( MyDebug.LOG ) + Log.e(TAG, "no camera"); + return; + } + if( this.use_fake_precapture == use_fake_precapture ) { + return; + } + this.use_fake_precapture = use_fake_precapture; + this.use_fake_precapture_mode = use_fake_precapture; + // no need to call updateUseFakePrecaptureMode(), as this method should only be called after first creating camera controller + } + + @Override + public boolean getUseCamera2FakeFlash() { + return this.use_fake_precapture; + } + + private void createPictureImageReader() { + if( MyDebug.LOG ) + Log.d(TAG, "createPictureImageReader"); + if( hasCaptureSession() ) { + // can only call this when captureSession not created - as the surface of the imageReader we create has to match the surface we pass to the captureSession + if( MyDebug.LOG ) + Log.e(TAG, "can't create picture image reader when captureSession running!"); + throw new RuntimeException(); // throw as RuntimeException, as this is a programming error + } + closePictureImageReader(); + if( picture_width == 0 || picture_height == 0 ) { + if( MyDebug.LOG ) + Log.e(TAG, "application needs to call setPictureSize()"); + throw new RuntimeException(); // throw as RuntimeException, as this is a programming error + } + // maxImages only needs to be 2, as we always read the JPEG data and close the image straight away in the imageReader + imageReader = ImageReader.newInstance(picture_width, picture_height, Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE && want_jpeg_r ? ImageFormat.JPEG_R : ImageFormat.JPEG, 2); + //imageReader = ImageReader.newInstance(picture_width, picture_height, ImageFormat.YUV_420_888, 2); + if( MyDebug.LOG ) { + Log.d(TAG, "created new imageReader: " + imageReader); + Log.d(TAG, "imageReader surface: " + imageReader.getSurface().toString()); + } + // It's intentional that we pass a handler on null, so the OnImageAvailableListener runs on the UI thread. + // If ever we want to change this on future, we should ensure that all image available listeners (JPEG+RAW) are + // using the same handler/thread. + imageReader.setOnImageAvailableListener(onImageAvailableListener = new OnImageAvailableListener(), null); + if( want_raw && raw_size != null && !previewIsVideoMode ) { + // unlike the JPEG imageReader, we can't read the data and close the image straight away, so we need to allow a larger + // value for maxImages + imageReaderRaw = ImageReader.newInstance(raw_size.getWidth(), raw_size.getHeight(), ImageFormat.RAW_SENSOR, max_raw_images); + if( MyDebug.LOG ) { + Log.d(TAG, "created new imageReaderRaw: " + imageReaderRaw); + Log.d(TAG, "imageReaderRaw surface: " + imageReaderRaw.getSurface().toString()); + } + // see note above for imageReader.setOnImageAvailableListener for why we use a null handler + imageReaderRaw.setOnImageAvailableListener(onRawImageAvailableListener = new OnRawImageAvailableListener(), null); + } + } + + private void clearPending() { + if( MyDebug.LOG ) + Log.d(TAG, "clearPending"); + pending_burst_images.clear(); + pending_burst_images_raw.clear(); + pending_raw_image = null; + if( onImageAvailableListener != null ) { + onImageAvailableListener.skip_next_image = false; + } + if( onRawImageAvailableListener != null ) { + onRawImageAvailableListener.clear(); + onRawImageAvailableListener.skip_next_image = false; + } + slow_burst_capture_requests = null; + n_burst = 0; + n_burst_taken = 0; + n_burst_total = 0; + n_burst_raw = 0; + burst_single_request = false; + slow_burst_start_ms = 0; + } + + private void takePendingRaw() { + if( MyDebug.LOG ) + Log.d(TAG, "takePendingRaw"); + // takePendingRaw() always called on UI thread, and pending_raw_image only used on UI thread, so shouldn't need to + // synchronize for that + if( pending_raw_image != null ) { + synchronized( background_camera_lock ) { + raw_todo = false; + } + // don't call callback with lock + picture_cb.onRawPictureTaken(pending_raw_image); + // pending_raw_image should be closed by the application (we don't do it here, so that applications can keep hold of the data, e.g., in a queue for background processing) + pending_raw_image = null; + if( onRawImageAvailableListener != null ) { + onRawImageAvailableListener.clear(); + } + } + } + + private void checkImagesCompleted() { + if( MyDebug.LOG ) + Log.d(TAG, "checkImagesCompleted"); + boolean completed = false; + boolean take_pending_raw = false; + synchronized( background_camera_lock ) { + if( !done_all_captures ) { + if( MyDebug.LOG ) + Log.d(TAG, "still waiting for captures"); + } + else if( picture_cb == null ) { + // just in case? + if( MyDebug.LOG ) + Log.d(TAG, "no picture_cb"); + } + else if( !jpeg_todo && !raw_todo ) { + if( MyDebug.LOG ) + Log.d(TAG, "all image callbacks now completed"); + completed = true; + } + else if( !jpeg_todo && pending_raw_image != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "jpeg callback already done, can now call pending raw callback"); + take_pending_raw = true; + completed = true; + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "need to wait for jpeg and/or raw callback"); + } + } + + // need to call callbacks without a lock + if( take_pending_raw ) { + takePendingRaw(); + if( MyDebug.LOG ) + Log.d(TAG, "all image callbacks now completed"); + } + if( completed ) { + // need to set picture_cb to null before calling onCompleted, as that may reenter CameraController to take another photo (if in auto-repeat burst mode) - see testTakePhotoRepeat() + PictureCallback cb = picture_cb; + picture_cb = null; + cb.onCompleted(); + synchronized( background_camera_lock ) { + if( burst_type == BurstType.BURSTTYPE_FOCUS ) + focus_bracketing_in_progress = false; + } + } + } + + @Override + public Size getPreviewSize() { + return new Size(preview_width, preview_height); + } + + @Override + public void setPreviewSize(int width, int height) { + if( MyDebug.LOG ) + Log.d(TAG, "setPreviewSize: " + width + " , " + height); + preview_width = width; + preview_height = height; + /*if( previewImageReader != null ) { + previewImageReader.close(); + } + previewImageReader = ImageReader.newInstance(width, height, ImageFormat.YUV_420_888, 2); + */ + } + + @Override + public void setVideoStabilization(boolean enabled) { + if( MyDebug.LOG ) + Log.d(TAG, "setVideoStabilization: " + enabled); + camera_settings.video_stabilization = enabled; + camera_settings.setStabilization(previewBuilder); + try { + setRepeatingRequest(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set video stabilization", e); + } + } + + @Override + public boolean getOpticalStabilization() { + Integer ois_mode = previewBuilder.get(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE); + if( ois_mode == null ) + return false; + return( ois_mode == CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON ); + } + + @Override + public boolean getVideoStabilization() { + return camera_settings.video_stabilization; + } + + @Override + public void setTonemapProfile(TonemapProfile tonemap_profile, float log_profile_strength, float gamma) { + if( MyDebug.LOG ) { + Log.d(TAG, "setTonemapProfile: " + tonemap_profile); + Log.d(TAG, "log_profile_strength: " + log_profile_strength); + Log.d(TAG, "gamma: " + gamma); + } + if( camera_settings.tonemap_profile == tonemap_profile && + camera_settings.log_profile_strength == log_profile_strength && + camera_settings.gamma_profile == gamma ) + return; // no change + + camera_settings.tonemap_profile = tonemap_profile; + + if( tonemap_profile == TonemapProfile.TONEMAPPROFILE_LOG ) + camera_settings.log_profile_strength = log_profile_strength; + else + camera_settings.log_profile_strength = 0.0f; + + if( tonemap_profile == TonemapProfile.TONEMAPPROFILE_GAMMA ) + camera_settings.gamma_profile = gamma; + else + camera_settings.gamma_profile = 0.0f; + + camera_settings.setTonemapProfile(previewBuilder); + try { + setRepeatingRequest(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set log profile", e); + } + } + + @Override + public TonemapProfile getTonemapProfile() { + return camera_settings.tonemap_profile; + } + + /** For testing. + */ + public CaptureRequest.Builder testGetPreviewBuilder() { + return previewBuilder; + } + + public TonemapCurve testGetTonemapCurve() { + return previewBuilder.get(CaptureRequest.TONEMAP_CURVE); + } + + @Override + public int getJpegQuality() { + return this.camera_settings.jpeg_quality; + } + + @Override + public void setJpegQuality(int quality) { + if( quality < 0 || quality > 100 ) { + if( MyDebug.LOG ) + Log.e(TAG, "invalid jpeg quality" + quality); + throw new RuntimeException(); // throw as RuntimeException, as this is a programming error + } + this.camera_settings.jpeg_quality = (byte)quality; + } + + @Override + public int getZoom() { + return this.current_zoom_value; + } + + @Override + public void setZoom(int value) { + setZoom(value, -1.0f); + } + + @Override + public void setZoom(int value, float smooth_zoom) { + if( zoom_ratios == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "zoom not supported"); + return; + } + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + if( this.supported_extensions_zoom != null && this.supported_extensions_zoom.contains(camera_extension) ) { + // fine, camera extension supports zoom + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "zoom not supported for camera extension"); + return; + } + } + if( value < 0 || value > zoom_ratios.size() ) { + if( MyDebug.LOG ) + Log.e(TAG, "invalid zoom value" + value); + throw new RuntimeException(); // throw as RuntimeException, as this is a programming error + } + if( smooth_zoom > 0.0f ) { + if( smooth_zoom < zoom_ratios.get(0)/100.0f ) { + if( MyDebug.LOG ) + Log.e(TAG, "invalid smooth_zoom: " + smooth_zoom); + throw new RuntimeException("smooth_zoom too small"); + } + else if( smooth_zoom > zoom_ratios.get(zoom_ratios.size()-1)/100.0f ) { + if( MyDebug.LOG ) + Log.e(TAG, "invalid smooth_zoom: " + smooth_zoom); + throw new RuntimeException("smooth_zoom too large"); + } + } + float zoom = smooth_zoom > 0.0f ? smooth_zoom : zoom_ratios.get(value)/100.0f; + if( MyDebug.LOG ) + Log.d(TAG, "zoom to: " + zoom); + + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.R ) { + camera_settings.has_control_zoom_ratio = true; + camera_settings.control_zoom_ratio = zoom; + camera_settings.setControlZoomRatio(previewBuilder); + } + else { + Rect sensor_rect = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); + int left = sensor_rect.width()/2; + int right = left; + int top = sensor_rect.height()/2; + int bottom = top; + int hwidth = (int)(sensor_rect.width() / (2.0*zoom)); + int hheight = (int)(sensor_rect.height() / (2.0*zoom)); + left -= hwidth; + right += hwidth; + top -= hheight; + bottom += hheight; + if( MyDebug.LOG ) { + Log.d(TAG, "zoom: " + zoom); + Log.d(TAG, "hwidth: " + hwidth); + Log.d(TAG, "hheight: " + hheight); + Log.d(TAG, "sensor_rect left: " + sensor_rect.left); + Log.d(TAG, "sensor_rect top: " + sensor_rect.top); + Log.d(TAG, "sensor_rect right: " + sensor_rect.right); + Log.d(TAG, "sensor_rect bottom: " + sensor_rect.bottom); + Log.d(TAG, "left: " + left); + Log.d(TAG, "top: " + top); + Log.d(TAG, "right: " + right); + Log.d(TAG, "bottom: " + bottom); + /*Rect current_rect = previewBuilder.get(CaptureRequest.SCALER_CROP_REGION); + Log.d(TAG, "current_rect left: " + current_rect.left); + Log.d(TAG, "current_rect top: " + current_rect.top); + Log.d(TAG, "current_rect right: " + current_rect.right); + Log.d(TAG, "current_rect bottom: " + current_rect.bottom);*/ + } + camera_settings.scalar_crop_region = new Rect(left, top, right, bottom); + camera_settings.setCropRegion(previewBuilder); + } + this.current_zoom_value = value; + try { + setRepeatingRequest(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set zoom", e); + } + } + + @Override + public void resetZoom() { + setZoom(zoom_value_1x); + } + + @Override + public int getExposureCompensation() { + if( previewBuilder.get(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION) == null ) + return 0; + return previewBuilder.get(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION); + } + + @Override + // Returns whether exposure was modified + public boolean setExposureCompensation(int new_exposure) { + camera_settings.has_ae_exposure_compensation = true; + camera_settings.ae_exposure_compensation = new_exposure; + if( camera_settings.setExposureCompensation(previewBuilder) ) { + try { + setRepeatingRequest(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set exposure compensation", e); + } + return true; + } + return false; + } + + @Override + public void setPreviewFpsRange(int min, int max) { + if( MyDebug.LOG ) + Log.d(TAG, "setPreviewFpsRange: " + min +"-" + max); + camera_settings.ae_target_fps_range = new Range<>(min / 1000, max / 1000); +// Frame duration is in nanoseconds. Using min to be safe. + camera_settings.sensor_frame_duration = + (long)(1.0 / (min / 1000.0) * 1000000000L); + + try { + if( camera_settings.setAEMode(previewBuilder, false) ) { + setRepeatingRequest(); + } + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set preview fps range to " + min +"-" + max, e); + } + } + + @Override + public void clearPreviewFpsRange() { + if( MyDebug.LOG ) + Log.d(TAG, "clearPreviewFpsRange"); + // needed e.g. on Nokia 8 when switching back from slow motion to regular speed, in order to reset to the regular + // frame rate + if( camera_settings.ae_target_fps_range != null || camera_settings.sensor_frame_duration != 0 ) { + // set back to default + camera_settings.ae_target_fps_range = null; + camera_settings.sensor_frame_duration = 0; + createPreviewRequest(); + // createPreviewRequest() needed so that the values in the previewBuilder reset to default values, for + // CONTROL_AE_TARGET_FPS_RANGE and SENSOR_FRAME_DURATION + + try { + if( camera_settings.setAEMode(previewBuilder, false) ) { + setRepeatingRequest(); + } + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to clear preview fps range", e); + } + } + } + + @Override + public List getSupportedPreviewFpsRange() { + if( MyDebug.LOG ) + Log.d(TAG, "getSupportedPreviewFpsRange"); + List l = new ArrayList<>(); + + List rr = want_video_high_speed ? hs_fps_ranges : ae_fps_ranges; + for (int[] r : rr) { + int[] ir = { r[0] * 1000, r[1] * 1000 }; + if( MyDebug.LOG ) + Log.d(TAG, " : " + Arrays.toString(ir)); + l.add( ir ); + } + if( MyDebug.LOG ) { + Log.d(TAG, " using " + (want_video_high_speed ? "high speed" : "ae") + " preview fps ranges"); + } + + return l; + } + + @Override + public void setFocusValue(String focus_value) { + if( MyDebug.LOG ) + Log.d(TAG, "setFocusValue: " + focus_value); + BLOCK_FOR_EXTENSIONS(); + int focus_mode; + switch(focus_value) { + case "focus_mode_auto": + case "focus_mode_locked": + focus_mode = CaptureRequest.CONTROL_AF_MODE_AUTO; + break; + case "focus_mode_infinity": + focus_mode = CaptureRequest.CONTROL_AF_MODE_OFF; + camera_settings.focus_distance = 0.0f; + break; + case "focus_mode_manual2": + focus_mode = CaptureRequest.CONTROL_AF_MODE_OFF; + camera_settings.focus_distance = camera_settings.focus_distance_manual; + /*if( capture_result_has_focus_distance ) // test + camera_settings.focus_distance = capture_result_focus_distance;*/ + break; + case "focus_mode_macro": + focus_mode = CaptureRequest.CONTROL_AF_MODE_MACRO; + break; + case "focus_mode_edof": + focus_mode = CaptureRequest.CONTROL_AF_MODE_EDOF; + break; + case "focus_mode_continuous_picture": + focus_mode = CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE; + break; + case "focus_mode_continuous_video": + focus_mode = CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO; + break; + default: + if (MyDebug.LOG) + Log.d(TAG, "setFocusValue() received unknown focus value " + focus_value); + return; + } + camera_settings.has_af_mode = true; + camera_settings.af_mode = focus_mode; + camera_settings.setFocusMode(previewBuilder); + camera_settings.setFocusDistance(previewBuilder); // also need to set distance, in case changed between infinity, manual or other modes + //camera_settings.setTonemapProfile(previewBuilder); // testing - if using focus mode to test video profiles, see test_new flag + try { + setRepeatingRequest(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set focus mode", e); + } + } + + private String convertFocusModeToValue(int focus_mode) { + if( MyDebug.LOG ) + Log.d(TAG, "convertFocusModeToValue: " + focus_mode); + String focus_value = ""; + switch (focus_mode) { + case CaptureRequest.CONTROL_AF_MODE_AUTO: + focus_value = "focus_mode_auto"; + break; + case CaptureRequest.CONTROL_AF_MODE_MACRO: + focus_value = "focus_mode_macro"; + break; + case CaptureRequest.CONTROL_AF_MODE_EDOF: + focus_value = "focus_mode_edof"; + break; + case CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE: + focus_value = "focus_mode_continuous_picture"; + break; + case CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO: + focus_value = "focus_mode_continuous_video"; + break; + case CaptureRequest.CONTROL_AF_MODE_OFF: + focus_value = "focus_mode_manual2"; // n.b., could be infinity + break; + } + return focus_value; + } + + @Override + public String getFocusValue() { + Integer focus_mode = previewBuilder.get(CaptureRequest.CONTROL_AF_MODE); + if( focus_mode == null ) + focus_mode = CaptureRequest.CONTROL_AF_MODE_AUTO; + return convertFocusModeToValue(focus_mode); + } + + @Override + public float getFocusDistance() { + return camera_settings.focus_distance; + } + + @Override + public boolean setFocusDistance(float focus_distance) { + if( MyDebug.LOG ) + Log.d(TAG, "setFocusDistance: " + focus_distance); + if( camera_settings.focus_distance == focus_distance ) { + if( MyDebug.LOG ) + Log.d(TAG, "already set"); + return false; + } + camera_settings.focus_distance = focus_distance; + camera_settings.focus_distance_manual = focus_distance; + camera_settings.setFocusDistance(previewBuilder); + try { + setRepeatingRequest(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set focus distance", e); + } + return true; + } + + @Override + public void setFocusBracketingNImages(int n_images) { + if( MyDebug.LOG ) + Log.d(TAG, "setFocusBracketingNImages: " + n_images); + this.focus_bracketing_n_images = n_images; + } + + @Override + public void setFocusBracketingAddInfinity(boolean focus_bracketing_add_infinity) { + if( MyDebug.LOG ) + Log.d(TAG, "setFocusBracketingAddInfinity: " + focus_bracketing_add_infinity); + this.focus_bracketing_add_infinity = focus_bracketing_add_infinity; + } + + @Override + public void setFocusBracketingSourceDistance(float focus_bracketing_source_distance) { + if( MyDebug.LOG ) + Log.d(TAG, "setFocusBracketingSourceDistance: " + focus_bracketing_source_distance); + this.focus_bracketing_source_distance = focus_bracketing_source_distance; + } + + @Override + public float getFocusBracketingSourceDistance() { + return this.focus_bracketing_source_distance; + } + + @Override + public void setFocusBracketingSourceDistanceFromCurrent() { + if( capture_result_has_focus_distance ) { + this.focus_bracketing_source_distance = capture_result_focus_distance; + } + } + + @Override + public void setFocusBracketingTargetDistance(float focus_bracketing_target_distance) { + if( MyDebug.LOG ) + Log.d(TAG, "setFocusBracketingTargetDistance: " + focus_bracketing_target_distance); + this.focus_bracketing_target_distance = focus_bracketing_target_distance; + } + + @Override + public float getFocusBracketingTargetDistance() { + return this.focus_bracketing_target_distance; + } + + /** Decides whether we should be using fake precapture mode. + */ + private void updateUseFakePrecaptureMode(String flash_value) { + if( MyDebug.LOG ) + Log.d(TAG, "useFakePrecaptureMode: " + flash_value); + boolean frontscreen_flash = flash_value.equals("flash_frontscreen_auto") || flash_value.equals("flash_frontscreen_on"); + if( frontscreen_flash ) { + use_fake_precapture_mode = true; + } + else if( burst_type != BurstType.BURSTTYPE_NONE ) + use_fake_precapture_mode = true; + else if( camera_settings.has_iso ) + use_fake_precapture_mode = true; + else { + use_fake_precapture_mode = use_fake_precapture; + } + if( MyDebug.LOG ) + Log.d(TAG, "use_fake_precapture_mode set to: " + use_fake_precapture_mode); + } + + @Override + public void setFlashValue(String flash_value) { + if( MyDebug.LOG ) + Log.d(TAG, "setFlashValue: " + flash_value); + if( camera_settings.flash_value.equals(flash_value) ) { + if( MyDebug.LOG ) + Log.d(TAG, "flash value already set"); + return; + } + + try { + updateUseFakePrecaptureMode(flash_value); + + if( camera_settings.flash_value.equals("flash_torch") && !flash_value.equals("flash_off") ) { + // hack - if switching to something other than flash_off, we first need to turn torch off, otherwise torch remains on (at least on Nexus 6 and Nokia 8) + camera_settings.flash_value = "flash_off"; + camera_settings.setAEMode(previewBuilder, false); + CaptureRequest request = previewBuilder.build(); + + // need to wait until torch actually turned off + camera_settings.flash_value = flash_value; + camera_settings.setAEMode(previewBuilder, false); + push_repeating_request_when_torch_off = true; + push_repeating_request_when_torch_off_id = request; + + setRepeatingRequest(request); + } + else { + camera_settings.flash_value = flash_value; + if( camera_settings.setAEMode(previewBuilder, false) ) { + setRepeatingRequest(); + } + } + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set flash mode", e); + } + } + + @Override + public String getFlashValue() { + // returns "" if flash isn't supported + if( !characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) ) { + return ""; + } + return camera_settings.flash_value; + } + + @Override + public void setRecordingHint(boolean hint) { + // not relevant for CameraController2 + } + + @Override + public void setAutoExposureLock(boolean enabled) { + if( enabled ) { + BLOCK_FOR_EXTENSIONS(); + } + camera_settings.ae_lock = enabled; + camera_settings.setAutoExposureLock(previewBuilder); + try { + setRepeatingRequest(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set auto exposure lock", e); + } + } + + @Override + public boolean getAutoExposureLock() { + if( previewBuilder.get(CaptureRequest.CONTROL_AE_LOCK) == null ) + return false; + return previewBuilder.get(CaptureRequest.CONTROL_AE_LOCK); + } + + @Override + public void setAutoWhiteBalanceLock(boolean enabled) { + if( enabled ) { + BLOCK_FOR_EXTENSIONS(); + } + camera_settings.wb_lock = enabled; + camera_settings.setAutoWhiteBalanceLock(previewBuilder); + try { + setRepeatingRequest(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set auto white balance lock", e); + } + } + + @Override + public boolean getAutoWhiteBalanceLock() { + if( previewBuilder.get(CaptureRequest.CONTROL_AWB_LOCK) == null ) + return false; + return previewBuilder.get(CaptureRequest.CONTROL_AWB_LOCK); + } + + @Override + public void setRotation(int rotation) { + this.camera_settings.rotation = rotation; + } + + @Override + public void setLocationInfo(Location location) { + // don't log location, in case of privacy! + if( MyDebug.LOG ) + Log.d(TAG, "setLocationInfo"); + this.camera_settings.location = location; + } + + @Override + public void removeLocationInfo() { + this.camera_settings.location = null; + } + + @Override + public void enableShutterSound(boolean enabled) { + this.sounds_enabled = enabled; + } + + private void playSound(int soundName) { + if( sounds_enabled ) { + // on some devices (e.g., Samsung Galaxy S10e), need to check whether phone on silent! + AudioManager audioManager = (AudioManager)context.getSystemService(Context.AUDIO_SERVICE); + if( audioManager.getRingerMode() == AudioManager.RINGER_MODE_NORMAL ) { + media_action_sound.play(soundName); + } + } + } + + /** Returns the viewable rect - this is crop region if available. + * We need this as callers will pass in (or expect returned) CameraController.Area values that + * are relative to the current view (i.e., taking zoom into account) (the old Camera API in + * CameraController1 always works in terms of the current view, whilst Camera2 works in terms + * of the full view always). Similarly for the rect field in CameraController.Face. + */ + private Rect getViewableRect() { + if( previewBuilder != null ) { + Rect crop_rect = previewBuilder.get(CaptureRequest.SCALER_CROP_REGION); + if( crop_rect != null ) { + return crop_rect; + } + } + Rect sensor_rect = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); + sensor_rect.right -= sensor_rect.left; + sensor_rect.left = 0; + sensor_rect.bottom -= sensor_rect.top; + sensor_rect.top = 0; + return sensor_rect; + } + + private Rect convertRectToCamera2(Rect crop_rect, Rect rect) { + // CameraController.Area is always [-1000, -1000] to [1000, 1000] for the viewable region + // but for CameraController2, we must convert to be relative to the crop region + double left_f = (rect.left+1000)/2000.0; + double top_f = (rect.top+1000)/2000.0; + double right_f = (rect.right+1000)/2000.0; + double bottom_f = (rect.bottom+1000)/2000.0; + int left = (int)(crop_rect.left + left_f * (crop_rect.width()-1)); + int right = (int)(crop_rect.left + right_f * (crop_rect.width()-1)); + int top = (int)(crop_rect.top + top_f * (crop_rect.height()-1)); + int bottom = (int)(crop_rect.top + bottom_f * (crop_rect.height()-1)); + left = Math.max(left, crop_rect.left); + right = Math.max(right, crop_rect.left); + top = Math.max(top, crop_rect.top); + bottom = Math.max(bottom, crop_rect.top); + left = Math.min(left, crop_rect.right); + right = Math.min(right, crop_rect.right); + top = Math.min(top, crop_rect.bottom); + bottom = Math.min(bottom, crop_rect.bottom); + + return new Rect(left, top, right, bottom); + } + + private MeteringRectangle convertAreaToMeteringRectangle(Rect sensor_rect, Area area) { + Rect camera2_rect = convertRectToCamera2(sensor_rect, area.rect); + return new MeteringRectangle(camera2_rect, area.weight); + } + + private Rect convertRectFromCamera2(Rect crop_rect, Rect camera2_rect) { + // inverse of convertRectToCamera2() + double left_f = (camera2_rect.left-crop_rect.left)/(double)(crop_rect.width()-1); + double top_f = (camera2_rect.top-crop_rect.top)/(double)(crop_rect.height()-1); + double right_f = (camera2_rect.right-crop_rect.left)/(double)(crop_rect.width()-1); + double bottom_f = (camera2_rect.bottom-crop_rect.top)/(double)(crop_rect.height()-1); + int left = (int)(left_f * 2000) - 1000; + int right = (int)(right_f * 2000) - 1000; + int top = (int)(top_f * 2000) - 1000; + int bottom = (int)(bottom_f * 2000) - 1000; + + left = Math.max(left, -1000); + right = Math.max(right, -1000); + top = Math.max(top, -1000); + bottom = Math.max(bottom, -1000); + left = Math.min(left, 1000); + right = Math.min(right, 1000); + top = Math.min(top, 1000); + bottom = Math.min(bottom, 1000); + + return new Rect(left, top, right, bottom); + } + + private Area convertMeteringRectangleToArea(Rect sensor_rect, MeteringRectangle metering_rectangle) { + Rect area_rect = convertRectFromCamera2(sensor_rect, metering_rectangle.getRect()); + return new Area(area_rect, metering_rectangle.getMeteringWeight()); + } + + private CameraController.Face convertFromCameraFace(Rect sensor_rect, android.hardware.camera2.params.Face camera2_face) { + Rect area_rect = convertRectFromCamera2(sensor_rect, camera2_face.getBounds()); + return new CameraController.Face(camera2_face.getScore(), area_rect); + } + + @Override + public boolean setFocusAndMeteringArea(List areas) { + if( MyDebug.LOG ) + Log.d(TAG, "setFocusAndMeteringArea"); + BLOCK_FOR_EXTENSIONS(); + Rect sensor_rect = getViewableRect(); + if( MyDebug.LOG ) + Log.d(TAG, "sensor_rect: " + sensor_rect.left + " , " + sensor_rect.top + " x " + sensor_rect.right + " , " + sensor_rect.bottom); + boolean has_focus = false; + boolean has_metering = false; + if( characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF) > 0 ) { + has_focus = true; + camera_settings.af_regions = new MeteringRectangle[areas.size()]; + int i = 0; + for(CameraController.Area area : areas) { + camera_settings.af_regions[i++] = convertAreaToMeteringRectangle(sensor_rect, area); + } + camera_settings.setAFRegions(previewBuilder); + } + else + camera_settings.af_regions = null; + if( characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE) > 0 ) { + has_metering = true; + camera_settings.ae_regions = new MeteringRectangle[areas.size()]; + int i = 0; + for(CameraController.Area area : areas) { + camera_settings.ae_regions[i++] = convertAreaToMeteringRectangle(sensor_rect, area); + } + camera_settings.setAERegions(previewBuilder); + } + else + camera_settings.ae_regions = null; + if( has_focus || has_metering ) { + try { + setRepeatingRequest(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set focus and/or metering regions", e); + } + } + return has_focus; + } + + @Override + public void clearFocusAndMetering() { + if( MyDebug.LOG ) + Log.d(TAG, "clearFocusAndMetering"); + BLOCK_FOR_EXTENSIONS(); + Rect sensor_rect = getViewableRect(); + boolean has_focus = false; + boolean has_metering = false; + if( sensor_rect.width() <= 0 || sensor_rect.height() <= 0 ) { + // had a crash on Google Play due to creating a MeteringRectangle with -ve width/height ?! + camera_settings.af_regions = null; + camera_settings.ae_regions = null; + } + else { + if( characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF) > 0 ) { + has_focus = true; + camera_settings.af_regions = new MeteringRectangle[1]; + camera_settings.af_regions[0] = new MeteringRectangle(0, 0, sensor_rect.width()-1, sensor_rect.height()-1, 0); + camera_settings.setAFRegions(previewBuilder); + } + else + camera_settings.af_regions = null; + if( characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE) > 0 ) { + has_metering = true; + camera_settings.ae_regions = new MeteringRectangle[1]; + camera_settings.ae_regions[0] = new MeteringRectangle(0, 0, sensor_rect.width()-1, sensor_rect.height()-1, 0); + camera_settings.setAERegions(previewBuilder); + } + else + camera_settings.ae_regions = null; + } + if( has_focus || has_metering ) { + try { + setRepeatingRequest(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to clear focus and metering regions", e); + } + } + if( MyDebug.LOG ) { + Log.d(TAG, "af_regions: " + Arrays.toString(camera_settings.af_regions)); + Log.d(TAG, "ae_regions: " + Arrays.toString(camera_settings.ae_regions)); + } + } + + @Override + public List getFocusAreas() { + if( characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF) == 0 ) + return null; + if( camera_settings.af_regions == null ) { + // needed to fix failure on Android emulator in testTakePhotoContinuousNoTouch - can happen when CONTROL_MAX_REGIONS_AF > 0, but Camera only has 1 focus mode so Preview doesn't set focus areas + return null; + } + MeteringRectangle [] metering_rectangles = previewBuilder.get(CaptureRequest.CONTROL_AF_REGIONS); + if( metering_rectangles == null ) + return null; + Rect sensor_rect = getViewableRect(); + if( metering_rectangles.length == 1 && metering_rectangles[0].getRect().left == 0 && metering_rectangles[0].getRect().top == 0 && metering_rectangles[0].getRect().right == sensor_rect.width()-1 && metering_rectangles[0].getRect().bottom == sensor_rect.height()-1 ) { + // for compatibility with CameraController1 + return null; + } + List areas = new ArrayList<>(); + for(MeteringRectangle metering_rectangle : metering_rectangles) { + areas.add(convertMeteringRectangleToArea(sensor_rect, metering_rectangle)); + } + return areas; + } + + @Override + public List getMeteringAreas() { + if( characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE) == 0 ) + return null; + if( camera_settings.ae_regions == null ) { + // needed to fix failure on Android emulator in testTakePhotoContinuousNoTouch - can happen when CONTROL_MAX_REGIONS_AF > 0, but Camera only has 1 focus mode so Preview doesn't set focus areas + return null; + } + MeteringRectangle [] metering_rectangles = previewBuilder.get(CaptureRequest.CONTROL_AE_REGIONS); + if( metering_rectangles == null ) + return null; + Rect sensor_rect = getViewableRect(); + if( metering_rectangles.length == 1 && metering_rectangles[0].getRect().left == 0 && metering_rectangles[0].getRect().top == 0 && metering_rectangles[0].getRect().right == sensor_rect.width()-1 && metering_rectangles[0].getRect().bottom == sensor_rect.height()-1 ) { + // for compatibility with CameraController1 + return null; + } + List areas = new ArrayList<>(); + for(MeteringRectangle metering_rectangle : metering_rectangles) { + areas.add(convertMeteringRectangleToArea(sensor_rect, metering_rectangle)); + } + return areas; + } + + @Override + public boolean supportsAutoFocus() { + if( previewBuilder == null ) + return false; + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) + return false; + Integer focus_mode = previewBuilder.get(CaptureRequest.CONTROL_AF_MODE); + if( focus_mode == null ) + return false; + if( focus_mode == CaptureRequest.CONTROL_AF_MODE_AUTO || focus_mode == CaptureRequest.CONTROL_AF_MODE_MACRO ) + return true; + return false; + } + + @Override + public boolean supportsMetering() { + return characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE) > 0; + } + + @Override + public boolean focusIsContinuous() { + if( previewBuilder == null ) + return false; + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) + return false; + Integer focus_mode = previewBuilder.get(CaptureRequest.CONTROL_AF_MODE); + if( focus_mode == null ) + return false; + if( focus_mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE || focus_mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO ) + return true; + return false; + } + + @Override + public boolean focusIsVideo() { + if( previewBuilder == null ) + return false; + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) + return false; + Integer focus_mode = previewBuilder.get(CaptureRequest.CONTROL_AF_MODE); + if( focus_mode == null ) + return false; + if( focus_mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO ) { + return true; + } + return false; + } + + @Override + public void setPreviewDisplay(SurfaceHolder holder) { + if( MyDebug.LOG ) { + Log.d(TAG, "setPreviewDisplay"); + Log.e(TAG, "SurfaceHolder not supported for CameraController2!"); + Log.e(TAG, "Should use setPreviewTexture() instead"); + } + throw new RuntimeException(); // throw as RuntimeException, as this is a programming error + } + + @Override + public void setPreviewTexture(TextureView texture) { + if( MyDebug.LOG ) { + Log.d(TAG, "setPreviewTexture: " + texture); + Log.d(TAG, "surface: " + texture.getSurfaceTexture()); + } + if( this.texture != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "preview texture already set"); + throw new RuntimeException(); // throw as RuntimeException, as this is a programming error + } + this.texture = texture.getSurfaceTexture(); + } + + private void setRepeatingRequest() throws CameraAccessException { + setRepeatingRequest(previewBuilder.build()); + } + + private void setRepeatingRequest(CaptureRequest request) throws CameraAccessException { + if( MyDebug.LOG ) + Log.d(TAG, "setRepeatingRequest"); + synchronized( background_camera_lock ) { + if( camera == null || !hasCaptureSession() ) { + if( MyDebug.LOG ) + Log.d(TAG, "no camera or capture session"); + return; + } + try { + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.S ) { + extensionSession.setRepeatingRequest(request, executor, previewExtensionCaptureCallback); + } + } + else if( is_video_high_speed && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M ) { + CameraConstrainedHighSpeedCaptureSession captureSessionHighSpeed = (CameraConstrainedHighSpeedCaptureSession) captureSession; + List mPreviewBuilderBurst = captureSessionHighSpeed.createHighSpeedRequestList(request); + captureSessionHighSpeed.setRepeatingBurst(mPreviewBuilderBurst, previewCaptureCallback, handler); + } + else { + captureSession.setRepeatingRequest(request, previewCaptureCallback, handler); + } + if( MyDebug.LOG ) + Log.d(TAG, "setRepeatingRequest done"); + } + catch(IllegalStateException e) { + MyDebug.logStackTrace(TAG, "captureSession already closed!", e); + // got this as a Google Play exception (from onCaptureCompleted->processCompleted) - this means the capture session is already closed + } + } + } + + private void capture() throws CameraAccessException { + capture(previewBuilder.build()); + } + + /** Performs a "capture" - note that in practice this isn't used for taking photos, but for + * one-off captures for the preview stream (e.g., to trigger focus). + */ + private void capture(CaptureRequest request) throws CameraAccessException { + if( MyDebug.LOG ) + Log.d(TAG, "capture: " + request); + synchronized( background_camera_lock ) { + if( camera == null || !hasCaptureSession() ) { + if( MyDebug.LOG ) + Log.d(TAG, "no camera or capture session"); + return; + } + BLOCK_FOR_EXTENSIONS(); // not yet supported for extension sessions + captureSession.capture(request, previewCaptureCallback, handler); + } + } + + private void createPreviewRequest() { + if( MyDebug.LOG ) + Log.d(TAG, "createPreviewRequest"); + if( camera == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera not available!"); + return; + } + if( MyDebug.LOG ) + Log.d(TAG, "camera: " + camera); + try { + previewBuilder = camera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); + previewBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW); + previewIsVideoMode = false; + camera_settings.setupBuilder(previewBuilder, false); + if( MyDebug.LOG ) + Log.d(TAG, "successfully created preview request"); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to create capture request", e); + } + } + + // should synchronize calls to this method using background_camera_lock + private Surface getPreviewSurface() { + return surface_texture; + } + + @Override + public void updatePreviewTexture() { + if( MyDebug.LOG ) + Log.d(TAG, "updatePreviewTexture"); + if( texture != null ) { + if( preview_width == 0 || preview_height == 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "preview size not yet set"); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "preview size: " + preview_width + " x " + preview_height); + this.test_texture_view_buffer_w = preview_width; + this.test_texture_view_buffer_h = preview_height; + texture.setDefaultBufferSize(preview_width, preview_height); + } + } + } + + @RequiresApi(api = Build.VERSION_CODES.P) + private List createOutputConfigurationList(List surfaces, Surface preview_surface) { + List outputs = new ArrayList<>(); + for(Surface surface : surfaces) { + OutputConfiguration config = new OutputConfiguration(surface); + if( cameraIdSPhysical != null ) { + config.setPhysicalCameraId(cameraIdSPhysical); + } + // On Galaxy S24+ at least, we seem to get Ultra HDR photos even without setting DynamicRangeProfiles.HLG10 + // furthermore, calling setDynamicRangeProfile with HLG10 gives photos with much lower saturation, so have + // disabled this + /*if( want_jpeg_r && surface == preview_surface && Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU ) { + config.setDynamicRangeProfile(DynamicRangeProfiles.HLG10); + }*/ + outputs.add(config); + } + return outputs; + } + + private abstract static class CreateCaptureSessionFunction { + public abstract void call() throws CameraAccessException; + } + + /** Function to support calling a function either on background thread or not depending on wait_until_started. + */ + private void launchCameraSession(boolean wait_until_started, CreateCaptureSessionFunction function, Runnable on_failed) throws CameraAccessException { + if( wait_until_started ) { + if( test_force_slow_preview_start ) { + try { + Thread.sleep(6000); // test slow to start preview + //Thread.sleep(25000); // test slow to start preview + } + catch(InterruptedException e) { + throw new RuntimeException(e); + } + } + function.call(); + } + else { + handler.post(new Runnable() { + @Override + public void run() { + try { + if( test_force_slow_preview_start ) { + try { + Thread.sleep(6000); // test slow to start preview + //Thread.sleep(25000); // test slow to start preview + } + catch(InterruptedException e) { + throw new RuntimeException(e); + } + } + function.call(); + } + catch(CameraAccessException | NullPointerException | IllegalArgumentException | UnsupportedOperationException e) { + // see notes below in createCaptureSession() for why we also catch NullPointerException, IllegalArgumentException, UnsupportedOperationException + // need to catch separately when wait_until_started==false due to this running on a background thread + MyDebug.logStackTrace(TAG, "exception create extension session on background thread", e); + //myStateCallback.onConfigureFailed(); + if( on_failed != null ) { + // if waiting, failure will be indicated via CameraControllerException thrown from this method + final Activity activity = (Activity)context; + activity.runOnUiThread(new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "call on_failed as preview failed to start"); + on_failed.run(); + } + }); + } + } + } + }); + } + } + + private void createCaptureSession(boolean wait_until_started, Runnable runnable, Runnable on_failed, final MediaRecorder video_recorder, boolean want_photo_video_recording) throws CameraControllerException { + if( MyDebug.LOG ) + Log.d(TAG, "create capture session"); + + if( previewBuilder == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "previewBuilder not present!"); + throw new RuntimeException(); // throw as RuntimeException, as this is a programming error + } + if( camera == null ) { + if( MyDebug.LOG ) + Log.e(TAG, "no camera"); + return; + } + + closeCaptureSession(); + + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + // check parameters are compatible with extension sessions + // we check here rather than when setting those parameters, to avoid problems with + // ordering (e.g., the caller sets those parameters, and then switches to an + // extension session) + if( want_video_high_speed ) { + throw new RuntimeException("want_video_high_speed not supported for extension session"); + } + else if( burst_type != BurstType.BURSTTYPE_NONE ) { + throw new RuntimeException("burst_type not supported for extension session"); + } + else if( want_jpeg_r ) { + throw new RuntimeException("want_jpeg_r not supported for extension session"); + } + else if( want_raw ) { + throw new RuntimeException("want_raw not supported for extension session"); + } + else if( camera_settings.has_iso ) { + throw new RuntimeException("has_iso not supported for extension session"); + } + else if( camera_settings.ae_target_fps_range != null ) { + throw new RuntimeException("ae_target_fps_range not supported for extension session"); + } + else if( camera_settings.sensor_frame_duration > 0 ) { + throw new RuntimeException("sensor_frame_duration not supported for extension session"); + } + else if( camera_settings.ae_lock ) { + throw new RuntimeException("ae_lock not supported for extension session"); + } + else if( camera_settings.wb_lock ) { + throw new RuntimeException("wb_lock not supported for extension session"); + } + else if( camera_settings.has_face_detect_mode ) { + throw new RuntimeException("has_face_detect_mode not supported for extension session"); + } + else if( face_detection_listener != null ) { + throw new RuntimeException("face_detection_listener not supported for extension session"); + } + } + + try { + if( video_recorder != null ) { + if( supports_photo_video_recording && !want_video_high_speed && want_photo_video_recording ) { + createPictureImageReader(); + } + else { + closePictureImageReader(); + } + } + else { + // in some cases need to recreate picture imageReader and the texture default buffer size (e.g., see test testTakePhotoPreviewPaused()) + createPictureImageReader(); + } + if( texture != null ) { + // need to set the texture size + if( MyDebug.LOG ) + Log.d(TAG, "set size of preview texture: " + preview_width + " x " + preview_height); + if( preview_width == 0 || preview_height == 0 ) { + if( MyDebug.LOG ) + Log.e(TAG, "application needs to call setPreviewSize()"); + throw new RuntimeException(); // throw as RuntimeException, as this is a programming error + } + updatePreviewTexture(); + // also need to create a new surface for the texture, in case the size has changed - but make sure we remove the old one first! + synchronized( background_camera_lock ) { + if( surface_texture != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "remove old target: " + surface_texture); + previewBuilder.removeTarget(surface_texture); + } + this.surface_texture = new Surface(texture); + if( MyDebug.LOG ) + Log.d(TAG, "created new target: " + surface_texture); + } + } + if( video_recorder != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "creating capture session for video recording"); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "picture size: " + imageReader.getWidth() + " x " + imageReader.getHeight()); + } + /*if( MyDebug.LOG ) + Log.d(TAG, "preview size: " + previewImageReader.getWidth() + " x " + previewImageReader.getHeight());*/ + if( MyDebug.LOG ) + Log.d(TAG, "set preview size: " + this.preview_width + " x " + this.preview_height); + + synchronized( background_camera_lock ) { + if( video_recorder != null ) + video_recorder_surface = video_recorder.getSurface(); + else + video_recorder_surface = null; + if( MyDebug.LOG ) + Log.d(TAG, "video_recorder_surface: " + video_recorder_surface); + } + + class MyStateCallback extends CameraCaptureSession.StateCallback { + private boolean callback_done; // must synchronize on this and notifyAll when setting to true + + private void onFailure() { + if( on_failed != null && !wait_until_started ) { + // if waiting, failure will be indicated on main thread below via CameraControllerException + final Activity activity = (Activity)context; + activity.runOnUiThread(new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "call on_failed as preview failed to start"); + on_failed.run(); + } + }); + } + } + + void onConfigured(@NonNull CameraCaptureSession session, @NonNull CameraExtensionSession eSession) { + boolean success = false; // whether we successfully started the preview + /*try { + Thread.sleep(6000); // test slow to start preview + //Thread.sleep(25000); // test slow to start preview + } + catch(InterruptedException e) { + throw new RuntimeException(e); + }*/ + synchronized( background_camera_lock ) { + if( camera == null ) { + if( MyDebug.LOG ) { + Log.d(TAG, "camera is closed"); + } + callback_done = true; + background_camera_lock.notifyAll(); + // don't call onFailure() - if camera has closed in the meantime, no need to report to user (e.g. this might be going to Settings + // whilst preview was starting) + return; + } + + if( MyDebug.LOG ) { + Log.d(TAG, "camera: " + camera); + Log.d(TAG, "previewBuilder: " + previewBuilder); + } + captureSession = session; + extensionSession = eSession; + previewBuilder.addTarget(surface_texture); + if( video_recorder != null ) { + if( MyDebug.LOG ) { + Log.d(TAG, "add video recorder surface to previewBuilder: " + video_recorder_surface); + } + previewBuilder.addTarget(video_recorder_surface); + } + try { + setRepeatingRequest(); + success = true; + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to start preview", e); + // we indicate that we failed to start the preview by setting captureSession back to null + // this will cause a CameraControllerException to be thrown below (if wait_until_started==true), + // or via the on_failed callback (if wait_until_started==false) + captureSession = null; + extensionSession = null; + } + } + synchronized( background_camera_lock ) { + callback_done = true; + background_camera_lock.notifyAll(); + } + if( success && runnable != null && !wait_until_started ) { + // if not waiting, we run the runnable on UI thread now that preview is started + final Activity activity = (Activity)context; + activity.runOnUiThread(new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "call runnable as preview now started"); + synchronized( background_camera_lock ) { + if( camera == null ) { + if( MyDebug.LOG ) { + Log.d(TAG, "but camera is closed in the meantime"); + } + // don't call onFailure() - if camera has closed in the meantime, no need to report to user (e.g. this might be going to Settings + // whilst preview was starting) + return; + } + } + runnable.run(); + } + }); + } + else if( !success ) { + onFailure(); + } + } + + @Override + public void onConfigured(@NonNull CameraCaptureSession session) { + if( MyDebug.LOG ) { + Log.d(TAG, "onConfigured: " + session); + } + onConfigured(session, null); + } + + void onConfigureFailed() { + synchronized( background_camera_lock ) { + callback_done = true; + background_camera_lock.notifyAll(); + } + onFailure(); + // don't throw CameraControllerException here, as won't be caught - instead we throw CameraControllerException below (if wait_until_started==true), + // or via the on_failed callback (if wait_until_started==false) + } + + @Override + public void onConfigureFailed(@NonNull CameraCaptureSession session) { + if( MyDebug.LOG ) { + Log.d(TAG, "onConfigureFailed: " + session); + } + onConfigureFailed(); + } + + /*@Override + public void onReady(CameraCaptureSession session) { + if( MyDebug.LOG ) + Log.d(TAG, "onReady: " + session); + if( pending_request_when_ready != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "have pending_request_when_ready: " + pending_request_when_ready); + CaptureRequest request = pending_request_when_ready; + pending_request_when_ready = null; + try { + captureSession.capture(request, previewCaptureCallback, handler); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to take picture", e); + jpeg_todo = false; + raw_todo = false; + picture_cb = null; + if( take_picture_error_cb != null ) { + take_picture_error_cb.onError(); + take_picture_error_cb = null; + } + } + } + }*/ + } + final MyStateCallback myStateCallback = new MyStateCallback(); + + Surface preview_surface; + List surfaces; + synchronized( background_camera_lock ) { + preview_surface = getPreviewSurface(); + if( video_recorder != null ) { + if( supports_photo_video_recording && !want_video_high_speed && want_photo_video_recording ) { + surfaces = Arrays.asList(preview_surface, video_recorder_surface, imageReader.getSurface()); + } + else { + surfaces = Arrays.asList(preview_surface, video_recorder_surface); + } + // n.b., raw not supported for photo snapshots while video recording + } + else if( want_video_high_speed ) { + // future proofing - at the time of writing want_video_high_speed is only set when recording video, + // but if ever this is changed, can only support the preview_surface as a target + surfaces = Collections.singletonList(preview_surface); + } + else if( imageReaderRaw != null ) { + surfaces = Arrays.asList(preview_surface, imageReader.getSurface(), imageReaderRaw.getSurface()); + } + else { + surfaces = Arrays.asList(preview_surface, imageReader.getSurface()); + } + if( MyDebug.LOG ) { + Log.d(TAG, "texture: " + texture); + Log.d(TAG, "preview_surface: " + preview_surface); + Log.d(TAG, "handler: " + handler); + Log.d(TAG, "surfaces: " + surfaces); + } + } + if( MyDebug.LOG ) { + if( video_recorder == null ) { + if( imageReaderRaw != null ) { + Log.d(TAG, "imageReaderRaw: " + imageReaderRaw); + Log.d(TAG, "imageReaderRaw: " + imageReaderRaw.getWidth()); + Log.d(TAG, "imageReaderRaw: " + imageReaderRaw.getHeight()); + Log.d(TAG, "imageReaderRaw: " + imageReaderRaw.getImageFormat()); + } + else { + Log.d(TAG, "imageReader: " + imageReader); + Log.d(TAG, "imageReader width: " + imageReader.getWidth()); + Log.d(TAG, "imageReader height: " + imageReader.getHeight()); + Log.d(TAG, "imageReader format: " + imageReader.getImageFormat()); + } + } + } + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + resetCaptureResultInfo(); // important as extension modes don't receive capture result info + + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.S ) { + if( MyDebug.LOG ) + Log.d(TAG, "create extension capture session"); + //int extension = CameraExtensionCharacteristics.EXTENSION_AUTOMATIC; + //int extension = CameraExtensionCharacteristics.EXTENSION_BOKEH; + int extension = camera_extension; + List outputs = createOutputConfigurationList(surfaces, preview_surface); + ExtensionSessionConfiguration extensionConfiguration = new ExtensionSessionConfiguration( + extension, + outputs, + executor, + new CameraExtensionSession.StateCallback() { + @Override + public void onConfigured(@NonNull CameraExtensionSession session) { + if( MyDebug.LOG ) { + Log.d(TAG, "onConfigured: " + session); + } + myStateCallback.onConfigured(null, session); + } + + @Override + public void onConfigureFailed(@NonNull CameraExtensionSession session) { + if( MyDebug.LOG ) { + Log.d(TAG, "onConfigureFailed: " + session); + } + myStateCallback.onConfigureFailed(); + } + + @Override + public void onClosed(@NonNull CameraExtensionSession session) { + if( MyDebug.LOG ) { + Log.d(TAG, "onClosed: " + session); + } + } + } + ); + launchCameraSession(wait_until_started, new CreateCaptureSessionFunction() { + @Override + public void call() throws CameraAccessException { + if( camera == null ) { + // just in case - don't throw exception as we don't want to show error toast, as it may be that another request to start preview is already active + Log.e(TAG, "camera is no longer open"); + return; + } + /*if( true ) + throw new UnsupportedOperationException(); // test*/ + camera.createExtensionSession(extensionConfiguration); + } + }, on_failed); + } + is_video_high_speed = false; + } + else if( video_recorder != null && want_video_high_speed && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M ) { + //if( want_video_high_speed && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M ) { + if( MyDebug.LOG ) + Log.d(TAG, "create high speed capture session"); + if( ( cameraIdSPhysical != null || want_jpeg_r ) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.P ) { + List outputs = createOutputConfigurationList(surfaces, preview_surface); + SessionConfiguration sessionConfiguration = new SessionConfiguration(SessionConfiguration.SESSION_HIGH_SPEED, outputs, executor, myStateCallback); + launchCameraSession(wait_until_started, new CreateCaptureSessionFunction() { + @Override + public void call() throws CameraAccessException { + if( camera == null ) { + // just in case - don't throw exception as we don't want to show error toast, as it may be that another request to start preview is already active + Log.e(TAG, "camera is no longer open"); + return; + } + camera.createCaptureSession(sessionConfiguration); + } + }, on_failed); + } + else { + launchCameraSession(wait_until_started, new CreateCaptureSessionFunction() { + @Override + public void call() throws CameraAccessException { + if( camera == null ) { + // just in case - don't throw exception as we don't want to show error toast, as it may be that another request to start preview is already active + Log.e(TAG, "camera is no longer open"); + return; + } + camera.createConstrainedHighSpeedCaptureSession(surfaces, + myStateCallback, + handler); + } + }, on_failed); + } + is_video_high_speed = true; + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "create capture session"); + try { + if( ( cameraIdSPhysical != null || want_jpeg_r ) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.P ) { + List outputs = createOutputConfigurationList(surfaces, preview_surface); + /*camera.createCaptureSessionByOutputConfigurations(outputs, + myStateCallback, + handler);*/ + SessionConfiguration sessionConfiguration = new SessionConfiguration(SessionConfiguration.SESSION_REGULAR, outputs, executor, myStateCallback); + launchCameraSession(wait_until_started, new CreateCaptureSessionFunction() { + @Override + public void call() throws CameraAccessException { + if( camera == null ) { + // just in case - don't throw exception as we don't want to show error toast, as it may be that another request to start preview is already active + Log.e(TAG, "camera is no longer open"); + return; + } + camera.createCaptureSession(sessionConfiguration); + } + }, on_failed); + } + else { + launchCameraSession(wait_until_started, new CreateCaptureSessionFunction() { + @Override + public void call() throws CameraAccessException { + /*if( true ) + throw new CameraAccessException(CameraAccessException.CAMERA_ERROR); // test*/ + if( camera == null ) { + // just in case - don't throw exception as we don't want to show error toast, as it may be that another request to start preview is already active + Log.e(TAG, "camera is no longer open"); + return; + } + camera.createCaptureSession(surfaces, + myStateCallback, + handler); + } + }, on_failed); + } + is_video_high_speed = false; + } + catch(NullPointerException e) { + // have had this from some devices on Google Play, from deep within createCaptureSession + // note, we put the catch here rather than below, so as to not mask nullpointerexceptions + // from my code + MyDebug.logStackTrace(TAG, "NullPointerException trying to create capture session", e); + throw new CameraControllerException(); + } + } + + if( wait_until_started ) { + if( MyDebug.LOG ) + Log.d(TAG, "wait until session created..."); + // n.b., we use the background_camera_lock lock instead of a separate lock, so that it's safe to call this + // method under the background_camera_lock (if we did so but used a separate lock, we'd hang here, because + // MyStateCallback.onConfigured() needs to lock on background_camera_lock, before it completes and sets + // myStateCallback.callback_done to true. + synchronized( background_camera_lock ) { + while( !myStateCallback.callback_done ) { + try { + // release the lock, and wait until myStateCallback calls notifyAll() + background_camera_lock.wait(); + } + catch(InterruptedException e) { + MyDebug.logStackTrace(TAG, "InterruptedException from background_camera_lock.wait()", e); + } + } + } + if( MyDebug.LOG ) { + if( captureSession != null ) + Log.d(TAG, "created captureSession: " + captureSession); + if( extensionSession != null ) + Log.d(TAG, "created extensionSession: " + extensionSession); + } + synchronized( background_camera_lock ) { + if( !hasCaptureSession() ) { + if( MyDebug.LOG ) + Log.e(TAG, "failed to create capture session"); + throw new CameraControllerException(); + } + } + + if( runnable != null ) { + runnable.run(); + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "NOT waiting until session created"); + // runnable is instead run from callback once preview is started + } + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "CameraAccessException trying to create capture session", e); + throw new CameraControllerException(); + } + catch(IllegalArgumentException e) { + // have had crashes from Google Play, from both createConstrainedHighSpeedCaptureSession and + // createCaptureSession + MyDebug.logStackTrace(TAG, "IllegalArgumentException trying to create capture session", e); + throw new CameraControllerException(); + } + catch(UnsupportedOperationException e) { + // have had crashes from Google Play, from createExtensionSession + MyDebug.logStackTrace(TAG, "UnsupportedOperationException trying to create capture session", e); + throw new CameraControllerException(); + } + } + + @Override + public void startPreview(boolean wait_until_started, Runnable runnable, Runnable on_failed) throws CameraControllerException { + if( MyDebug.LOG ) + Log.d(TAG, "startPreview"); + + if( !camera_settings.has_af_mode && initial_focus_mode != null && sessionType != SessionType.SESSIONTYPE_EXTENSION ) { + if( MyDebug.LOG ) + Log.d(TAG, "user didn't specify focus, so set to: " + initial_focus_mode); + // If the caller hasn't set a focus mode, but focus modes are supported, it's still better to explicitly set one rather than leaving to the + // builder's default - e.g., problem on Android emulator with LIMITED camera where it only supported infinity focus (CONTROL_AF_MODE_OFF), but + // the preview builder defaults to CONTROL_AF_MODE_CONTINUOUS_PICTURE! This meant we froze when trying to take a photo, because we thought + // we were in continuous picture mode and so waited in state STATE_WAITING_AUTOFOCUS, but the focus never occurred. + // Ideally the caller to CameraController2 (Preview) should always explicitly set a focus mode if at least 1 focus mode is supported. At the + // time of writing, Preview only sets a focus if at least 2 focus modes are supported. But even if we fix that in future, still good to have + // well defined behaviour at the CameraController level. + setFocusValue(initial_focus_mode); + } + + synchronized( background_camera_lock ) { + if( hasCaptureSession() ) { + try { + setRepeatingRequest(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to start preview", e); + // do via CameraControllerException instead of preview_error_cb, so caller immediately knows preview has failed + throw new CameraControllerException(); + } + if( runnable != null ) { + runnable.run(); + } + return; + } + } + createCaptureSession(wait_until_started, runnable, on_failed, null, false); + } + + @Override + public void stopRepeating() { + if( MyDebug.LOG ) + Log.d(TAG, "stopRepeating: " + this); + stopPreview(false); + } + + @Override + public void stopPreview() { + if( MyDebug.LOG ) + Log.d(TAG, "stopPreview: " + this); + stopPreview(true); + } + + public void stopPreview(boolean close_capture_session) { + synchronized( background_camera_lock ) { + if( camera == null || !hasCaptureSession() ) { + if( MyDebug.LOG ) + Log.d(TAG, "no camera or capture session"); + return; + } + try { + //pending_request_when_ready = null; + + try { + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.S ) { + extensionSession.stopRepeating(); + } + } + else { + captureSession.stopRepeating(); + } + } + catch(IllegalStateException e) { + MyDebug.logStackTrace(TAG, "captureSession already closed!", e); + // got this as a Google Play exception + // we still call close() below, as it has no effect if captureSession is already closed + } + if( close_capture_session ) { + // although stopRepeating() alone will pause the preview, seems better to close captureSession altogether - this allows the app to make changes such as changing the picture size + closeCaptureSession(); + } + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to stop repeating", e); + } + // simulate CameraController1 behaviour where face detection is stopped when we stop preview + if( camera_settings.has_face_detect_mode && close_capture_session ) { + if( MyDebug.LOG ) + Log.d(TAG, "cancel face detection"); + camera_settings.has_face_detect_mode = false; + camera_settings.setFaceDetectMode(previewBuilder); + // no need to call setRepeatingRequest(), we're just setting the camera_settings for when we restart the preview + } + } + } + + @Override + public boolean startFaceDetection() { + if( MyDebug.LOG ) + Log.d(TAG, "startFaceDetection"); + BLOCK_FOR_EXTENSIONS(); + if( previewBuilder.get(CaptureRequest.STATISTICS_FACE_DETECT_MODE) != null && previewBuilder.get(CaptureRequest.STATISTICS_FACE_DETECT_MODE) != CaptureRequest.STATISTICS_FACE_DETECT_MODE_OFF ) { + if( MyDebug.LOG ) + Log.d(TAG, "face detection already enabled"); + return false; + } + if( supports_face_detect_mode_full ) { + if( MyDebug.LOG ) + Log.d(TAG, "use full face detection"); + camera_settings.has_face_detect_mode = true; + camera_settings.face_detect_mode = CaptureRequest.STATISTICS_FACE_DETECT_MODE_FULL; + } + else if( supports_face_detect_mode_simple ) { + if( MyDebug.LOG ) + Log.d(TAG, "use simple face detection"); + camera_settings.has_face_detect_mode = true; + camera_settings.face_detect_mode = CaptureRequest.STATISTICS_FACE_DETECT_MODE_SIMPLE; + } + else { + Log.e(TAG, "startFaceDetection() called but face detection not available"); + return false; + } + camera_settings.setFaceDetectMode(previewBuilder); + camera_settings.setSceneMode(previewBuilder); // also need to set the scene mode + try { + setRepeatingRequest(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to start face detection", e); + return false; + } + return true; + } + + @Override + public void setFaceDetectionListener(final FaceDetectionListener listener) { + if( listener != null ) { + BLOCK_FOR_EXTENSIONS(); + } + this.face_detection_listener = listener; + this.last_faces_detected = -1; + } + + /* If do_af_trigger_for_continuous is false, doing an autoFocus() in continuous focus mode just + means we call the autofocus callback the moment focus is not scanning (as with old Camera API). + If do_af_trigger_for_continuous is true, we set CONTROL_AF_TRIGGER_START, and wait for + CONTROL_AF_STATE_FOCUSED_LOCKED or CONTROL_AF_STATE_NOT_FOCUSED_LOCKED, similar to other focus + methods. + do_af_trigger_for_continuous==true used to have advantages: + - On Nexus 6 for flash auto, it means ae state is set to FLASH_REQUIRED if it is required + when it comes to taking the photo. If do_af_trigger_for_continuous==false, sometimes + it's set to CONTROL_AE_STATE_CONVERGED even for dark scenes, so we think we can skip + the precapture, causing photos to come out dark (or we can force always doing precapture, + but that makes things slower when flash isn't needed) + Update: this now seems hard to reproduce. + - On OnePlus 3T, with do_af_trigger_for_continuous==false photos come out with blue tinge + if the scene is not dark (but still dark enough that you'd want flash). + do_af_trigger_for_continuous==true fixes this for cases where the flash fires for autofocus. + Note that the problem is still not fixed for flash on where the scene is bright enough to + not need flash (and so we don't fire flash for autofocus). + Update: now fixed by setting TEMPLATE_PREVIEW for the precaptureBuilder. + do_af_trigger_for_continuous==true has disadvantage: + - On both Nexus 6 and OnePlus 3T, taking photos with flash is longer, as we have flash firing + for autofocus and precapture. Though note this is the case with autofocus mode anyway. + Note for fake flash mode, we still can use do_af_trigger_for_continuous==false (and doing the + af trigger for fake flash mode can sometimes mean flash fires for too long and we get a worse + result). + */ + private final static boolean do_af_trigger_for_continuous = false; + + @Override + public void autoFocus(final AutoFocusCallback cb, boolean capture_follows_autofocus_hint) { + if( MyDebug.LOG ) { + Log.d(TAG, "autoFocus"); + Log.d(TAG, "capture_follows_autofocus_hint? " + capture_follows_autofocus_hint); + } + AutoFocusCallback push_autofocus_cb = null; + synchronized( background_camera_lock ) { + fake_precapture_torch_focus_performed = false; + if( camera == null || !hasCaptureSession() ) { + if( MyDebug.LOG ) + Log.d(TAG, "no camera or capture session"); + // should call the callback, so the application isn't left waiting (e.g., when we autofocus before trying to take a photo) + cb.onAutoFocus(false); + return; + } + Integer focus_mode = previewBuilder.get(CaptureRequest.CONTROL_AF_MODE); + if( MyDebug.LOG ) + Log.d(TAG, "focus mode: " + (focus_mode == null ? "null" : focus_mode)); + if( focus_mode == null ) { + // we preserve the old Camera API where calling autoFocus() on a device without autofocus immediately calls the callback + // (unclear if Open Camera needs this, but just to be safe and consistent between camera APIs) + if( MyDebug.LOG ) + Log.d(TAG, "no focus mode"); + cb.onAutoFocus(true); + return; + } + else if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + if( MyDebug.LOG ) + Log.d(TAG, "no auto focus for extensions"); + cb.onAutoFocus(true); + return; + } + else if( (!do_af_trigger_for_continuous || use_fake_precapture_mode) && focus_mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE ) { + // See note above for do_af_trigger_for_continuous + if( MyDebug.LOG ) + Log.d(TAG, "skip af trigger due to continuous mode"); + this.capture_follows_autofocus_hint = capture_follows_autofocus_hint; + this.autofocus_cb = cb; + this.autofocus_time_ms = System.currentTimeMillis(); + return; + } + else if( is_video_high_speed ) { + // CONTROL_AF_TRIGGER_IDLE/CONTROL_AF_TRIGGER_START not supported for high speed video + cb.onAutoFocus(true); + return; + } + /*if( state == STATE_WAITING_AUTOFOCUS ) { + if( MyDebug.LOG ) + Log.d(TAG, "already waiting for an autofocus"); + // need to update the callback! + this.capture_follows_autofocus_hint = capture_follows_autofocus_hint; + this.autofocus_cb = cb; + this.autofocus_time_ms = System.currentTimeMillis(); + return; + }*/ + CaptureRequest.Builder afBuilder = previewBuilder; + if( MyDebug.LOG ) { + { + MeteringRectangle [] areas = afBuilder.get(CaptureRequest.CONTROL_AF_REGIONS); + for(int i=0;areas != null && i actual_exposure_time ) { + adjust_preview = true; + } + } + else { + // preview is auto but still is manual + adjust_preview = true; + } + } + } + return adjust_preview; + } + + /** Adjusts the preview's manual exposure to match the stillRequest's manual exposure. Should only + * be called if adjustPreview() returns true. + * We use RUN_POST_CAPTURE, so we can be sure that the request to adjust the preview's exposure has + * completed. + */ + private void adjustPreviewToStill(CaptureRequest stillRequest, PostCapture post_capture) throws CameraAccessException { + if( MyDebug.LOG ) + Log.d(TAG, "adjustPreviewToStill"); + previewBuilder.set(CaptureRequest.CONTROL_AE_MODE, stillRequest.get(CaptureRequest.CONTROL_AE_MODE)); + previewBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, stillRequest.get(CaptureRequest.SENSOR_EXPOSURE_TIME)); + this.run_post_capture = post_capture; + previewBuilder.setTag(new RequestTagObject(RequestTagType.RUN_POST_CAPTURE)); + captureSession.capture(previewBuilder.build(), previewCaptureCallback, handler); + previewBuilder.setTag(null); + setRepeatingRequest(); + } + + private void takePictureAfterPrecapture() { + if( MyDebug.LOG ) + Log.d(TAG, "takePictureAfterPrecapture"); + long debug_time = 0; + if( MyDebug.LOG ) { + debug_time = System.currentTimeMillis(); + } + + if( !previewIsVideoMode ) { + // special burst modes not supported for photo snapshots when recording video + if( burst_type == BurstType.BURSTTYPE_EXPO || burst_type == BurstType.BURSTTYPE_FOCUS ) { + takePictureBurstBracketing(); + if( MyDebug.LOG ) { + Log.d(TAG, "takePictureAfterPrecapture() took: " + (System.currentTimeMillis() - debug_time)); + } + return; + } + else if( burst_type == BurstType.BURSTTYPE_NORMAL || burst_type == BurstType.BURSTTYPE_CONTINUOUS ) { + takePictureBurst(false); + if( MyDebug.LOG ) { + Log.d(TAG, "takePictureAfterPrecapture() took: " + (System.currentTimeMillis() - debug_time)); + } + return; + } + } + + CaptureRequest.Builder stillBuilder = null; + boolean ok = true; + ErrorCallback push_take_picture_error_cb = null; + + synchronized( background_camera_lock ) { + if( camera == null || !hasCaptureSession() ) { + if( MyDebug.LOG ) + Log.d(TAG, "no camera or capture session"); + return; + } + try { + if( MyDebug.LOG ) { + if( imageReaderRaw != null ) { + Log.d(TAG, "imageReaderRaw: " + imageReaderRaw); + Log.d(TAG, "imageReaderRaw surface: " + imageReaderRaw.getSurface().toString()); + } + else { + Log.d(TAG, "imageReader: " + imageReader.toString()); + Log.d(TAG, "imageReader surface: " + imageReader.getSurface().toString()); + } + } + // important to use TEMPLATE_MANUAL for manual exposure: this fixes bug on Pixel 6 Pro where manual exposure is ignored when longer than the + // preview exposure time (oddly Galaxy S10e has the same bug since Android 11, but that isn't fixed with using TEMPLATE_MANUAL) + stillBuilder = camera.createCaptureRequest(previewIsVideoMode ? CameraDevice.TEMPLATE_VIDEO_SNAPSHOT : camera_settings.has_iso ? CameraDevice.TEMPLATE_MANUAL : CameraDevice.TEMPLATE_STILL_CAPTURE); + stillBuilder.setTag(new RequestTagObject(RequestTagType.CAPTURE)); + camera_settings.setupBuilder(stillBuilder, true); + if( use_fake_precapture_mode && fake_precapture_torch_performed ) { + if( MyDebug.LOG ) + Log.d(TAG, "setting torch for capture"); + if( !camera_settings.has_iso ) + stillBuilder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON); + stillBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_TORCH); + test_fake_flash_photo++; + } + // Versions previous to 1.51 would switch to manual mode and underexpose in bright scenes; however on more modern devices such as Samsung and + // Pixels, this means that we lose the benefit of manufacturer algorithms creating a worse result. So we're better off staying in auto mode. + // (Even on old versions, we didn't do this on OnePlus devices due to OnePlus 3T having preview corruption / camera freezing problems when + // using manual shutter speeds.) + //stillBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); + //stillBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.O && sessionType != SessionType.SESSIONTYPE_EXTENSION ) { + // unclear why we wouldn't want to request ZSL + // this is also required to enable HDR+ on Google Pixel devices when using Camera2: https://opensource.google.com/projects/pixelvisualcorecamera + // but don't set for extension sessions (in theory it should be ignored, but just in case) + stillBuilder.set(CaptureRequest.CONTROL_ENABLE_ZSL, true); + if( MyDebug.LOG ) { + Boolean zsl = stillBuilder.get(CaptureRequest.CONTROL_ENABLE_ZSL); + Log.d(TAG, "CONTROL_ENABLE_ZSL: " + (zsl==null ? "null" : zsl)); + } + } + clearPending(); + // shouldn't add preview surface as a target - no known benefit to doing so + stillBuilder.addTarget(imageReader.getSurface()); + if( imageReaderRaw != null ) + stillBuilder.addTarget(imageReaderRaw.getSurface()); + + n_burst = 1; + n_burst_taken = 0; + n_burst_total = n_burst; + n_burst_raw = raw_todo ? n_burst : 0; + burst_single_request = false; + if( !previewIsVideoMode ) { + // need to stop preview before capture (as done in Camera2Basic; otherwise we get bugs such as flash remaining on after taking a photo with flash) + // but don't do this in video mode - if we're taking photo snapshots while video recording, we don't want to pause video! + // update: bug with flash may have been device specific (things are fine with Nokia 8) + if( sessionType != SessionType.SESSIONTYPE_EXTENSION ) + captureSession.stopRepeating(); + } + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to take picture", e); + ok = false; + jpeg_todo = false; + raw_todo = false; + picture_cb = null; + push_take_picture_error_cb = take_picture_error_cb; + take_picture_error_cb = null; + } + catch(IllegalStateException e) { + MyDebug.logStackTrace(TAG, "captureSession already closed!", e); + ok = false; + jpeg_todo = false; + raw_todo = false; + picture_cb = null; + // don't report error, as camera is closed or closing + } + } + + // need to call callbacks without a lock + if( ok && picture_cb != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "call onStarted() in callback"); + picture_cb.onStarted(); + } + + if( ok ) { + synchronized( background_camera_lock ) { + if( camera == null || !hasCaptureSession() ) { + if( MyDebug.LOG ) + Log.d(TAG, "no camera or capture session"); + return; + } + if( test_release_during_photo ) { + final Activity activity = (Activity)context; + activity.runOnUiThread(new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "test UI thread call release()"); + release(); + } + }); + try { + Thread.sleep(1000); + } + catch(InterruptedException e) { + MyDebug.logStackTrace(TAG, "InterruptedException from sleep", e); + } + } + try { + if( MyDebug.LOG ) + Log.d(TAG, "capture with stillBuilder"); + //pending_request_when_ready = stillBuilder.build(); + + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.S ) { + extensionSession.capture(stillBuilder.build(), executor, previewExtensionCaptureCallback); + } + } + else { + final CaptureRequest capture = stillBuilder.build(); + boolean adjust_preview = adjustPreview(capture); + if( adjust_preview ) { + if( MyDebug.LOG ) + Log.d(TAG, "long manual exposure workaround: adjust preview first"); + + PostCapture post_capture = new PostCapture() { + @Override + public void call() throws CameraAccessException { + captureSession.capture(capture, previewCaptureCallback, handler); + } + }; + + adjustPreviewToStill(capture, post_capture); + } + else { + captureSession.capture(capture, previewCaptureCallback, handler); + } + //captureSession.capture(stillBuilder.build(), new CameraCaptureSession.CaptureCallback() { + //}, handler); + } + playSound(shutter_click_sound); // play shutter sound asap, otherwise user has the illusion of being slow to take photos + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to take picture", e); + //noinspection UnusedAssignment + ok = false; + jpeg_todo = false; + raw_todo = false; + picture_cb = null; + push_take_picture_error_cb = take_picture_error_cb; + } + catch(IllegalStateException e) { + MyDebug.logStackTrace(TAG, "captureSession already closed!", e); + //noinspection UnusedAssignment + ok = false; + jpeg_todo = false; + raw_todo = false; + picture_cb = null; + // don't report error, as camera is closed or closing + } + } + } + + // need to call callbacks without a lock + if( push_take_picture_error_cb != null ) { + push_take_picture_error_cb.onError(); + } + if( MyDebug.LOG ) { + Log.d(TAG, "takePictureAfterPrecapture() took: " + (System.currentTimeMillis() - debug_time)); + } + } + + public static List setupFocusBracketingDistances(float source, float target, int count) { + List focus_distances = new ArrayList<>(); + float focus_distance_s = source; + float focus_distance_e = target; + final float max_focus_bracket_distance_c = 0.1f; // 10m + focus_distance_s = Math.max(focus_distance_s, max_focus_bracket_distance_c); // since we'll dealing with 1/distance, use Math.max + focus_distance_e = Math.max(focus_distance_e, max_focus_bracket_distance_c); // since we'll dealing with 1/distance, use Math.max + if( MyDebug.LOG ) { + Log.d(TAG, "focus_distance_s: " + focus_distance_s); + Log.d(TAG, "focus_distance_e: " + focus_distance_e); + } + // we want to interpolate linearly in distance, not 1/distance + float real_focus_distance_s = 1.0f/focus_distance_s; + float real_focus_distance_e = 1.0f/focus_distance_e; + if( MyDebug.LOG ) { + Log.d(TAG, "real_focus_distance_s: " + real_focus_distance_s); + Log.d(TAG, "real_focus_distance_e: " + real_focus_distance_e); + } + for(int i=0;i real_focus_distance_e ) { + // if source is further than target, we still want the interpolation distances to be the same, but in reversed order + value = count-1-i; + } + float alpha = (float)(1.0-Math.log(count-value)/Math.log(count)); + if( real_focus_distance_s > real_focus_distance_e ) { + alpha = 1.0f-alpha; + } + float real_distance = (1.0f-alpha)*real_focus_distance_s + alpha*real_focus_distance_e; + if( MyDebug.LOG ) { + Log.d(TAG, " alpha: " + alpha); + Log.d(TAG, " real_distance: " + real_distance); + } + distance = 1.0f/real_distance; + } + if( MyDebug.LOG ) { + Log.d(TAG, " distance: " + distance); + } + focus_distances.add(distance); + } + return focus_distances; + } + + private void takePictureBurstBracketing() { + if( MyDebug.LOG ) + Log.d(TAG, "takePictureBurstBracketing"); + if( burst_type != BurstType.BURSTTYPE_EXPO && burst_type != BurstType.BURSTTYPE_FOCUS ) { + Log.e(TAG, "takePictureBurstBracketing called but unexpected burst_type: " + burst_type); + } + BLOCK_FOR_EXTENSIONS(); // not supported for extension sessions + + List requests = new ArrayList<>(); + boolean ok = true; + ErrorCallback push_take_picture_error_cb = null; + + synchronized( background_camera_lock ) { + if( camera == null || !hasCaptureSession() ) { + if( MyDebug.LOG ) + Log.d(TAG, "no camera or capture session"); + return; + } + try { + if( MyDebug.LOG ) { + Log.d(TAG, "imageReader: " + imageReader.toString()); + Log.d(TAG, "imageReader surface: " + imageReader.getSurface().toString()); + } + int n_dummy_requests = 0; + + CaptureRequest.Builder stillBuilder = camera.createCaptureRequest((burst_type == BurstType.BURSTTYPE_EXPO || camera_settings.has_iso) ? CameraDevice.TEMPLATE_MANUAL : CameraDevice.TEMPLATE_STILL_CAPTURE); + // Needs to be TEMPLATE_MANUAL! Otherwise first image in burst may come out incorrectly (on Pixel 6 Pro, + // the first image incorrectly had HDR+ applied, which we don't want here). Also problem on Pixel 6 Pro + // where manual exposure is ignored when longer than the preview exposure. + // Update: but only when doing burst for expo bracketing, not focus bracketing (unless actually doing that + // in manual mode)! (Only manual exposure should use TEMPLATE_MANUAL, otherwise focus bracketing images + // come out underexposed on Pixel 6 Pro). + // n.b., don't set RequestTagType.CAPTURE here - we only do it for the last of the burst captures (see below) + camera_settings.setupBuilder(stillBuilder, true); + + if( MyDebug.LOG && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O ) { + Boolean zsl = stillBuilder.get(CaptureRequest.CONTROL_ENABLE_ZSL); + Log.d(TAG, "CONTROL_ENABLE_ZSL: " + (zsl==null ? "null" : zsl)); + } + + clearPending(); + // shouldn't add preview surface as a target - see note in takePictureAfterPrecapture() + // but also, adding the preview surface causes the dark/light exposures to be visible, which we don't want + stillBuilder.addTarget(imageReader.getSurface()); + if( raw_todo ) + stillBuilder.addTarget(imageReaderRaw.getSurface()); + + if( burst_type == BurstType.BURSTTYPE_EXPO ) { + + if( MyDebug.LOG ) + Log.d(TAG, "expo bracketing"); + + /*stillBuilder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON); + stillBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF); + + stillBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, -6); + requests.add( stillBuilder.build() ); + stillBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0); + requests.add( stillBuilder.build() ); + stillBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 6); + requests.add( stillBuilder.build() );*/ + + stillBuilder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_OFF); + if( use_fake_precapture_mode && fake_precapture_torch_performed ) { + if( MyDebug.LOG ) + Log.d(TAG, "setting torch for capture"); + stillBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_TORCH); + test_fake_flash_photo++; + } + // else don't turn torch off, as user may be in torch on mode + + Range iso_range = characteristics.get(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE); // may be null on some devices + if( iso_range == null ) { + Log.e(TAG, "takePictureBurstBracketing called but null iso_range"); + } + else { + // set ISO + int iso = 800; + // obtain current ISO/etc settings from the capture result - but if we're in manual ISO mode, + // might as well use the settings the user has actually requested (also useful for workaround for + // OnePlus 3T bug where the reported ISO and exposure_time are wrong in dark scenes) + if( camera_settings.has_iso ) + iso = camera_settings.iso; + else if( capture_result_has_iso ) + iso = capture_result_iso; + // see https://sourceforge.net/p/opencamera/tickets/321/ - some devices may have auto ISO that's + // outside of the allowed manual iso range! + iso = Math.max(iso, iso_range.getLower()); + iso = Math.min(iso, iso_range.getUpper()); + stillBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, iso ); + } + if( capture_result_has_frame_duration ) + stillBuilder.set(CaptureRequest.SENSOR_FRAME_DURATION, capture_result_frame_duration); + else + stillBuilder.set(CaptureRequest.SENSOR_FRAME_DURATION, 1000000000L/30); + + long base_exposure_time = 1000000000L/30; + if( camera_settings.has_iso ) + base_exposure_time = camera_settings.exposure_time; + else if( capture_result_has_exposure_time ) + base_exposure_time = capture_result_exposure_time; + + int n_half_images = expo_bracketing_n_images/2; + final double scale = Math.pow(2.0, expo_bracketing_stops/(double)n_half_images); + + if( MyDebug.LOG ) { + Log.d(TAG, "taking expo bracketing with n_images: " + expo_bracketing_n_images); + Log.d(TAG, "ISO: " + stillBuilder.get(CaptureRequest.SENSOR_SENSITIVITY)); + Log.d(TAG, "Frame duration: " + stillBuilder.get(CaptureRequest.SENSOR_FRAME_DURATION)); + Log.d(TAG, "Base exposure time: " + base_exposure_time); + Log.d(TAG, "Min exposure time: " + min_exposure_time); + Log.d(TAG, "Max exposure time: " + max_exposure_time); + } + + if( dummy_capture_hack && use_expo_fast_burst ) { + if( MyDebug.LOG ) + Log.d(TAG, "add dummy capture"); + // dummy_capture_hack only supported for use_expo_fast_burst==true - + // supporting for use_expo_fast_burst==false would complicate the code, and + // these are only special case hacks anyway + stillBuilder.setTag(null); + requests.add( stillBuilder.build() ); + n_dummy_requests++; + if( onImageAvailableListener != null ) + onImageAvailableListener.skip_next_image = true; + if( onRawImageAvailableListener != null ) + onRawImageAvailableListener.skip_next_image = true; + } + + // darker images + for(int i=0;i max_exposure_time ) + exposure_time = max_exposure_time; + if( MyDebug.LOG ) { + Log.d(TAG, "add burst request for " + i + "th light image:"); + Log.d(TAG, " this_scale: " + this_scale); + Log.d(TAG, " exposure_time: " + exposure_time); + } + stillBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, exposure_time); + if( i == n_half_images - 1 ) { + // RequestTagType.CAPTURE should only be set for the last request, otherwise we'll may do things like turning + // off torch (for fake flash) before all images are received + // More generally, doesn't seem a good idea to be doing the post-capture commands (resetting ae state etc) + // multiple times, and before all captures are complete! + if( MyDebug.LOG ) + Log.d(TAG, "set RequestTagType.CAPTURE for last burst request"); + stillBuilder.setTag(new RequestTagObject(RequestTagType.CAPTURE)); + } + else { + stillBuilder.setTag(new RequestTagObject(RequestTagType.CAPTURE_BURST_IN_PROGRESS)); + } + requests.add( stillBuilder.build() ); + } + } + + burst_single_request = true; + } + else { + // BURSTTYPE_FOCUS + if( MyDebug.LOG ) + Log.d(TAG, "focus bracketing"); + + if( use_fake_precapture_mode && fake_precapture_torch_performed ) { + if( MyDebug.LOG ) + Log.d(TAG, "setting torch for capture"); + if( !camera_settings.has_iso ) + stillBuilder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON); + stillBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_TORCH); + test_fake_flash_photo++; + } + + stillBuilder.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_OFF); // just in case + + if( Math.abs(camera_settings.focus_distance - focus_bracketing_source_distance) < 1.0e-5 ) { + if( MyDebug.LOG ) + Log.d(TAG, "current focus matches source"); + } + else if( Math.abs(camera_settings.focus_distance - focus_bracketing_target_distance) < 1.0e-5 ) { + if( MyDebug.LOG ) + Log.d(TAG, "current focus matches target"); + } + else { + Log.d(TAG, "current focus matches neither source nor target"); + } + + List focus_distances = setupFocusBracketingDistances(focus_bracketing_source_distance, focus_bracketing_target_distance, focus_bracketing_n_images); + if( focus_bracketing_add_infinity ) { + focus_distances.add(0.0f); + } + for(int i=0;i= 10 || n_burst_raw >= 10 ) { + // Nokia 8 in std mode without post-processing options doesn't hit this limit (we only hit this + // if it's set to "n_burst >= 5") + if( MyDebug.LOG ) { + Log.d(TAG, "...but wait for continuous burst, as waiting for too many photos"); + } + //throw new RuntimeException(); // test + handler.postDelayed(this, continuous_burst_rate_ms); + } + else if( picture_cb.imageQueueWouldBlock(n_burst_raw, n_burst+1) ) { + if( MyDebug.LOG ) { + Log.d(TAG, "...but wait for continuous burst, as image queue would block"); + } + //throw new RuntimeException(); // test + handler.postDelayed(this, continuous_burst_rate_ms); + } + else { + takePictureBurst(true); + } + } + }, continuous_burst_rate_ms); + } + } + else { + List requests = new ArrayList<>(); + for(int i=0;i= iso_threshold; + } + + /** Used in use_fake_precapture mode when flash is auto, this returns whether we fire the flash. + * If the decision was recently calculated, we return that same decision - used to fix problem that if + * we fire flash during autofocus (for autofocus mode), we don't then want to decide the scene is too + * bright to not need flash for taking photo! + */ + private boolean fireAutoFlash() { + if( MyDebug.LOG ) + Log.d(TAG, "fireAutoFlash"); + long time_now = System.currentTimeMillis(); + if( MyDebug.LOG && fake_precapture_use_flash_time_ms != -1 ) { + Log.d(TAG, "fake_precapture_use_flash_time_ms: " + fake_precapture_use_flash_time_ms); + Log.d(TAG, "time_now: " + time_now); + Log.d(TAG, "time since last flash auto decision: " + (time_now - fake_precapture_use_flash_time_ms)); + } + final long cache_time_ms = 3000; // needs to be at least the time of a typical autoflash, see comment for this function above + if( fake_precapture_use_flash_time_ms != -1 && time_now - fake_precapture_use_flash_time_ms < cache_time_ms ) { + if( MyDebug.LOG ) + Log.d(TAG, "use recent decision: " + fake_precapture_use_flash); + fake_precapture_use_flash_time_ms = time_now; + return fake_precapture_use_flash; + } + switch(camera_settings.flash_value) { + case "flash_auto": + fake_precapture_use_flash = is_flash_required; + break; + case "flash_frontscreen_auto": + fake_precapture_use_flash = fireAutoFlashFrontScreen(); + if(MyDebug.LOG) + Log.d(TAG, " ISO was: " + capture_result_iso); + break; + default: + // shouldn't really be calling this function if not flash auto... + fake_precapture_use_flash = false; + break; + } + if( MyDebug.LOG ) + Log.d(TAG, "fake_precapture_use_flash: " + fake_precapture_use_flash); + // We only cache the result if we decide to turn on torch, as that mucks up our ability to tell if we need the flash (since once the torch + // is on, the ae_state thinks it's bright enough to not need flash!) + // But if we don't turn on torch, this problem doesn't occur, so no need to cache - and good that the next time we should make an up-to-date + // decision. + if( fake_precapture_use_flash ) { + fake_precapture_use_flash_time_ms = time_now; + } + else { + fake_precapture_use_flash_time_ms = -1; + } + return fake_precapture_use_flash; + } + + @Override + public void takePicture(final PictureCallback picture, final ErrorCallback error) { + if( MyDebug.LOG ) + Log.d(TAG, "takePicture"); + long debug_time = 0; + if( MyDebug.LOG ) { + debug_time = System.currentTimeMillis(); + } + + boolean call_takePictureAfterPrecapture = false; + boolean call_runFakePrecapture = false; + boolean call_runPrecapture = false; + + synchronized( background_camera_lock ) { + if( camera == null || !hasCaptureSession() ) { + if( MyDebug.LOG ) + Log.d(TAG, "no camera or capture session"); + error.onError(); + return; + } + this.picture_cb = picture; + this.jpeg_todo = true; + this.raw_todo = imageReaderRaw != null; + this.done_all_captures = false; + this.take_picture_error_cb = error; + this.fake_precapture_torch_performed = false; // just in case still on? + if( sessionType == SessionType.SESSIONTYPE_NORMAL && !ready_for_capture ) { + if( MyDebug.LOG ) + Log.e(TAG, "takePicture: not ready for capture!"); + //throw new RuntimeException(); // debugging + } + + { + if( MyDebug.LOG ) { + Log.d(TAG, "current flash value: " + camera_settings.flash_value); + Log.d(TAG, "use_fake_precapture_mode: " + use_fake_precapture_mode); + } + if( sessionType == SessionType.SESSIONTYPE_EXTENSION ) { + // precapture not supported for extensions + call_takePictureAfterPrecapture = true; + } + else if( camera_settings.flash_value.equals("flash_off") || camera_settings.flash_value.equals("flash_torch") || camera_settings.flash_value.equals("flash_frontscreen_torch") ) { + // Don't need precapture if flash off or torch + call_takePictureAfterPrecapture = true; + } + else if( use_fake_precapture_mode ) { + // fake flash auto/on mode + // fake precapture works by turning on torch (or using a "front screen flash"), so we can't use the camera's own decision for flash auto + // instead we check the current ISO value + boolean auto_flash = camera_settings.flash_value.equals("flash_auto") || camera_settings.flash_value.equals("flash_frontscreen_auto"); + Integer flash_mode = previewBuilder.get(CaptureRequest.FLASH_MODE); + if( MyDebug.LOG ) + Log.d(TAG, "flash_mode: " + flash_mode); + if( auto_flash && !fireAutoFlash() ) { + if( MyDebug.LOG ) + Log.d(TAG, "fake precapture flash auto: seems bright enough to not need flash"); + call_takePictureAfterPrecapture = true; + } + else if( flash_mode != null && flash_mode == CameraMetadata.FLASH_MODE_TORCH ) { + if( MyDebug.LOG ) + Log.d(TAG, "fake precapture flash: torch already on (presumably from autofocus)"); + // On some devices (e.g., OnePlus 3T), if we've already turned on torch for an autofocus immediately before + // taking the photo, ae convergence may have already occurred - so if we called runFakePrecapture(), we'd just get + // stuck waiting for CONTROL_AE_STATE_SEARCHING which will never happen, until we hit the timeout - it works, + // but it means taking photos is slower as we have to wait until the timeout + // Instead we assume that ae scanning has already started, so go straight to STATE_WAITING_FAKE_PRECAPTURE_DONE, + // which means wait until we're no longer CONTROL_AE_STATE_SEARCHING. + // (Note, we don't want to go straight to takePictureAfterPrecapture(), as it might be that ae scanning is still + // taking place.) + // An alternative solution would be to switch torch off and back on again to cause ae scanning to start - but + // at worst this is tricky to get working, and at best, taking photos would be slower. + fake_precapture_torch_performed = true; // so we know to fire the torch when capturing + test_fake_flash_precapture++; // for testing, should treat this same as if we did do the precapture + state = STATE_WAITING_FAKE_PRECAPTURE_DONE; + precapture_state_change_time_ms = System.currentTimeMillis(); + } + else { + call_runFakePrecapture = true; + } + } + else { + // standard flash, flash auto or on + // note that we don't call needsFlash() (or use is_flash_required) - as if ae state is neither CONVERGED nor FLASH_REQUIRED, we err on the side + // of caution and don't skip the precapture + //boolean needs_flash = capture_result_ae != null && capture_result_ae == CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED; + boolean needs_flash = capture_result_ae != null && capture_result_ae != CaptureResult.CONTROL_AE_STATE_CONVERGED; + if( camera_settings.flash_value.equals("flash_auto") && !needs_flash ) { + // if we call precapture anyway, flash wouldn't fire - but we tend to have a pause + // so skipping the precapture if flash isn't going to fire makes this faster + if( MyDebug.LOG ) + Log.d(TAG, "flash auto, but we don't need flash"); + call_takePictureAfterPrecapture = true; + } + else { + call_runPrecapture = true; + } + } + } + + /*camera_settings.setupBuilder(previewBuilder, false); + previewBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START); + state = STATE_WAITING_AUTOFOCUS; + precapture_started = -1; + //capture(); + setRepeatingRequest();*/ + } + + // important to call functions outside of locks, so that they can in turn call callbacks without a lock + if( call_takePictureAfterPrecapture ) { + takePictureAfterPrecapture(); + } + else if( call_runFakePrecapture ) { + runFakePrecapture(); + } + else if( call_runPrecapture ) { + runPrecapture(); + } + if( MyDebug.LOG ) { + Log.d(TAG, "takePicture() took: " + (System.currentTimeMillis() - debug_time)); + } + } + + @Override + public void setDisplayOrientation(int degrees) { + // for CameraController2, the preview display orientation is handled via the TextureView's transform + if( MyDebug.LOG ) + Log.d(TAG, "setDisplayOrientation not supported by this API"); + throw new RuntimeException(); // throw as RuntimeException, as this is a programming error + } + + @Override + public int getDisplayOrientation() { + if( MyDebug.LOG ) + Log.d(TAG, "getDisplayOrientation not supported by this API"); + throw new RuntimeException(); // throw as RuntimeException, as this is a programming error + } + + @Override + public int getCameraOrientation() { + // cached for performance, as this method is frequently called from Preview.onOrientationChanged + return characteristics_sensor_orientation; + } + + @Override + public Facing getFacing() { + // cached for performance, as this method is frequently called from Preview.onOrientationChanged + return characteristics_facing; + } + + @Override + public void unlock() { + // do nothing at this stage + } + + @Override + public void initVideoRecorderPrePrepare(MediaRecorder video_recorder) { + // if we change where we play the START_VIDEO_RECORDING sound, make sure it can't be heard in resultant video + BLOCK_FOR_EXTENSIONS(); // not supported for extension sessions + playSound(MediaActionSound.START_VIDEO_RECORDING); + } + + @Override + public void initVideoRecorderPostPrepare(MediaRecorder video_recorder, boolean want_photo_video_recording) throws CameraControllerException { + if( MyDebug.LOG ) + Log.d(TAG, "initVideoRecorderPostPrepare"); + if( camera == null ) { + Log.e(TAG, "no camera"); + throw new CameraControllerException(); + } + BLOCK_FOR_EXTENSIONS(); // not supported for extension sessions + try { + if( MyDebug.LOG ) + Log.d(TAG, "obtain video_recorder surface"); + previewBuilder = camera.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + if( MyDebug.LOG ) + Log.d(TAG, "done"); + previewIsVideoMode = true; + previewBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CaptureRequest.CONTROL_CAPTURE_INTENT_VIDEO_RECORD); + camera_settings.setupBuilder(previewBuilder, false); + /*if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE ) { + previewBuilder.set(CaptureRequest.CONTROL_AUTOFRAMING, CaptureRequest.CONTROL_AUTOFRAMING_ON); // test + }*/ + createCaptureSession(true, null, null, video_recorder, want_photo_video_recording); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to create capture request for video", e); + throw new CameraControllerException(); + } + } + + @Override + public void reconnect() throws CameraControllerException { + if( MyDebug.LOG ) + Log.d(TAG, "reconnect"); + // if we change where we play the STOP_VIDEO_RECORDING sound, make sure it can't be heard in resultant video + playSound(MediaActionSound.STOP_VIDEO_RECORDING); + createPreviewRequest(); + createCaptureSession(true, null, null, null, false); + /*if( MyDebug.LOG ) + Log.d(TAG, "add preview surface to previewBuilder"); + Surface surface = getPreviewSurface(); + previewBuilder.addTarget(surface);*/ + //setRepeatingRequest(); + } + + @Override + public String getParametersString() { + return null; + } + + @Override + public boolean captureResultIsAEScanning() { + return capture_result_is_ae_scanning; + } + + @Override + public boolean needsFlash() { + //boolean needs_flash = capture_result_ae != null && capture_result_ae == CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED; + //return needs_flash; + return is_flash_required; + } + + @Override + public boolean needsFrontScreenFlash() { + return camera_settings.flash_value.equals("flash_frontscreen_on") || + ( camera_settings.flash_value.equals("flash_frontscreen_auto") && fireAutoFlashFrontScreen() ); + } + + @Override + public boolean captureResultHasWhiteBalanceTemperature() { + return capture_result_has_white_balance_rggb; + } + + @Override + public int captureResultWhiteBalanceTemperature() { + // for performance reasons, we don't convert from rggb to temperature in every frame, rather only when requested + return convertRggbVectorToTemperature(capture_result_white_balance_rggb); + } + + @Override + public boolean captureResultHasIso() { + return capture_result_has_iso; + } + + @Override + public int captureResultIso() { + return capture_result_iso; + } + + @Override + public boolean captureResultHasExposureTime() { + return capture_result_has_exposure_time; + } + + @Override + public long captureResultExposureTime() { + return capture_result_exposure_time; + } + + @Override + public boolean captureResultHasFrameDuration() { + return capture_result_has_frame_duration; + } + + @Override + public long captureResultFrameDuration() { + return capture_result_frame_duration; + } + + @Override + public boolean captureResultHasFocusDistance() { + return capture_result_has_focus_distance; + } + + @Override + public float captureResultFocusDistance() { + return capture_result_focus_distance; + } + + @Override + public boolean captureResultHasAperture() { + return capture_result_has_aperture; + } + + @Override + public float captureResultAperture() { + return capture_result_aperture; + } + + /* + @Override + public boolean captureResultHasFocusDistance() { + return capture_result_has_focus_distance; + } + + @Override + public float captureResultFocusDistanceMin() { + return capture_result_focus_distance_min; + } + + @Override + public float captureResultFocusDistanceMax() { + return capture_result_focus_distance_max; + } + */ + + private final CameraExtensionSession.ExtensionCaptureCallback previewExtensionCaptureCallback; + + @RequiresApi(api = Build.VERSION_CODES.S) + private class MyExtensionCaptureCallback extends CameraExtensionSession.ExtensionCaptureCallback { + + @Override + public void onCaptureStarted(@NonNull CameraExtensionSession session, + @NonNull CaptureRequest request, long timestamp) { + /*if( MyDebug.LOG ) + Log.d(TAG, "onCaptureStarted");*/ + if( MyDebug.LOG ) { + if( previewCaptureCallback.getRequestTagType(request) == RequestTagType.CAPTURE ) { + Log.d(TAG, "onCaptureStarted: capture"); + } + else if( previewCaptureCallback.getRequestTagType(request) == RequestTagType.CAPTURE_BURST_IN_PROGRESS ) { + Log.d(TAG, "onCaptureStarted: capture burst in progress"); + } + } + + // for previewCaptureCallback, we set has_received_frame in onCaptureCompleted(), but + // that method doesn't exist for ExtensionCaptureCallback, and the other methods such as + // onCaptureSequenceCompleted aren't called for the preview captures; + // onCaptureResultAvailable meanwhile is only called if + // CameraExtensionCharacteristics.getAvailableCaptureResultKeys() returns a non-empty + // list + if( !has_received_frame ) { + has_received_frame = true; + if( MyDebug.LOG ) + Log.d(TAG, "has_received_frame now set to true"); + } + + super.onCaptureStarted(session, request, timestamp); + } + + @Override + public void onCaptureProcessStarted(@NonNull CameraExtensionSession session, + @NonNull CaptureRequest request) { + super.onCaptureProcessStarted(session, request); + } + + @Override + public void onCaptureFailed(@NonNull CameraExtensionSession session, + @NonNull CaptureRequest request) { + if( MyDebug.LOG ) { + Log.e(TAG, "onCaptureFailed"); + } + super.onCaptureFailed(session, request); + } + + @Override + public void onCaptureSequenceCompleted(@NonNull CameraExtensionSession session, + int sequenceId) { + if( MyDebug.LOG ) { + Log.d(TAG, "onCaptureSequenceCompleted"); + Log.d(TAG, "sequenceId: " + sequenceId); + } + + // since we don't receive the request, we can't check for a request tag type of + // RequestTagType.CAPTURE - but this method should only be called for photo captures + // anyway + test_capture_results++; + modified_from_camera_settings = false; + + previewCaptureCallback.callCheckImagesCompleted(); + + super.onCaptureSequenceCompleted(session, sequenceId); + } + + @Override + public void onCaptureSequenceAborted(@NonNull CameraExtensionSession session, + int sequenceId) { + if( MyDebug.LOG ) { + Log.d(TAG, "onCaptureSequenceAborted"); + Log.d(TAG, "sequenceId: " + sequenceId); + } + super.onCaptureSequenceAborted(session, sequenceId); + } + + @Override + public void onCaptureResultAvailable(@NonNull CameraExtensionSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { + previewCaptureCallback.updateCachedCaptureResult(result); + } + + @Override + public void onCaptureProcessProgressed(@NonNull CameraExtensionSession session, + @NonNull CaptureRequest request, @IntRange(from = 0, to = 100) int progress) { + if( MyDebug.LOG ) + Log.d(TAG, "onCaptureProcessProgressed: " + progress); + + final Activity activity = (Activity)context; + activity.runOnUiThread(new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "onCaptureProcessProgressed UI thread: " + progress); + if( picture_cb != null ) { + picture_cb.onExtensionProgress(progress); + } + } + }); + } + } + + private final MyCaptureCallback previewCaptureCallback = new MyCaptureCallback(); + + private class MyCaptureCallback extends CameraCaptureSession.CaptureCallback { + private long last_process_frame_number = 0; + private int last_af_state = -1; + + private RequestTagType getRequestTagType(@NonNull CaptureRequest request) { + Object tag = request.getTag(); + if( tag == null ) + return null; + RequestTagObject requestTag = (RequestTagObject)tag; + return requestTag.getType(); + } + + @Override + public void onCaptureBufferLost(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull Surface target, long frameNumber) { + if( MyDebug.LOG ) + Log.d(TAG, "onCaptureBufferLost: " + frameNumber); + super.onCaptureBufferLost(session, request, target, frameNumber); + } + + @Override + public void onCaptureFailed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureFailure failure) { + if( MyDebug.LOG ) { + Log.e(TAG, "onCaptureFailed: " + failure); + Log.d(TAG, "reason: " + failure.getReason()); + Log.d(TAG, "was image captured?: " + failure.wasImageCaptured()); + Log.d(TAG, "sequenceId: " + failure.getSequenceId()); + } + super.onCaptureFailed(session, request, failure); // API docs say this does nothing, but call it just to be safe + } + + @Override + public void onCaptureSequenceAborted(@NonNull CameraCaptureSession session, int sequenceId) { + if( MyDebug.LOG ) { + Log.d(TAG, "onCaptureSequenceAborted"); + Log.d(TAG, "sequenceId: " + sequenceId); + } + super.onCaptureSequenceAborted(session, sequenceId); // API docs say this does nothing, but call it just to be safe + } + + @Override + public void onCaptureSequenceCompleted(@NonNull CameraCaptureSession session, int sequenceId, long frameNumber) { + if( MyDebug.LOG ) { + Log.d(TAG, "onCaptureSequenceCompleted"); + Log.d(TAG, "sequenceId: " + sequenceId); + Log.d(TAG, "frameNumber: " + frameNumber); + } + super.onCaptureSequenceCompleted(session, sequenceId, frameNumber); // API docs say this does nothing, but call it just to be safe + } + + @Override + public void onCaptureStarted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, long timestamp, long frameNumber) { + if( MyDebug.LOG ) { + if( getRequestTagType(request) == RequestTagType.CAPTURE ) { + Log.d(TAG, "onCaptureStarted: capture"); + Log.d(TAG, "frameNumber: " + frameNumber); + Log.d(TAG, "exposure time: " + request.get(CaptureRequest.SENSOR_EXPOSURE_TIME)); + } + else if( getRequestTagType(request) == RequestTagType.CAPTURE_BURST_IN_PROGRESS ) { + Log.d(TAG, "onCaptureStarted: capture burst in progress"); + Log.d(TAG, "frameNumber: " + frameNumber); + Log.d(TAG, "exposure time: " + request.get(CaptureRequest.SENSOR_EXPOSURE_TIME)); + } + } + // n.b., we don't play the shutter sound here for RequestTagType.CAPTURE, as it typically sounds "too late" + // (if ever we changed this, would also need to fix for burst, where we only set the RequestTagType.CAPTURE for the last image) + super.onCaptureStarted(session, request, timestamp, frameNumber); + } + + @Override + public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) { + /*if( MyDebug.LOG ) + Log.d(TAG, "onCaptureProgressed");*/ + //process(request, partialResult); + // Note that we shouldn't try to process partial results - or if in future we decide to, remember that it's documented that + // not all results may be available. E.g., OnePlus 3T on Android 7 (OxygenOS 4.0.2) reports null for AF_STATE from this method. + // We'd also need to fix up the discarding of old frames in process(), as we probably don't want to be discarding the + // complete results from onCaptureCompleted()! + super.onCaptureProgressed(session, request, partialResult); // API docs say this does nothing, but call it just to be safe (as with Google Camera) + } + + @Override + public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { + /*if( MyDebug.LOG ) + Log.d(TAG, "onCaptureCompleted");*/ + if( MyDebug.LOG ) { + if( getRequestTagType(request) == RequestTagType.CAPTURE ) { + Log.d(TAG, "onCaptureCompleted: capture"); + Log.d(TAG, "sequenceId: " + result.getSequenceId()); + Log.d(TAG, "frameNumber: " + result.getFrameNumber()); + Log.d(TAG, "exposure time: " + request.get(CaptureRequest.SENSOR_EXPOSURE_TIME)); + Log.d(TAG, "frame duration: " + request.get(CaptureRequest.SENSOR_FRAME_DURATION)); + } + else if( getRequestTagType(request) == RequestTagType.CAPTURE_BURST_IN_PROGRESS ) { + Log.d(TAG, "onCaptureCompleted: capture burst in progress"); + Log.d(TAG, "sequenceId: " + result.getSequenceId()); + Log.d(TAG, "frameNumber: " + result.getFrameNumber()); + Log.d(TAG, "exposure time: " + request.get(CaptureRequest.SENSOR_EXPOSURE_TIME)); + Log.d(TAG, "frame duration: " + request.get(CaptureRequest.SENSOR_FRAME_DURATION)); + } + } + process(request, result); + processCompleted(request, result); + super.onCaptureCompleted(session, request, result); // API docs say this does nothing, but call it just to be safe (as with Google Camera) + } + + /** Updates cached information regarding the capture result status related to auto-exposure. + */ + private void updateCachedAECaptureStatus(CaptureResult result) { + Integer ae_state = result.get(CaptureResult.CONTROL_AE_STATE); + /*if( MyDebug.LOG ) { + if( ae_state == null ) + Log.d(TAG, "CONTROL_AE_STATE is null"); + else if( ae_state == CaptureResult.CONTROL_AE_STATE_INACTIVE ) + Log.d(TAG, "CONTROL_AE_STATE = CONTROL_AE_STATE_INACTIVE"); + else if( ae_state == CaptureResult.CONTROL_AE_STATE_SEARCHING ) + Log.d(TAG, "CONTROL_AE_STATE = CONTROL_AE_STATE_SEARCHING"); + else if( ae_state == CaptureResult.CONTROL_AE_STATE_CONVERGED ) + Log.d(TAG, "CONTROL_AE_STATE = CONTROL_AE_STATE_CONVERGED"); + else if( ae_state == CaptureResult.CONTROL_AE_STATE_LOCKED ) + Log.d(TAG, "CONTROL_AE_STATE = CONTROL_AE_STATE_LOCKED"); + else if( ae_state == CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED ) + Log.d(TAG, "CONTROL_AE_STATE = CONTROL_AE_STATE_FLASH_REQUIRED"); + else if( ae_state == CaptureResult.CONTROL_AE_STATE_PRECAPTURE ) + Log.d(TAG, "CONTROL_AE_STATE = CONTROL_AE_STATE_PRECAPTURE"); + else + Log.d(TAG, "CONTROL_AE_STATE = " + ae_state); + }*/ + Integer flash_mode = result.get(CaptureResult.FLASH_MODE); + /*if( MyDebug.LOG ) { + if( flash_mode == null ) + Log.d(TAG, "FLASH_MODE is null"); + else if( flash_mode == CaptureResult.FLASH_MODE_OFF ) + Log.d(TAG, "FLASH_MODE = FLASH_MODE_OFF"); + else if( flash_mode == CaptureResult.FLASH_MODE_SINGLE ) + Log.d(TAG, "FLASH_MODE = FLASH_MODE_SINGLE"); + else if( flash_mode == CaptureResult.FLASH_MODE_TORCH ) + Log.d(TAG, "FLASH_MODE = FLASH_MODE_TORCH"); + else + Log.d(TAG, "FLASH_MODE = " + flash_mode); + }*/ + + if( use_fake_precapture_mode && ( fake_precapture_torch_focus_performed || fake_precapture_torch_performed ) && flash_mode != null && flash_mode == CameraMetadata.FLASH_MODE_TORCH ) { + // don't change ae state while torch is on for fake flash + } + else if( ae_state == null ) { + capture_result_ae = null; + is_flash_required = false; + } + else if( !ae_state.equals(capture_result_ae) ) { + // need to store this before calling the autofocus callbacks below + if( MyDebug.LOG ) + Log.d(TAG, "CONTROL_AE_STATE changed from " + capture_result_ae + " to " + ae_state); + capture_result_ae = ae_state; + // capture_result_ae should always be non-null here, as we've already handled ae_state separately + if( capture_result_ae == CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED && !is_flash_required ) { + is_flash_required = true; + if( MyDebug.LOG ) + Log.d(TAG, "flash now required"); + } + else if( capture_result_ae == CaptureResult.CONTROL_AE_STATE_CONVERGED && is_flash_required ) { + is_flash_required = false; + if( MyDebug.LOG ) + Log.d(TAG, "flash no longer required"); + } + } + + if( ae_state != null && ae_state == CaptureResult.CONTROL_AE_STATE_SEARCHING ) { + /*if( MyDebug.LOG && !capture_result_is_ae_scanning ) + Log.d(TAG, "ae_state now searching");*/ + capture_result_is_ae_scanning = true; + } + else { + /*if( MyDebug.LOG && capture_result_is_ae_scanning ) + Log.d(TAG, "ae_state stopped searching");*/ + capture_result_is_ae_scanning = false; + } + } + + private void handleStateChange(CaptureRequest request, CaptureResult result) { + // use Integer instead of int, so can compare to null: Google Play crashes confirmed that this can happen; Google Camera also ignores cases with null af state + Integer af_state = result.get(CaptureResult.CONTROL_AF_STATE); + /*if( MyDebug.LOG ) { + if( af_state == null ) + Log.d(TAG, "CONTROL_AF_STATE is null"); + else if( af_state == CaptureResult.CONTROL_AF_STATE_INACTIVE ) + Log.d(TAG, "CONTROL_AF_STATE = CONTROL_AF_STATE_INACTIVE"); + else if( af_state == CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN ) + Log.d(TAG, "CONTROL_AF_STATE = CONTROL_AF_STATE_PASSIVE_SCAN"); + else if( af_state == CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED ) + Log.d(TAG, "CONTROL_AF_STATE = CONTROL_AF_STATE_PASSIVE_FOCUSED"); + else if( af_state == CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN ) + Log.d(TAG, "CONTROL_AF_STATE = CONTROL_AF_STATE_ACTIVE_SCAN"); + else if( af_state == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED ) + Log.d(TAG, "CONTROL_AF_STATE = CONTROL_AF_STATE_FOCUSED_LOCKED"); + else if( af_state == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED ) + Log.d(TAG, "CONTROL_AF_STATE = CONTROL_AF_STATE_NOT_FOCUSED_LOCKED"); + else if( af_state == CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED ) + Log.d(TAG, "CONTROL_AF_STATE = CONTROL_AF_STATE_PASSIVE_UNFOCUSED"); + else + Log.d(TAG, "CONTROL_AF_STATE = " + af_state); + }*/ + // CONTROL_AE_STATE can be null on some devices, so as with af_state, use Integer + Integer ae_state = result.get(CaptureResult.CONTROL_AE_STATE); + /*Integer awb_state = result.get(CaptureResult.CONTROL_AWB_STATE); + if( MyDebug.LOG ) { + if( awb_state == null ) + Log.d(TAG, "CONTROL_AWB_STATE is null"); + else if( awb_state == CaptureResult.CONTROL_AWB_STATE_INACTIVE ) + Log.d(TAG, "CONTROL_AWB_STATE = CONTROL_AWB_STATE_INACTIVE"); + else if( awb_state == CaptureResult.CONTROL_AWB_STATE_SEARCHING ) + Log.d(TAG, "CONTROL_AWB_STATE = CONTROL_AWB_STATE_SEARCHING"); + else if( awb_state == CaptureResult.CONTROL_AWB_STATE_CONVERGED ) + Log.d(TAG, "CONTROL_AWB_STATE = CONTROL_AWB_STATE_CONVERGED"); + else if( awb_state == CaptureResult.CONTROL_AWB_STATE_LOCKED ) + Log.d(TAG, "CONTROL_AWB_STATE = CONTROL_AWB_STATE_LOCKED"); + else + Log.d(TAG, "CONTROL_AWB_STATE = " + awb_state); + }*/ + + boolean autofocus_timeout = autofocus_time_ms != -1 && System.currentTimeMillis() > autofocus_time_ms + autofocus_timeout_c; + if( MyDebug.LOG && autofocus_timeout ) + Log.d(TAG, "autofocus timeout!"); + if( af_state != null && af_state == CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN && !autofocus_timeout ) { + /*if( MyDebug.LOG ) + Log.d(TAG, "not ready for capture: " + af_state);*/ + ready_for_capture = false; + } + else { + /*if( MyDebug.LOG ) + Log.d(TAG, "ready for capture: " + af_state);*/ + ready_for_capture = true; + if( autofocus_cb != null && (!do_af_trigger_for_continuous || use_fake_precapture_mode) && focusIsContinuous() ) { + Integer focus_mode = previewBuilder.get(CaptureRequest.CONTROL_AF_MODE); + if( focus_mode != null && focus_mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE ) { + if( MyDebug.LOG ) + Log.d(TAG, "call autofocus callback, as continuous mode and not focusing: " + af_state); + // need to check af_state != null, I received Google Play crash in 1.33 where it was null + boolean focus_success = af_state != null && ( af_state == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED || af_state == CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED ); + if( MyDebug.LOG ) { + if( focus_success ) + Log.d(TAG, "autofocus success"); + else + Log.d(TAG, "autofocus failed"); + if( af_state == null ) + Log.e(TAG, "continuous focus mode but af_state is null"); + else + Log.d(TAG, "af_state: " + af_state); + } + if( af_state == null ) { + test_af_state_null_focus++; + } + autofocus_cb.onAutoFocus(focus_success); + autofocus_cb = null; + autofocus_time_ms = -1; + capture_follows_autofocus_hint = false; + } + } + } + + /*if( MyDebug.LOG ) { + if( autofocus_cb == null ) { + if( af_state == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED ) + Log.d(TAG, "processAF: autofocus success but no callback set"); + else if( af_state == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED ) + Log.d(TAG, "processAF: autofocus failed but no callback set"); + } + }*/ + + if( fake_precapture_turn_on_torch_id != null && fake_precapture_turn_on_torch_id == request ) { + if( MyDebug.LOG ) + Log.d(TAG, "torch turned on for fake precapture"); + fake_precapture_turn_on_torch_id = null; + } + + if( state == STATE_NORMAL ) { + // do nothing + } + else if( state == STATE_WAITING_AUTOFOCUS ) { + if( af_state == null ) { + // autofocus shouldn't really be requested if af not available, but still allow this rather than getting stuck waiting for autofocus to complete + if( MyDebug.LOG ) + Log.e(TAG, "waiting for autofocus but af_state is null"); + test_af_state_null_focus++; + state = STATE_NORMAL; + precapture_state_change_time_ms = -1; + if( autofocus_cb != null ) { + autofocus_cb.onAutoFocus(false); + autofocus_cb = null; + } + autofocus_time_ms = -1; + capture_follows_autofocus_hint = false; + } + else if( af_state != last_af_state || autofocus_timeout ) { + // check for autofocus completing + if( autofocus_timeout || af_state == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED || af_state == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED /*|| + af_state == CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED || af_state == CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED*/ + ) { + boolean focus_success = af_state == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED || af_state == CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED; + if( MyDebug.LOG ) { + if( focus_success ) + Log.d(TAG, "onCaptureCompleted: autofocus success"); + else + Log.d(TAG, "onCaptureCompleted: autofocus failed"); + Log.d(TAG, "af_state: " + af_state); + } + state = STATE_NORMAL; + precapture_state_change_time_ms = -1; + if( use_fake_precapture_mode && fake_precapture_torch_focus_performed ) { + fake_precapture_torch_focus_performed = false; + if( !capture_follows_autofocus_hint ) { + // If we're going to be taking a photo immediately after the autofocus, it's better for the fake flash + // mode to leave the torch on. If we don't do this, one of the following issues can happen: + // - On OnePlus 3T, the torch doesn't get turned off, but because we've switched off the torch flag + // in previewBuilder, we go ahead with the precapture routine instead of + if( MyDebug.LOG ) + Log.d(TAG, "turn off torch after focus (fake precapture code)"); + + // same hack as in setFlashValue() - for fake precapture we need to turn off the torch mode that was set, but + // at least on Nexus 6, we need to turn to flash_off to turn off the torch! + String saved_flash_value = camera_settings.flash_value; + camera_settings.flash_value = "flash_off"; + camera_settings.setAEMode(previewBuilder, false); + try { + capture(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to do capture to turn off torch after autofocus", e); + } + + // now set the actual (should be flash auto or flash on) mode + camera_settings.flash_value = saved_flash_value; + camera_settings.setAEMode(previewBuilder, false); + try { + setRepeatingRequest(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set repeating request to turn off torch after autofocus", e); + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "torch was enabled for autofocus, leave it on for capture (fake precapture code)"); + } + } + if( autofocus_cb != null ) { + autofocus_cb.onAutoFocus(focus_success); + autofocus_cb = null; + } + autofocus_time_ms = -1; + capture_follows_autofocus_hint = false; + } + } + } + else if( state == STATE_WAITING_PRECAPTURE_START ) { + if( MyDebug.LOG ) + Log.d(TAG, "waiting for precapture start..."); + if( MyDebug.LOG ) { + if( ae_state != null ) + Log.d(TAG, "CONTROL_AE_STATE = " + ae_state); + else + Log.d(TAG, "CONTROL_AE_STATE is null"); + } + if( ae_state == null || ae_state == CaptureResult.CONTROL_AE_STATE_PRECAPTURE /*|| ae_state == CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED*/ ) { + // we have to wait for CONTROL_AE_STATE_PRECAPTURE; if we allow CONTROL_AE_STATE_FLASH_REQUIRED, then on Nexus 6 at least we get poor quality results with flash: + // varying levels of brightness, sometimes too bright or too dark, sometimes with blue tinge, sometimes even with green corruption + // similarly photos with flash come out too dark on OnePlus 3T + if( MyDebug.LOG ) { + Log.d(TAG, "precapture started after: " + (System.currentTimeMillis() - precapture_state_change_time_ms)); + } + state = STATE_WAITING_PRECAPTURE_DONE; + precapture_state_change_time_ms = System.currentTimeMillis(); + } + else if( precapture_state_change_time_ms != -1 && System.currentTimeMillis() - precapture_state_change_time_ms > precapture_start_timeout_c ) { + // hack - give up waiting - sometimes we never get a CONTROL_AE_STATE_PRECAPTURE so would end up stuck + // always log error, so we can look for it when manually testing with logging disabled + Log.e(TAG, "precapture start timeout"); + count_precapture_timeout++; + state = STATE_WAITING_PRECAPTURE_DONE; + precapture_state_change_time_ms = System.currentTimeMillis(); + } + } + else if( state == STATE_WAITING_PRECAPTURE_DONE ) { + if( MyDebug.LOG ) + Log.d(TAG, "waiting for precapture done..."); + if( MyDebug.LOG ) { + if( ae_state != null ) + Log.d(TAG, "CONTROL_AE_STATE = " + ae_state); + else + Log.d(TAG, "CONTROL_AE_STATE is null"); + } + if( ae_state == null || ae_state != CaptureResult.CONTROL_AE_STATE_PRECAPTURE ) { + if( MyDebug.LOG ) { + Log.d(TAG, "precapture completed after: " + (System.currentTimeMillis() - precapture_state_change_time_ms)); + } + state = STATE_NORMAL; + precapture_state_change_time_ms = -1; + takePictureAfterPrecapture(); + } + else if( precapture_state_change_time_ms != -1 && System.currentTimeMillis() - precapture_state_change_time_ms > precapture_done_timeout_c ) { + // just in case + // always log error, so we can look for it when manually testing with logging disabled + Log.e(TAG, "precapture done timeout"); + count_precapture_timeout++; + state = STATE_NORMAL; + precapture_state_change_time_ms = -1; + takePictureAfterPrecapture(); + } + } + else if( state == STATE_WAITING_FAKE_PRECAPTURE_START ) { + if( MyDebug.LOG ) + Log.d(TAG, "waiting for fake precapture start..."); + if( MyDebug.LOG ) { + if( ae_state != null ) + Log.d(TAG, "CONTROL_AE_STATE = " + ae_state); + else + Log.d(TAG, "CONTROL_AE_STATE is null"); + } + if( fake_precapture_turn_on_torch_id != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "still waiting for torch to come on for fake precapture"); + } + + if( fake_precapture_turn_on_torch_id == null && (ae_state == null || ae_state == CaptureResult.CONTROL_AE_STATE_SEARCHING) ) { + if( MyDebug.LOG ) { + Log.d(TAG, "fake precapture started after: " + (System.currentTimeMillis() - precapture_state_change_time_ms)); + } + state = STATE_WAITING_FAKE_PRECAPTURE_DONE; + precapture_state_change_time_ms = System.currentTimeMillis(); + } + else if( fake_precapture_turn_on_torch_id == null && camera_settings.has_iso && precapture_state_change_time_ms != -1 && System.currentTimeMillis() - precapture_state_change_time_ms > 100 ) { + // When using manual ISO, we can't make use of changes to the ae_state - but at the same time, we don't + // need ISO/exposure to re-adjust anyway. + // If fake_precapture_turn_on_torch_id != null, we still wait for the physical torch to turn on. + // But if fake_precapture_turn_on_torch_id==null (i.e., for flash_frontscreen_torch), just wait a short + // period to ensure the frontscreen flash has enabled. + if( MyDebug.LOG ) { + Log.d(TAG, "fake precapture started after: " + (System.currentTimeMillis() - precapture_state_change_time_ms)); + } + state = STATE_WAITING_FAKE_PRECAPTURE_DONE; + precapture_state_change_time_ms = System.currentTimeMillis(); + } + else if( precapture_state_change_time_ms != -1 && System.currentTimeMillis() - precapture_state_change_time_ms > precapture_start_timeout_c ) { + // just in case + // always log error, so we can look for it when manually testing with logging disabled + Log.e(TAG, "fake precapture start timeout"); + count_precapture_timeout++; + state = STATE_WAITING_FAKE_PRECAPTURE_DONE; + precapture_state_change_time_ms = System.currentTimeMillis(); + fake_precapture_turn_on_torch_id = null; + } + } + else if( state == STATE_WAITING_FAKE_PRECAPTURE_DONE ) { + if( MyDebug.LOG ) + Log.d(TAG, "waiting for fake precapture done..."); + if( MyDebug.LOG ) { + if( ae_state != null ) + Log.d(TAG, "CONTROL_AE_STATE = " + ae_state); + else + Log.d(TAG, "CONTROL_AE_STATE is null"); + Log.d(TAG, "ready_for_capture? " + ready_for_capture); + } + // wait for af and ae scanning to end (need to check af too, as in continuous focus mode, a focus may start again after switching torch on for the fake precapture) + if( ready_for_capture && ( ae_state == null || ae_state != CaptureResult.CONTROL_AE_STATE_SEARCHING) ) { + if( MyDebug.LOG ) { + Log.d(TAG, "fake precapture completed after: " + (System.currentTimeMillis() - precapture_state_change_time_ms)); + } + state = STATE_NORMAL; + precapture_state_change_time_ms = -1; + takePictureAfterPrecapture(); + } + else if( precapture_state_change_time_ms != -1 && System.currentTimeMillis() - precapture_state_change_time_ms > precapture_done_timeout_c ) { + // sometimes camera can take a while to stop ae/af scanning, better to just go ahead and take photo + // always log error, so we can look for it when manually testing with logging disabled + Log.e(TAG, "fake precapture done timeout"); + count_precapture_timeout++; + state = STATE_NORMAL; + precapture_state_change_time_ms = -1; + takePictureAfterPrecapture(); + } + } + } + + private void handleContinuousFocusMove(CaptureResult result) { + Integer af_state = result.get(CaptureResult.CONTROL_AF_STATE); + if( af_state != null && af_state == CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN && af_state != last_af_state ) { + /*if( MyDebug.LOG ) + Log.d(TAG, "continuous focusing started");*/ + if( continuous_focus_move_callback != null ) { + continuous_focus_move_callback.onContinuousFocusMove(true); + } + } + else if( af_state != null && last_af_state == CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN && af_state != last_af_state ) { + /*if( MyDebug.LOG ) + Log.d(TAG, "continuous focusing stopped");*/ + if( continuous_focus_move_callback != null ) { + continuous_focus_move_callback.onContinuousFocusMove(false); + } + } + } + + /** Processes either a partial or total result. + */ + private void process(CaptureRequest request, CaptureResult result) { + /*if( MyDebug.LOG ) + Log.d(TAG, "process, state: " + state);*/ + if( result.getFrameNumber() < last_process_frame_number ) { + /*if( MyDebug.LOG ) + Log.d(TAG, "processAF discarded outdated frame " + result.getFrameNumber() + " vs " + last_process_frame_number);*/ + return; + } + /*long debug_time = 0; + if( MyDebug.LOG ) { + debug_time = System.currentTimeMillis(); + }*/ + last_process_frame_number = result.getFrameNumber(); + + updateCachedAECaptureStatus(result); + + handleStateChange(request, result); + + handleContinuousFocusMove(result); + + Integer af_state = result.get(CaptureResult.CONTROL_AF_STATE); + if( af_state != null && af_state != last_af_state ) { + /*if( MyDebug.LOG ) + Log.d(TAG, "CONTROL_AF_STATE changed from " + last_af_state + " to " + af_state);*/ + last_af_state = af_state; + } + + /*if( MyDebug.LOG ) { + Log.d(TAG, "process() took: " + (System.currentTimeMillis() - debug_time)); + }*/ + } + + /** Updates cached information regarding the capture result. + */ + private void updateCachedCaptureResult(CaptureResult result) { + if( modified_from_camera_settings ) { + // don't update capture results! + // otherwise have problem taking HDR photos twice in a row, the second one will pick up the exposure time as + // being from the long exposure of the previous HDR/expo burst! + } + else if( result.get(CaptureResult.SENSOR_SENSITIVITY) != null ) { + capture_result_has_iso = true; + capture_result_iso = result.get(CaptureResult.SENSOR_SENSITIVITY); + /*if( MyDebug.LOG ) + Log.d(TAG, "capture_result_iso: " + capture_result_iso);*/ + /*if( camera_settings.has_iso && Math.abs(camera_settings.iso - capture_result_iso) > 10 && previewBuilder != null ) { + // ugly hack: problem (on Nexus 6 at least) that when we start recording video (video_recorder.start() call), this often causes the ISO setting to reset to the wrong value! + // seems to happen more often with shorter exposure time + // seems to happen on other camera apps with Camera2 API too + // update: allow some tolerance, as on OnePlus 3T it's normal to have some slight difference between requested and actual + // this workaround still means a brief flash with incorrect ISO, but is best we can do for now! + // check previewBuilder != null as we have had Google Play crashes from the setRepeatingRequest() call via here + // Update 20180326: can no longer reproduce original problem on Nexus 6 (at FullHD or 4K); no evidence of + // problems on OnePlus 3T or Nokia 8. + // Also note that this code was being activated whenever manual ISO is changed (since we don't immediately + // update to the new ISO). At the least, this should be restricted to when recording video, but best to + // disable completely now that we don't seem to need it. + if( MyDebug.LOG ) { + Log.d(TAG, "ISO " + capture_result_iso + " different to requested ISO " + camera_settings.iso); + Log.d(TAG, " requested ISO was: " + request.get(CaptureRequest.SENSOR_SENSITIVITY)); + Log.d(TAG, " requested AE mode was: " + request.get(CaptureRequest.CONTROL_AE_MODE)); + } + try { + setRepeatingRequest(); + } + catch(CameraAccessException e) { + MyDebug.logStackTrace(TAG, "failed to set repeating request after ISO hack", e); + } + }*/ + } + else { + capture_result_has_iso = false; + } + + if( modified_from_camera_settings ) { + // see note above + } + else if( result.get(CaptureResult.SENSOR_EXPOSURE_TIME) != null ) { + capture_result_has_exposure_time = true; + capture_result_exposure_time = result.get(CaptureResult.SENSOR_EXPOSURE_TIME); + + // If using manual exposure time longer than max_preview_exposure_time_c, the preview will be fixed to + // max_preview_exposure_time_c, so we should just use the requested manual exposure time. + // (This affects the exposure time shown on on-screen preview - whilst showing the preview exposure time + // isn't necessarily wrong, it tended to confuse people, thinking that manual exposure time wasn't working + // when set above max_preview_exposure_time_c.) + // Update: but on some devices (e.g., Galaxy S10e) the reported exposure time can become inaccurate when + // we set longer preview exposure times (fine at 1/15s, 1/10s, but wrong at 0.2s and 0.3s), possibly this is + // by design if the preview along supports certain rates(?), but best to fall back to the requested exposure + // time in manual mode if requested exposure is longer than 1/12s OR the max_preview_exposure_time_c. + if( camera_settings.has_iso && camera_settings.exposure_time > Math.min(max_preview_exposure_time_c, 1000000000L/12) ) + capture_result_exposure_time = camera_settings.exposure_time; + + if( capture_result_exposure_time <= 0 ) { + // wierd bug seen on Nokia 8 + capture_result_has_exposure_time = false; + } + } + else { + capture_result_has_exposure_time = false; + } + + if( modified_from_camera_settings ) { + // see note above + } + else if( result.get(CaptureResult.SENSOR_FRAME_DURATION) != null ) { + capture_result_has_frame_duration = true; + capture_result_frame_duration = result.get(CaptureResult.SENSOR_FRAME_DURATION); + } + else { + capture_result_has_frame_duration = false; + } + /*if( MyDebug.LOG ) { + if( result.get(CaptureResult.SENSOR_EXPOSURE_TIME) != null ) { + long capture_result_exposure_time = result.get(CaptureResult.SENSOR_EXPOSURE_TIME); + Log.d(TAG, "capture_result_exposure_time: " + capture_result_exposure_time); + } + if( result.get(CaptureResult.SENSOR_FRAME_DURATION) != null ) { + long capture_result_frame_duration = result.get(CaptureResult.SENSOR_FRAME_DURATION); + Log.d(TAG, "capture_result_frame_duration: " + capture_result_frame_duration); + } + }*/ + /*if( modified_from_camera_settings ) { + // see note above + } + else if( result.get(CaptureResult.LENS_FOCUS_RANGE) != null ) { + Pair focus_range = result.get(CaptureResult.LENS_FOCUS_RANGE); + capture_result_has_focus_distance = true; + capture_result_focus_distance_min = focus_range.first; + capture_result_focus_distance_max = focus_range.second; + } + else { + capture_result_has_focus_distance = false; + }*/ + + if( modified_from_camera_settings ) { + // see note above + } + else if( result.get(CaptureResult.LENS_FOCUS_DISTANCE) != null ) { + capture_result_has_focus_distance = true; + capture_result_focus_distance = result.get(CaptureResult.LENS_FOCUS_DISTANCE); + /*if( MyDebug.LOG ) { + Log.d(TAG, "capture_result_focus_distance: " + capture_result_focus_distance); + if( capture_result_focus_distance > 0.0f ) { + float real_focus_distance = 1.0f / capture_result_focus_distance; + Log.d(TAG, "real_focus_distance: " + real_focus_distance); + } + }*/ + // ensure within the valid range for manual focus, just in case + if( capture_result_focus_distance < 0.0f ) + capture_result_focus_distance = 0.0f; + else if( capture_result_focus_distance > minimum_focus_distance ) + capture_result_focus_distance = minimum_focus_distance; + } + else { + capture_result_has_focus_distance = false; + } + + if( modified_from_camera_settings ) { + // see note above + } + else if( result.get(CaptureResult.LENS_APERTURE) != null ) { + capture_result_has_aperture = true; + capture_result_aperture = result.get(CaptureResult.LENS_APERTURE); + /*if( MyDebug.LOG ) { + Log.d(TAG, "capture_result_aperture: " + capture_result_aperture); + }*/ + } + else { + capture_result_has_aperture = false; + } + { + RggbChannelVector vector = result.get(CaptureResult.COLOR_CORRECTION_GAINS); + if( modified_from_camera_settings ) { + // see note above + } + else if( vector != null ) { + capture_result_has_white_balance_rggb = true; + capture_result_white_balance_rggb = vector; + } + } + + /*if( MyDebug.LOG ) { + RggbChannelVector vector = result.get(CaptureResult.COLOR_CORRECTION_GAINS); + if( vector != null ) { + convertRggbVectorToTemperature(vector); // logging will occur in this function + } + }*/ + } + + private void handleFaceDetection(CaptureResult result) { + if( face_detection_listener != null && previewBuilder != null ) { + Integer face_detect_mode = previewBuilder.get(CaptureRequest.STATISTICS_FACE_DETECT_MODE); + if( face_detect_mode != null && face_detect_mode != CaptureRequest.STATISTICS_FACE_DETECT_MODE_OFF ) { + Rect sensor_rect = getViewableRect(); + android.hardware.camera2.params.Face [] camera_faces = result.get(CaptureResult.STATISTICS_FACES); + if( camera_faces != null ) { + if( camera_faces.length == 0 && last_faces_detected == 0 ) { + // no point continually calling the callback if 0 faces detected (same behaviour as CameraController1) + } + else { + last_faces_detected = camera_faces.length; + CameraController.Face [] faces = new CameraController.Face[camera_faces.length]; + for(int i=0;i 90.5f ) { + // count as ultra-wide + if( !description.isEmpty() ) + description += ", "; + description += context.getResources().getString(R.string.ultrawide); + } + else if( view_angle.getWidth() < 29.5f ) { + // count as telephoto + // Galaxy S24+ telephoto is 29x22 degrees + if( !description.isEmpty() ) + description += ", "; + description += context.getResources().getString(R.string.telephoto); + } + + if( include_angles ) { + if( !description.isEmpty() ) + description += ", "; + description += ((int)(view_angle.getWidth()+0.5f)) + String.valueOf((char)0x00B0) + " x " + ((int)(view_angle.getHeight()+0.5f)) + (char) 0x00B0; + } + } + catch(Throwable e) { + // see note under isFrontFacing() why we catch anything, not just CameraAccessException + MyDebug.logStackTrace(TAG, "exception trying to get camera characteristics", e); + } + return description; + } + + /** Helper class to compute view angles from the CameraCharacteristics. + * @return The width and height of the returned size represent the x and y view angles in + * degrees. + */ + static SizeF computeViewAngles(CameraCharacteristics characteristics) { + // Note this is an approximation (see http://stackoverflow.com/questions/39965408/what-is-the-android-camera2-api-equivalent-of-camera-parameters-gethorizontalvie ). + // This does not take into account the aspect ratio of the preview or camera, it's up to the caller to do this (e.g., see Preview.getViewAngleX(), getViewAngleY()). + Rect active_size = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); + SizeF physical_size = characteristics.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE); + android.util.Size pixel_size = characteristics.get(CameraCharacteristics.SENSOR_INFO_PIXEL_ARRAY_SIZE); + float [] focal_lengths = characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS); + if( active_size == null || physical_size == null || pixel_size == null || focal_lengths == null || focal_lengths.length == 0 ) { + // in theory this should never happen according to the documentation, but I've had a report of physical_size (SENSOR_INFO_PHYSICAL_SIZE) + // being null on an EXTERNAL Camera2 device, see https://sourceforge.net/p/opencamera/tickets/754/ + if( MyDebug.LOG ) { + Log.e(TAG, "can't get camera view angles"); + } + // fall back to a default + return new SizeF(55.0f, 43.0f); + } + //camera_features.view_angle_x = (float)Math.toDegrees(2.0 * Math.atan2(physical_size.getWidth(), (2.0 * focal_lengths[0]))); + //camera_features.view_angle_y = (float)Math.toDegrees(2.0 * Math.atan2(physical_size.getHeight(), (2.0 * focal_lengths[0]))); + float frac_x = ((float)active_size.width())/(float)pixel_size.getWidth(); + float frac_y = ((float)active_size.height())/(float)pixel_size.getHeight(); + float view_angle_x = (float)Math.toDegrees(2.0 * Math.atan2(physical_size.getWidth() * frac_x, (2.0 * focal_lengths[0]))); + float view_angle_y = (float)Math.toDegrees(2.0 * Math.atan2(physical_size.getHeight() * frac_y, (2.0 * focal_lengths[0]))); + if( MyDebug.LOG ) { + Log.d(TAG, "frac_x: " + frac_x); + Log.d(TAG, "frac_y: " + frac_y); + Log.d(TAG, "view_angle_x: " + view_angle_x); + Log.d(TAG, "view_angle_y: " + view_angle_y); + } + return new SizeF(view_angle_x, view_angle_y); + } + + /* Returns true if the device supports the required hardware level, or better. + * See https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL . + * From Android N, higher levels than "FULL" are possible, that will have higher integer values. + * Also see https://sourceforge.net/p/opencamera/tickets/141/ . + */ + static boolean isHardwareLevelSupported(CameraCharacteristics c, int requiredLevel) { + int deviceLevel = c.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); + if( MyDebug.LOG ) { + switch (deviceLevel) { + case CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY: + Log.d(TAG, "Camera has LEGACY Camera2 support"); + break; + case CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL: + Log.d(TAG, "Camera has EXTERNAL Camera2 support"); + break; + case CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED: + Log.d(TAG, "Camera has LIMITED Camera2 support"); + break; + case CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL: + Log.d(TAG, "Camera has FULL Camera2 support"); + break; + case CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_3: + Log.d(TAG, "Camera has Level 3 Camera2 support"); + break; + default: + Log.d(TAG, "Camera has unknown Camera2 support: " + deviceLevel); + break; + } + } + + // need to treat legacy and external as special cases; otherwise can then use numerical comparison + + if( deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY ) { + return requiredLevel == deviceLevel; + } + + if( deviceLevel == CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL ) { + deviceLevel = CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; + } + if( requiredLevel == CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL ) { + requiredLevel = CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; + } + + return requiredLevel <= deviceLevel; + } + + /* Rather than allowing Camera2 API on all Android 5+ devices, we restrict it to certain cases. + * This returns whether the specified camera has at least LIMITED support. + */ + public boolean allowCamera2Support(int cameraId) { + CameraManager manager = (CameraManager)context.getSystemService(Context.CAMERA_SERVICE); + try { + String cameraIdS = manager.getCameraIdList()[cameraId]; + CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraIdS); + //return isHardwareLevelSupported(characteristics, CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY); + return isHardwareLevelSupported(characteristics, CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED); + } + catch(Throwable e) { + // in theory we should only get CameraAccessException, but Google Play shows we can get a variety of exceptions + // from some devices, e.g., AssertionError, IllegalArgumentException, RuntimeException, so just catch everything! + // We don't want users to experience a crash just because of buggy camera2 drivers - instead the user can switch + // back to old camera API. + MyDebug.logStackTrace(TAG, "exception trying to get camera characteristics", e); + } + return false; + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/cameracontroller/RawImage.java b/app/src/main/java/net/sourceforge/opencamera/cameracontroller/RawImage.java new file mode 100644 index 0000000..b23126b --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/cameracontroller/RawImage.java @@ -0,0 +1,56 @@ +package net.sourceforge.opencamera.cameracontroller; + +import android.hardware.camera2.DngCreator; +import android.media.Image; +import android.util.Log; + +import net.sourceforge.opencamera.MyDebug; + +import java.io.IOException; +import java.io.OutputStream; + +/** Wrapper class to store DngCreator and Image. + */ +public class RawImage { + private static final String TAG = "RawImage"; + + private final DngCreator dngCreator; + private final Image image; + + public RawImage(DngCreator dngCreator, Image image) { + this.dngCreator = dngCreator; + this.image = image; + } + + /** Writes the dng file to the supplied output. + */ + public void writeImage(OutputStream dngOutput) throws IOException { + if( MyDebug.LOG ) + Log.d(TAG, "writeImage"); + try { + dngCreator.writeImage(dngOutput, image); + } + catch(AssertionError e) { + // have had AssertionError from OnePlus 5 on Google Play; rethrow as an IOException so it's handled + // in the same way + MyDebug.logStackTrace(TAG, "failed to write SNG image", e); + throw new IOException(); + } + catch(IllegalStateException e) { + // have had IllegalStateException from Galaxy Note 8 on Google Play; rethrow as an IOException so it's handled + // in the same way + MyDebug.logStackTrace(TAG, "failed to write SNG image", e); + throw new IOException(); + } + } + + /** Closes the image. Must be called to free up resources when no longer needed. After calling + * this method, this object should not be used. + */ + public void close() { + if( MyDebug.LOG ) + Log.d(TAG, "close"); + image.close(); + dngCreator.close(); + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/preview/ApplicationInterface.java b/app/src/main/java/net/sourceforge/opencamera/preview/ApplicationInterface.java new file mode 100644 index 0000000..5e2e080 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/preview/ApplicationInterface.java @@ -0,0 +1,277 @@ +package net.sourceforge.opencamera.preview; + +import java.io.File; +import java.io.IOException; +import java.io.Serial; +import java.util.Date; +import java.util.List; + +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Point; +import android.location.Location; +import android.net.Uri; +import android.os.Build; +import android.util.Log; +import android.util.Pair; +import android.view.MotionEvent; + +import androidx.annotation.RequiresApi; + +import net.sourceforge.opencamera.MyDebug; +import net.sourceforge.opencamera.cameracontroller.CameraController; +import net.sourceforge.opencamera.cameracontroller.RawImage; + +/** Provides communication between the Preview and the rest of the application + * - so in theory one can drop the Preview/ (and CameraController/) classes + * into a new application, by providing an appropriate implementation of this + * ApplicationInterface. + */ +public interface ApplicationInterface { + class NoFreeStorageException extends Exception { + @Serial + private static final long serialVersionUID = -2021932609486148748L; + } + class VideoMaxFileSize { + public long max_filesize; // maximum file size in bytes for video (return 0 for device default - typically this is ~2GB) + public boolean auto_restart; // whether to automatically restart on hitting max filesize (this setting is still relevant for max_filesize==0, as typically there will still be a device max filesize) + } + + enum VideoMethod { + FILE, // video will be saved to a file + SAF, // video will be saved using Android 5's Storage Access Framework + MEDIASTORE, // video will be saved to the supplied MediaStore Uri + URI // video will be written to the supplied Uri + } + + // methods that request information + Context getContext(); // get the application context + boolean useCamera2(); // should Android 5's Camera 2 API be used? + Location getLocation(); // get current location - null if not available (or you don't care about geotagging) + VideoMethod createOutputVideoMethod(); // return a VideoMethod value to specify how to create a video file + File createOutputVideoFile(String extension) throws IOException; // will be called if createOutputVideoUsingSAF() returns VideoMethod.FILE; extension is the recommended filename extension for the chosen video type + Uri createOutputVideoSAF(String extension) throws IOException; // will be called if createOutputVideoUsingSAF() returns VideoMethod.SAF; extension is the recommended filename extension for the chosen video type + Uri createOutputVideoMediaStore(String extension) throws IOException; // will be called if createOutputVideoUsingSAF() returns VideoMethod.MEDIASTORE; extension is the recommended filename extension for the chosen video type + Uri createOutputVideoUri(); // will be called if createOutputVideoUsingSAF() returns VideoMethod.URI + // for all of the get*Pref() methods, you can use Preview methods to get the supported values (e.g., getSupportedSceneModes()) + // if you just want a default or don't really care, see the comments for each method for a default or possible options + // if Preview doesn't support the requested setting, it will check this, and choose its own + int getCameraIdPref(); // camera to use, from 0 to getCameraControllerManager().getNumberOfCameras() + String getCameraIdSPhysicalPref(); // if non-null, the Camera2 physical camera ID (must be one of Preview.getPhysicalCameras()) + String getFlashPref(); // flash_off, flash_auto, flash_on, flash_torch, flash_red_eye + String getFocusPref(boolean is_video); // focus_mode_auto, focus_mode_infinity, focus_mode_macro, focus_mode_locked, focus_mode_fixed, focus_mode_manual2, focus_mode_edof, focus_mode_continuous_picture, focus_mode_continuous_video + boolean isVideoPref(); // start up in video mode? + String getSceneModePref(); // "auto" for default (strings correspond to Android's scene mode constants in android.hardware.Camera.Parameters) + String getColorEffectPref(); // "node" for default (strings correspond to Android's color effect constants in android.hardware.Camera.Parameters) + String getWhiteBalancePref(); // "auto" for default (strings correspond to Android's white balance constants in android.hardware.Camera.Parameters) + int getWhiteBalanceTemperaturePref(); + String getAntiBandingPref(); // "auto" for default (strings correspond to Android's antibanding constants in android.hardware.Camera.Parameters) + String getEdgeModePref(); // CameraController.EDGE_MODE_DEFAULT for device default, or "off", "fast", "high_quality" + String getCameraNoiseReductionModePref(); // CameraController.NOISE_REDUCTION_MODE_DEFAULT for device default, or "off", "minimal", "fast", "high_quality" + String getISOPref(); // "auto" for auto-ISO, otherwise a numerical value; see documentation for Preview.supportsISORange(). + int getExposureCompensationPref(); // 0 for default + + class CameraResolutionConstraints { + private static final String TAG = "CameraResConstraints"; + + public boolean has_max_mp; + public int max_mp; + + boolean hasConstraints() { + return has_max_mp; + } + + boolean satisfies(CameraController.Size size) { + if( this.has_max_mp && size.width * size.height > this.max_mp ) { + if( MyDebug.LOG ) + Log.d(TAG, "size index larger than max_mp: " + this.max_mp); + return false; + } + return true; + } + } + /** The resolution to use for photo mode. + * If the returned resolution is not supported by the device, or this method returns null, then + * the preview will choose a size, and then call setCameraResolutionPref() with the chosen + * size. + * If the returned resolution is supported by the device, setCameraResolutionPref() will be + * called with the returned resolution. + * Note that even if the device supports the resolution in general, the Preview may choose a + * different resolution in some circumstances: + * * A burst mode as been requested, but the resolution does not support burst. + * * A constraint has been set via constraints. + * In such cases, the resolution actually in use should be found by calling + * Preview.getCurrentPictureSize() rather than relying on the setCameraResolutionPref(). (The + * logic behind this is that if a resolution is not supported by the device at all, it's good + * practice to correct the preference stored in user settings; but this shouldn't be done if + * the resolution is changed for something more temporary such as enabling burst mode.) + * @param constraints Optional constraints that may be set. If the returned resolution does not + * satisfy these constraints, then the preview will choose the closest + * resolution that does. + */ + Pair getCameraResolutionPref(CameraResolutionConstraints constraints); // return null to let Preview choose size + int getImageQualityPref(); // jpeg quality for taking photos; "90" is a recommended default + boolean getFaceDetectionPref(); // whether to use face detection mode + String getVideoQualityPref(); // should be one of Preview.getSupportedVideoQuality() (use Preview.getCamcorderProfile() or Preview.getCamcorderProfileDescription() for details); or return "" to let Preview choose quality + boolean getVideoStabilizationPref(); // whether to use video stabilization for video + boolean getForce4KPref(); // whether to force 4K mode - experimental, only really available for some devices that allow 4K recording but don't return it as an available resolution - not recommended for most uses + String getRecordVideoOutputFormatPref(); // preference_video_output_format_default, preference_video_output_format_mpeg4_h264, preference_video_output_format_mpeg4_hevc, preference_video_output_format_3gpp, preference_video_output_format_webm + String getVideoBitratePref(); // return "default" to let Preview choose + String getVideoFPSPref(); // return "default" to let Preview choose; if getVideoCaptureRateFactor() returns a value other than 1.0, this is the capture fps; the resultant video's fps will be getVideoFPSPref()*getVideoCaptureRateFactor() + float getVideoCaptureRateFactor(); // return 1.0f for standard operation, less than 1.0 for slow motion, more than 1.0 for timelapse; consider using a higher fps for slow motion, see getVideoFPSPref() + CameraController.TonemapProfile getVideoTonemapProfile(); // tonemap profile to use for video mode + float getVideoLogProfileStrength(); // strength of the log profile for video mode, if getVideoTonemapProfile() returns TONEMAPPROFILE_LOG + float getVideoProfileGamma(); // gamma for video mode, if getVideoTonemapProfile() returns TONEMAPPROFILE_GAMMA + long getVideoMaxDurationPref(); // time in ms after which to automatically stop video recording (return 0 for off) + int getVideoRestartTimesPref(); // number of times to restart video recording after hitting max duration (return 0 for never auto-restarting) + VideoMaxFileSize getVideoMaxFileSizePref() throws NoFreeStorageException; // see VideoMaxFileSize class for details + boolean getVideoFlashPref(); // option to switch flash on/off while recording video (should be false in most cases!) + boolean getVideoLowPowerCheckPref(); // whether to stop video automatically on critically low battery + String getPreviewSizePref(); // "preference_preview_size_wysiwyg" is recommended (preview matches aspect ratio of photo resolution as close as possible), but can also be "preference_preview_size_display" to maximise the preview size + String getLockOrientationPref(); // return "none" for default; use "portrait" or "landscape" to lock photos/videos to that orientation + boolean getTouchCapturePref(); // whether to enable touch to capture + boolean getDoubleTapCapturePref(); // whether to enable double-tap to capture + boolean getPausePreviewPref(); // whether to pause the preview after taking a photo + boolean getShowToastsPref(); + boolean getShutterSoundPref(); // whether to play sound when taking photo + boolean getStartupFocusPref(); // whether to do autofocus on startup + long getTimerPref(); // time in ms for timer (so 0 for off) + String getRepeatPref(); // return number of times to repeat photo in a row (as a string), so "1" for default; return "unlimited" for unlimited + long getRepeatIntervalPref(); // time in ms between repeat + boolean getGeotaggingPref(); // whether to geotag photos + boolean getRequireLocationPref(); // if getGeotaggingPref() returns true, and this method returns true, then phot/video will only be taken if location data is available + boolean getRecordAudioPref(); // whether to record audio when recording video + String getRecordAudioChannelsPref(); // either "audio_default", "audio_mono" or "audio_stereo" + String getRecordAudioSourcePref(); // "audio_src_camcorder" is recommended, but other options are: "audio_src_mic", "audio_src_default", "audio_src_voice_communication", "audio_src_unprocessed" (unprocessed required Android 7+); see corresponding values in android.media.MediaRecorder.AudioSource + int getZoomPref(); // index into Preview.getSupportedZoomRatios() array (each entry is the zoom factor, scaled by 100; array is sorted from min to max zoom); return -1 for default 1x zoom + double getCalibratedLevelAngle(); // set to non-zero to calibrate the accelerometer used for the level angles + boolean canTakeNewPhoto(); // whether taking new photos is allowed (e.g., can return false if queue for processing images would become full) + boolean imageQueueWouldBlock(int n_raw, int n_jpegs); // called during some burst operations, whether we can allow taking the supplied number of extra photos + /** Same behaviour as Activity.getWindowManager().getDefaultDisplay().getRotation() (including + * returning a member of Surface.ROTATION_*), but allows application to modify e.g. for + * upside-down preview. + * @param prefer_later When the device orientation changes, there can be some ambiguity if this + * is called during this rotation, since getRotation() may updated shortly + * before the UI appears to rotate. If prefer_later==false, then prefer the + * previous rotation in such cases. This can be implemented by caching the + * value. prefer_later should be set to false when this is being called + * frequently e.g. as part of a UI that should smoothly rotate as the device + * rotates. prefer_later should be set to true for "one-off" calls. + */ + int getDisplayRotation(boolean prefer_later); + // Camera2 only modes: + long getExposureTimePref(); // only called if getISOPref() is not "default" + float getFocusDistancePref(boolean is_target_distance); // if isFocusBracketingPref()==true, returns the source or target focus distance + boolean isFocusBracketingSourceAutoPref(); // if isFocusBracketingPref()==true, returns whether the source focus distance should be set by calling CameraController.setFocusBracketingSourceDistanceFromCurrent() + boolean isExpoBracketingPref(); // whether to enable burst photos with expo bracketing + int getExpoBracketingNImagesPref(); // how many images to take for exposure bracketing + double getExpoBracketingStopsPref(); // stops per image for exposure bracketing + int getFocusBracketingNImagesPref(); // how many images to take for focus bracketing + boolean getFocusBracketingAddInfinityPref(); // whether to include an additional image at infinite focus distance, for focus bracketing + boolean isFocusBracketingPref(); // whether to enable burst photos with focus bracketing + boolean isCameraBurstPref(); // whether to shoot the camera in burst mode (n.b., not the same as the "auto-repeat" mode) + int getBurstNImages(); // only relevant if isCameraBurstPref() returns true; see CameraController doc for setBurstNImages(). + boolean getBurstForNoiseReduction(); // only relevant if isCameraBurstPref() returns true; see CameraController doc for setBurstForNoiseReduction(). + enum NRModePref { + NRMODE_NORMAL, + NRMODE_LOW_LIGHT + } + NRModePref getNRModePref(); // only relevant if getBurstForNoiseReduction() returns true; if this changes without reopening the preview's camera, call Preview.setupBurstMode() + boolean isCameraExtensionPref(); // whether to use camera vendor extension (see https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics ) + @RequiresApi(api = Build.VERSION_CODES.S) + int getCameraExtensionPref(); // if isCameraExtensionPref() returns true, the camera extension mode to use + float getAperturePref(); // get desired aperture (called if Preview.getSupportedApertures() returns non-null); return -1.0f for no preference + boolean getJpegRPref(); // whether to request JPEG_R (Ultra HDR) photos + enum RawPref { + RAWPREF_JPEG_ONLY, // JPEG only + RAWPREF_JPEG_DNG // JPEG and RAW (DNG) + } + RawPref getRawPref(); // whether to enable RAW photos + int getMaxRawImages(); // see documentation of CameraController.setRaw(), corresponds to max_raw_images + boolean useCamera2DummyCaptureHack(); // whether to enable CameraController.setDummyCaptureHack() for Camera2 API + boolean useCamera2FakeFlash(); // whether to enable CameraController.setUseCamera2FakeFlash() for Camera2 API + boolean useCamera2FastBurst(); // whether to enable Camera2's captureBurst() for faster taking of expo-bracketing photos (generally should be true, but some devices have problems with captureBurst()) + boolean usePhotoVideoRecording(); // whether to enable support for taking photos when recording video (if not supported, this won't be called) + boolean isPreviewInBackground(); // if true, then Preview can disable real-time effects (e.g., computing histogram); also it won't try to open the camera when in the background + boolean allowZoom(); // if false, don't allow zoom functionality even if the device supports it - Preview.supportsZoom() will also return false; if true, allow zoom if the device supports it + boolean optimiseFocusForLatency(); // behaviour for taking photos with continuous focus mode: if true, optimise focus for latency (take photo asap); if false, optimise for quality (don't take photo until scene is focused) + + /** Return size of default display, e.g., Activity.getWindowManager().getDefaultDisplay().getSize(). + * @param display_size The returned display size. + * @param exclude_insets If the activity is running in edge-to-edge mode, then whether to exclude + * insets. If the activity is not running in edge-to-edge mode, then this should + * be ignored, and insets should always be excluded. + */ + void getDisplaySize(Point display_size, boolean exclude_insets); + + // for testing purposes: + boolean isTestAlwaysFocus(); // if true, pretend autofocus always successful + + // methods that transmit information/events (up to the Application whether to do anything or not) + void cameraSetup(); // called when the camera is (re-)set up - should update UI elements/parameters that depend on camera settings + void touchEvent(MotionEvent event); + void startingVideo(); // called just before video recording starts + void startedVideo(); // called just after video recording starts + void stoppingVideo(); // called just before video recording stops; note that if startingVideo() is called but then video recording fails to start, this method will still be called, but startedVideo() and stoppedVideo() won't be called + void stoppedVideo(final VideoMethod video_method, final Uri uri, final String filename); // called after video recording stopped (uri/filename will be null if video is corrupt or not created); will be called iff startedVideo() was called + void restartedVideo(final VideoMethod video_method, final Uri uri, final String filename); // called after a seamless restart (supported on Android 8+) has occurred - in this case stoppedVideo() is only called for the final video file; this method is instead called for all earlier video file segments + void deleteUnusedVideo(final VideoMethod video_method, final Uri uri, final String filename); // application should delete the requested video (which will correspond to a video file previously returned via the createOutputVideo*() methods), either because it is corrupt or unused + void onFailedStartPreview(); // called if failed to start camera preview + void onCameraError(); // called if the camera closes due to serious error. + void onPhotoError(); // callback for failing to take a photo + void onVideoInfo(int what, int extra); // callback for info when recording video (see MediaRecorder.OnInfoListener) + void onVideoError(int what, int extra); // callback for errors when recording video (see MediaRecorder.OnErrorListener) + void onVideoRecordStartError(VideoProfile profile); // callback for video recording failing to start + void onVideoRecordStopError(VideoProfile profile); // callback for video recording being corrupted + void onFailedReconnectError(); // failed to reconnect camera after stopping video recording + void onFailedCreateVideoFileError(); // callback if unable to create file for recording video + void hasPausedPreview(boolean paused); // called when the preview is paused or unpaused (due to getPausePreviewPref()) + void cameraInOperation(boolean in_operation, boolean is_video); // called when the camera starts/stops being operation (taking photos or recording video, including if preview is paused after taking a photo), use to disable GUI elements during camera operation + void turnFrontScreenFlashOn(); // called when front-screen "flash" required (for modes flash_frontscreen_auto, flash_frontscreen_on); the application should light up the screen, until cameraInOperation(false) is called + void cameraClosed(); + void timerBeep(long remaining_time); // n.b., called once per second on timer countdown - so application can beep, or do whatever it likes + + // methods that request actions + void multitouchZoom(int new_zoom); // indicates that the zoom has changed due to multitouch gesture on preview + void requestTakePhoto(); // requesting taking a photo (due to single/double tap, if either getTouchCapturePref(), getDoubleTouchCapturePref() options are enabled) + // the set/clear*Pref() methods are called if Preview decides to override the requested pref (because Camera device doesn't support requested pref) (clear*Pref() is called if the feature isn't supported at all) + // the application can use this information to update its preferences + void setCameraIdPref(int cameraId, String cameraIdSPhysical); + void setFlashPref(String flash_value); + void setFocusPref(String focus_value, boolean is_video); + void setVideoPref(boolean is_video); + void setSceneModePref(String scene_mode); + void clearSceneModePref(); + void setColorEffectPref(String color_effect); + void clearColorEffectPref(); + void setWhiteBalancePref(String white_balance); + void clearWhiteBalancePref(); + void setWhiteBalanceTemperaturePref(int white_balance_temperature); + void setISOPref(String iso); + void clearISOPref(); + void setExposureCompensationPref(int exposure); + void clearExposureCompensationPref(); + void setCameraResolutionPref(int width, int height); + void setVideoQualityPref(String video_quality); + void setZoomPref(int zoom); + void requestCameraPermission(); // for Android 6+: called when trying to open camera, but CAMERA permission not available + @SuppressWarnings("SameReturnValue") + boolean needsStoragePermission(); // return true if the preview should call requestStoragePermission() if WRITE_EXTERNAL_STORAGE not available (i.e., if the application needs storage permission, e.g., to save photos) + void requestStoragePermission(); // for Android 6+: called when trying to open camera, but WRITE_EXTERNAL_STORAGE permission not available + void requestRecordAudioPermission(); // for Android 6+: called when switching to (or starting up in) video mode, but RECORD_AUDIO permission not available + // Camera2 only modes: + void setExposureTimePref(long exposure_time); + void clearExposureTimePref(); + void setFocusDistancePref(float focus_distance, boolean is_target_distance); + + // callbacks + void onDrawPreview(Canvas canvas); + boolean onPictureTaken(byte [] data, Date current_date); + boolean onBurstPictureTaken(List images, Date current_date); + boolean onRawPictureTaken(RawImage raw_image, Date current_date); + boolean onRawBurstPictureTaken(List raw_images, Date current_date); + void onCaptureStarted(); // called immediately before we start capturing the picture + void onPictureCompleted(); // called after all picture callbacks have been called and returned + void onExtensionProgress(int progress); // Reports percentage progress for vendor camera extensions. Note that not all devices support this being called. + void onContinuousFocusMove(boolean start); // called when focusing starts/stop in continuous picture mode (in photo mode only) +} diff --git a/app/src/main/java/net/sourceforge/opencamera/preview/BasicApplicationInterface.java b/app/src/main/java/net/sourceforge/opencamera/preview/BasicApplicationInterface.java new file mode 100644 index 0000000..af26b45 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/preview/BasicApplicationInterface.java @@ -0,0 +1,692 @@ +package net.sourceforge.opencamera.preview; + +import java.util.Date; +import java.util.List; + +import android.app.Activity; +import android.graphics.Canvas; +import android.location.Location; +import android.net.Uri; +import android.os.Build; +import android.util.Pair; +import android.view.MotionEvent; + +import androidx.annotation.RequiresApi; + +import net.sourceforge.opencamera.cameracontroller.CameraController; +import net.sourceforge.opencamera.cameracontroller.RawImage; + +/** A partial implementation of ApplicationInterface that provides "default" implementations. So + * sub-classing this is easier than implementing ApplicationInterface directly - you only have to + * provide the unimplemented methods to get started, and can later override + * BasicApplicationInterface's methods as required. + * Note there is no need for your subclass of BasicApplicationInterface to call "super" methods - + * these are just default implementations that should be overridden as required. + */ +public abstract class BasicApplicationInterface implements ApplicationInterface { + @Override + public Location getLocation() { + return null; + } + + @Override + public int getCameraIdPref() { + return 0; + } + + @Override + public String getCameraIdSPhysicalPref() { + return null; + } + + @Override + public String getFlashPref() { + return "flash_off"; + } + + @Override + public String getFocusPref(boolean is_video) { + return "focus_mode_continuous_picture"; + } + + @Override + public boolean isVideoPref() { + return false; + } + + @Override + public String getSceneModePref() { + return CameraController.SCENE_MODE_DEFAULT; + } + + @Override + public String getColorEffectPref() { + return CameraController.COLOR_EFFECT_DEFAULT; + } + + @Override + public String getWhiteBalancePref() { + return CameraController.WHITE_BALANCE_DEFAULT; + } + + @Override + public int getWhiteBalanceTemperaturePref() { + return 0; + } + + @Override + public String getAntiBandingPref() { + return CameraController.ANTIBANDING_DEFAULT; + } + + @Override + public String getEdgeModePref() { + return CameraController.EDGE_MODE_DEFAULT; + } + + @Override + public String getCameraNoiseReductionModePref() { + return CameraController.NOISE_REDUCTION_MODE_DEFAULT; + } + + @Override + public String getISOPref() { + return CameraController.ISO_DEFAULT; + } + + @Override + public int getExposureCompensationPref() { + return 0; + } + + @Override + public Pair getCameraResolutionPref(CameraResolutionConstraints constraints) { + return null; + } + + @Override + public int getImageQualityPref() { + return 90; + } + + @Override + public boolean getFaceDetectionPref() { + return false; + } + + @Override + public String getVideoQualityPref() { + return ""; + } + + @Override + public boolean getVideoStabilizationPref() { + return false; + } + + @Override + public boolean getForce4KPref() { + return false; + } + + @Override + public String getRecordVideoOutputFormatPref() { + return "preference_video_output_format_default"; + } + + @Override + public String getVideoBitratePref() { + return "default"; + } + + @Override + public String getVideoFPSPref() { + return "default"; + } + + @Override + public float getVideoCaptureRateFactor() { + return 1.0f; + } + + @Override + public CameraController.TonemapProfile getVideoTonemapProfile() { + return CameraController.TonemapProfile.TONEMAPPROFILE_OFF; + } + + @Override + public float getVideoLogProfileStrength() { + return 0; + } + + @Override + public float getVideoProfileGamma() { + return 0; + } + + @Override + public long getVideoMaxDurationPref() { + return 0; + } + + @Override + public int getVideoRestartTimesPref() { + return 0; + } + + @Override + public VideoMaxFileSize getVideoMaxFileSizePref() throws NoFreeStorageException { + VideoMaxFileSize video_max_filesize = new VideoMaxFileSize(); + video_max_filesize.max_filesize = 0; + video_max_filesize.auto_restart = true; + return video_max_filesize; + } + + @Override + public boolean getVideoFlashPref() { + return false; + } + + @Override + public boolean getVideoLowPowerCheckPref() { + return true; + } + + @Override + public String getPreviewSizePref() { + return "preference_preview_size_wysiwyg"; + } + + @Override + public String getLockOrientationPref() { + return "none"; + } + + @Override + public boolean getTouchCapturePref() { + return false; + } + + @Override + public boolean getDoubleTapCapturePref() { + return false; + } + + @Override + public boolean getPausePreviewPref() { + return false; + } + + @Override + public boolean getShowToastsPref() { + return true; + } + + @Override + public boolean getShutterSoundPref() { + return true; + } + + @Override + public boolean getStartupFocusPref() { + return true; + } + + @Override + public long getTimerPref() { + return 0; + } + + @Override + public String getRepeatPref() { + return "1"; + } + + @Override + public long getRepeatIntervalPref() { + return 0; + } + + @Override + public boolean getGeotaggingPref() { + return false; + } + + @Override + public boolean getRequireLocationPref() { + return false; + } + + @Override + public boolean getRecordAudioPref() { + return true; + } + + @Override + public String getRecordAudioChannelsPref() { + return "audio_default"; + } + + @Override + public String getRecordAudioSourcePref() { + return "audio_src_camcorder"; + } + + @Override + public int getZoomPref() { + return -1; + } + + @Override + public double getCalibratedLevelAngle() { + return 0; + } + + @Override + public boolean canTakeNewPhoto() { + return true; + } + + @Override + public boolean imageQueueWouldBlock(int n_raw, int n_jpegs) { + return false; + } + + @Override + public int getDisplayRotation(boolean prefer_later) { + Activity activity = (Activity)this.getContext(); + return activity.getWindowManager().getDefaultDisplay().getRotation(); + } + + @Override + public long getExposureTimePref() { + return CameraController.EXPOSURE_TIME_DEFAULT; + } + + @Override + public float getFocusDistancePref(boolean is_target_distance) { + return 0; + } + + @Override + public boolean isExpoBracketingPref() { + return false; + } + + @Override + public int getExpoBracketingNImagesPref() { + return 3; + } + + @Override + public double getExpoBracketingStopsPref() { + return 2.0; + } + + @Override + public int getFocusBracketingNImagesPref() { + return 3; + } + + @Override + public boolean getFocusBracketingAddInfinityPref() { + return false; + } + + @Override + public boolean isFocusBracketingPref() { + return false; + } + + @Override + public boolean isCameraBurstPref() { + return false; + } + + @Override + public int getBurstNImages() { + return 5; + } + + @Override + public boolean getBurstForNoiseReduction() { + return false; + } + + @Override + public NRModePref getNRModePref() { + return NRModePref.NRMODE_NORMAL; + } + + @Override + public boolean isCameraExtensionPref() { + return false; + } + + @Override + @RequiresApi(api = Build.VERSION_CODES.S) + public int getCameraExtensionPref() { + return 0; + } + + @Override + public float getAperturePref() { + return -1.0f; + } + + @Override + public boolean getJpegRPref() { + return false; + } + + @Override + public RawPref getRawPref() { + return RawPref.RAWPREF_JPEG_ONLY; + } + + @Override + public int getMaxRawImages() { + return 2; + } + + @Override + public boolean useCamera2DummyCaptureHack() { + return false; + } + + @Override + public boolean useCamera2FakeFlash() { + return false; + } + + @Override + public boolean useCamera2FastBurst() { + return true; + } + + @Override + public boolean usePhotoVideoRecording() { + return true; + } + + @Override + public boolean isPreviewInBackground() { + return false; + } + + @Override + public boolean allowZoom() { + return true; + } + + @Override + public boolean optimiseFocusForLatency() { + return true; + } + + @Override + public boolean isTestAlwaysFocus() { + return false; + } + + @Override + public void cameraSetup() { + + } + + @Override + public void touchEvent(MotionEvent event) { + + } + + @Override + public void startingVideo() { + + } + + @Override + public void startedVideo() { + + } + + @Override + public void stoppingVideo() { + + } + + @Override + public void stoppedVideo(VideoMethod video_method, Uri uri, String filename) { + + } + + @Override + public void restartedVideo(final VideoMethod video_method, final Uri uri, final String filename) { + } + + @Override + public void deleteUnusedVideo(final VideoMethod video_method, final Uri uri, final String filename) { + } + + @Override + public void onFailedStartPreview() { + + } + + @Override + public void onCameraError() { + + } + + @Override + public void onPhotoError() { + + } + + @Override + public void onVideoInfo(int what, int extra) { + + } + + @Override + public void onVideoError(int what, int extra) { + + } + + @Override + public void onVideoRecordStartError(VideoProfile profile) { + + } + + @Override + public void onVideoRecordStopError(VideoProfile profile) { + + } + + @Override + public void onFailedReconnectError() { + + } + + @Override + public void onFailedCreateVideoFileError() { + + } + + @Override + public void hasPausedPreview(boolean paused) { + + } + + @Override + public void cameraInOperation(boolean in_operation, boolean is_video) { + + } + + @Override + public void turnFrontScreenFlashOn() { + + } + + @Override + public void cameraClosed() { + + } + + @Override + public void timerBeep(long remaining_time) { + + } + + @Override + public void multitouchZoom(int new_zoom) { + + } + + @Override + public void requestTakePhoto() { + } + + @Override + public void setCameraIdPref(int cameraId, String cameraIdSPhysical) { + + } + + @Override + public void setFlashPref(String flash_value) { + + } + + @Override + public void setFocusPref(String focus_value, boolean is_video) { + + } + + @Override + public void setVideoPref(boolean is_video) { + + } + + @Override + public void setSceneModePref(String scene_mode) { + + } + + @Override + public void clearSceneModePref() { + + } + + @Override + public void setColorEffectPref(String color_effect) { + + } + + @Override + public void clearColorEffectPref() { + + } + + @Override + public void setWhiteBalancePref(String white_balance) { + + } + + @Override + public void clearWhiteBalancePref() { + + } + + @Override + public void setWhiteBalanceTemperaturePref(int white_balance_temperature) { + + } + + @Override + public void setISOPref(String iso) { + + } + + @Override + public void clearISOPref() { + + } + + @Override + public void setExposureCompensationPref(int exposure) { + + } + + @Override + public void clearExposureCompensationPref() { + + } + + @Override + public void setCameraResolutionPref(int width, int height) { + + } + + @Override + public void setVideoQualityPref(String video_quality) { + + } + + @Override + public void setZoomPref(int zoom) { + + } + + @Override + public void setExposureTimePref(long exposure_time) { + + } + + @Override + public void clearExposureTimePref() { + + } + + @Override + public void setFocusDistancePref(float focus_distance, boolean is_target_distance) { + + } + + @Override + public void onDrawPreview(Canvas canvas) { + + } + + @Override + public boolean onBurstPictureTaken(List images, Date current_date) { + return false; + } + + @Override + public boolean onRawPictureTaken(RawImage raw_image, Date current_date) { + return false; + } + + @Override + public boolean onRawBurstPictureTaken(List raw_images, Date current_date) { + return false; + } + + @Override + public void onCaptureStarted() { + + } + + @Override + public void onPictureCompleted() { + + } + + @Override + public void onExtensionProgress(int progress) { + } + + @Override + public void onContinuousFocusMove(boolean start) { + + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/preview/CanvasView.java b/app/src/main/java/net/sourceforge/opencamera/preview/CanvasView.java new file mode 100644 index 0000000..048cc34 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/preview/CanvasView.java @@ -0,0 +1,72 @@ +package net.sourceforge.opencamera.preview; + +import net.sourceforge.opencamera.MyDebug; +import android.content.Context; +import android.graphics.Canvas; +import android.os.Handler; +import android.util.Log; +import android.view.View; + +import androidx.annotation.NonNull; + +/** View for on top of the Preview - this just redirects to Preview.onDraw to do the + * work. Only used if using a MyTextureView (if using MySurfaceView, then that + * class can handle the onDraw()). TextureViews can't be used for both a + * camera preview, and used for drawing on. + */ +public class CanvasView extends View { + private static final String TAG = "CanvasView"; + + private final Preview preview; + private final int [] measure_spec = new int[2]; + private final Handler handler = new Handler(); + private final Runnable tick; + + CanvasView(Context context, final Preview preview) { + super(context); + this.preview = preview; + if( MyDebug.LOG ) { + Log.d(TAG, "new CanvasView"); + } + + // deprecated setting, but required on Android versions prior to 3.0 + //getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); // deprecated + + tick = new Runnable() { + public void run() { + /*if( MyDebug.LOG ) + Log.d(TAG, "invalidate()");*/ + preview.test_ticker_called = true; + invalidate(); + handler.postDelayed(this, preview.getFrameRate()); + } + }; + } + + @Override + public void onDraw(@NonNull Canvas canvas) { + /*if( MyDebug.LOG ) + Log.d(TAG, "onDraw()");*/ + preview.draw(canvas); + } + + @Override + protected void onMeasure(int widthSpec, int heightSpec) { + if( MyDebug.LOG ) + Log.d(TAG, "onMeasure: " + widthSpec + " x " + heightSpec); + preview.getMeasureSpec(measure_spec, widthSpec, heightSpec); + super.onMeasure(measure_spec[0], measure_spec[1]); + } + + void onPause() { + if( MyDebug.LOG ) + Log.d(TAG, "onPause()"); + handler.removeCallbacks(tick); + } + + void onResume() { + if( MyDebug.LOG ) + Log.d(TAG, "onResume()"); + tick.run(); + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/preview/Preview.java b/app/src/main/java/net/sourceforge/opencamera/preview/Preview.java new file mode 100644 index 0000000..fb9067c --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/preview/Preview.java @@ -0,0 +1,9300 @@ +package net.sourceforge.opencamera.preview; + +import net.sourceforge.opencamera.JavaImageFunctions; +import net.sourceforge.opencamera.JavaImageProcessing; +import net.sourceforge.opencamera.cameracontroller.RawImage; +//import net.sourceforge.opencamera.MainActivity; +import net.sourceforge.opencamera.MyDebug; +import net.sourceforge.opencamera.R; +import net.sourceforge.opencamera.TakePhoto; +import net.sourceforge.opencamera.ToastBoxer; +import net.sourceforge.opencamera.cameracontroller.CameraController; +import net.sourceforge.opencamera.cameracontroller.CameraController1; +import net.sourceforge.opencamera.cameracontroller.CameraController2; +import net.sourceforge.opencamera.cameracontroller.CameraControllerException; +import net.sourceforge.opencamera.cameracontroller.CameraControllerManager; +import net.sourceforge.opencamera.cameracontroller.CameraControllerManager1; +import net.sourceforge.opencamera.cameracontroller.CameraControllerManager2; +import net.sourceforge.opencamera.preview.ApplicationInterface.NoFreeStorageException; +import net.sourceforge.opencamera.preview.camerasurface.CameraSurface; +import net.sourceforge.opencamera.preview.camerasurface.MySurfaceView; +import net.sourceforge.opencamera.preview.camerasurface.MyTextureView; + +import java.io.File; +//import java.io.FileOutputStream; +import java.io.IOException; +//import java.io.OutputStream; +import java.lang.ref.WeakReference; +import java.text.DecimalFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.Hashtable; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.Timer; +import java.util.TimerTask; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import android.Manifest; +import android.annotation.SuppressLint; +import android.app.Activity; +import android.content.Context; +import android.content.Intent; +import android.content.IntentFilter; +import android.content.pm.PackageManager; +import android.content.res.Configuration; +import android.content.res.Resources; +import android.graphics.Bitmap; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Matrix; +import android.graphics.Point; +import android.graphics.Rect; +import android.graphics.RectF; +import android.graphics.SurfaceTexture; +import android.hardware.SensorEvent; +import android.hardware.SensorManager; +import android.hardware.camera2.CameraExtensionCharacteristics; +import android.location.Location; +import android.media.CamcorderProfile; +import android.media.MediaRecorder; +import android.net.Uri; +import android.os.AsyncTask; +import android.os.BatteryManager; +import android.os.Build; +import android.os.Bundle; +//import android.os.Environment; +import android.os.Handler; +import android.os.ParcelFileDescriptor; + +import androidx.annotation.NonNull; +import androidx.core.content.ContextCompat; +import android.util.Log; +import android.util.Pair; +import android.view.GestureDetector; +//import android.view.Gravity; +import android.view.LayoutInflater; +import android.view.MotionEvent; +import android.view.OrientationEventListener; +import android.view.ScaleGestureDetector; +import android.view.Surface; +import android.view.SurfaceHolder; +import android.view.TextureView; +import android.view.View; +import android.view.ViewConfiguration; +import android.view.ViewGroup; +import android.view.ViewParent; +import android.view.WindowManager; +import android.view.View.MeasureSpec; +import android.widget.FrameLayout; +import android.widget.TextView; +import android.widget.Toast; + +/** This class was originally named due to encapsulating the camera preview, + * but in practice it's grown to more than this, and includes most of the + * operation of the camera. It exists at a higher level than CameraController + * (i.e., this isn't merely a low level wrapper to the camera API, but + * supports much of the Open Camera logic and functionality). Communication to + * the rest of the application is available through ApplicationInterface. + * We could probably do with decoupling this class into separate components! + * + * This class also keeps track of various camera parameters, obtained from the + * CameraController class. One decision is when certain parameters depend on + * others (e.g., some resolutions don't support burst; lots of things don't + * support vendor camera extensions). In general we shouldn't do that restriction + * at this level, as that can cause problems since at the Application level we + * may need to know what features are possible in any mode. E.g., if we said + * burst mode isn't supported because we're in a camera extension mode, the user + * wouldn't be able to switch to Fast Burst mode because the application thinks + * burst isn't available! And also for changing preferences in Settings, we + * typically want to show all available settings (e.g., showing RAW if it's + * available for the current camera, even if not available in the current mode). + * There are some exceptions where we need to restrict at the Preview level, e.g.: + * - Resolutions (for burst mode, camera extensions) - though the application + * can choose to obtain the full list by calling getSupportedPictureSizes() + * with check_supported==false. + * - Flash modes (for manual ISO or camera extensions). + * - Focus modes (for camera extensions). + * Similarly we shouldn't restrict available features at the CameraController + * class, except where this is unavoidable due to the Android camera API + * behaviour (e.g., for scene modes, it may be that some camera features are + * affected). + */ +public class Preview implements SurfaceHolder.Callback, TextureView.SurfaceTextureListener { + private static final String TAG = "Preview"; + + private final boolean using_android_l; + + private final ApplicationInterface applicationInterface; + private final CameraSurface cameraSurface; + private CanvasView canvasView; + private boolean set_preview_size; + private int preview_w, preview_h; + private boolean set_textureview_size; + private int textureview_w, textureview_h; + + private boolean want_preview_bitmap; // whether application has requested we generate bitmap for the preview + private boolean use_preview_bitmap_small, use_preview_bitmap_full; // whether we want downsized and/or full preview bitmaps + private Bitmap preview_bitmap; // downsided bitmap from preview + private int preview_bitmap_full_w = -1, preview_bitmap_full_h = -1; // for full bitmaps, we generate copies on the fly (as these need to be saved for preshots feature) + private long last_preview_bitmap_time_ms; // time the last preview_bitmap was updated + private RefreshPreviewBitmapTask refreshPreviewBitmapTask; + + private boolean want_histogram; // whether to generate a histogram, requires want_preview_bitmap==true and use_preview_bitmap_small==true + public enum HistogramType { + HISTOGRAM_TYPE_RGB, + HISTOGRAM_TYPE_LUMINANCE, + HISTOGRAM_TYPE_VALUE, + HISTOGRAM_TYPE_INTENSITY, + HISTOGRAM_TYPE_LIGHTNESS + } + private HistogramType histogram_type = HistogramType.HISTOGRAM_TYPE_VALUE; + private int [] histogram; + private long last_histogram_time_ms; // time the last histogram was updated + + private boolean want_zebra_stripes; // whether to generate zebra stripes bitmap, requires want_preview_bitmap==true and use_preview_bitmap_small==true + private int zebra_stripes_threshold; // pixels with max rgb value equal to or greater than this threshold are marked with zebra stripes + private int zebra_stripes_color_foreground; + private int zebra_stripes_color_background; + private Bitmap zebra_stripes_bitmap_buffer; + private Bitmap zebra_stripes_bitmap; + + private boolean want_focus_peaking; // whether to generate focus peaking bitmap, requires want_preview_bitmap==true and use_preview_bitmap_small==true + private Bitmap focus_peaking_bitmap_buffer; + private Bitmap focus_peaking_bitmap_buffer_temp; + private Bitmap focus_peaking_bitmap; + + private boolean want_pre_shots; // whether to store pre-shots from preview bitmap, requires want_preview_bitmap==true and use_preview_bitmap_full==true + + private final Matrix camera_to_preview_matrix = new Matrix(); + private final Matrix preview_to_camera_matrix = new Matrix(); + private double preview_targetRatio; + + //private boolean ui_placement_right = true; + + private boolean app_is_paused = true; // whether activity is paused + private boolean is_paused = true; // whether Preview.onPause() is called - note this could include the application pausing the preview, even if app_is_paused==false + private boolean has_surface; + private boolean has_aspect_ratio; + private double aspect_ratio; + private final CameraControllerManager camera_controller_manager; + private CameraController camera_controller; + private final Map camera_features_caches = new Hashtable<>(); // cache for CameraController2 + enum CameraOpenState { + CAMERAOPENSTATE_CLOSED, // have yet to attempt to open the camera (either at all, or since the camera was closed) + CAMERAOPENSTATE_OPENING, // the camera is currently being opened (on a background thread) + CAMERAOPENSTATE_OPENED, // either the camera is open (if camera_controller!=null) or we failed to open the camera (if camera_controller==null) + CAMERAOPENSTATE_CLOSING // the camera is currently being closed (on a background thread) + } + private CameraOpenState camera_open_state = CameraOpenState.CAMERAOPENSTATE_CLOSED; + private AsyncTask open_camera_task; // background task used for opening camera + private CloseCameraTask close_camera_task; // background task used for closing camera + private boolean has_permissions = true; // whether we have permissions necessary to operate the camera (camera, storage); assume true until we've been denied one of them + private boolean is_video; + private volatile MediaRecorder video_recorder; // must be volatile for test project reading the state + private volatile boolean video_start_time_set; // must be volatile for test project reading the state + private long video_start_time; // system time when the video recording was started, or last resumed if it was paused + private long video_accumulated_time; // this time should be added to (System.currentTimeMillis() - video_start_time) to find the true video duration, that takes into account pausing/resuming, as well as any auto-restarts from max filesize + private long video_time_last_maxfilesize_restart; // when the video last restarted due to maxfilesize (or otherwise 0) - note this is time in ms relative to the recorded video, and not system time + private boolean video_recorder_is_paused; // whether video_recorder is running but has paused + private boolean video_restart_on_max_filesize; + private static final long min_safe_restart_video_time = 1000; // if the remaining max time after restart is less than this, don't restart + /** Stores the file (or similar) to record a video. + * Important to call close() when the video recording is finished, to free up any resources + * (e.g., supplied ParcelFileDescriptor). + */ + private static class VideoFileInfo { + private final ApplicationInterface.VideoMethod video_method; + private final Uri video_uri; // for VideoMethod.SAF, VideoMethod.URI or VideoMethod.MEDIASTORE + private final String video_filename; // for VideoMethod.FILE + private final ParcelFileDescriptor video_pfd_saf; // for VideoMethod.SAF, VideoMethod.URI or VideoMethod.MEDIASTORE + + VideoFileInfo() { + this.video_method = ApplicationInterface.VideoMethod.FILE; + this.video_uri = null; + this.video_filename = null; + this.video_pfd_saf = null; + } + VideoFileInfo(ApplicationInterface.VideoMethod video_method, Uri video_uri, String video_filename, ParcelFileDescriptor video_pfd_saf) { + this.video_method = video_method; + this.video_uri = video_uri; + this.video_filename = video_filename; + this.video_pfd_saf = video_pfd_saf; + } + + void close() { + if( this.video_pfd_saf != null ) { + try { + this.video_pfd_saf.close(); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "failed to close video_pfd_saf", e); + } + } + } + } + private VideoFileInfo videoFileInfo = new VideoFileInfo(); + private VideoFileInfo nextVideoFileInfo; // used for Android 8+ to handle seamless restart (see MediaRecorder.setNextOutputFile()) + + private static final int PHASE_NORMAL = 0; + private static final int PHASE_TIMER = 1; + private static final int PHASE_TAKING_PHOTO = 2; + private static final int PHASE_PREVIEW_PAUSED = 3; // the paused state after taking a photo + private volatile int phase = PHASE_NORMAL; // must be volatile for test project reading the state + private final Timer takePictureTimer = new Timer(); + private TimerTask takePictureTimerTask; + private final Timer beepTimer = new Timer(); + private TimerTask beepTimerTask; + private final Timer flashVideoTimer = new Timer(); + private TimerTask flashVideoTimerTask; + private final IntentFilter battery_ifilter = new IntentFilter(Intent.ACTION_BATTERY_CHANGED); + private final Timer batteryCheckVideoTimer = new Timer(); + private TimerTask batteryCheckVideoTimerTask; + private long take_photo_time; + private int remaining_repeat_photos; + private int remaining_restart_video; + + //private boolean is_preview_started; + private static final int PREVIEW_NOT_STARTED = 0; + private static final int PREVIEW_IS_STARTING = 1; + private static final int PREVIEW_STARTED = 2; + private int preview_started_state = PREVIEW_NOT_STARTED; // state of the camera preview + + private OrientationEventListener orientationEventListener; + private int current_orientation; // orientation received by onOrientationChanged + private int current_rotation; // orientation relative to camera's orientation (used for parameters.setRotation()) + private boolean has_level_angle; + private double natural_level_angle; // "level" angle of device in degrees, before applying any calibration and without accounting for screen orientation + private double level_angle; // "level" angle of device in degrees, including calibration + private double orig_level_angle; // "level" angle of device in degrees, including calibration, but without accounting for screen orientation + private boolean has_pitch_angle; + private double pitch_angle; // pitch angle of device in degrees + + // if applicationInterface.allowZoom() returns false, then has_zoom will be false, but camera_controller_supports_zoom + // supports whether the camera controller supported zoom + // similarly for camera_controller_max_zoom_factor, camera_controller_zoom_ratios - needed for initZoom() + private boolean camera_controller_supports_zoom; + private boolean has_zoom; + private int camera_controller_max_zoom_factor; + private int max_zoom_factor; + private List camera_controller_zoom_ratios; + private List zoom_ratios; + private final GestureDetector gestureDetector; + private final ScaleGestureDetector scaleGestureDetector; + private float minimum_focus_distance; + private boolean touch_was_multitouch; + private float touch_orig_x; + private float touch_orig_y; + + private List supported_flash_values; // our "values" format + private int current_flash_index = -1; // this is an index into the supported_flash_values array, or -1 if no flash modes available + + private List supported_focus_values; // our "values" format + private int current_focus_index = -1; // this is an index into the supported_focus_values array, or -1 if no focus modes available + private int max_num_focus_areas; + private boolean continuous_focus_move_is_started; + + private boolean is_exposure_lock_supported; + private boolean is_exposure_locked; + + private boolean is_white_balance_lock_supported; + private boolean is_white_balance_locked; + + private List color_effects; + private List scene_modes; + private List white_balances; + private List antibanding; + private List edge_modes; + private List noise_reduction_modes; // n.b., this is for the Camera2 API setting, not for Open Camera's Noise Reduction photo mode + private List isos; + private boolean supports_white_balance_temperature; + private int min_temperature; + private int max_temperature; + private boolean supports_iso_range; + private int min_iso; + private int max_iso; + private boolean supports_exposure_time; + private long min_exposure_time; + private long max_exposure_time; + private List exposures; + private int min_exposure; + private int max_exposure; + private float exposure_step; + private boolean supports_expo_bracketing; + private int max_expo_bracketing_n_images; + private boolean supports_focus_bracketing; + private boolean supports_burst; + private boolean supports_jpeg_r; + private boolean supports_raw; + private float view_angle_x; + private float view_angle_y; + private Set physical_camera_ids; // if non-null, this camera is part of a logical camera that exposes these physical camera IDs + + private List supported_preview_sizes; + + private List photo_sizes; + private ApplicationInterface.CameraResolutionConstraints photo_size_constraints; + private int current_size_index = -1; // this is an index into the sizes array, or -1 if sizes not yet set + + public List supported_extensions; // if non-null, list of supported camera vendor extensions, see https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics + public List supported_extensions_zoom; // if non-null, list of camera vendor extensions that support zoom + + private boolean supports_video; + private boolean has_capture_rate_factor; // whether we have a capture rate for faster (timelapse) or slow motion + private float capture_rate_factor = 1.0f; // should be 1.0f if has_capture_rate_factor is false; set lower than 1 for slow motion, higher than 1 for timelapse + private boolean video_high_speed; // whether the current video mode requires high speed frame rate (note this may still be true even if is_video==false, so potentially we could switch photo/video modes without setting up the flag) + private boolean supports_video_high_speed; + private final VideoQualityHandler video_quality_handler = new VideoQualityHandler(); + + private Toast last_toast; + private long last_toast_time_ms; + private final ToastBoxer focus_flash_toast = new ToastBoxer(); + private final ToastBoxer take_photo_toast = new ToastBoxer(); + private final ToastBoxer pause_video_toast = new ToastBoxer(); + + private int ui_rotation; + + private boolean supports_face_detection; + private boolean using_face_detection; + private CameraController.Face [] faces_detected; + private final RectF face_rect = new RectF(); + private boolean supports_optical_stabilization; + private boolean supports_video_stabilization; + private boolean supports_photo_video_recording; + private boolean can_disable_shutter_sound; + private int tonemap_max_curve_points; + private boolean supports_tonemap_curve; + private float [] supported_apertures; + private boolean has_focus_area; + private long focus_area_time = -1; // time when has_focus_area last set to true + private float focus_camera_x; + private float focus_camera_y; + private long focus_complete_time = -1; + private long focus_started_time = -1; + private int focus_success = FOCUS_DONE; + private static final int FOCUS_WAITING = 0; + private static final int FOCUS_SUCCESS = 1; + private static final int FOCUS_FAILED = 2; + private static final int FOCUS_DONE = 3; + private String set_flash_value_after_autofocus = ""; + private boolean take_photo_after_autofocus; // set to take a photo when the in-progress autofocus has completed; if setting, remember to call camera_controller.setCaptureFollowAutofocusHint() + private boolean successfully_focused; + private long successfully_focused_time = -1; + + // accelerometer and geomagnetic sensor info + private static final float sensor_alpha = 0.8f; // for filter + private boolean has_gravity; + private final float [] gravity = new float[3]; + private boolean has_geomagnetic; + private final float [] geomagnetic = new float[3]; + private final float [] deviceRotation = new float[9]; + private final float [] cameraRotation = new float[9]; + private final float [] deviceInclination = new float[9]; + private boolean has_geo_direction; + private final float [] geo_direction = new float[3]; // geo direction in radians + private final float [] new_geo_direction = new float[3]; + + private final DecimalFormat decimal_format_1dp = new DecimalFormat("#.#"); + + // use use '0' instead of '#' to display e.g. 1.20 instead of 1.2, so that text lengths are consistent (e.g., for the + // toasts shown when changing sliders for manual focus distance or exposure compensation). + private final DecimalFormat decimal_format_2dp_force0 = new DecimalFormat("0.00"); + + /* If the user touches to focus in continuous mode, and in photo mode, we switch the camera_controller to autofocus mode. + * autofocus_in_continuous_mode is set to true when this happens; the runnable reset_continuous_focus_runnable + * switches back to continuous mode. + */ + private final Handler reset_continuous_focus_handler = new Handler(); + private Runnable reset_continuous_focus_runnable; + private boolean autofocus_in_continuous_mode; + private boolean focus_set_for_target_distance; // if true, then the focus has been set to manual focus distance for the target (for focus bracketing) + private long focus_set_for_target_distance_ms; // time when focus_set_for_target_distance last changed + + enum FaceLocation { + FACELOCATION_UNSET, + FACELOCATION_UNKNOWN, + FACELOCATION_LEFT, + FACELOCATION_RIGHT, + FACELOCATION_TOP, + FACELOCATION_BOTTOM, + FACELOCATION_CENTRE + } + + // for testing; must be volatile for test project reading the state + private boolean is_test; // whether called from OpenCamera.test testing + private boolean is_test_junit4; + public volatile int count_cameraStartPreview; + public volatile int count_cameraAutoFocus; + public volatile int count_cameraTakePicture; + public volatile int count_cameraContinuousFocusMoving; + public volatile boolean test_fail_open_camera; + public volatile boolean test_video_failure; + public volatile boolean test_video_ioexception; + public volatile boolean test_video_cameracontrollerexception; + public volatile boolean test_ticker_called; // set from MySurfaceView or CanvasView + public volatile boolean test_called_next_output_file; + public volatile boolean test_started_next_output_file; + public volatile boolean test_runtime_on_video_stop; // force throwing a RuntimeException when stopping video (this usually happens naturally when stopping video too soon) + public volatile boolean test_burst_resolution; + + public Preview(ApplicationInterface applicationInterface, ViewGroup parent) { + if( MyDebug.LOG ) { + Log.d(TAG, "new Preview"); + } + + this.applicationInterface = applicationInterface; + + Activity activity = (Activity)this.getContext(); + if( activity.getIntent() != null && activity.getIntent().getExtras() != null ) { + // whether called from testing + is_test = activity.getIntent().getExtras().getBoolean("test_project"); + is_test_junit4 = activity.getIntent().getExtras().getBoolean("test_project_junit4"); + } + if( MyDebug.LOG ) { + Log.d(TAG, "is_test: " + is_test); + Log.d(TAG, "is_test_junit4: " + is_test_junit4); + } + + this.using_android_l = applicationInterface.useCamera2(); + if( MyDebug.LOG ) { + Log.d(TAG, "using_android_l?: " + using_android_l); + } + + boolean using_texture_view = false; + if( using_android_l ) { + // use a TextureView for Android L - had bugs with SurfaceView not resizing properly on Nexus 7; and good to use a TextureView anyway + // ideally we'd use a TextureView for older camera API too, but sticking with SurfaceView to avoid risk of breaking behaviour + using_texture_view = true; + } + + if( using_texture_view ) { + this.cameraSurface = new MyTextureView(getContext(), this); + // a TextureView can't be used both as a camera preview, and used for drawing on, so we use a separate CanvasView + this.canvasView = new CanvasView(getContext(), this); + camera_controller_manager = new CameraControllerManager2(getContext()); + } + else { + this.cameraSurface = new MySurfaceView(getContext(), this); + camera_controller_manager = new CameraControllerManager1(); + } + /*{ + FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(FrameLayout.LayoutParams.WRAP_CONTENT, FrameLayout.LayoutParams.WRAP_CONTENT); + layoutParams.gravity = Gravity.CENTER; + cameraSurface.getView().setLayoutParams(layoutParams); + }*/ + + gestureDetector = new GestureDetector(getContext(), new GestureDetector.SimpleOnGestureListener()); + gestureDetector.setOnDoubleTapListener(new DoubleTapListener()); + scaleGestureDetector = new ScaleGestureDetector(getContext(), new ScaleListener()); + + parent.addView(cameraSurface.getView()); + if( canvasView != null ) { + parent.addView(canvasView); + } + } + + /*private void previewToCamera(float [] coords) { + float alpha = coords[0] / (float)this.getWidth(); + float beta = coords[1] / (float)this.getHeight(); + coords[0] = 2000.0f * alpha - 1000.0f; + coords[1] = 2000.0f * beta - 1000.0f; + }*/ + + /*private void cameraToPreview(float [] coords) { + float alpha = (coords[0] + 1000.0f) / 2000.0f; + float beta = (coords[1] + 1000.0f) / 2000.0f; + coords[0] = alpha * (float)this.getWidth(); + coords[1] = beta * (float)this.getHeight(); + }*/ + + private Resources getResources() { + return cameraSurface.getView().getResources(); + } + + public View getView() { + return cameraSurface.getView(); + } + + // If this code is changed, important to test that face detection and touch to focus still works as expected, for front and back + // cameras, for old and new API, including with zoom. Also test with MainActivity.setWindowFlagsForCamera() setting orientation as SCREEN_ORIENTATION_REVERSE_LANDSCAPE, + // and/or set "Rotate preview" option to 180 degrees. + private void calculateCameraToPreviewMatrix() { + if( MyDebug.LOG ) + Log.d(TAG, "calculateCameraToPreviewMatrix"); + if( camera_controller == null ) + return; + camera_to_preview_matrix.reset(); + if( !using_android_l ) { + // see http://developer.android.com/reference/android/hardware/Camera.Face.html#rect + // Need mirror for front camera + boolean mirror = (camera_controller.getFacing() == CameraController.Facing.FACING_FRONT); + camera_to_preview_matrix.setScale(mirror ? -1 : 1, 1); + int display_orientation = camera_controller.getDisplayOrientation(); + if( MyDebug.LOG ) { + Log.d(TAG, "orientation of display relative to camera orientation: " + display_orientation); + } + camera_to_preview_matrix.postRotate(display_orientation); + } + else { + // Unfortunately the transformation for Android L API isn't documented, but this seems to work for Nexus 6. + // This is the equivalent code for android.hardware.Camera.setDisplayOrientation, but we don't actually use setDisplayOrientation() + // for CameraController2, except testing on Nexus 6 shows that we shouldn't change "result" for front facing camera. + boolean mirror = (camera_controller.getFacing() == CameraController.Facing.FACING_FRONT); + camera_to_preview_matrix.setScale(1, mirror ? -1 : 1); + int degrees = getDisplayRotationDegrees(false); + int result = (camera_controller.getCameraOrientation() - degrees + 360) % 360; + if( MyDebug.LOG ) { + Log.d(TAG, "orientation of display relative to natural orientation: " + degrees); + Log.d(TAG, "orientation of display relative to camera orientation: " + result); + } + camera_to_preview_matrix.postRotate(result); + } + // Camera driver coordinates range from (-1000, -1000) to (1000, 1000). + // UI coordinates range from (0, 0) to (width, height). + camera_to_preview_matrix.postScale(cameraSurface.getView().getWidth() / 2000f, cameraSurface.getView().getHeight() / 2000f); + camera_to_preview_matrix.postTranslate(cameraSurface.getView().getWidth() / 2f, cameraSurface.getView().getHeight() / 2f); + } + + private void calculatePreviewToCameraMatrix() { + if( camera_controller == null ) + return; + calculateCameraToPreviewMatrix(); + if( !camera_to_preview_matrix.invert(preview_to_camera_matrix) ) { + if( MyDebug.LOG ) + Log.d(TAG, "calculatePreviewToCameraMatrix failed to invert matrix!?"); + } + } + + private Matrix getCameraToPreviewMatrix() { + calculateCameraToPreviewMatrix(); + return camera_to_preview_matrix; + } + + /*Matrix getPreviewToCameraMatrix() { + calculatePreviewToCameraMatrix(); + return preview_to_camera_matrix; + }*/ + + /** Return a focus area from supplied point. Supplied coordinates should be in camera + * coordinates. + */ + private ArrayList getAreas(float focus_x, float focus_y) { + int focus_size = 50; + if( MyDebug.LOG ) { + Log.d(TAG, "focus x, y: " + focus_x + ", " + focus_y); + } + Rect rect = new Rect(); + rect.left = (int)focus_x - focus_size; + rect.right = (int)focus_x + focus_size; + rect.top = (int)focus_y - focus_size; + rect.bottom = (int)focus_y + focus_size; + if( rect.left < -1000 ) { + rect.left = -1000; + rect.right = rect.left + 2*focus_size; + } + else if( rect.right > 1000 ) { + rect.right = 1000; + rect.left = rect.right - 2*focus_size; + } + if( rect.top < -1000 ) { + rect.top = -1000; + rect.bottom = rect.top + 2*focus_size; + } + else if( rect.bottom > 1000 ) { + rect.bottom = 1000; + rect.top = rect.bottom - 2*focus_size; + } + + ArrayList areas = new ArrayList<>(); + areas.add(new CameraController.Area(rect, 1000)); + return areas; + } + + @SuppressWarnings("SameReturnValue") + public boolean touchEvent(MotionEvent event) { + if( MyDebug.LOG ) + Log.d(TAG, "touch event at : " + event.getX() + " , " + event.getY() + " at time " + event.getEventTime()); + + // doesn't seem a bad idea to clear fake toasts (touching screen gets rid of standard toasts on Android 10+ at least) + this.clearActiveFakeToast(); + + //boolean was_paused = !this.is_preview_started; + boolean was_paused = this.preview_started_state != PREVIEW_STARTED; + if( MyDebug.LOG ) + Log.d(TAG, "was_paused: " + was_paused); + + if( gestureDetector.onTouchEvent(event) ) { + if( MyDebug.LOG ) + Log.d(TAG, "touch event handled by gestureDetector"); + return true; + } + scaleGestureDetector.onTouchEvent(event); + if( camera_controller == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "received touch event, but camera not available"); + return true; + } + applicationInterface.touchEvent(event); + /*if( MyDebug.LOG ) { + Log.d(TAG, "touch event: " + event.getAction()); + }*/ + if( event.getPointerCount() != 1 ) { + //multitouch_time = System.currentTimeMillis(); + touch_was_multitouch = true; + return true; + } + if( event.getAction() != MotionEvent.ACTION_UP ) { + if( event.getAction() == MotionEvent.ACTION_DOWN && event.getPointerCount() == 1 ) { + touch_was_multitouch = false; + if( event.getAction() == MotionEvent.ACTION_DOWN ) { + touch_orig_x = event.getX(); + touch_orig_y = event.getY(); + if( MyDebug.LOG ) + Log.d(TAG, "touch down at " + touch_orig_x + " , " + touch_orig_y); + } + } + return true; + } + // now only have to handle MotionEvent.ACTION_UP from this point onwards + + if( touch_was_multitouch ) { + return true; + } + + // ignore swipes + { + float x = event.getX(); + float y = event.getY(); + float diff_x = x - touch_orig_x; + float diff_y = y - touch_orig_y; + float dist2 = diff_x*diff_x + diff_y*diff_y; + float scale = getResources().getDisplayMetrics().density; + float tol = 31 * scale + 0.5f; // convert dps to pixels (about 0.5cm) + if( MyDebug.LOG ) { + Log.d(TAG, "touched from " + touch_orig_x + " , " + touch_orig_y + " to " + x + " , " + y); + Log.d(TAG, "dist: " + Math.sqrt(dist2)); + Log.d(TAG, "tol: " + tol); + } + if( dist2 > tol*tol ) { + if( MyDebug.LOG ) + Log.d(TAG, "touch was a swipe"); + return true; + } + } + + if( takePhotoOnDoubleTap() ) { + // need to wait until onSingleTapConfirmed() before calling handleSingleTouch(), e.g., don't + // want to do touch-to-focus if this is part of a double tap + return true; + } + + return handleSingleTouch(event, was_paused); + } + + private boolean handleSingleTouch(MotionEvent event, boolean was_paused) { + if( MyDebug.LOG ) + Log.d(TAG, "handleSingleTouch"); + + if( !this.is_video && this.isTakingPhotoOrOnTimer() ) { + // if video, okay to refocus when recording + return true; + } + + // note, we always try to force start the preview (in case is_preview_paused has become false) + // except if recording video (firstly, the preview should be running; secondly, we don't want to reset the phase!) + if( !this.is_video ) { + startCameraPreview(true, null); + } + + // whether to clear focus area instead of setting new one + // we don't rely purely on isFocusWaiting(), as sometimes the focus can be really quick + if( MyDebug.LOG ) + Log.d(TAG, "focus_started_time: " + focus_started_time); + boolean clear_focus_areas = has_focus_area && focus_area_time != -1 && (System.currentTimeMillis() - focus_area_time) < ViewConfiguration.getDoubleTapTimeout(); + cancelAutoFocus(); + + boolean touch_capture = applicationInterface.getTouchCapturePref(); + + // don't set focus areas on touch if the user is touching to unpause! + // similarly if doing single touch to capture (we go straight to taking a photo) + // and not supported for camera extensions + if( camera_controller != null && !this.using_face_detection && !was_paused && !touch_capture && !camera_controller.isCameraExtension() ) { + if( clear_focus_areas ) { + // double tap to clear focus areas + // also if we were in autofocus_in_continuous_mode mode, reset back to continuous mode + if( MyDebug.LOG ) + Log.d(TAG, "remove focus areas due to touch"); + clearFocusAreas(); + continuousFocusReset(); + } + else { + this.has_focus_area = false; + this.focus_area_time = -1; + + if( MyDebug.LOG ) { + Log.d(TAG, "x, y: " + event.getX() + ", " + event.getY()); + } + float [] coords = {event.getX(), event.getY()}; + calculatePreviewToCameraMatrix(); + preview_to_camera_matrix.mapPoints(coords); + float focus_x = coords[0]; + float focus_y = coords[1]; + ArrayList areas = getAreas(focus_x, focus_y); + + if( camera_controller.setFocusAndMeteringArea(areas) ) { + if( MyDebug.LOG ) + Log.d(TAG, "set focus (and metering?) area"); + this.has_focus_area = true; + this.focus_area_time = System.currentTimeMillis(); + this.focus_camera_x = focus_x; + this.focus_camera_y = focus_y; + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "didn't set focus area in this mode, may have set metering"); + // don't set has_focus_area in this mode + } + } + } + + // don't take a photo on touch if the user is touching to unpause! + if( !was_paused && touch_capture ) { + if( MyDebug.LOG ) + Log.d(TAG, "touch to capture"); + // Interpret as if user had clicked take photo/video button, except that we set the focus/metering areas. + // We go via ApplicationInterface instead of going direct to Preview.takePicturePressed(), so that + // the application can handle same as if user had pressed shutter button (needed so that this works + // correctly in Panorama mode). + applicationInterface.requestTakePhoto(); + return true; + } + + // don't auto focus on touch if the user is touching to unpause! + if( !was_paused ) { + // if clear_focus_areas==true, don't want to reenter autofocus_in_continuous_mode mode + tryAutoFocus(false, !clear_focus_areas); + } + return true; + } + + //@SuppressLint("ClickableViewAccessibility") @Override + + // When pinch zooming, we'd normally have the problem that zooming is too fast, because we can + // only zoom to the limited set of values in the zoom_ratios array. So when pinch zooming, we + // keep track of the fractional scaled zoom. + private boolean has_smooth_zoom = false; + private float smooth_zoom = 1.0f; + + /** Returns true if the user is currently pinch zooming, and the Preview has already handled setting + * the zoom via Preview.zoomTo(). + */ + public boolean hasSmoothZoom() { + return this.has_smooth_zoom; + } + + /** Handle multitouch zoom. + */ + private class ScaleListener extends ScaleGestureDetector.SimpleOnScaleGestureListener { + private boolean has_multitouch_start_zoom_factor = false; + private int multitouch_start_zoom_factor = 0; + + @Override + public boolean onScale(@NonNull ScaleGestureDetector detector) { + if( Preview.this.camera_controller != null && Preview.this.has_zoom ) { + float scale_factor = detector.getScaleFactor(); + if( MyDebug.LOG ) + Log.d(TAG, "onScale: " + scale_factor); + // make pinch zoom more sensitive: + if( touch_was_multitouch ) + scale_factor = 1.0f + 2.0f*(scale_factor - 1.0f); + Preview.this.scaleZoom(scale_factor); + } + return true; + } + + @Override + public boolean onScaleBegin(@NonNull ScaleGestureDetector detector) { + if( has_zoom && camera_controller != null ) { + has_multitouch_start_zoom_factor = true; + multitouch_start_zoom_factor = camera_controller.getZoom(); + has_smooth_zoom = true; + smooth_zoom = zoom_ratios.get(multitouch_start_zoom_factor)/100.0f; + } + else { + has_multitouch_start_zoom_factor = false; + multitouch_start_zoom_factor = 0; + has_smooth_zoom = false; + smooth_zoom = 1.0f; + } + return true; + } + + @Override + public void onScaleEnd(@NonNull ScaleGestureDetector detector) { + if( MyDebug.LOG ) + Log.d(TAG, "onScaleEnd"); + if( has_multitouch_start_zoom_factor && has_zoom && camera_controller != null && zoom_ratios.get(0) < 100 ) { + // when the minimum zoom is less than 1x, we should support snapping to 1x, so it's easy for the user to + // switch back to 1x zoom when using pinch zoom + int start_zoom = zoom_ratios.get(multitouch_start_zoom_factor); + final int end_zoom_factor = camera_controller.getZoom(); + int end_zoom = zoom_ratios.get(end_zoom_factor); + if( MyDebug.LOG ) { + Log.d(TAG, "start_zoom: " + start_zoom); + Log.d(TAG, "end_zoom : " + end_zoom); + } + if( end_zoom >= 90 && end_zoom <= 110 && start_zoom != 100 && end_zoom != 100 ) { + int start_diff = start_zoom - 100; + int end_diff = end_zoom - 100; + if( Math.signum(start_diff) == Math.signum(end_diff) && Math.abs(end_diff) >= Math.abs(start_diff) ) { + // we only want to snap when moving towards 1x, or have crossed over 1x + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "snapped pinch zoom to 1x zoom"); + int snapped_zoom = find1xZoom(); + zoomTo(snapped_zoom, false); + } + } + } + has_multitouch_start_zoom_factor = false; + multitouch_start_zoom_factor = 0; + has_smooth_zoom = false; + smooth_zoom = 1.0f; + } + } + + /** Returns whether we will take a photo on a double tap. + */ + private boolean takePhotoOnDoubleTap() { + return applicationInterface.getDoubleTapCapturePref(); + } + + @SuppressWarnings("SameReturnValue") + public boolean onDoubleTap() { + if( MyDebug.LOG ) + Log.d(TAG, "onDoubleTap()"); + if( takePhotoOnDoubleTap() ) { + if( MyDebug.LOG ) + Log.d(TAG, "double-tap to capture"); + // Interpret as if user had clicked take photo/video button. + // We go via ApplicationInterface instead of going direct to Preview.takePicturePressed(), so that + // the application can handle same as if user had pressed shutter button (needed so that this works + // correctly in Panorama mode). + applicationInterface.requestTakePhoto(); + return true; + } + if( applicationInterface.getTouchCapturePref() ) { + // return true to disable double-tap-drag zoom gesture, as we don't want this when using single tap to capture either + return true; + } + return false; // important, so that double-tap-drag zoom gesture works + } + + private class DoubleTapListener extends GestureDetector.SimpleOnGestureListener { + @Override + public boolean onSingleTapConfirmed(@NonNull MotionEvent e) { + if( MyDebug.LOG ) + Log.d(TAG, "onSingleTapConfirmed"); + // If we're taking a photo on double tap, then for single taps we need to wait until these are confirmed + // otherwise we handle via Preview.touchEvent(). + // Arguably we could handle everything via onSingleTapConfirmed(), but want to avoid + // unexpected changes of behaviour - plus it would mean a slight delay for touch to + // focus (since onSingleTapConfirmed obviously has to wait to be sure this isn't a + // double tap). + if( takePhotoOnDoubleTap() ) { + // now safe to handle the single touch + //boolean was_paused = !is_preview_started; + boolean was_paused = preview_started_state != PREVIEW_STARTED; + if( MyDebug.LOG ) + Log.d(TAG, "was_paused: " + was_paused); + return handleSingleTouch(e, was_paused); + } + return false; + } + + @Override + public boolean onDoubleTap(@NonNull MotionEvent e) { + if( MyDebug.LOG ) + Log.d(TAG, "onDoubleTap"); + return Preview.this.onDoubleTap(); + } + } + + public void clearFocusAreas() { + if( MyDebug.LOG ) + Log.d(TAG, "clearFocusAreas()"); + if( camera_controller == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera not opened!"); + return; + } + // don't cancelAutoFocus() here, otherwise we get sluggish zoom behaviour on Camera2 API + if( !camera_controller.isCameraExtension() ) { + // if using camera extensions, we could never have set focus and metering in the first place + camera_controller.clearFocusAndMetering(); + } + has_focus_area = false; + focus_area_time = -1; + focus_success = FOCUS_DONE; + successfully_focused = false; + } + + public void getMeasureSpec(int [] spec, int widthSpec, int heightSpec) { + if( MyDebug.LOG ) + Log.d(TAG, "getMeasureSpec"); + if( !this.hasAspectRatio() ) { + if( MyDebug.LOG ) + Log.d(TAG, "doesn't have aspect ratio"); + spec[0] = widthSpec; + spec[1] = heightSpec; + return; + } + double aspect_ratio = this.getAspectRatio(); + + int previewWidth = MeasureSpec.getSize(widthSpec); + int previewHeight = MeasureSpec.getSize(heightSpec); + if( MyDebug.LOG ) { + Log.d(TAG, "previewWidth: " + previewWidth); + Log.d(TAG, "previewHeight: " + previewHeight); + } + + // Get the padding of the border background. + int hPadding = cameraSurface.getView().getPaddingLeft() + cameraSurface.getView().getPaddingRight(); + int vPadding = cameraSurface.getView().getPaddingTop() + cameraSurface.getView().getPaddingBottom(); + + // Resize the preview frame with correct aspect ratio. + previewWidth -= hPadding; + previewHeight -= vPadding; + + int result; + if( camera_controller != null ) { + // We shouldn't assume that previewWidth > previewHeight means the device is in landscape + // orientation - this isn't necessarily true for split-screen or multi-window mode. + // Important to use prefer_later==true, as in split-screen or multi-window mode, we don't always get a call + // to MainActivity.onConfigurationChanged() when device orientation changes, so have no way to know to + // reset the cached rotation. But we want the latest rotation value here anyway. + int degrees = getDisplayRotationDegrees(true); + result = (camera_controller.getCameraOrientation() - degrees + 360) % 360; + if( MyDebug.LOG ) { + Log.d(TAG, "orientation of display relative to natural orientation: " + degrees); + Log.d(TAG, "orientation of display relative to camera orientation: " + result); + } + } + else { + // fall back to guessing via the window dimensions + result = (previewWidth > previewHeight) ? 0 : 90; + } + if( MyDebug.LOG ) + Log.d(TAG, "aspect_ratio: " + aspect_ratio); + if( result % 180 != 0 ) { + // Usually this means the device is in portrait mode, and hence e.g. an aspect ratio of + // 4:3 should give an on-screen preview of 3:4 (since the device is rotated 90 degrees + // compared to the natural camera orientation). + // It's important to use this code instead of checking if the display is in portrait + // (or that previewWidth < previewHeight), for split-screen or multi-window displays. + // E.g., if the device orientation is in portrait, it might still be that Open Camera + // is running in landscape with previewWidth > previewHeight, because of running in + // split-screen mode, or more generally in multi-window mode where the window is resized + // to landscape orientation. + // See https://developer.android.com/training/camera2/camera-preview#relative_rotation . + aspect_ratio = 1.0f / aspect_ratio; + if( MyDebug.LOG ) + Log.d(TAG, "aspect_ratio rotated to: " + aspect_ratio); + } + + if( previewWidth > previewHeight * aspect_ratio ) { + previewWidth = (int) ((double) previewHeight * aspect_ratio); + } + else { + previewHeight = (int) ((double) previewWidth / aspect_ratio); + } + if( MyDebug.LOG ) { + Log.d(TAG, "previewWidth is now: " + previewWidth); + Log.d(TAG, "previewHeight is now: " + previewHeight); + } + + // Add the padding of the border. + previewWidth += hPadding; + previewHeight += vPadding; + + spec[0] = MeasureSpec.makeMeasureSpec(previewWidth, MeasureSpec.EXACTLY); + spec[1] = MeasureSpec.makeMeasureSpec(previewHeight, MeasureSpec.EXACTLY); + if( MyDebug.LOG ) + Log.d(TAG, "return: " + spec[0] + " x " + spec[1]); + } + + private void mySurfaceCreated() { + if( MyDebug.LOG ) + Log.d(TAG, "mySurfaceCreated"); + this.has_surface = true; + this.openCamera(); + } + + private void mySurfaceDestroyed() { + if( MyDebug.LOG ) + Log.d(TAG, "mySurfaceDestroyed"); + this.has_surface = false; + this.closeCamera(false, null); + } + + private void mySurfaceChanged() { + // surface size is now changed to match the aspect ratio of camera preview - so we shouldn't change the preview to match the surface size, so no need to restart preview here + if( camera_controller == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera not opened!"); + //noinspection UnnecessaryReturnStatement + return; + } + } + + @Override + public void surfaceCreated(@NonNull SurfaceHolder holder) { + if( MyDebug.LOG ) + Log.d(TAG, "surfaceCreated()"); + // The Surface has been created, acquire the camera and tell it where + // to draw. + mySurfaceCreated(); + cameraSurface.getView().setWillNotDraw(false); // see http://stackoverflow.com/questions/2687015/extended-surfaceviews-ondraw-method-never-called + } + + @Override + public void surfaceDestroyed(@NonNull SurfaceHolder holder) { + if( MyDebug.LOG ) + Log.d(TAG, "surfaceDestroyed()"); + // Surface will be destroyed when we return, so stop the preview. + // Because the CameraDevice object is not a shared resource, it's very + // important to release it when the activity is paused. + mySurfaceDestroyed(); + } + + @Override + public void surfaceChanged(@NonNull SurfaceHolder holder, int format, int w, int h) { + if( MyDebug.LOG ) + Log.d(TAG, "surfaceChanged " + w + ", " + h); + if( holder.getSurface() == null ) { + // preview surface does not exist + return; + } + mySurfaceChanged(); + } + + @Override + public void onSurfaceTextureAvailable(@NonNull SurfaceTexture arg0, int width, int height) { + if( MyDebug.LOG ) + Log.d(TAG, "onSurfaceTextureAvailable()"); + this.set_textureview_size = true; + this.textureview_w = width; + this.textureview_h = height; + mySurfaceCreated(); + } + + @Override + public boolean onSurfaceTextureDestroyed(@NonNull SurfaceTexture arg0) { + if( MyDebug.LOG ) + Log.d(TAG, "onSurfaceTextureDestroyed()"); + this.set_textureview_size = false; + this.textureview_w = 0; + this.textureview_h = 0; + mySurfaceDestroyed(); + return true; + } + + @Override + public void onSurfaceTextureSizeChanged(@NonNull SurfaceTexture texture, int width, int height) { + if( MyDebug.LOG ) { + Log.d(TAG, "onSurfaceTextureSizeChanged " + width + ", " + height); + //Log.d(TAG, "surface texture is now: " + ((TextureView)cameraSurface).getSurfaceTexture()); + } + + if( camera_controller != null ) { + camera_controller.test_texture_view_buffer_w = width; + camera_controller.test_texture_view_buffer_h = height; + + if( set_preview_size && (width != preview_w || height != preview_h) ) { + if( MyDebug.LOG ) + Log.d(TAG, "updatePreviewTexture"); + // Needed to fix problem if Open Camera is already running, and the aspect ratio changes (e.g., + // change of resolution, or switching between photo and video mode). When starting up in a "default", + // aspect ratio, the camera is opened via onSurfaceTextureAvailable(), and although we then call setAspectRatio(), + // there are no calls to onSurfaceTextureSizeChanged(). But when already running, or if + // an aspect ratio change for the view is required, changing the aspect ratio causes a call to + // onSurfaceTextureSizeChanged(), which results in the texture view's surface texture's buffer size being reset! + // (This can be seen in the source code of TextureView: onSizeChanged() calls setDefaultBufferSize() before + // calling onSurfaceTextureSizeChanged()!) So we need to call setDefaultBufferSize() again to reset + // to the desired preview buffer size that we already chose! + camera_controller.updatePreviewTexture(); + } + } + + this.set_textureview_size = true; + this.textureview_w = width; + this.textureview_h = height; + mySurfaceChanged(); + configureTransform(); + recreatePreviewBitmap(); + } + + @Override + public void onSurfaceTextureUpdated(@NonNull SurfaceTexture arg0) { + refreshPreviewBitmap(); + } + + private void configureTransform() { + if( MyDebug.LOG ) + Log.d(TAG, "configureTransform"); + if( camera_controller == null || !this.set_preview_size || !this.set_textureview_size ) { + if( MyDebug.LOG ) + Log.d(TAG, "nothing to do"); + return; + } + if( MyDebug.LOG ) { + Log.d(TAG, "textureview size: " + textureview_w + ", " + textureview_h); + Log.d(TAG, "preview size: " + preview_w + ", " + preview_h); + } + // Important to use prefer_later==true, as in split-screen or multi-window mode, we don't always get a call + // to MainActivity.onConfigurationChanged() when device orientation changes, so have no way to know to + // reset the cached rotation. But we want the latest rotation value here anyway. + int rotation = applicationInterface.getDisplayRotation(true); + if( MyDebug.LOG ) + Log.d(TAG, "configureTransform rotation: " + rotation); + Matrix matrix = new Matrix(); + RectF viewRect = new RectF(0, 0, this.textureview_w, this.textureview_h); + RectF bufferRect = new RectF(0, 0, this.preview_h, this.preview_w); + float centerX = viewRect.centerX(); + float centerY = viewRect.centerY(); + if( rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270 ) { + bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY()); + matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL); + float scale = Math.max( + (float) textureview_h / preview_h, + (float) textureview_w / preview_w); + matrix.postScale(scale, scale, centerX, centerY); + matrix.postRotate(90 * (rotation - 2), centerX, centerY); + } + else if( rotation == Surface.ROTATION_180 ) { + matrix.postRotate(180, centerX, centerY); + } + cameraSurface.setTransform(matrix); + } + + public void stopVideo(boolean from_restart) { + if( MyDebug.LOG ) + Log.d(TAG, "stopVideo()"); + if( video_recorder == null ) { + // no need to do anything if not recording + // (important to exit, otherwise we'll momentarily switch the take photo icon to video mode in MyApplicationInterface.stoppingVideo() when opening the settings in landscape mode + if( MyDebug.LOG ) + Log.d(TAG, "video wasn't recording anyway"); + return; + } + applicationInterface.stoppingVideo(); + if( flashVideoTimerTask != null ) { + flashVideoTimerTask.cancel(); + flashVideoTimerTask = null; + } + if( batteryCheckVideoTimerTask != null ) { + batteryCheckVideoTimerTask.cancel(); + batteryCheckVideoTimerTask = null; + } + if( !from_restart ) { + remaining_restart_video = 0; + } + if( video_recorder != null ) { // check again, just to be safe + if( MyDebug.LOG ) + Log.d(TAG, "stop video recording"); + //this.phase = PHASE_NORMAL; + video_recorder.setOnErrorListener(null); + video_recorder.setOnInfoListener(null); + + try { + if( using_android_l && video_high_speed ) { + // Needed to fix problems with 0.125x and 0.25x slow motion on Pixel 6 Pro - otherwise although + // the video is recorded, we are unable to restart the preview after stopping video. + // Beware of enabling this for non-high-speed - would need careful testing to ensure this doesn't cause unstable + // behaviour. + if( MyDebug.LOG ) + Log.d(TAG, "about to call stopRepeating()"); + camera_controller.stopRepeating(); + } + if( MyDebug.LOG ) + Log.d(TAG, "about to call video_recorder.stop()"); + if( test_runtime_on_video_stop ) + throw new RuntimeException(); + video_recorder.stop(); + if( MyDebug.LOG ) + Log.d(TAG, "done video_recorder.stop()"); + } + catch(RuntimeException e) { + // stop() can throw a RuntimeException if stop is called too soon after start - this indicates the video file is corrupt, and should be deleted + if( MyDebug.LOG ) + Log.d(TAG, "runtime exception when stopping video"); + videoFileInfo.close(); + applicationInterface.deleteUnusedVideo(videoFileInfo.video_method, videoFileInfo.video_uri, videoFileInfo.video_filename); + + videoFileInfo = new VideoFileInfo(); + if( nextVideoFileInfo != null ) + nextVideoFileInfo.close(); + nextVideoFileInfo = null; + // if video recording is stopped quickly after starting, it's normal that we might not have saved a valid file, so no need to display a message + if( !video_start_time_set || System.currentTimeMillis() - video_start_time > 2000 ) { + VideoProfile profile = getVideoProfile(); + applicationInterface.onVideoRecordStopError(profile); + } + } + videoRecordingStopped(); + } + } + + private void videoRecordingStopped() { + if( MyDebug.LOG ) + Log.d(TAG, "reset video_recorder"); + video_recorder.reset(); + if( MyDebug.LOG ) + Log.d(TAG, "release video_recorder"); + video_recorder.release(); + video_recorder = null; + video_recorder_is_paused = false; + applicationInterface.cameraInOperation(false, true); + reconnectCamera(false); // n.b., if something went wrong with video, then we reopen the camera - which may fail (or simply not reopen, e.g., if app is now paused) + videoFileInfo.close(); + applicationInterface.stoppedVideo(videoFileInfo.video_method, videoFileInfo.video_uri, videoFileInfo.video_filename); + if( nextVideoFileInfo != null ) { + // if nextVideoFileInfo is not-null, it means we received MEDIA_RECORDER_INFO_MAX_FILESIZE_APPROACHING but not + // MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED, so it is the application responsibility to create the zero-size + // video file that will have been created + if( MyDebug.LOG ) + Log.d(TAG, "delete unused next video file"); + nextVideoFileInfo.close(); + applicationInterface.deleteUnusedVideo(nextVideoFileInfo.video_method, nextVideoFileInfo.video_uri, nextVideoFileInfo.video_filename); + } + videoFileInfo = new VideoFileInfo(); + nextVideoFileInfo = null; + } + + private Context getContext() { + return applicationInterface.getContext(); + } + + /** Restart video - either due to hitting maximum filesize (for pre-Android 8 when not able to restart seamlessly), or maximum duration. + */ + private void restartVideo(boolean due_to_max_filesize) { + if( MyDebug.LOG ) + Log.d(TAG, "restartVideo()"); + if( video_recorder != null ) { + if( due_to_max_filesize ) { + long last_time = System.currentTimeMillis() - video_start_time; + video_accumulated_time += last_time; + if( MyDebug.LOG ) { + Log.d(TAG, "last_time: " + last_time); + Log.d(TAG, "video_accumulated_time is now: " + video_accumulated_time); + } + } + else { + video_accumulated_time = 0; + } + stopVideo(true); // this will also stop the timertask + + // handle restart + if( MyDebug.LOG ) { + if( due_to_max_filesize ) + Log.d(TAG, "restarting due to maximum filesize"); + else + Log.d(TAG, "remaining_restart_video is: " + remaining_restart_video); + } + if( due_to_max_filesize ) { + long video_max_duration = applicationInterface.getVideoMaxDurationPref(); + if( video_max_duration > 0 ) { + video_max_duration -= video_accumulated_time; + if( video_max_duration < min_safe_restart_video_time ) { + // if there's less than 1s to go, ignore it - don't want to risk the resultant video being corrupt or throwing error, due to stopping too soon + // so instead just pretend we hit the max duration instead + if( MyDebug.LOG ) + Log.d(TAG, "hit max filesize, but max time duration is also set, with remaining time less than 1s: " + video_max_duration); + due_to_max_filesize = false; + } + } + } + if( due_to_max_filesize || remaining_restart_video > 0 ) { + if( is_video ) { + String toast = null; + if( !due_to_max_filesize ) + toast = remaining_restart_video + " " + getContext().getResources().getString(R.string.repeats_to_go); + takePicture(due_to_max_filesize, false, false); + if( !due_to_max_filesize ) { + showToast(null, toast, true); // show the toast afterwards, as we're hogging the UI thread here, and media recorder takes time to start up + // must decrement after calling takePicture(), so that takePicture() doesn't reset the value of remaining_restart_video + remaining_restart_video--; + } + } + else { + remaining_restart_video = 0; + } + } + } + } + + private void reconnectCamera(boolean quiet) { + if( MyDebug.LOG ) + Log.d(TAG, "reconnectCamera()"); + if( camera_controller != null ) { // just to be safe + try { + camera_controller.reconnect(); + this.setPreviewPaused(false); + } + catch(CameraControllerException e) { + MyDebug.logStackTrace(TAG, "failed to reconnect to camera", e); + applicationInterface.onFailedReconnectError(); + closeCamera(false, null); + } + try { + tryAutoFocus(false, false); + } + catch(RuntimeException e) { + MyDebug.logStackTrace(TAG, "tryAutoFocus() threw exception", e); + // this happens on Nexus 7 if trying to record video at bitrate 50Mbits or higher - it's fair enough that it fails, but we need to recover without a crash! + // not safe to call closeCamera, as any call to getParameters may cause a RuntimeException + // update: can no longer reproduce failures on Nexus 7?! + //this.is_preview_started = false; + this.preview_started_state = PREVIEW_NOT_STARTED; + if( !quiet ) { + VideoProfile profile = getVideoProfile(); + applicationInterface.onVideoRecordStopError(profile); + } + camera_controller.release(); + camera_controller = null; + camera_open_state = CameraOpenState.CAMERAOPENSTATE_CLOSED; + openCamera(); + } + } + } + + private interface CloseCameraCallback { + void onClosed(); + } + + private class CloseCameraTask extends AsyncTask { + private static final String TAG = "CloseCameraTask"; + + boolean reopen; // if set to true, reopen the camera once closed + + final CameraController camera_controller_local; + final CloseCameraCallback closeCameraCallback; + + CloseCameraTask(CameraController camera_controller_local, CloseCameraCallback closeCameraCallback) { + this.camera_controller_local = camera_controller_local; + this.closeCameraCallback = closeCameraCallback; + } + + @Override + protected Void doInBackground(Void... voids) { + long debug_time = 0; + if( MyDebug.LOG ) { + Log.d(TAG, "doInBackground, async task: " + this); + debug_time = System.currentTimeMillis(); + } + camera_controller_local.stopPreview(); + if( MyDebug.LOG ) { + Log.d(TAG, "time to stop preview: " + (System.currentTimeMillis() - debug_time)); + } + camera_controller_local.release(); + if( MyDebug.LOG ) { + Log.d(TAG, "time to release camera controller: " + (System.currentTimeMillis() - debug_time)); + } + return null; + } + + /** The system calls this to perform work in the UI thread and delivers + * the result from doInBackground() */ + protected void onPostExecute(Void result) { + if( MyDebug.LOG ) + Log.d(TAG, "onPostExecute, async task: " + this); + camera_open_state = CameraOpenState.CAMERAOPENSTATE_CLOSED; + close_camera_task = null; // just to be safe + if( closeCameraCallback != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "onPostExecute, calling closeCameraCallback.onClosed"); + closeCameraCallback.onClosed(); + } + if( reopen ) { + if( MyDebug.LOG ) + Log.d(TAG, "onPostExecute, reopen camera"); + openCamera(); + } + if( MyDebug.LOG ) + Log.d(TAG, "onPostExecute done, async task: " + this); + } + } + + /** Closes the camera. + * @param async Whether to close the camera on a background thread. + * @param closeCameraCallback If async is true, closeCameraCallback.onClosed() will be called, + * from the UI thread, once the camera is closed. If async is false, + * this field is ignored. + */ + private void closeCamera(boolean async, final CloseCameraCallback closeCameraCallback) { + long debug_time = 0; + if( MyDebug.LOG ) { + Log.d(TAG, "closeCamera()"); + Log.d(TAG, "async: " + async); + debug_time = System.currentTimeMillis(); + } + removePendingContinuousFocusReset(); + ring_buffer.flush(); // so we flush e.g. when switching cameras + has_focus_area = false; + focus_area_time = -1; + focus_success = FOCUS_DONE; + focus_started_time = -1; + synchronized( this ) { + // synchronise for consistency (keep FindBugs happy) + take_photo_after_autofocus = false; + // no need to call camera_controller.setCaptureFollowAutofocusHint() as we're closing the camera + } + set_flash_value_after_autofocus = ""; + successfully_focused = false; + preview_targetRatio = 0.0; + // n.b., don't reset has_set_location, as we can remember the location when switching camera + if( continuous_focus_move_is_started ) { + continuous_focus_move_is_started = false; + applicationInterface.onContinuousFocusMove(false); + } + if( zoom_transition_runnable != null ) { + // cancel an existing runnable + zoom_transition_handler.removeCallbacks(zoom_transition_runnable); + zoom_transition_runnable = null; + } + applicationInterface.cameraClosed(); + cancelTimer(); + cancelRepeat(); + if( camera_controller != null ) { + if( MyDebug.LOG ) { + Log.d(TAG, "close camera_controller"); + } + if( video_recorder != null ) { + stopVideo(false); + } + // make sure we're into continuous video mode for closing + // workaround for bug on Samsung Galaxy S5 with UHD, where if the user switches to another (non-continuous-video) focus mode, then goes to Settings, then returns and records video, the preview freezes and the video is corrupted + // so to be safe, we always reset to continuous video mode + this.updateFocusForVideo(); + // need to check for camera being non-null again - if an error occurred stopping the video, we will have closed the camera, and may not be able to reopen + if( camera_controller != null ) { + //camera.setPreviewCallback(null); + if( MyDebug.LOG ) { + Log.d(TAG, "closeCamera: about to pause preview: " + (System.currentTimeMillis() - debug_time)); + } + pausePreview(false); + // we set camera_controller to null before starting background thread, so that other callers won't try + // to use it + final CameraController camera_controller_local = camera_controller; + camera_controller = null; + preview_started_state = PREVIEW_NOT_STARTED; + if( async ) { + if( MyDebug.LOG ) + Log.d(TAG, "close camera on background async"); + camera_open_state = CameraOpenState.CAMERAOPENSTATE_CLOSING; + close_camera_task = new CloseCameraTask(camera_controller_local, closeCameraCallback); + close_camera_task.execute(); + } + else { + if( MyDebug.LOG ) { + Log.d(TAG, "closeCamera: about to release camera controller: " + (System.currentTimeMillis() - debug_time)); + } + camera_controller_local.stopPreview(); + if( MyDebug.LOG ) { + Log.d(TAG, "time to stop preview: " + (System.currentTimeMillis() - debug_time)); + } + camera_controller_local.release(); + camera_open_state = CameraOpenState.CAMERAOPENSTATE_CLOSED; + } + } + } + else { + if( MyDebug.LOG ) { + Log.d(TAG, "camera_controller isn't open"); + } + if( closeCameraCallback != null ) { + // still need to call the callback though! (otherwise if camera fails to open, switch camera button won't work!) + if( MyDebug.LOG ) + Log.d(TAG, "calling closeCameraCallback.onClosed"); + closeCameraCallback.onClosed(); + } + } + + if( orientationEventListener != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "free orientationEventListener"); + orientationEventListener.disable(); + orientationEventListener = null; + } + if( MyDebug.LOG ) { + Log.d(TAG, "closeCamera: total time: " + (System.currentTimeMillis() - debug_time)); + } + } + + public void cancelTimer() { + if( MyDebug.LOG ) + Log.d(TAG, "cancelTimer()"); + if( this.isOnTimer() ) { + takePictureTimerTask.cancel(); + takePictureTimerTask = null; + if( beepTimerTask != null ) { + beepTimerTask.cancel(); + beepTimerTask = null; + } + this.phase = PHASE_NORMAL; + if( MyDebug.LOG ) + Log.d(TAG, "cancelled camera timer"); + } + } + + public void cancelRepeat() { + if( MyDebug.LOG ) + Log.d(TAG, "cancelRepeat()"); + remaining_repeat_photos = 0; + } + + /** + * @param stop_preview Whether to call camera_controller.stopPreview(). Normally this should be + * true, but can be set to false if the callers is going to handle calling + * that (e.g., on a background thread). + */ + public void pausePreview(boolean stop_preview) { + long debug_time = 0; + if( MyDebug.LOG ) { + Log.d(TAG, "pausePreview()"); + debug_time = System.currentTimeMillis(); + } + if( camera_controller == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera not opened!"); + return; + } + // make sure we're into continuous video mode + // workaround for bug on Samsung Galaxy S5 with UHD, where if the user switches to another (non-continuous-video) focus mode, then goes to Settings, then returns and records video, the preview freezes and the video is corrupted + // so to be safe, we always reset to continuous video mode + // although I've now fixed this at the level where we close the settings, I've put this guard here, just in case the problem occurs from elsewhere + this.updateFocusForVideo(); + this.setPreviewPaused(false); + if( stop_preview ) { + if( MyDebug.LOG ) { + Log.d(TAG, "pausePreview: about to stop preview: " + (System.currentTimeMillis() - debug_time)); + } + camera_controller.stopPreview(); + if( MyDebug.LOG ) { + Log.d(TAG, "pausePreview: time to stop preview: " + (System.currentTimeMillis() - debug_time)); + } + } + this.phase = PHASE_NORMAL; + //this.is_preview_started = false; + this.preview_started_state = PREVIEW_NOT_STARTED; + if( MyDebug.LOG ) { + Log.d(TAG, "pausePreview: about to call cameraInOperation: " + (System.currentTimeMillis() - debug_time)); + } + /*applicationInterface.cameraInOperation(false, false); + if( is_video ) + applicationInterface.cameraInOperation(false, true);*/ + if( MyDebug.LOG ) { + Log.d(TAG, "pausePreview: total time: " + (System.currentTimeMillis() - debug_time)); + } + } + + //private int debug_count_opencamera = 0; // see usage below + + /** Try to open the camera. Should only be called if camera_controller==null. + * The camera will be opened on a background thread, so won't be available upon + * exit of this function. + * If camera_open_state is already CAMERAOPENSTATE_OPENING, this method does nothing. + */ + private void openCamera() { + long debug_time = 0; + if( MyDebug.LOG ) { + Log.d(TAG, "openCamera()"); + debug_time = System.currentTimeMillis(); + } + if( applicationInterface.isPreviewInBackground() ) { + if( MyDebug.LOG ) + Log.d(TAG, "don't open camera as preview in background"); + // note, even if the application never tries to reopen the camera in the background, we still need this check to avoid the camera + // opening from mySurfaceCreated() + // for example, this is needed when the application is recreated when settings are open (a new Preview and surface is created, but + // we don't want the camera to be opened) - to test this, go to settings then turn screen off and on (and unlock) + return; + } + else if( camera_open_state == CameraOpenState.CAMERAOPENSTATE_OPENING ) { + if( MyDebug.LOG ) + Log.d(TAG, "already opening camera in background thread"); + return; + } + else if( camera_open_state == CameraOpenState.CAMERAOPENSTATE_CLOSING ) { + Log.d(TAG, "tried to open camera while camera is still closing in background thread"); + return; + } + // need to init everything now, in case we don't open the camera (but these may already be initialised from an earlier call - e.g., if we are now switching to another camera) + // n.b., don't reset has_set_location, as we can remember the location when switching camera + //is_preview_started = false; // theoretically should be false anyway, but I had one RuntimeException from surfaceCreated()->openCamera()->setupCamera()->setPreviewSize() because is_preview_started was true, even though the preview couldn't have been started + this.preview_started_state = PREVIEW_NOT_STARTED; // theoretically should be PREVIEW_NOT_STARTED anyway, but I had one RuntimeException from surfaceCreated()->openCamera()->setupCamera()->setPreviewSize() because is_preview_started was true, even though the preview couldn't have been started + + set_preview_size = false; + preview_w = 0; + preview_h = 0; + has_focus_area = false; + focus_area_time = -1; + focus_success = FOCUS_DONE; + focus_started_time = -1; + synchronized( this ) { + // synchronise for consistency (keep FindBugs happy) + take_photo_after_autofocus = false; + // no need to call camera_controller.setCaptureFollowAutofocusHint() as we're opening the camera + } + set_flash_value_after_autofocus = ""; + successfully_focused = false; + preview_targetRatio = 0.0; + scene_modes = null; + camera_controller_supports_zoom = false; + has_zoom = false; + max_zoom_factor = 0; + camera_controller_max_zoom_factor = 0; + zoom_ratios = null; + camera_controller_zoom_ratios = null; + minimum_focus_distance = 0.0f; + faces_detected = null; + supports_face_detection = false; + using_face_detection = false; + supports_optical_stabilization = false; + supports_video_stabilization = false; + supports_photo_video_recording = false; + can_disable_shutter_sound = false; + tonemap_max_curve_points = 0; + supports_tonemap_curve = false; + color_effects = null; + white_balances = null; + antibanding = null; + edge_modes = null; + noise_reduction_modes = null; + isos = null; + supports_white_balance_temperature = false; + min_temperature = 0; + max_temperature = 0; + supports_iso_range = false; + min_iso = 0; + max_iso = 0; + supports_exposure_time = false; + min_exposure_time = 0L; + max_exposure_time = 0L; + exposures = null; + min_exposure = 0; + max_exposure = 0; + exposure_step = 0.0f; + supports_expo_bracketing = false; + max_expo_bracketing_n_images = 0; + supports_focus_bracketing = false; + supports_burst = false; + supports_jpeg_r = false; + supports_raw = false; + view_angle_x = 55.0f; // set a sensible default + view_angle_y = 43.0f; // set a sensible default + photo_sizes = null; + current_size_index = -1; + photo_size_constraints = null; + has_capture_rate_factor = false; + capture_rate_factor = 1.0f; + video_high_speed = false; + supports_video = true; + supports_video_high_speed = false; + video_quality_handler.resetCurrentQuality(); + supported_flash_values = null; + current_flash_index = -1; + supported_focus_values = null; + current_focus_index = -1; + max_num_focus_areas = 0; + applicationInterface.cameraInOperation(false, false); + if( is_video ) + applicationInterface.cameraInOperation(false, true); + if( !this.has_surface ) { + if( MyDebug.LOG ) { + Log.d(TAG, "preview surface not yet available"); + } + return; + } + if( this.is_paused ) { + if( MyDebug.LOG ) { + Log.d(TAG, "don't open camera as paused"); + } + return; + } + + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.M ) { + // we restrict the checks to Android 6 or later just in case, see note in LocationSupplier.setupLocationListener() + if( MyDebug.LOG ) + Log.d(TAG, "check for permissions"); + if( ContextCompat.checkSelfPermission(getContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera permission not available"); + has_permissions = false; + applicationInterface.requestCameraPermission(); + // return for now - the application should try to reopen the camera if permission is granted + return; + } + if( applicationInterface.needsStoragePermission() && ContextCompat.checkSelfPermission(getContext(), Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED ) { + if( MyDebug.LOG ) + Log.d(TAG, "storage permission not available"); + has_permissions = false; + applicationInterface.requestStoragePermission(); + // return for now - the application should try to reopen the camera if permission is granted + return; + } + if( MyDebug.LOG ) + Log.d(TAG, "permissions available"); + } + // set in case this was previously set to false + has_permissions = true; + + /*{ + // debug + if( debug_count_opencamera++ == 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "debug: don't open camera yet"); + return; + } + }*/ + + camera_open_state = CameraOpenState.CAMERAOPENSTATE_OPENING; + int cameraId = applicationInterface.getCameraIdPref(); + String cameraIdSPhysical = applicationInterface.getCameraIdSPhysicalPref(); + if( cameraId < 0 || cameraId >= camera_controller_manager.getNumberOfCameras() ) { + if( MyDebug.LOG ) + Log.d(TAG, "invalid cameraId: " + cameraId); + cameraId = 0; + cameraIdSPhysical = null; + applicationInterface.setCameraIdPref(cameraId, cameraIdSPhysical); + } + + if( !using_android_l && cameraIdSPhysical != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "physical camera not supported for old camera API: " + cameraIdSPhysical); + cameraIdSPhysical = null; + applicationInterface.setCameraIdPref(cameraId, cameraIdSPhysical); + } + + //final boolean use_background_thread = false; + //final boolean use_background_thread = true; + final boolean use_background_thread = Build.VERSION.SDK_INT >= Build.VERSION_CODES.M; + /* Opening camera on background thread is important so that we don't block the UI thread: + * - For old Camera API, this is recommended behaviour by Google for Camera.open(). + - For Camera2, the manager.openCamera() call is asynchronous, but CameraController2 + waits for it to open, so it's still important that we run that in a background thread. + * In theory this works for all Android versions, but this caused problems of Galaxy Nexus + * with tests testTakePhotoAutoLevel(), testTakePhotoAutoLevelAngles() (various camera + * errors/exceptions, failing to taking photos). Since this is a significant change, this is + * for now limited to modern devices. + * Initially this was Android 7, but for 1.44, I enabled for Android 6. + */ + if( use_background_thread ) { + final int cameraId_f = cameraId; + final String cameraIdSPhysical_f = cameraIdSPhysical; + + open_camera_task = new AsyncTask() { + private static final String TAG = "Preview/openCamera"; + + @Override + protected CameraController doInBackground(Void... voids) { + if( MyDebug.LOG ) + Log.d(TAG, "doInBackground, async task: " + this); + return openCameraCore(cameraId_f, cameraIdSPhysical_f); + } + + /** The system calls this to perform work in the UI thread and delivers + * the result from doInBackground() */ + protected void onPostExecute(CameraController camera_controller) { + if( MyDebug.LOG ) + Log.d(TAG, "onPostExecute, async task: " + this); + // see note in openCameraCore() for why we set camera_controller here + Preview.this.camera_controller = camera_controller; + cameraOpened(); + // set camera_open_state after cameraOpened, just in case a non-UI thread is listening for this - also + // important for test code waitUntilCameraOpened(), as test code runs on a different thread + camera_open_state = CameraOpenState.CAMERAOPENSTATE_OPENED; + open_camera_task = null; // just to be safe + if( MyDebug.LOG ) + Log.d(TAG, "onPostExecute done, async task: " + this); + } + + protected void onCancelled(CameraController camera_controller) { + if( MyDebug.LOG ) { + Log.d(TAG, "onCancelled, async task: " + this); + Log.d(TAG, "camera_controller: " + camera_controller); + } + // this typically means the application has paused whilst we were opening camera in background - so should just + // dispose of the camera controller + if( camera_controller != null ) { + // this is the local camera_controller, not Preview.this.camera_controller! + camera_controller.release(); + } + camera_open_state = CameraOpenState.CAMERAOPENSTATE_OPENED; // n.b., still set OPENED state - important for test thread to know that this callback is complete + open_camera_task = null; // just to be safe + if( MyDebug.LOG ) + Log.d(TAG, "onCancelled done, async task: " + this); + } + }.execute(); + } + else { + this.camera_controller = openCameraCore(cameraId, cameraIdSPhysical); + if( MyDebug.LOG ) { + Log.d(TAG, "openCamera: time after opening camera: " + (System.currentTimeMillis() - debug_time)); + } + + cameraOpened(); + camera_open_state = CameraOpenState.CAMERAOPENSTATE_OPENED; + } + + if( MyDebug.LOG ) { + Log.d(TAG, "openCamera: total time to open camera: " + (System.currentTimeMillis() - debug_time)); + } + } + + /** Open the camera - this should be called from background thread, to avoid hogging the UI thread. + */ + private CameraController openCameraCore(int cameraId, String cameraIdSPhysical) { + long debug_time = 0; + if( MyDebug.LOG ) { + Log.d(TAG, "openCameraCore()"); + debug_time = System.currentTimeMillis(); + } + // We pass a camera controller back to the UI thread rather than assigning to camera_controller here, because: + // * If we set camera_controller directly, we'd need to synchronize, otherwise risk of memory barrier issues + // * Risk of race conditions if UI thread accesses camera_controller before we have called cameraOpened(). + CameraController camera_controller_local; + try { + if( MyDebug.LOG ) { + Log.d(TAG, "try to open camera: " + cameraId); + Log.d(TAG, "openCamera: time before opening camera: " + (System.currentTimeMillis() - debug_time)); + } + if( test_fail_open_camera ) { + if( MyDebug.LOG ) + Log.d(TAG, "test failing to open camera"); + throw new CameraControllerException(); + } + CameraController.ErrorCallback cameraErrorCallback = new CameraController.ErrorCallback() { + public void onError() { + if( MyDebug.LOG ) + Log.e(TAG, "error from CameraController: camera device failed"); + if( camera_controller != null ) { + if( MyDebug.LOG ) + Log.e(TAG, "set camera_controller to null"); + camera_controller = null; + camera_open_state = CameraOpenState.CAMERAOPENSTATE_CLOSED; + preview_started_state = PREVIEW_NOT_STARTED; + applicationInterface.onCameraError(); + } + } + }; + if( using_android_l ) { + CameraController.ErrorCallback previewErrorCallback = new CameraController.ErrorCallback() { + public void onError() { + if( MyDebug.LOG ) + Log.e(TAG, "error from CameraController: preview failed to start"); + applicationInterface.onFailedStartPreview(); + } + }; + camera_controller_local = new CameraController2(Preview.this.getContext(), cameraId, cameraIdSPhysical, camera_features_caches, previewErrorCallback, cameraErrorCallback); + if( applicationInterface.useCamera2FakeFlash() ) { + camera_controller_local.setUseCamera2FakeFlash(true); + } + } + else + camera_controller_local = new CameraController1(cameraId, cameraErrorCallback); + //throw new CameraControllerException(); // uncomment to test camera not opening + } + catch(CameraControllerException e) { + MyDebug.logStackTrace(TAG, "failed to open camera", e); + camera_controller_local = null; + } + + if( MyDebug.LOG ) { + Log.d(TAG, "openCamera: total time for openCameraCore: " + (System.currentTimeMillis() - debug_time)); + } + return camera_controller_local; + } + + /** Called from UI thread after openCameraCore() completes on the background thread. + */ + private void cameraOpened() { + long debug_time = 0; + if( MyDebug.LOG ) { + Log.d(TAG, "cameraOpened()"); + debug_time = System.currentTimeMillis(); + } + if( camera_controller != null ) { + Activity activity = (Activity)Preview.this.getContext(); + /*if( MyDebug.LOG ) + Log.d(TAG, "intent: " + activity.getIntent()); + boolean take_photo = false; + if( activity.getIntent() != null && activity.getIntent().getExtras() != null ) { + take_photo = activity.getIntent().getExtras().getBoolean(TakePhoto.TAKE_PHOTO); + activity.getIntent().removeExtra(TakePhoto.TAKE_PHOTO); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "no intent data"); + }*/ + boolean take_photo = TakePhoto.TAKE_PHOTO; + if( take_photo ) + TakePhoto.TAKE_PHOTO = false; + if( MyDebug.LOG ) + Log.d(TAG, "take_photo?: " + take_photo); + + setCameraDisplayOrientation(); + if( orientationEventListener == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "create orientationEventListener"); + orientationEventListener = new OrientationEventListener(activity) { + @Override + public void onOrientationChanged(int orientation) { + Preview.this.onOrientationChanged(orientation); + } + }; + orientationEventListener.enable(); + } + if( MyDebug.LOG ) { + Log.d(TAG, "openCamera: time after setting orientation: " + (System.currentTimeMillis() - debug_time)); + } + + if( MyDebug.LOG ) + Log.d(TAG, "call setPreviewDisplay"); + cameraSurface.setPreviewDisplay(camera_controller); + if( MyDebug.LOG ) { + Log.d(TAG, "openCamera: time after setting preview display: " + (System.currentTimeMillis() - debug_time)); + } + + //final boolean wait_until_started = true; + final boolean wait_until_started = !( using_android_l && Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE ); + //final boolean wait_until_started = false; // test + if( MyDebug.LOG ) + Log.d(TAG, "wait_until_started: " + wait_until_started); + setupCamera(take_photo, wait_until_started, new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "cameraOpened: runnable for starting camera preview"); + if( using_android_l ) { + configureTransform(); + } + } + }); + } + + if( MyDebug.LOG ) { + Log.d(TAG, "openCamera: total time for cameraOpened: " + (System.currentTimeMillis() - debug_time)); + } + } + + + /** Try to reopen the camera, if not currently open (e.g., permission wasn't granted, but now it is). + * The camera will be opened on a background thread, so won't be available upon + * exit of this function. + * If camera_open_state is already CAMERAOPENSTATE_OPENING, or the camera is already open, + * this method does nothing. + */ + public void retryOpenCamera() { + if( MyDebug.LOG ) + Log.d(TAG, "retryOpenCamera()"); + if( camera_controller == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "try to reopen camera"); + this.openCamera(); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "camera already open"); + } + } + + /** Closes and reopens the camera. + * The camera will be closed and opened on a background thread, so won't be available upon + * exit of this function. + */ + public void reopenCamera() { + if( MyDebug.LOG ) + Log.d(TAG, "reopenCamera()"); + //this.closeCamera(false, null); + //this.openCamera(); + closeCamera(true, new CloseCameraCallback() { + @Override + public void onClosed() { + if( MyDebug.LOG ) + Log.d(TAG, "CloseCameraCallback.onClosed"); + openCamera(); + } + }); + } + + /** Returns false if we failed to open the camera because camera or storage permission wasn't available. + */ + public boolean hasPermissions() { + return has_permissions; + } + + /** Returns true iff the camera is currently being opened on background thread (openCamera() called, but + * camera not yet available). + */ + public boolean isOpeningCamera() { + return camera_open_state == CameraOpenState.CAMERAOPENSTATE_OPENING; + } + + /** Returns true iff we've tried to open the camera (whether or not it was successful). + */ + public boolean openCameraAttempted() { + return camera_open_state == CameraOpenState.CAMERAOPENSTATE_OPENED; + } + + /** Returns true iff we've tried to open the camera, and were unable to do so. + */ + public boolean openCameraFailed() { + return camera_open_state == CameraOpenState.CAMERAOPENSTATE_OPENED && camera_controller == null; + } + + /** Should only be called after camera first opened, or after preview is paused. + * Important to call this when switching between photo and video mode, as ApplicationInterface + * preferences/parameters may be different (since we can support taking photos in video snapshot + * mode, but this may have different parameters). + * @param take_photo take_photo is true if we have been called from the TakePhoto widget + * (which means we'll take a photo immediately after startup). + * @param wait_until_started For CameraController.startPreview(). + * @param preview_opened For CameraController.startPreview(). + */ + public void setupCamera(boolean take_photo, boolean wait_until_started, Runnable preview_opened) { + if( MyDebug.LOG ) + Log.d(TAG, "setupCamera()"); + long debug_time = 0; + if( MyDebug.LOG ) { + debug_time = System.currentTimeMillis(); + } + if( camera_controller == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera not opened!"); + return; + } + boolean do_startup_focus = !take_photo && applicationInterface.getStartupFocusPref(); + if( MyDebug.LOG ) { + Log.d(TAG, "take_photo? " + take_photo); + Log.d(TAG, "do_startup_focus? " + do_startup_focus); + } + this.focus_set_for_target_distance = false; // reset + this.focus_set_for_target_distance_ms = System.currentTimeMillis(); + // make sure we're into continuous video mode for reopening + // workaround for bug on Samsung Galaxy S5 with UHD, where if the user switches to another (non-continuous-video) focus mode, then goes to Settings, then returns and records video, the preview freezes and the video is corrupted + // so to be safe, we always reset to continuous video mode + // although I've now fixed this at the level where we close the settings, I've put this guard here, just in case the problem occurs from elsewhere + // we'll switch to the user-requested focus by calling setFocusPref() from setupCameraParameters() below + this.updateFocusForVideo(); + + try { + initCameraParameters(); + } + catch(CameraControllerException e) { + MyDebug.logStackTrace(TAG, "exception from initCameraParameters", e); + applicationInterface.onCameraError(); + closeCamera(false, null); + return; + } + + // now switch to video if saved + boolean saved_is_video = applicationInterface.isVideoPref(); + if( MyDebug.LOG ) { + Log.d(TAG, "saved_is_video: " + saved_is_video); + } + if( saved_is_video && !supports_video ) { + if( MyDebug.LOG ) + Log.d(TAG, "but video not supported"); + saved_is_video = false; + } + // must switch video before setupCameraParameters(), and starting preview + if( saved_is_video != this.is_video ) { + if( MyDebug.LOG ) + Log.d(TAG, "switch video mode as not in correct mode"); + this.switchVideo(true, false); + } + + // seems sensible to set extension mode (or not) first + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.S && this.supported_extensions != null && applicationInterface.isCameraExtensionPref() ) { + int extension = applicationInterface.getCameraExtensionPref(); + if( this.supported_extensions.contains(extension) ) { + camera_controller.setCameraExtension(true, extension); + + // also filter unsupported flash modes + if( supported_flash_values != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "restrict flash modes for extension session"); + List new_supported_flash_values = new ArrayList<>(); + for(String supported_flash_value : supported_flash_values) { + switch( supported_flash_value ) { + case "flash_off": + case "flash_frontscreen_torch": + new_supported_flash_values.add(supported_flash_value); + break; + } + } + supported_flash_values = new_supported_flash_values; + } + + // also disallow focus modes + if( supported_focus_values != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "restrict focus modes for extension session"); + supported_focus_values = null; + } + + // and disable ae and awb lock (as normally we don't set this when stopping/starting preview) + camera_controller.setAutoExposureLock(false); + camera_controller.setAutoWhiteBalanceLock(false); + } + else { + camera_controller.setCameraExtension(false, 0); + } + } + else { + camera_controller.setCameraExtension(false, 0); + } + + setupCameraParameters(); + + updateFlashForVideo(); + if( take_photo ) { + if( this.is_video ) { + if( MyDebug.LOG ) + Log.d(TAG, "switch to video for take_photo widget"); + this.switchVideo(true, true); + } + } + + // must be done after switching to video mode (so is_video is set correctly) + if( MyDebug.LOG ) + Log.d(TAG, "is_video?: " + is_video); + if( this.is_video ) { + CameraController.TonemapProfile tonemap_profile = CameraController.TonemapProfile.TONEMAPPROFILE_OFF; + if( supports_tonemap_curve ) { + tonemap_profile = applicationInterface.getVideoTonemapProfile(); + + } + float video_log_profile_strength = (tonemap_profile == CameraController.TonemapProfile.TONEMAPPROFILE_LOG) ? applicationInterface.getVideoLogProfileStrength() : 0.0f; + float video_gamma = (tonemap_profile == CameraController.TonemapProfile.TONEMAPPROFILE_GAMMA) ? applicationInterface.getVideoProfileGamma() : 0.0f; + if( MyDebug.LOG ) { + Log.d(TAG, "tonemap_profile: " + tonemap_profile); + Log.d(TAG, "video_log_profile_strength: " + video_log_profile_strength); + Log.d(TAG, "video_gamma: " + video_gamma); + } + camera_controller.setTonemapProfile(tonemap_profile, video_log_profile_strength, video_gamma); + } + + // Setup for high speed - must be done after setupCameraParameters() and switching to video mode, but before setPreviewSize() and startCameraPreview(). + // In theory it shouldn't matter if we call setVideoHighSpeed(true) if is_video==false, as it should only have an effect + // when recording video; but don't set high speed mode in photo mode just to be safe. + camera_controller.setVideoHighSpeed(is_video && video_high_speed); + + if( do_startup_focus && using_android_l && camera_controller.supportsAutoFocus() ) { + // need to switch flash off for autofocus - and for Android L, need to do this before starting preview (otherwise it won't work in time); for old camera API, need to do this after starting preview! + set_flash_value_after_autofocus = ""; + String old_flash_value = camera_controller.getFlashValue(); + // getFlashValue() may return "" if flash not supported! + // also set flash_torch - otherwise we get bug where torch doesn't turn on when starting up in video mode (and it's not like we want to turn torch off for startup focus, anyway) + if( !old_flash_value.isEmpty() && !old_flash_value.equals("flash_off") && !old_flash_value.equals("flash_torch") ) { + set_flash_value_after_autofocus = old_flash_value; + camera_controller.setFlashValue("flash_off"); + } + if( MyDebug.LOG ) + Log.d(TAG, "set_flash_value_after_autofocus is now: " + set_flash_value_after_autofocus); + } + + boolean is_extension = camera_controller.isCameraExtension(); + if( this.supports_jpeg_r && !is_extension && applicationInterface.getJpegRPref() ) { + camera_controller.setJpegR(true); + } + else { + camera_controller.setJpegR(false); + } + + if( this.supports_raw && applicationInterface.getRawPref() != ApplicationInterface.RawPref.RAWPREF_JPEG_ONLY ) { + camera_controller.setRaw(true, applicationInterface.getMaxRawImages()); + } + else { + camera_controller.setRaw(false, 0); + } + + setupBurstMode(); + + { + boolean is_burst = camera_controller.isCaptureFastBurst(); + int extension = is_extension ? camera_controller.getCameraExtension() : -1; + if( is_burst || is_extension ) { + if( MyDebug.LOG ) { + if( is_burst ) + Log.d(TAG, "check photo resolution supports burst"); + if( is_extension ) + Log.d(TAG, "check photo resolution supports extension: " + extension); + } + CameraController.Size current_size = getCurrentPictureSize(); + if( current_size != null ) { + if( MyDebug.LOG ) { + Log.d(TAG, "current_size: " + current_size.width + " x " + current_size.height + " supports_burst? " + current_size.supports_burst); + } + if( !current_size.supportsRequirements(is_burst, is_extension, extension) ) { + if( MyDebug.LOG ) + Log.d(TAG, "current picture size doesn't support required burst and/or extension"); + // set to next largest that supports what we need + CameraController.Size new_size = null; + for(int i=0;i new_size.width*new_size.height ) { + current_size_index = i; + new_size = size; + } + } + } + if( new_size == null ) { + Log.e(TAG, "can't find supporting picture size smaller than the current picture size"); + // just find largest that supports requirements + for(int i=0;i new_size.width*new_size.height ) { + current_size_index = i; + new_size = size; + } + } + } + if( new_size == null ) { + Log.e(TAG, "can't find supporting picture size"); + } + } + // if we set a new size, we don't save this to applicationinterface (so that if user switches to a burst mode or extension mode and back + // when the original resolution doesn't support burst/extension we revert to the original resolution) + } + } + } + } + + // Must set preview size before starting camera preview + // and must do it after setting photo vs video mode + // and after setting what camera extension we're using (if any) + setPreviewSize(); // need to call this when we switch cameras, not just when we run for the first time + if( MyDebug.LOG ) { + Log.d(TAG, "setupCamera: time after setting preview size: " + (System.currentTimeMillis() - debug_time)); + } + + final long debug_time_f = debug_time; + // Must call startCameraPreview after checking if face detection is present - probably best to call it after setting all parameters that we want + startCameraPreview(wait_until_started, new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "setupCamera: runnable for starting camera preview"); + + // must be done after setting parameters, as this function may set parameters + // also needs to be done after starting preview for some devices (e.g., Nexus 7) + if( has_zoom ) { + int zoom_pref = applicationInterface.getZoomPref(); + if( zoom_pref == -1 ) { + zoom_pref = find1xZoom(); + } + zoomTo(zoom_pref, false); + if( MyDebug.LOG ) { + Log.d(TAG, "setupCamera: total time after zoomTo: " + (System.currentTimeMillis() - debug_time_f)); + } + } + else if( camera_controller_supports_zoom && !has_zoom ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera supports zoom but application disabled zoom, so reset zoom to default"); + // if the application switches zoom off via ApplicationInterface.allowZoom(), we need to support + // resetting the zoom (in case the application called setupCamera() rather than reopening the camera). + camera_controller.resetZoom(); + } + + /*if( take_photo ) { + if( this.is_video ) { + if( MyDebug.LOG ) + Log.d(TAG, "switch to video for take_photo widget"); + this.switchVideo(false); // set during_startup to false, as we now need to reset the preview + } + }*/ + + applicationInterface.cameraSetup(); // must call this after the above take_photo code for calling switchVideo + if( MyDebug.LOG ) { + Log.d(TAG, "setupCamera: total time after cameraSetup: " + (System.currentTimeMillis() - debug_time_f)); + } + + if( take_photo ) { + // take photo after a delay - otherwise we sometimes get a black image?! + // also need a longer delay for continuous picture focus, to allow a chance to focus - 1000ms seems to work okay for Nexus 6, put 1500ms to be safe + String focus_value = getCurrentFocusValue(); + final int delay = ( focus_value != null && focus_value.equals("focus_mode_continuous_picture") ) ? 1500 : 500; + if( MyDebug.LOG ) + Log.d(TAG, "delay for take photo: " + delay); + final Handler handler = new Handler(); + handler.postDelayed(new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "do automatic take picture"); + takePicture(false, false, false); + } + }, delay); + } + + if( do_startup_focus ) { + final Handler handler = new Handler(); + handler.postDelayed(new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "do startup autofocus"); + tryAutoFocus(true, false); // so we get the autofocus when starting up - we do this on a delay, as calling it immediately means the autofocus doesn't seem to work properly sometimes (at least on Galaxy Nexus) + } + }, 500); + } + + if( preview_opened != null ) { + preview_opened.run(); + } + } + }); + if( MyDebug.LOG ) { + Log.d(TAG, "setupCamera: time after starting camera preview: " + (System.currentTimeMillis() - debug_time)); + } + + if( MyDebug.LOG ) { + Log.d(TAG, "setupCamera: total time after setupCamera: " + (System.currentTimeMillis() - debug_time)); + } + } + + private int find1xZoom() { + for(int i=0;i current_size.width*current_size.height ) { + current_size = size; + } + } + if( current_size != null ) + current_size.supports_burst = false; + } + supported_flash_values = camera_features.supported_flash_values; + supported_focus_values = camera_features.supported_focus_values; + this.max_num_focus_areas = camera_features.max_num_focus_areas; + this.is_exposure_lock_supported = camera_features.is_exposure_lock_supported; + this.is_white_balance_lock_supported = camera_features.is_white_balance_lock_supported; + this.supports_optical_stabilization = camera_features.is_optical_stabilization_supported; + this.supports_video_stabilization = camera_features.is_video_stabilization_supported; + this.supports_photo_video_recording = camera_features.is_photo_video_recording_supported; + this.can_disable_shutter_sound = camera_features.can_disable_shutter_sound; + this.tonemap_max_curve_points = camera_features.tonemap_max_curve_points; + this.supports_tonemap_curve = camera_features.supports_tonemap_curve; + this.supported_apertures = camera_features.apertures; + this.supports_white_balance_temperature = camera_features.supports_white_balance_temperature; + this.min_temperature = camera_features.min_temperature; + this.max_temperature = camera_features.max_temperature; + this.supports_iso_range = camera_features.supports_iso_range; + this.min_iso = camera_features.min_iso; + this.max_iso = camera_features.max_iso; + this.supports_exposure_time = camera_features.supports_exposure_time; + this.min_exposure_time = camera_features.min_exposure_time; + this.max_exposure_time = camera_features.max_exposure_time; + this.min_exposure = camera_features.min_exposure; + this.max_exposure = camera_features.max_exposure; + this.exposure_step = camera_features.exposure_step; + this.supports_expo_bracketing = camera_features.supports_expo_bracketing; + this.max_expo_bracketing_n_images = camera_features.max_expo_bracketing_n_images; + this.supports_focus_bracketing = camera_features.supports_focus_bracketing; + this.supports_burst = camera_features.supports_burst; + this.supports_jpeg_r = camera_features.supports_jpeg_r; + this.supports_raw = camera_features.supports_raw; + this.view_angle_x = camera_features.view_angle_x; + this.view_angle_y = camera_features.view_angle_y; + this.supports_video_high_speed = camera_features.video_sizes_high_speed != null && !camera_features.video_sizes_high_speed.isEmpty(); + this.video_quality_handler.setVideoSizes(camera_features.video_sizes); + this.video_quality_handler.setVideoSizesHighSpeed(camera_features.video_sizes_high_speed); + this.supported_preview_sizes = camera_features.preview_sizes; + this.supported_extensions = camera_features.supported_extensions; + this.supported_extensions_zoom = camera_features.supported_extensions_zoom; + this.physical_camera_ids = camera_features.physical_camera_ids; + + // need to do zoom last, as applicationInterface.allowZoom() may depend on the supported + // camera features (e.g., zoom not necessarily supported with camera extensions, so we need to have first + // stored supported_extensions - otherwise starting up in an extension photo mode will still + // show zoom controls even if zoom not supported) + this.camera_controller_supports_zoom = camera_features.is_zoom_supported; + this.camera_controller_max_zoom_factor = camera_features.max_zoom; + this.camera_controller_zoom_ratios = camera_features.zoom_ratios; + initZoom(); + } + } + + private void setupCameraParameters() { + if( MyDebug.LOG ) + Log.d(TAG, "setupCameraParameters()"); + long debug_time = 0; + if( MyDebug.LOG ) { + debug_time = System.currentTimeMillis(); + } + + { + if( MyDebug.LOG ) + Log.d(TAG, "set up face detection"); + // get face detection supported + this.faces_detected = null; + if( this.supports_face_detection ) { + this.using_face_detection = applicationInterface.getFaceDetectionPref(); + } + else { + this.using_face_detection = false; + } + if( MyDebug.LOG ) { + Log.d(TAG, "supports_face_detection?: " + supports_face_detection); + Log.d(TAG, "using_face_detection?: " + using_face_detection); + } + if( this.using_face_detection ) { + class MyFaceDetectionListener implements CameraController.FaceDetectionListener { + private final Handler handler = new Handler(); + private int last_n_faces = -1; + private FaceLocation last_face_location = FaceLocation.FACELOCATION_UNSET; + + /** Note, at least for Camera2 API, onFaceDetection() isn't called on UI thread. + */ + @Override + public void onFaceDetection(final CameraController.Face[] faces) { + if( MyDebug.LOG ) + Log.d(TAG, "onFaceDetection: " + faces.length + " : " + Arrays.toString(faces)); + if( camera_controller == null ) { + // can get a crash in some cases when switching camera when face detection is on (at least for Camera2) + Activity activity = (Activity)Preview.this.getContext(); + activity.runOnUiThread(new Runnable() { + public void run() { + faces_detected = null; + } + }); + return; + } + // don't assign to faces_detected yet, as that has to be done on the UI thread + + // We don't synchronize on faces_detected, as the array may be passed to other + // classes via getFacesDetected(). Although that function could copy instead, + // that would mean an allocation in every frame in DrawPreview. + // Easier to just do the assignment on the UI thread. + Activity activity = (Activity)Preview.this.getContext(); + activity.runOnUiThread(new Runnable() { + public void run() { + reportFaces(faces); + + if( faces_detected == null || faces_detected.length != faces.length ) { + // avoid unnecessary reallocations + if( MyDebug.LOG ) + Log.d(TAG, "allocate new faces_detected"); + faces_detected = new CameraController.Face[faces.length]; + } + System.arraycopy(faces, 0, faces_detected, 0, faces.length); + } + }); + } + + /** Accessibility: report number of faces for talkback etc. + */ + private void reportFaces(CameraController.Face[] local_faces) { + { + int n_faces = local_faces.length; + FaceLocation face_location = FaceLocation.FACELOCATION_UNKNOWN; + if( n_faces > 0 ) { + // set face_location + float avg_x = 0, avg_y = 0; + final float bdry_frac_c = 0.35f; + boolean all_centre = true; + final Matrix matrix = getCameraToPreviewMatrix(); + for(CameraController.Face face : local_faces) { + //float face_x = face.rect.centerX(); + //float face_y = face.rect.centerY(); + // convert to screen space coordinates + face_rect.set(face.rect); + matrix.mapRect(face_rect); + float face_x = face_rect.centerX(); + float face_y = face_rect.centerY(); + + face_x /= (float)cameraSurface.getView().getWidth(); + face_y /= (float)cameraSurface.getView().getHeight(); + if( all_centre ) { + if( face_x < bdry_frac_c || face_x > 1.0f-bdry_frac_c || face_y < bdry_frac_c || face_y > 1.0f-bdry_frac_c ) + all_centre = false; + } + avg_x += face_x; + avg_y += face_y; + } + avg_x /= n_faces; + avg_y /= n_faces; + if( MyDebug.LOG ) { + Log.d(TAG, " avg_x: " + avg_x); + Log.d(TAG, " avg_y: " + avg_y); + Log.d(TAG, " ui_rotation: " + ui_rotation); + } + if( all_centre ) { + face_location = FaceLocation.FACELOCATION_CENTRE; + } + else { + switch( ui_rotation ) { + case 0: + break; + case 90: { + float temp = avg_x; + //noinspection SuspiciousNameCombination + avg_x = avg_y; + avg_y = 1.0f-temp; + break; + } + case 180: + avg_x = 1.0f-avg_x; + avg_y = 1.0f-avg_y; + break; + case 270: { + float temp = avg_x; + avg_x = 1.0f-avg_y; + avg_y = temp; + break; + } + } + if( MyDebug.LOG ) { + Log.d(TAG, " avg_x: " + avg_x); + Log.d(TAG, " avg_y: " + avg_y); + } + if( avg_x < bdry_frac_c ) + face_location = FaceLocation.FACELOCATION_LEFT; + else if( avg_x > 1.0f-bdry_frac_c ) + face_location = FaceLocation.FACELOCATION_RIGHT; + else if( avg_y < bdry_frac_c ) + face_location = FaceLocation.FACELOCATION_TOP; + else if( avg_y > 1.0f-bdry_frac_c ) + face_location = FaceLocation.FACELOCATION_BOTTOM; + } + } + + if( n_faces != last_n_faces || face_location != last_face_location ) { + if( n_faces == 0 && last_n_faces == -1 ) { + // only say 0 faces detected if previously the number was non-zero + } + else { + String string = n_faces + " " + getContext().getResources().getString(n_faces==1 ? R.string.face_detected : R.string.faces_detected); + if( n_faces > 0 && face_location != FaceLocation.FACELOCATION_UNKNOWN ) { + switch( face_location ) { + case FACELOCATION_CENTRE: + string += " " + getContext().getResources().getString(R.string.centre_of_screen); + break; + case FACELOCATION_LEFT: + string += " " + getContext().getResources().getString(R.string.left_of_screen); + break; + case FACELOCATION_RIGHT: + string += " " + getContext().getResources().getString(R.string.right_of_screen); + break; + case FACELOCATION_TOP: + string += " " + getContext().getResources().getString(R.string.top_of_screen); + break; + case FACELOCATION_BOTTOM: + string += " " + getContext().getResources().getString(R.string.bottom_of_screen); + break; + } + } + final String string_f = string; + if( MyDebug.LOG ) + Log.d(TAG, string); + // to avoid having a big queue of saying "one face detected, two faces detected" etc, we only report + // after a delay, cancelling any that were previously queued + handler.removeCallbacksAndMessages(null); + handler.postDelayed(new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "announceForAccessibility: " + string_f); + Preview.this.getView().announceForAccessibility(string_f); + } + }, 500); + } + + last_n_faces = n_faces; + last_face_location = face_location; + } + } + } + } + camera_controller.setFaceDetectionListener(new MyFaceDetectionListener()); + } + else { + camera_controller.setFaceDetectionListener(null); + } + } + if( MyDebug.LOG ) { + Log.d(TAG, "setupCameraParameters: time after setting face detection: " + (System.currentTimeMillis() - debug_time)); + } + + { + if( MyDebug.LOG ) { + Log.d(TAG, "set up video stabilization"); + Log.d(TAG, "is_video?: " + is_video); + } + if( this.supports_video_stabilization ) { + boolean using_video_stabilization = is_video && applicationInterface.getVideoStabilizationPref(); + if( MyDebug.LOG ) + Log.d(TAG, "using_video_stabilization?: " + using_video_stabilization); + camera_controller.setVideoStabilization(using_video_stabilization); + } + if( MyDebug.LOG ) + Log.d(TAG, "supports_video_stabilization?: " + supports_video_stabilization); + } + if( MyDebug.LOG ) { + Log.d(TAG, "setupCameraParameters: time after video stabilization: " + (System.currentTimeMillis() - debug_time)); + } + + { + if( MyDebug.LOG ) + Log.d(TAG, "set up color effect"); + String value = applicationInterface.getColorEffectPref(); + if( MyDebug.LOG ) + Log.d(TAG, "saved color effect: " + value); + + CameraController.SupportedValues supported_values = camera_controller.setColorEffect(value); + if( supported_values != null ) { + color_effects = supported_values.values; + // now save, so it's available for PreferenceActivity + applicationInterface.setColorEffectPref(supported_values.selected_value); + } + else { + // delete key in case it's present (e.g., if feature no longer available due to change in OS, or switching APIs) + applicationInterface.clearColorEffectPref(); + } + } + if( MyDebug.LOG ) { + Log.d(TAG, "setupCameraParameters: time after color effect: " + (System.currentTimeMillis() - debug_time)); + } + + { + if( MyDebug.LOG ) + Log.d(TAG, "set up white balance"); + String value = applicationInterface.getWhiteBalancePref(); + if( MyDebug.LOG ) + Log.d(TAG, "saved white balance: " + value); + + CameraController.SupportedValues supported_values = camera_controller.setWhiteBalance(value); + if( supported_values != null ) { + white_balances = supported_values.values; + // now save, so it's available for PreferenceActivity + applicationInterface.setWhiteBalancePref(supported_values.selected_value); + + if( supported_values.selected_value.equals("manual") && this.supports_white_balance_temperature ) { + int temperature = applicationInterface.getWhiteBalanceTemperaturePref(); + camera_controller.setWhiteBalanceTemperature(temperature); + if( MyDebug.LOG ) + Log.d(TAG, "saved white balance: " + value); + } + } + else { + // delete key in case it's present (e.g., if feature no longer available due to change in OS, or switching APIs) + applicationInterface.clearWhiteBalancePref(); + } + } + if( MyDebug.LOG ) { + Log.d(TAG, "setupCameraParameters: time after white balance: " + (System.currentTimeMillis() - debug_time)); + } + + { + if( MyDebug.LOG ) + Log.d(TAG, "set up antibanding"); + String value = applicationInterface.getAntiBandingPref(); + if( MyDebug.LOG ) + Log.d(TAG, "saved antibanding: " + value); + + CameraController.SupportedValues supported_values = camera_controller.setAntiBanding(value); + // for anti-banding, if the stored preference wasn't supported, we stick with the device default - but don't + // write it back to the user preference + if( supported_values != null ) { + antibanding = supported_values.values; + } + } + + { + if( MyDebug.LOG ) + Log.d(TAG, "set up edge_mode"); + String value = applicationInterface.getEdgeModePref(); + if( MyDebug.LOG ) + Log.d(TAG, "saved edge_mode: " + value); + + CameraController.SupportedValues supported_values = camera_controller.setEdgeMode(value); + // for edge mode, if the stored preference wasn't supported, we stick with the device default - but don't + // write it back to the user preference + if( supported_values != null ) { + edge_modes = supported_values.values; + } + } + + { + if( MyDebug.LOG ) + Log.d(TAG, "set up noise_reduction_mode"); + String value = applicationInterface.getCameraNoiseReductionModePref(); + if( MyDebug.LOG ) + Log.d(TAG, "saved noise_reduction_mode: " + value); + + CameraController.SupportedValues supported_values = camera_controller.setNoiseReductionMode(value); + // for noise reduction mode, if the stored preference wasn't supported, we stick with the device default - but don't + // write it back to the user preference + if( supported_values != null ) { + noise_reduction_modes = supported_values.values; + } + } + + // must be done before setting flash modes, as we may remove flash modes if in manual mode (update: we now support flash for manual ISO anyway) + if( MyDebug.LOG ) + Log.d(TAG, "set up iso"); + String value = applicationInterface.getISOPref(); + if( MyDebug.LOG ) + Log.d(TAG, "saved iso: " + value); + boolean is_manual_iso = false; + boolean is_extension = camera_controller.isCameraExtension(); + if( is_extension ) { + // manual ISO not supported for camera extensions + camera_controller.setManualISO(false, 0); + } + else if( supports_iso_range ) { + // in this mode, we can set any ISO value from min to max + this.isos = null; // if supports_iso_range==true, caller shouldn't be using getSupportedISOs() + + // now set the desired ISO mode/value + if( value.equals(CameraController.ISO_DEFAULT) ) { + if( MyDebug.LOG ) + Log.d(TAG, "setting auto iso"); + camera_controller.setManualISO(false, 0); + } + else { + int iso = parseManualISOValue(value); + if( iso >= 0 ) { + is_manual_iso = true; + if( MyDebug.LOG ) + Log.d(TAG, "iso: " + iso); + camera_controller.setManualISO(true, iso); + } + else { + // failed to parse + camera_controller.setManualISO(false, 0); + value = CameraController.ISO_DEFAULT; // so we switch the preferences back to auto mode, rather than the invalid value + } + + // now save, so it's available for PreferenceActivity + applicationInterface.setISOPref(value); + } + } + else { + // in this mode, any support for ISO is only the specific ISOs offered by the CameraController + CameraController.SupportedValues supported_values = camera_controller.setISO(value); + if( supported_values != null ) { + isos = supported_values.values; + if( !supported_values.selected_value.equals(CameraController.ISO_DEFAULT) ) { + if( MyDebug.LOG ) + Log.d(TAG, "has manual iso"); + is_manual_iso = true; + } + // now save, so it's available for PreferenceActivity + applicationInterface.setISOPref(supported_values.selected_value); + + } + else { + // delete key in case it's present (e.g., if feature no longer available due to change in OS, or switching APIs) + applicationInterface.clearISOPref(); + } + } + + if( is_manual_iso ) { + if( supports_exposure_time ) { + long exposure_time_value = applicationInterface.getExposureTimePref(); + if( MyDebug.LOG ) + Log.d(TAG, "saved exposure_time: " + exposure_time_value); + if( exposure_time_value < getMinimumExposureTime() ) + exposure_time_value = getMinimumExposureTime(); + else if( exposure_time_value > getMaximumExposureTime() ) + exposure_time_value = getMaximumExposureTime(); + camera_controller.setExposureTime(exposure_time_value); + // now save + applicationInterface.setExposureTimePref(exposure_time_value); + } + else { + // delete key in case it's present (e.g., if feature no longer available due to change in OS, or switching APIs) + applicationInterface.clearExposureTimePref(); + } + + if( supported_flash_values != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "restrict flash modes for manual mode"); + List new_supported_flash_values = new ArrayList<>(); + for(String supported_flash_value : supported_flash_values) { + switch( supported_flash_value ) { + case "flash_off": + case "flash_on": + case "flash_torch": + case "flash_frontscreen_on": + case "flash_frontscreen_torch": + new_supported_flash_values.add(supported_flash_value); + break; + } + } + supported_flash_values = new_supported_flash_values; + /* + // flash modes not supported when using Camera2 and manual ISO + // (it's unclear flash is useful - ideally we'd at least offer torch, but ISO seems to reset to 100 when flash/torch is on!) + supported_flash_values = null; + if( MyDebug.LOG ) + Log.d(TAG, "flash not supported in Camera2 manual mode"); + */ + } + } + if( MyDebug.LOG ) { + Log.d(TAG, "setupCameraParameters: time after manual iso: " + (System.currentTimeMillis() - debug_time)); + } + + { + if( MyDebug.LOG ) { + Log.d(TAG, "set up exposure compensation"); + Log.d(TAG, "min_exposure: " + min_exposure); + Log.d(TAG, "max_exposure: " + max_exposure); + } + // get min/max exposure + exposures = null; + if( min_exposure != 0 || max_exposure != 0 ) { + exposures = new ArrayList<>(); + for(int i=min_exposure;i<=max_exposure;i++) { + exposures.add(String.valueOf(i)); + } + // if in manual ISO mode, we still want to get the valid exposure compensations, but shouldn't set exposure compensation + if( !is_manual_iso ) { + int exposure = applicationInterface.getExposureCompensationPref(); + if( exposure < min_exposure || exposure > max_exposure ) { + exposure = 0; + if( MyDebug.LOG ) + Log.d(TAG, "saved exposure not supported, reset to 0"); + if( exposure < min_exposure || exposure > max_exposure ) { + if( MyDebug.LOG ) + Log.d(TAG, "zero isn't an allowed exposure?! reset to min " + min_exposure); + exposure = min_exposure; + } + } + camera_controller.setExposureCompensation(exposure); + // now save, so it's available for PreferenceActivity + applicationInterface.setExposureCompensationPref(exposure); + } + } + else { + // delete key in case it's present (e.g., if feature no longer available due to change in OS, or switching APIs) + applicationInterface.clearExposureCompensationPref(); + } + } + if( MyDebug.LOG ) { + Log.d(TAG, "setupCameraParameters: time after exposures: " + (System.currentTimeMillis() - debug_time)); + } + + if( supported_apertures != null ) { + // set up aperture + float aperture = applicationInterface.getAperturePref(); + if( aperture > 0.0f ) { + // check supported + for(float this_aperture : supported_apertures) { + if( this_aperture == aperture ) { + camera_controller.setAperture(aperture); + } + } + // else don't set any aperture (leave as the device default) + } + } + + { + if( MyDebug.LOG ) + Log.d(TAG, "set up picture sizes"); + if( MyDebug.LOG ) { + for(int i=0;i resolution = applicationInterface.getCameraResolutionPref(photo_size_constraints); + if( resolution != null ) { + int resolution_w = resolution.first; + int resolution_h = resolution.second; + // now find size in valid list + for(int i=0;i current_size.width*current_size.height ) { + current_size_index = i; + current_size = size; + } + } + } + { + CameraController.Size current_size = getCurrentPictureSize(); + if( current_size != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "Current size index " + current_size_index + ": " + current_size.width + ", " + current_size.height); + + // now save, so it's available for PreferenceActivity + applicationInterface.setCameraResolutionPref(current_size.width, current_size.height); + + // check against constraints + // we intentionally do this after calling applicationInterface.setCameraResolutionPref() (as the constraints are + // used to just temporarily change resolution, e.g., if a maximum resolution has been enforced for HDR or NR photo + // mode, but we don't want to update the saved resolution preference in such cases + if( !photo_size_constraints.satisfies(current_size) ) { + if( MyDebug.LOG ) + Log.d(TAG, "current size index fail to satisfy constraints"); + CameraController.Size new_size = null; + // find the largest size that satisfies the constraint + for(int i=0;i new_size.width*new_size.height ) { + current_size_index = i; + new_size = size; + } + } + } + if( new_size == null ) { + Log.e(TAG, "can't find picture size that satisfies the constraints!"); + // so just choose the smallest + for(int i=0;i 1.0e-5f; + if( MyDebug.LOG ) { + Log.d(TAG, "has_capture_rate_factor: " + has_capture_rate_factor); + Log.d(TAG, "capture_rate_factor: " + capture_rate_factor); + } + + // set up high speed frame rates + // should be done after checking the requested video size is available, and after reading the requested capture rate + video_high_speed = false; + if( this.supports_video_high_speed ) { + VideoProfile profile = getVideoProfile(); + int capture_rate = (int)(profile.videoCaptureRate+1.0e-5f); + // We round to an int (a) to avoid risk of numerical wobble when comparing to the integer supported fps ranges, and (b) due to the + // "Nokia 8" hack in getVideoProfile(). + // Note that when using timelapse (capture_rate_factor > 1.0), it may be that the capture rate is genuinely fractional, although these + // should always be non-high-speed, and this code is just for high speed cases, and if so for determining if the video resolution supports high speed + if( MyDebug.LOG ) + Log.d(TAG, "check if we need high speed video for " + profile.videoFrameWidth + " x " + profile.videoFrameHeight + " at fps capture rate " + capture_rate); + CameraController.Size best_video_size = video_quality_handler.findVideoSizeForFrameRate(profile.videoFrameWidth, profile.videoFrameHeight, capture_rate, false); + // n.b., we should pass videoCaptureRate (capture_rate) and not videoFrameRate (as for slow motion, it's videoCaptureRate that will be high, not videoFrameRate) + + if( best_video_size == null && fpsIsHighSpeed(String.valueOf(capture_rate)) && video_quality_handler.getSupportedVideoSizesHighSpeed() != null ) { + Log.e(TAG, "can't find match for capture rate: " + capture_rate + " and video size: " + profile.videoFrameWidth + " x " + profile.videoFrameHeight + " at fps " + profile.videoFrameRate); + // If fpsIsHighSpeed() returns true for capture_rate, then it means an fps is one that isn't + // supported by any standard video sizes, but it is supported by a high speed video size. If + // best_video_size==null, then we must have an incompatible size for this fps. + // So try falling back to one of the supported high speed resolutions. + CameraController.Size requested_size = video_quality_handler.getMaxSupportedVideoSizeHighSpeed(); + profile.videoFrameWidth = requested_size.width; + profile.videoFrameHeight = requested_size.height; + // now try again + best_video_size = CameraController.CameraFeatures.findSize(video_quality_handler.getSupportedVideoSizesHighSpeed(), requested_size, capture_rate, false); + if( best_video_size != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "fall back to a supported video size for high speed fps"); + // need to write back to the application + // so find the corresponding quality value + video_quality_handler.setCurrentVideoQualityIndex(-1); + for(int i=0;i 1 ) { + + String flash_value = applicationInterface.getFlashPref(); + if( !flash_value.isEmpty() ) { + if( MyDebug.LOG ) + Log.d(TAG, "found existing flash_value: " + flash_value); + if( !updateFlash(flash_value, false) ) { // don't need to save, as this is the value that's already saved + if( MyDebug.LOG ) + Log.d(TAG, "flash value no longer supported!"); + // if in manual ISO mode, we'll have restricted the available flash modes - so although we want to + // communicate this to the application, we don't want to save the new value we've chosen (otherwise + // if user goes to manual ISO and back, we might switch saved flash say from auto to off) + // similarly for camera extension modes, and specific physical cameras + updateFlash(0, false); + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "found no existing flash_value"); + // whilst devices with flash should support flash_auto, we'll also be in this codepath for front cameras with + // no flash, as instead the available options will be flash_off, flash_frontscreen_auto, flash_frontscreen_on + // see testTakePhotoFrontCameraScreenFlash + /*if( supported_flash_values.contains("flash_auto") ) + updateFlash("flash_auto", true); + else + updateFlash("flash_off", true);*/ + // update, we now default to flash off - flash is increasingly less useful on modern cameras, + // plus reduces problems from risk of buggy flash on Camera2 API... + updateFlash("flash_off", true); + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "flash not supported"); + supported_flash_values = null; + } + } + if( MyDebug.LOG ) { + Log.d(TAG, "setupCameraParameters: time after setting up flash: " + (System.currentTimeMillis() - debug_time)); + } + + { + if( MyDebug.LOG ) + Log.d(TAG, "set up focus"); + current_focus_index = -1; + if( supported_focus_values != null && supported_focus_values.size() > 1 ) { + if( MyDebug.LOG ) + Log.d(TAG, "focus values: " + supported_focus_values); + + setFocusPref(true); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "focus not supported"); + supported_focus_values = null; + } + /*supported_focus_values = new ArrayList<>(); + supported_focus_values.add("focus_mode_auto"); + supported_focus_values.add("focus_mode_infinity"); + supported_focus_values.add("focus_mode_macro"); + supported_focus_values.add("focus_mode_locked"); + supported_focus_values.add("focus_mode_manual2"); + supported_focus_values.add("focus_mode_fixed"); + supported_focus_values.add("focus_mode_edof"); + supported_focus_values.add("focus_mode_continuous_video");*/ + /*View focusModeButton = (View) activity.findViewById(R.id.focus_mode); + focusModeButton.setVisibility(supported_focus_values != null && !immersive_mode ? View.VISIBLE : View.GONE);*/ + } + + { + float focus_distance_value = applicationInterface.getFocusDistancePref(false); + if( MyDebug.LOG ) + Log.d(TAG, "saved focus_distance: " + focus_distance_value); + if( focus_distance_value < 0.0f ) + focus_distance_value = 0.0f; + else if( focus_distance_value > minimum_focus_distance ) + focus_distance_value = minimum_focus_distance; + camera_controller.setFocusDistance(focus_distance_value); + camera_controller.setFocusBracketingSourceDistance(focus_distance_value); + // now save + applicationInterface.setFocusDistancePref(focus_distance_value, false); + } + { + float focus_distance_value = applicationInterface.getFocusDistancePref(true); + if( MyDebug.LOG ) + Log.d(TAG, "saved focus_bracketing_target_distance: " + focus_distance_value); + if( focus_distance_value < 0.0f ) + focus_distance_value = 0.0f; + else if( focus_distance_value > minimum_focus_distance ) + focus_distance_value = minimum_focus_distance; + camera_controller.setFocusBracketingTargetDistance(focus_distance_value); + // now save + applicationInterface.setFocusDistancePref(focus_distance_value, true); + } + if( MyDebug.LOG ) { + Log.d(TAG, "setupCameraParameters: time after setting up focus: " + (System.currentTimeMillis() - debug_time)); + } + + { + if( MyDebug.LOG ) + Log.d(TAG, "set up exposure lock"); + // exposure lock should always default to false, as doesn't make sense to save it - we can't really preserve a "lock" after the camera is reopened + // also note that it isn't safe to lock the exposure before starting the preview + is_exposure_locked = false; + } + + { + if( MyDebug.LOG ) + Log.d(TAG, "set up white balance lock"); + // same reasoning as exposure lock + is_white_balance_locked = false; + } + + if( MyDebug.LOG ) { + Log.d(TAG, "setupCameraParameters: total time for setting up camera parameters: " + (System.currentTimeMillis() - debug_time)); + } + } + + private void setPreviewSize() { + if( MyDebug.LOG ) + Log.d(TAG, "setPreviewSize()"); + // also now sets picture size + if( camera_controller == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera not opened!"); + return; + } + //if( is_preview_started ) { + if( preview_started_state != PREVIEW_NOT_STARTED ) { + Log.e(TAG, "setPreviewSize() shouldn't be called when preview is running"); + //throw new RuntimeException(); // throw as RuntimeException, as this is a programming error + // Bizarrely I have seen the above crash reported from Google Play devices, but inspection of the code leaves it unclear + // why this can happen. So have disabled the exception since this evidently can happen. + return; + } + if( !using_android_l ) { + // don't do for Android L, else this means we get flash on startup autofocus if flash is on + this.cancelAutoFocus(); + } + // first set picture size (for photo mode, must be done now so we can set the picture size from this; for video, doesn't really matter when we set it) + CameraController.Size new_size; + if( this.is_video ) { + // see comments for getOptimalVideoPictureSize() + VideoProfile profile = getVideoProfile(); + if( MyDebug.LOG ) + Log.d(TAG, "video size: " + profile.videoFrameWidth + " x " + profile.videoFrameHeight); + if( video_high_speed ) { + // It's unclear it matters what size we set here given that high speed is only for Camera2 API, and that + // take photo whilst recording video isn't supported for high speed video - so for Camera2 API, setting + // picture size should have no effect. But set to a sensible value just in case. + new_size = new CameraController.Size(profile.videoFrameWidth, profile.videoFrameHeight); + } + else { + double targetRatio = ((double) profile.videoFrameWidth) / (double) profile.videoFrameHeight; + new_size = getOptimalVideoPictureSize(photo_sizes, targetRatio); + } + } + else { + new_size = getCurrentPictureSize(); + } + if( new_size != null ) { + camera_controller.setPictureSize(new_size.width, new_size.height); + } + // set optimal preview size + if( supported_preview_sizes != null && !supported_preview_sizes.isEmpty() ) { + CameraController.Size best_size = getOptimalPreviewSize(supported_preview_sizes); + camera_controller.setPreviewSize(best_size.width, best_size.height); + this.set_preview_size = true; + this.preview_w = best_size.width; + this.preview_h = best_size.height; + this.setAspectRatio( ((double)best_size.width) / (double)best_size.height ); + } + } + + private void initialiseVideoSizes() { + if( camera_controller == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera not opened!"); + return; + } + this.video_quality_handler.sortVideoSizes(); + } + + private void initialiseVideoQuality() { + int cameraId = camera_controller.getCameraId(); + List profiles = new ArrayList<>(); + List dimensions = new ArrayList<>(); + if( CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_HIGH) ) { + CamcorderProfile profile = CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_HIGH); + profiles.add(CamcorderProfile.QUALITY_HIGH); + dimensions.add(new VideoQualityHandler.Dimension2D(profile.videoFrameWidth, profile.videoFrameHeight)); + } + { + if( CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_2160P) ) { + CamcorderProfile profile = CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_2160P); + profiles.add(CamcorderProfile.QUALITY_2160P); + dimensions.add(new VideoQualityHandler.Dimension2D(profile.videoFrameWidth, profile.videoFrameHeight)); + } + } + if( CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_1080P) ) { + CamcorderProfile profile = CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_1080P); + profiles.add(CamcorderProfile.QUALITY_1080P); + dimensions.add(new VideoQualityHandler.Dimension2D(profile.videoFrameWidth, profile.videoFrameHeight)); + } + if( CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_720P) ) { + CamcorderProfile profile = CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_720P); + profiles.add(CamcorderProfile.QUALITY_720P); + dimensions.add(new VideoQualityHandler.Dimension2D(profile.videoFrameWidth, profile.videoFrameHeight)); + } + if( CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_480P) ) { + CamcorderProfile profile = CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_480P); + profiles.add(CamcorderProfile.QUALITY_480P); + dimensions.add(new VideoQualityHandler.Dimension2D(profile.videoFrameWidth, profile.videoFrameHeight)); + } + if( CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_CIF) ) { + CamcorderProfile profile = CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_CIF); + profiles.add(CamcorderProfile.QUALITY_CIF); + dimensions.add(new VideoQualityHandler.Dimension2D(profile.videoFrameWidth, profile.videoFrameHeight)); + } + if( CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_QVGA) ) { + CamcorderProfile profile = CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_QVGA); + profiles.add(CamcorderProfile.QUALITY_QVGA); + dimensions.add(new VideoQualityHandler.Dimension2D(profile.videoFrameWidth, profile.videoFrameHeight)); + } + if( CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_QCIF) ) { + CamcorderProfile profile = CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_QCIF); + profiles.add(CamcorderProfile.QUALITY_QCIF); + dimensions.add(new VideoQualityHandler.Dimension2D(profile.videoFrameWidth, profile.videoFrameHeight)); + } + if( CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_LOW) ) { + CamcorderProfile profile = CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_LOW); + profiles.add(CamcorderProfile.QUALITY_LOW); + dimensions.add(new VideoQualityHandler.Dimension2D(profile.videoFrameWidth, profile.videoFrameHeight)); + } + this.video_quality_handler.initialiseVideoQualityFromProfiles(profiles, dimensions); + } + + /** Gets a CamcorderProfile associated with the supplied quality, for non-slow motion modes. Note + * that the supplied quality doesn't have to match whatever the current video mode is (or indeed, + * this might be called even in slow motion mode), since we use this for things like setting up + * available preferences. + */ + public CamcorderProfile getCamcorderProfile(String quality) { + if( MyDebug.LOG ) + Log.d(TAG, "getCamcorderProfile(): " + quality); + if( camera_controller == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera not opened!"); + return CamcorderProfile.get(0, CamcorderProfile.QUALITY_HIGH); + } + int cameraId = camera_controller.getCameraId(); + CamcorderProfile camcorder_profile = CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_HIGH); // default + try { + String profile_string = quality; + int index = profile_string.indexOf('_'); + if( index != -1 ) { + profile_string = quality.substring(0, index); + if( MyDebug.LOG ) + Log.d(TAG, " profile_string: " + profile_string); + } + int profile = Integer.parseInt(profile_string); + camcorder_profile = CamcorderProfile.get(cameraId, profile); + if( index != -1 && index+1 < quality.length() ) { + String override_string = quality.substring(index+1); + if( MyDebug.LOG ) + Log.d(TAG, " override_string: " + override_string); + if( override_string.charAt(0) == 'r' && override_string.length() >= 4 ) { + index = override_string.indexOf('x'); + if( index == -1 ) { + if( MyDebug.LOG ) + Log.d(TAG, "override_string invalid format, can't find x"); + } + else { + String resolution_w_s = override_string.substring(1, index); // skip first 'r' + String resolution_h_s = override_string.substring(index+1); + if( MyDebug.LOG ) { + Log.d(TAG, "resolution_w_s: " + resolution_w_s); + Log.d(TAG, "resolution_h_s: " + resolution_h_s); + } + // copy to local variable first, so that if we fail to parse height, we don't set the width either + int resolution_w = Integer.parseInt(resolution_w_s); + int resolution_h = Integer.parseInt(resolution_h_s); + camcorder_profile.videoFrameWidth = resolution_w; + camcorder_profile.videoFrameHeight = resolution_h; + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "unknown override_string initial code, or otherwise invalid format"); + } + } + } + catch(NumberFormatException e) { + MyDebug.logStackTrace(TAG, "failed to parse video quality: " + quality, e); + } + return camcorder_profile; + } + + /** Returns a profile describing the currently selected video quality. The returned VideoProfile + * will usually encapsulate a CamcorderProfile (VideoProfile.getCamcorderProfile() will return + * non-null), but not always (e.g., for slow motion mode). + */ + public VideoProfile getVideoProfile() { + VideoProfile video_profile; + + // 4K UHD video is not yet supported by Android API (at least testing on Samsung S5 and Note 3, they do not return it via getSupportedVideoSizes(), nor via a CamcorderProfile (either QUALITY_HIGH, or anything else) + // but it does work if we explicitly set the resolution (at least tested on an S5) + if( camera_controller == null ) { + video_profile = new VideoProfile(); + Log.e(TAG, "camera not opened! returning default video profile for QUALITY_HIGH"); + return video_profile; + } + /*if( video_high_speed ) { + // return a video profile for a high speed frame rate - note that if we have a capture rate factor of say 0.25x, + // the actual fps and bitrate of the resultant video would also be scaled by a factor of 0.25x + //return new VideoProfile(MediaRecorder.AudioEncoder.AAC, MediaRecorder.OutputFormat.WEBM, 20000000, + // MediaRecorder.VideoEncoder.VP8, this.video_high_speed_size.height, 120, + // this.video_high_speed_size.width); + return new VideoProfile(MediaRecorder.AudioEncoder.AAC, MediaRecorder.OutputFormat.MPEG_4, 4*14000000, + MediaRecorder.VideoEncoder.H264, this.video_high_speed_size.height, 120, + this.video_high_speed_size.width); + }*/ + + // Get user settings + boolean record_audio = applicationInterface.getRecordAudioPref(); + String channels_value = applicationInterface.getRecordAudioChannelsPref(); + String fps_value = applicationInterface.getVideoFPSPref(); + String bitrate_value = applicationInterface.getVideoBitratePref(); + boolean force4k = applicationInterface.getForce4KPref(); + // Use CamcorderProfile just to get the current sizes and defaults. + { + CamcorderProfile cam_profile; + int cameraId = camera_controller.getCameraId(); + + // video_high_speed should only be for Camera2, where we don't support force4k option, but + // put the check here just in case - don't want to be forcing 4K resolution if high speed + // frame rate! + if( force4k && !video_high_speed ) { + if( MyDebug.LOG ) + Log.d(TAG, "force 4K UHD video"); + cam_profile = CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_HIGH); + cam_profile.videoFrameWidth = 3840; + cam_profile.videoFrameHeight = 2160; + cam_profile.videoBitRate = (int)(cam_profile.videoBitRate*2.8); // need a higher bitrate for the better quality - this is roughly based on the bitrate used by an S5's native camera app at 4K (47.6 Mbps, compared to 16.9 Mbps which is what's returned by the QUALITY_HIGH profile) + } + else if( this.video_quality_handler.getCurrentVideoQualityIndex() != -1 ) { + cam_profile = getCamcorderProfile(this.video_quality_handler.getCurrentVideoQuality()); + } + else { + cam_profile = null; + } + video_profile = cam_profile != null ? new VideoProfile(cam_profile) : new VideoProfile(); + } + + //video_profile.fileFormat = MediaRecorder.OutputFormat.MPEG_4; + //video_profile.videoCodec = MediaRecorder.VideoEncoder.H264; + + if( fps_value.equals("default") ) { + if( supports_video_high_speed && video_profile.videoFrameWidth != 0 && video_profile.videoFrameHeight != 0 ) { + // Check videoFrameRate is actually supported by requested video resolution. + // We need this as sometimes the CamcorderProfile we use may store a frame rate not actually + // supported for the resolution (e.g., on Pixel 6 Pro, 1920x1080 and 3840x2160 support 60fps, + // and the CamcorderProfiles set 60fps, but the intermediate resolutions such as 1920x1440 only + // support 30fps). + // Limited to supports_video_high_speed - at the least, we don't want this code for old camera API where + // supported frame rates aren't available. + // N.B., we should pass videoCaptureRate and not videoFrameRate (as for slow motion, it's videoCaptureRate + // that will be high, not videoFrameRate). + CameraController.Size best_video_size = video_quality_handler.findVideoSizeForFrameRate(video_profile.videoFrameWidth, video_profile.videoFrameHeight, video_profile.videoCaptureRate, true); + if( best_video_size != null && !best_video_size.supportsFrameRate(video_profile.videoCaptureRate) ) { + if( MyDebug.LOG ) + Log.d(TAG, "video resolution " + video_profile.videoFrameWidth + " x " + video_profile.videoFrameHeight + " doesn't support requested fps " + video_profile.videoFrameRate); + int closest_fps = best_video_size.closestFrameRate(video_profile.videoFrameRate); + if( MyDebug.LOG ) + Log.d(TAG, " instead choose valid fps: " + closest_fps); + if( closest_fps != -1 ) { // just in case? + video_profile.videoFrameRate = closest_fps; + video_profile.videoCaptureRate = closest_fps; + } + } + } + } + else { + try { + int fps = Integer.parseInt(fps_value); + if( MyDebug.LOG ) + Log.d(TAG, "fps: " + fps); + video_profile.videoFrameRate = fps; + video_profile.videoCaptureRate = fps; + } + catch(NumberFormatException exception) { + if( MyDebug.LOG ) + Log.d(TAG, "fps invalid format, can't parse to int: " + fps_value); + } + } + + if( !bitrate_value.equals("default") ) { + try { + int bitrate = Integer.parseInt(bitrate_value); + if( MyDebug.LOG ) + Log.d(TAG, "bitrate: " + bitrate); + video_profile.videoBitRate = bitrate; + } + catch(NumberFormatException exception) { + if( MyDebug.LOG ) + Log.d(TAG, "bitrate invalid format, can't parse to int: " + bitrate_value); + } + } + final int min_high_speed_bitrate_c = 4*14000000; + if( video_high_speed && video_profile.videoBitRate < min_high_speed_bitrate_c ) { + video_profile.videoBitRate = min_high_speed_bitrate_c; + if( MyDebug.LOG ) + Log.d(TAG, "set minimum bitrate for high speed: " + video_profile.videoBitRate); + } + + if( has_capture_rate_factor ) { + if( MyDebug.LOG ) + Log.d(TAG, "set video profile frame rate for slow motion or timelapse, capture rate: " + capture_rate_factor); + if( capture_rate_factor < 1.0 ) { + // capture rate remains the same, and we adjust the frame rate of video + video_profile.videoFrameRate = (int)(video_profile.videoFrameRate * capture_rate_factor + 0.5f); + video_profile.videoBitRate = (int)(video_profile.videoBitRate * capture_rate_factor + 0.5f); + if( MyDebug.LOG ) + Log.d(TAG, "scaled frame rate to: " + video_profile.videoFrameRate); + if( Math.abs(capture_rate_factor - 0.5f) < 1.0e-5f ) { + // hack - on Nokia 8 at least, capture_rate_factor of 0.5x still gives a normal speed video, but a + // workaround is to increase the capture rate - even increasing by just 1.0e-5 works + // unclear if this is needed in general, or is a Nokia specific bug + video_profile.videoCaptureRate += 1.0e-3; + if( MyDebug.LOG ) + Log.d(TAG, "fudged videoCaptureRate to: " + video_profile.videoCaptureRate); + } + } + else if( capture_rate_factor > 1.0 ) { + // resultant framerate remains the same, instead adjust the capture rate + video_profile.videoCaptureRate = video_profile.videoCaptureRate / (double)capture_rate_factor; + if( MyDebug.LOG ) + Log.d(TAG, "scaled capture rate to: " + video_profile.videoCaptureRate); + if( Math.abs(capture_rate_factor - 2.0f) < 1.0e-5f ) { + // hack - similar idea to the hack above for 2x slow motion + // again, even decreasing by 1.0e-5 works + // again, unclear if this is needed in general, or is a Nokia specific bug + video_profile.videoCaptureRate -= 1.0e-3f; + if( MyDebug.LOG ) + Log.d(TAG, "fudged videoCaptureRate to: " + video_profile.videoCaptureRate); + } + } + // audio not recorded with slow motion or timelapse video + record_audio = false; + } + + // we repeat the Build.VERSION check to avoid Android Lint warning; also needs to be an "if" statement rather than using the + // "?" operator, otherwise we still get the Android Lint warning + if( using_android_l ) { + video_profile.videoSource = MediaRecorder.VideoSource.SURFACE; + } + else { + video_profile.videoSource = MediaRecorder.VideoSource.CAMERA; + } + + // Done with video + + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.M + && record_audio + && ContextCompat.checkSelfPermission(getContext(), Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED ) { + // needed for Android 6, in case users deny storage permission, otherwise we'll crash + // see https://developer.android.com/training/permissions/requesting.html + // we request permission when switching to video mode - if it wasn't granted, here we just switch it off + // we restrict check to Android 6 or later just in case, see note in LocationSupplier.setupLocationListener() + if( MyDebug.LOG ) + Log.e(TAG, "don't have RECORD_AUDIO permission"); + // don't show a toast here, otherwise we'll keep showing toasts whenever getVideoProfile() is called; we only + // should show a toast when user starts recording video; so we indicate this via the no_audio_permission flag + record_audio = false; + video_profile.no_audio_permission = true; + } + + video_profile.record_audio = record_audio; + if( record_audio ) { + String pref_audio_src = applicationInterface.getRecordAudioSourcePref(); + if( MyDebug.LOG ) + Log.d(TAG, "pref_audio_src: " + pref_audio_src); + switch(pref_audio_src) { + case "audio_src_mic": + video_profile.audioSource = MediaRecorder.AudioSource.MIC; + break; + case "audio_src_default": + video_profile.audioSource = MediaRecorder.AudioSource.DEFAULT; + break; + case "audio_src_voice_communication": + video_profile.audioSource = MediaRecorder.AudioSource.VOICE_COMMUNICATION; + break; + case "audio_src_voice_recognition": + video_profile.audioSource = MediaRecorder.AudioSource.VOICE_RECOGNITION; + break; + case "audio_src_unprocessed": + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.N ) { + video_profile.audioSource = MediaRecorder.AudioSource.UNPROCESSED; + } + else { + Log.e(TAG, "audio_src_voice_unprocessed requires Android 7"); + video_profile.audioSource = MediaRecorder.AudioSource.CAMCORDER; + } + break; + case "audio_src_camcorder": + default: + video_profile.audioSource = MediaRecorder.AudioSource.CAMCORDER; + break; + } + if( MyDebug.LOG ) + Log.d(TAG, "audio_source: " + video_profile.audioSource); + + if( MyDebug.LOG ) + Log.d(TAG, "pref_audio_channels: " + channels_value); + if( channels_value.equals("audio_mono") ) { + video_profile.audioChannels = 1; + } + else if( channels_value.equals("audio_stereo") ) { + video_profile.audioChannels = 2; + } + // else keep with the value already stored in VideoProfile (set from the CamcorderProfile) + } + + String pref_video_output_format = applicationInterface.getRecordVideoOutputFormatPref(); + if( MyDebug.LOG ) + Log.d(TAG, "pref_video_output_format: " + pref_video_output_format); + switch( pref_video_output_format ) { + case "preference_video_output_format_default": + // n.b., although there is MediaRecorder.OutputFormat.DEFAULT, we don't explicitly set that - rather stick with what is default in the CamcorderProfile + break; + case "preference_video_output_format_mpeg4_h264": + video_profile.fileFormat = MediaRecorder.OutputFormat.MPEG_4; + video_profile.videoCodec = MediaRecorder.VideoEncoder.H264; + video_profile.audioCodec = MediaRecorder.AudioEncoder.AAC; + break; + case "preference_video_output_format_mpeg4_hevc": + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.N ) { + video_profile.fileFormat = MediaRecorder.OutputFormat.MPEG_4; + video_profile.videoCodec = MediaRecorder.VideoEncoder.HEVC; + video_profile.audioCodec = MediaRecorder.AudioEncoder.AAC; + } + // else treat as default + break; + case "preference_video_output_format_3gpp": + video_profile.fileFormat = MediaRecorder.OutputFormat.THREE_GPP; + video_profile.fileExtension = "3gp"; + // leave others at default + break; + case "preference_video_output_format_webm": + { + // n.b., audio isn't recorded on any device I've tested with WEBM, seems this may + // not be supported yet, see: + // https://developer.android.com/guide/topics/media/media-formats#audio-formats + // https://stackoverflow.com/questions/42857584/recording-webm-with-android-mediarecorder + video_profile.fileFormat = MediaRecorder.OutputFormat.WEBM; + video_profile.videoCodec = MediaRecorder.VideoEncoder.VP8; + video_profile.audioCodec = MediaRecorder.AudioEncoder.VORBIS; + video_profile.fileExtension = "webm"; + } + // else treat as default + break; + default: + // treat as default + Log.e(TAG, "unknown pref_video_output_format: " + pref_video_output_format); + break; + } + + if( MyDebug.LOG ) + Log.d(TAG, "returning video_profile: " + video_profile); + return video_profile; + } + + private static String formatFloatToString(final float f) { + final int i=(int)f; + if( f == i ) + return Integer.toString(i); + return String.format(Locale.getDefault(), "%.2f", f); + } + + private static int greatestCommonFactor(int a, int b) { + while( b > 0 ) { + int temp = b; + b = a % b; + a = temp; + } + return a; + } + + private static String getAspectRatio(int width, int height) { + int gcf = greatestCommonFactor(width, height); + if( gcf > 0 ) { + // had a Google Play crash due to gcf being 0!? Implies width must be zero + width /= gcf; + height /= gcf; + } + return width + ":" + height; + } + + public static String getMPString(int width, int height) { + float mp = (width*height)/1000000.0f; + return formatFloatToString(mp) + "MP"; + } + + private static String getBurstString(Resources resources, boolean supports_burst) { + // should return empty string if supports_burst==true, as this is also used for video resolution strings + return supports_burst ? "" : ", " + resources.getString(R.string.no_burst); + } + + public static String getAspectRatioMPString(Resources resources, int width, int height, boolean supports_burst) { + return "(" + getAspectRatio(width, height) + ", " + getMPString(width, height) + getBurstString(resources, supports_burst) + ")"; + } + + private String getCamcorderProfileDescriptionType(CamcorderProfile profile) { + String type = ""; + // keep strings short, as displayed on the PopupView + if( profile.videoFrameWidth == 3840 && profile.videoFrameHeight == 2160 ) { + type = "4K"; + } + else if( profile.videoFrameWidth == 1920 && profile.videoFrameHeight == 1080 ) { + type = "FullHD"; + } + else if( profile.videoFrameWidth == 1280 && profile.videoFrameHeight == 720 ) { + type = "HD"; + } + else if( profile.videoFrameWidth == 720 && profile.videoFrameHeight == 480 ) { + type = "SD"; + } + else if( profile.videoFrameWidth == 640 && profile.videoFrameHeight == 480 ) { + type = "VGA"; + } + else if( profile.videoFrameWidth == 352 && profile.videoFrameHeight == 288 ) { + type = "CIF"; + } + else if( profile.videoFrameWidth == 320 && profile.videoFrameHeight == 240 ) { + type = "QVGA"; + } + else if( profile.videoFrameWidth == 176 && profile.videoFrameHeight == 144 ) { + type = "QCIF"; + } + return type; + } + + public String getCamcorderProfileDescriptionShort(String quality) { + if( camera_controller == null ) + return ""; + CamcorderProfile profile = getCamcorderProfile(quality); + String type = getCamcorderProfileDescriptionType(profile); + String space = type.isEmpty() ? "" : " "; + return profile.videoFrameWidth + "x" + profile.videoFrameHeight + space + type; + } + + public String getCamcorderProfileDescription(String quality) { + if( camera_controller == null ) + return ""; + CamcorderProfile profile = getCamcorderProfile(quality); + String type = getCamcorderProfileDescriptionType(profile); + String space = type.isEmpty() ? "" : " "; + return type + space + profile.videoFrameWidth + "x" + profile.videoFrameHeight + " " + getAspectRatioMPString(getResources(), profile.videoFrameWidth, profile.videoFrameHeight, true); + } + + public double getTargetRatio() { + return preview_targetRatio; + } + + private double calculateTargetRatioForPreview(Point display_size) { + double targetRatio; + String preview_size = applicationInterface.getPreviewSizePref(); + // should always use wysiwig for video mode, otherwise we get incorrect aspect ratio shown when recording video (at least on Galaxy Nexus, e.g., at 640x480) + // also not using wysiwyg mode with video caused corruption on Samsung cameras (tested with Samsung S3, Android 4.3, front camera, infinity focus) + if( preview_size.equals("preference_preview_size_wysiwyg") || this.is_video ) { + if( this.is_video ) { + if( MyDebug.LOG ) + Log.d(TAG, "set preview aspect ratio from video size (wysiwyg)"); + VideoProfile profile = getVideoProfile(); + if( MyDebug.LOG ) + Log.d(TAG, "video size: " + profile.videoFrameWidth + " x " + profile.videoFrameHeight); + targetRatio = ((double)profile.videoFrameWidth) / (double)profile.videoFrameHeight; + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "set preview aspect ratio from photo size (wysiwyg)"); + CameraController.Size picture_size = camera_controller.getPictureSize(); + if( MyDebug.LOG ) + Log.d(TAG, "picture_size: " + picture_size.width + " x " + picture_size.height); + targetRatio = ((double)picture_size.width) / (double)picture_size.height; + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "set preview aspect ratio from display size"); + // base target ratio from display size - means preview will fill the device's display as much as possible + // but if the preview's aspect ratio differs from the actual photo/video size, the preview will show a cropped version of what is actually taken + targetRatio = ((double)display_size.x) / (double)display_size.y; + } + this.preview_targetRatio = targetRatio; + if( MyDebug.LOG ) + Log.d(TAG, "targetRatio: " + targetRatio); + return targetRatio; + } + + /** Returns the size in sizes that is the closest aspect ratio match to targetRatio, but (if max_size is non-null) is not + * larger than max_size (in either width or height). + */ + private static CameraController.Size getClosestSize(List sizes, double targetRatio, CameraController.Size max_size) { + if( MyDebug.LOG ) + Log.d(TAG, "getClosestSize()"); + CameraController.Size optimalSize = null; + double minDiff = Double.MAX_VALUE; + for(CameraController.Size size : sizes) { + double ratio = (double)size.width / size.height; + if( max_size != null ) { + if( size.width > max_size.width || size.height > max_size.height ) + continue; + } + if( Math.abs(ratio - targetRatio) < minDiff ) { + optimalSize = size; + minDiff = Math.abs(ratio - targetRatio); + } + } + return optimalSize; + } + + public CameraController.Size getOptimalPreviewSize(List sizes) { + if( MyDebug.LOG ) + Log.d(TAG, "getOptimalPreviewSize()"); + final double ASPECT_TOLERANCE = 0.05; + if( sizes == null ) + return null; + if( is_video && video_high_speed ) { + VideoProfile profile = getVideoProfile(); + if( MyDebug.LOG ) + Log.d(TAG, "video size: " + profile.videoFrameWidth + " x " + profile.videoFrameHeight); + // preview size must match video resolution for high speed, see doc for CameraDevice.createConstrainedHighSpeedCaptureSession() + return new CameraController.Size(profile.videoFrameWidth, profile.videoFrameHeight); + } + CameraController.Size optimalSize = null; + double minDiff = Double.MAX_VALUE; + Point display_size = new Point(); + { + applicationInterface.getDisplaySize(display_size, false); // don't exclude insets, as preview runs under insets in edge-to-edge mode + // getSize() is adjusted based on the current rotation, so should already be landscape format, but: + // (a) it would be good to not assume Open Camera runs in landscape mode (if we ever ran in portrait mode, + // we'd still want display_size.x > display_size.y as preview resolutions also have width > height, + // (b) on some devices (e.g., Nokia 8), when coming back from the Settings when device is held in Preview, + // display size is returned in portrait format! (To reproduce, enable "Maximise preview size"; or if that's + // already enabled, change the setting off and on.) + if( MyDebug.LOG ) + Log.d(TAG, "display_size: " + display_size.x + " x " + display_size.y); + if( display_size.x < display_size.y ) { + //noinspection SuspiciousNameCombination + display_size.set(display_size.y, display_size.x); + if( MyDebug.LOG ) + Log.d(TAG, "swapped display_size to: " + display_size.x + " x " + display_size.y); + } + } + double targetRatio = calculateTargetRatioForPreview(display_size); + int targetHeight = Math.min(display_size.y, display_size.x); + if( targetHeight <= 0 ) { + targetHeight = display_size.y; + } + // Try to find the size which matches the aspect ratio, and is closest match to display height + for(CameraController.Size size : sizes) { + if( MyDebug.LOG ) + Log.d(TAG, " supported preview size: " + size.width + ", " + size.height); + if( camera_controller.isCameraExtension() ) { + int extension = camera_controller.getCameraExtension(); + if( !size.supportsExtension(extension) ) { + if( MyDebug.LOG ) + Log.d(TAG, " not supported by current extension: " + extension); + continue; + } + } + double ratio = (double)size.width / size.height; + if( Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE ) + continue; + if( Math.abs(size.height - targetHeight) < minDiff ) { + optimalSize = size; + minDiff = Math.abs(size.height - targetHeight); + } + } + if( optimalSize == null ) { + // can't find match for aspect ratio, so find closest one + if( MyDebug.LOG ) + Log.d(TAG, "no preview size matches the aspect ratio"); + optimalSize = getClosestSize(sizes, targetRatio, null); + } + if( MyDebug.LOG ) { + Log.d(TAG, "chose optimalSize: " + optimalSize.width + " x " + optimalSize.height); + Log.d(TAG, "optimalSize ratio: " + ((double)optimalSize.width / optimalSize.height)); + } + return optimalSize; + } + + public CameraController.Size getOptimalVideoPictureSize(List sizes, double targetRatio) { + if( MyDebug.LOG ) + Log.d(TAG, "getOptimalVideoPictureSize()"); + CameraController.Size max_video_size = video_quality_handler.getMaxSupportedVideoSize(); + return getOptimalVideoPictureSize(sizes, targetRatio, max_video_size); + } + + /** Returns a picture size to set during video mode. + * In theory, the picture size shouldn't matter in video mode, but the stock Android camera sets a picture size + * which is the largest that matches the video's aspect ratio. + * This seems necessary to work around an aspect ratio bug introduced in Android 4.4.3 (on Nexus 7 at least): http://code.google.com/p/android/issues/detail?id=70830 + * which results in distorted aspect ratio on preview and recorded video! + * Setting the picture size in video mode is also needed for taking photos when recording video. We need to make sure we + * set photo resolutions that are supported by Android when recording video. For old camera API, this doesn't matter so much + * (if we request too high, it'll automatically reduce the photo resolution), but still good to match the aspect ratio. For + * Camera2 API, see notes at "https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession(java.util.List, android.hardware.camera2.CameraCaptureSession.StateCallback, android.os.Handler)" . + */ + public static CameraController.Size getOptimalVideoPictureSize(List sizes, double targetRatio, CameraController.Size max_video_size) { + if( MyDebug.LOG ) + Log.d(TAG, "getOptimalVideoPictureSize()"); + final double ASPECT_TOLERANCE = 0.05; + if( sizes == null ) + return null; + if( MyDebug.LOG ) + Log.d(TAG, "max_video_size: " + max_video_size.width + ", " + max_video_size.height); + CameraController.Size optimalSize = null; + // Try to find largest size that matches aspect ratio. + // But don't choose a size that's larger than the max video size (as this isn't supported for taking photos when + // recording video for devices with LIMITED support in Camera2 mode). + // In theory, for devices FULL Camera2 support, if the current video resolution is smaller than the max preview resolution, + // we should be able to support larger photo resolutions, but this is left to future. + for(CameraController.Size size : sizes) { + if( MyDebug.LOG ) + Log.d(TAG, " supported preview size: " + size.width + ", " + size.height); + double ratio = (double)size.width / size.height; + if( Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE ) + continue; + if( size.width > max_video_size.width || size.height > max_video_size.height ) + continue; + if( optimalSize == null || size.width > optimalSize.width ) { + optimalSize = size; + } + } + if( optimalSize == null ) { + // can't find match for aspect ratio, so find closest one + if( MyDebug.LOG ) + Log.d(TAG, "no picture size matches the aspect ratio"); + optimalSize = getClosestSize(sizes, targetRatio, max_video_size); + } + if( MyDebug.LOG ) { + Log.d(TAG, "chose optimalSize: " + optimalSize.width + " x " + optimalSize.height); + Log.d(TAG, "optimalSize ratio: " + ((double)optimalSize.width / optimalSize.height)); + } + return optimalSize; + } + + private void setAspectRatio(double ratio) { + if( ratio <= 0.0 ) + throw new IllegalArgumentException(); + + has_aspect_ratio = true; + if( aspect_ratio != ratio ) { + aspect_ratio = ratio; + if( MyDebug.LOG ) + Log.d(TAG, "new aspect ratio: " + aspect_ratio); + cameraSurface.getView().requestLayout(); + if( canvasView != null ) { + canvasView.requestLayout(); + } + } + } + + private boolean hasAspectRatio() { + return has_aspect_ratio; + } + + private double getAspectRatio() { + return aspect_ratio; + } + + /** Returns the rotation in degrees of the display relative to the natural device orientation. + */ + public int getDisplayRotationDegrees(boolean prefer_later) { + int rotation = applicationInterface.getDisplayRotation(prefer_later); + int degrees = 0; + switch (rotation) { + case Surface.ROTATION_0: degrees = 0; break; + case Surface.ROTATION_90: degrees = 90; break; + case Surface.ROTATION_180: degrees = 180; break; + case Surface.ROTATION_270: degrees = 270; break; + default: + break; + } + if( MyDebug.LOG ) + Log.d(TAG, " degrees = " + degrees); + return degrees; + } + + // note, if orientation is locked to landscape this is only called when setting up the activity, and will always have the same orientation + public void setCameraDisplayOrientation() { + if( MyDebug.LOG ) + Log.d(TAG, "setCameraDisplayOrientation()"); + if( camera_controller == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera not opened!"); + return; + } + if( using_android_l ) { + // need to configure the textureview + configureTransform(); + } + else { + int degrees = getDisplayRotationDegrees(true); + if( MyDebug.LOG ) + Log.d(TAG, " degrees = " + degrees); + // note the code to make the rotation relative to the camera sensor is done in camera_controller.setDisplayOrientation() + camera_controller.setDisplayOrientation(degrees); + } + } + + // for taking photos - see http://developer.android.com/reference/android/hardware/Camera.Parameters.html#setRotation(int) + private void onOrientationChanged(int orientation) { + /*if( MyDebug.LOG ) { + Log.d(TAG, "onOrientationChanged()"); + Log.d(TAG, "orientation: " + orientation); + }*/ + if( orientation == OrientationEventListener.ORIENTATION_UNKNOWN ) + return; + if( camera_controller == null ) { + /*if( MyDebug.LOG ) + Log.d(TAG, "camera not opened!");*/ + return; + } + orientation = (orientation + 45) / 90 * 90; + this.current_orientation = orientation % 360; + int new_rotation; + int camera_orientation = camera_controller.getCameraOrientation(); + if( (camera_controller.getFacing() == CameraController.Facing.FACING_FRONT) ) { + new_rotation = (camera_orientation - orientation + 360) % 360; + } + else { + new_rotation = (camera_orientation + orientation) % 360; + } + if( new_rotation != current_rotation ) { + if( MyDebug.LOG ) { + Log.d(TAG, " current_orientation is " + current_orientation); + Log.d(TAG, " info orientation is " + camera_orientation); + Log.d(TAG, " set Camera rotation from " + current_rotation + " to " + new_rotation); + } + this.current_rotation = new_rotation; + } + } + + private int getDeviceDefaultOrientation() { + WindowManager windowManager = (WindowManager)this.getContext().getSystemService(Context.WINDOW_SERVICE); + Configuration config = getResources().getConfiguration(); + int rotation = windowManager.getDefaultDisplay().getRotation(); + if( ( (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180) && + config.orientation == Configuration.ORIENTATION_LANDSCAPE ) + || ( (rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270) && + config.orientation == Configuration.ORIENTATION_PORTRAIT ) ) { + return Configuration.ORIENTATION_LANDSCAPE; + } + else { + return Configuration.ORIENTATION_PORTRAIT; + } + } + + /* Returns the rotation (in degrees) to use for images/videos, taking the preference_lock_orientation into account. + */ + private int getImageVideoRotation() { + if( MyDebug.LOG ) + Log.d(TAG, "getImageVideoRotation() from current_rotation " + current_rotation); + String lock_orientation = applicationInterface.getLockOrientationPref(); + if( lock_orientation.equals("landscape") ) { + int camera_orientation = camera_controller.getCameraOrientation(); + int device_orientation = getDeviceDefaultOrientation(); + int result; + if( device_orientation == Configuration.ORIENTATION_PORTRAIT ) { + // should be equivalent to onOrientationChanged(270) + if( (camera_controller.getFacing() == CameraController.Facing.FACING_FRONT) ) { + result = (camera_orientation + 90) % 360; + } + else { + result = (camera_orientation + 270) % 360; + } + } + else { + // should be equivalent to onOrientationChanged(0) + result = camera_orientation; + } + if( MyDebug.LOG ) + Log.d(TAG, "getImageVideoRotation() lock to landscape, returns " + result); + return result; + } + else if( lock_orientation.equals("portrait") ) { + int camera_orientation = camera_controller.getCameraOrientation(); + int result; + int device_orientation = getDeviceDefaultOrientation(); + if( device_orientation == Configuration.ORIENTATION_PORTRAIT ) { + // should be equivalent to onOrientationChanged(0) + result = camera_orientation; + } + else { + // should be equivalent to onOrientationChanged(90) + if( (camera_controller.getFacing() == CameraController.Facing.FACING_FRONT) ) { + result = (camera_orientation + 270) % 360; + } + else { + result = (camera_orientation + 90) % 360; + } + } + if( MyDebug.LOG ) + Log.d(TAG, "getImageVideoRotation() lock to portrait, returns " + result); + return result; + } + if( MyDebug.LOG ) + Log.d(TAG, "getImageVideoRotation() returns current_rotation " + current_rotation); + return this.current_rotation; + } + + public void draw(Canvas canvas) { + /*if( MyDebug.LOG ) + Log.d(TAG, "draw()");*/ + if( this.is_paused ) { + /*if( MyDebug.LOG ) + Log.d(TAG, "draw(): paused");*/ + return; + } + /*if( true ) // test + return;*/ + /*if( MyDebug.LOG ) + Log.d(TAG, "ui_rotation: " + ui_rotation);*/ + /*if( MyDebug.LOG ) + Log.d(TAG, "canvas size " + canvas.getWidth() + " x " + canvas.getHeight());*/ + /*if( MyDebug.LOG ) + Log.d(TAG, "surface frame " + mHolder.getSurfaceFrame().width() + ", " + mHolder.getSurfaceFrame().height());*/ + + if( this.focus_success != FOCUS_DONE ) { + if( focus_complete_time != -1 && System.currentTimeMillis() > focus_complete_time + 1000 ) { + focus_success = FOCUS_DONE; + } + } + applicationInterface.onDrawPreview(canvas); + } + + public int getScaledZoomFactor(float scale_factor) { + if( MyDebug.LOG ) + Log.d(TAG, "getScaledZoomFactor() " + scale_factor); + + int new_zoom_factor = 0; + if( this.camera_controller != null && this.has_zoom ) { + final int zoom_factor = camera_controller.getZoom(); + float zoom_ratio; + if( has_smooth_zoom ) { + zoom_ratio = smooth_zoom; + if( MyDebug.LOG ) + Log.d(TAG, " use smooth_zoom: " + smooth_zoom + " instead of: " + this.zoom_ratios.get(zoom_factor)/100.0f); + } + else { + zoom_ratio = this.zoom_ratios.get(zoom_factor)/100.0f; + } + zoom_ratio *= scale_factor; + if( MyDebug.LOG ) + Log.d(TAG, " zoom_ratio: " + zoom_ratio); + + new_zoom_factor = zoom_factor; + if( zoom_ratio <= zoom_ratios.get(0)/100.0f ) { + new_zoom_factor = 0; + if( has_smooth_zoom ) + smooth_zoom = zoom_ratios.get(0)/100.0f; + } + else if( zoom_ratio >= zoom_ratios.get(max_zoom_factor)/100.0f ) { + new_zoom_factor = max_zoom_factor; + if( has_smooth_zoom ) + smooth_zoom = zoom_ratios.get(max_zoom_factor)/100.0f; + } + else if( has_smooth_zoom ) { + // Find the closest zoom level by rounding to nearest. + // Important to have same behaviour whether zooming in or out, otherwise problem when touching with two fingers and not + // moving - we'll get very small scale factors alternately between zooming in and out. + // The only reason we have separate codepath for zooming in or out is for performance (since we know to only look at + // higher or lower zoom ratios). + float dist = Math.abs(zoom_ratio - zoom_ratios.get(zoom_factor)/100.0f); + if( MyDebug.LOG ) + Log.d(TAG, " current dist: " + dist); + + if( scale_factor > 1.0f ) { + // zooming in + for(int i=zoom_factor+1;i dist+1.0e-5f ) { + break; + } + } + } + else { + // zooming out + for(int i=zoom_factor-1;i>=0;i--) { + float this_dist = Math.abs(zoom_ratio - zoom_ratios.get(i)/100.0f); + if( this_dist < dist ) { + new_zoom_factor = i; + dist = this_dist; + if( MyDebug.LOG ) + Log.d(TAG, "zoom out, found new zoom by comparing " + zoom_ratios.get(i)/100.0f + " to " + zoom_ratio + " , dist " + dist); + } + else if( this_dist > dist+1.0e-5f ) { + break; + } + } + } + + smooth_zoom = zoom_ratio; + } + else { + // find the closest zoom level + // unclear if we need this code anymore (smooth_zoom should always be true?) + + if( scale_factor > 1.0f ) { + // zooming in + for(int i=zoom_factor;i= zoom_ratio ) { + if( MyDebug.LOG ) + Log.d(TAG, "zoom in, found new zoom by comparing " + zoom_ratios.get(i)/100.0f + " >= " + zoom_ratio); + new_zoom_factor = i; + break; + } + } + } + else { + // zooming out + for(int i=zoom_factor;i>=0;i--) { + if( zoom_ratios.get(i)/100.0f <= zoom_ratio ) { + if( MyDebug.LOG ) + Log.d(TAG, "zoom out, found new zoom by comparing " + zoom_ratios.get(i)/100.0f + " <= " + zoom_ratio); + new_zoom_factor = i; + break; + } + } + } + } + if( MyDebug.LOG ) { + Log.d(TAG, "zoom_ratio is now " + zoom_ratio); + Log.d(TAG, " old zoom_factor " + zoom_factor + " ratio " + zoom_ratios.get(zoom_factor)/100.0f); + Log.d(TAG, " chosen new zoom_factor " + new_zoom_factor + " ratio " + zoom_ratios.get(new_zoom_factor)/100.0f); + } + } + + return new_zoom_factor; + } + + public void scaleZoom(float scale_factor) { + if( MyDebug.LOG ) + Log.d(TAG, "scaleZoom() " + scale_factor); + if( this.camera_controller != null && this.has_zoom ) { + int new_zoom_factor = getScaledZoomFactor(scale_factor); + if( has_smooth_zoom ) + zoomTo(new_zoom_factor, true); + // else don't call zoomTo; this should be called indirectly by applicationInterface.multitouchZoom() + applicationInterface.multitouchZoom(new_zoom_factor); + } + } + + private final Handler zoom_transition_handler = new Handler(); + private Runnable zoom_transition_runnable; + + private void zoomTo(int new_zoom_factor, boolean allow_smooth_zoom) { + zoomTo(new_zoom_factor, allow_smooth_zoom, false); + } + + /** Zooms to the supplied index (within the zoom_ratios array). + * @param new_zoom_factor The index to zoom to. + * @param allow_smooth_zoom Whether zooming as part of pinch zooming. + * @param allow_zoom_transition If true, then change zoom gradually towards the requested zoom, + * rather than zooming immediately to the requested zoom. Only + * supported if allow_smooth_zoom==false. + */ + public void zoomTo(int new_zoom_factor, boolean allow_smooth_zoom, boolean allow_zoom_transition) { + if( MyDebug.LOG ) + Log.d(TAG, "ZoomTo(): " + new_zoom_factor); + if( new_zoom_factor < 0 ) + new_zoom_factor = 0; + else if( new_zoom_factor > max_zoom_factor ) + new_zoom_factor = max_zoom_factor; + if( zoom_transition_runnable != null ) { + // cancel an existing runnable + zoom_transition_handler.removeCallbacks(zoom_transition_runnable); + zoom_transition_runnable = null; + } + // problem where we crashed due to calling this function with null camera should be fixed now, but check again just to be safe + if( camera_controller != null ) { + if( this.has_zoom ) { + // don't cancelAutoFocus() here, otherwise we get sluggish zoom behaviour on Camera2 API + allow_zoom_transition = allow_zoom_transition && using_android_l; // only for Camera2 + allow_zoom_transition = allow_zoom_transition && !allow_smooth_zoom; // only if not smooth zooming + if( allow_zoom_transition && Math.abs(camera_controller.getZoom() - new_zoom_factor) < 6 ) { + // don't bother with transition if only changing a small amount + allow_zoom_transition = false; + } + if( allow_zoom_transition ) { + final int start_zoom_value = camera_controller.getZoom(); + final int target_zoom_value = new_zoom_factor; + //final float start_zoom = zoom_ratios.get(start_zoom_value)/100.0f; + final long start_time = System.currentTimeMillis(); + final long delay = 16; + + zoom_transition_runnable = new Runnable() { + public void run() { + // check just in case camera is closed or changed to a state where has_zoom==false, + // without cancelling the zoom_transition_runnable + if( camera_controller == null || !has_zoom ) { + return; + } + int this_zoom_value; + long time = System.currentTimeMillis() - start_time; + time += delay; // so we have a quicker transition + final long duration = 200; + if( time >= duration ) { + this_zoom_value = target_zoom_value; + } + else { + float alpha = time / (float)duration; + alpha = Math.min(alpha, 1.0f); + this_zoom_value = (int)((1.0f-alpha) * start_zoom_value + alpha * target_zoom_value + 0.5f); + } + if( MyDebug.LOG ) + Log.d(TAG, "ZoomTo runnable, this_zoom_value: " + this_zoom_value); + camera_controller.setZoom(this_zoom_value, -1.0f); + if( time < duration ) { + zoom_transition_handler.postDelayed(this, delay); + } + } + }; + zoom_transition_runnable.run(); + } + else { + // if pinch zooming, pass through the "smooth" zoom factor so for Camera2 API we get perfectly smooth zoom, rather than it + // being snapped to the discrete zoom values + camera_controller.setZoom(new_zoom_factor, (allow_smooth_zoom && has_smooth_zoom) ? smooth_zoom : -1.0f); + } + applicationInterface.setZoomPref(new_zoom_factor); + clearFocusAreas(); + } + } + } + + public void setFocusDistance(float new_focus_distance, boolean is_target_distance, boolean show_toast) { + if( MyDebug.LOG ) { + Log.d(TAG, "setFocusDistance: " + new_focus_distance); + Log.d(TAG, "is_target_distance: " + is_target_distance); + } + if( camera_controller != null ) { + if( new_focus_distance < 0.0f ) + new_focus_distance = 0.0f; + else if( new_focus_distance > minimum_focus_distance ) + new_focus_distance = minimum_focus_distance; + boolean focus_changed = false; + if( is_target_distance ) { + focus_changed = true; + camera_controller.setFocusBracketingTargetDistance(new_focus_distance); + // also set the focus distance, so the user can see what the target distance looks like + camera_controller.setFocusDistance(new_focus_distance); + this.focus_set_for_target_distance = true; + this.focus_set_for_target_distance_ms = System.currentTimeMillis(); + if( applicationInterface.isFocusBracketingSourceAutoPref() ) { + // first record the current focus distance, in case needed for taking a photo whilst adjusting the target focus distance + camera_controller.setFocusBracketingSourceDistanceFromCurrent(); + camera_controller.setFocusValue("focus_mode_manual2"); + } + } + else if( camera_controller.setFocusDistance(new_focus_distance) ) { + focus_changed = true; + camera_controller.setFocusBracketingSourceDistance(new_focus_distance); + } + + if( focus_changed ) { + // now save + applicationInterface.setFocusDistancePref(new_focus_distance, is_target_distance); + if( show_toast ) + { + String focus_distance_s; + if( new_focus_distance > 0.0f ) { + float real_focus_distance = 1.0f / new_focus_distance; + focus_distance_s = decimal_format_2dp_force0.format(real_focus_distance) + getResources().getString(R.string.metres_abbreviation); + } + else { + focus_distance_s = getResources().getString(R.string.infinite); + } + int id = R.string.focus_distance; + if( this.supports_focus_bracketing && applicationInterface.isFocusBracketingPref() ) + id = is_target_distance ? R.string.focus_bracketing_target_distance : R.string.focus_bracketing_source_distance; + showToast(getResources().getString(id) + " " + focus_distance_s, true); + } + } + } + } + + public void stoppedSettingFocusDistance(boolean is_target_distance) { + if( MyDebug.LOG ) { + Log.d(TAG, "stoppedSettingFocusDistance"); + Log.d(TAG, "is_target_distance: " + is_target_distance); + } + if( is_target_distance && camera_controller != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "set manual focus distance back to start"); + camera_controller.setFocusDistance( camera_controller.getFocusBracketingSourceDistance() ); + this.focus_set_for_target_distance = false; + this.focus_set_for_target_distance_ms = System.currentTimeMillis(); + if( applicationInterface.isFocusBracketingSourceAutoPref() ) { + String focus_value = applicationInterface.getFocusPref(is_video); + if( !focus_value.isEmpty() ) { + camera_controller.setFocusValue(focus_value); // in case using focus bracketing in auto focus mode + } + } + } + } + + /** Returns whether the target focus distance is currently being set. + */ + public boolean isSettingTargetFocusDistance() { + return this.focus_set_for_target_distance; + } + + public long getSettingTargetFocusDistanceTime() { + return this.focus_set_for_target_distance_ms; + } + + public void setExposure(int new_exposure) { + if( MyDebug.LOG ) + Log.d(TAG, "setExposure(): " + new_exposure); + if( camera_controller != null && ( min_exposure != 0 || max_exposure != 0 ) ) { + cancelAutoFocus(); + if( new_exposure < min_exposure ) + new_exposure = min_exposure; + else if( new_exposure > max_exposure ) + new_exposure = max_exposure; + if( camera_controller.setExposureCompensation(new_exposure) ) { + // now save + applicationInterface.setExposureCompensationPref(new_exposure); + showToast(null, getExposureCompensationString(new_exposure), 0, true); + } + } + } + + /** Set a manual white balance temperature. The white balance mode must be set to "manual" for + * this to have an effect. + */ + public void setWhiteBalanceTemperature(int new_temperature) { + if( MyDebug.LOG ) + Log.d(TAG, "seWhiteBalanceTemperature(): " + new_temperature); + if( camera_controller != null ) { + if( camera_controller.setWhiteBalanceTemperature(new_temperature) ) { + // now save + applicationInterface.setWhiteBalanceTemperaturePref(new_temperature); + showToast(null, getResources().getString(R.string.white_balance) + " " + new_temperature, 0, true); + } + } + } + + /** Try to parse the supplied manual ISO value + * @return The manual ISO value, or -1 if not recognised as a number. + */ + public int parseManualISOValue(String value) { + int iso; + try { + if( MyDebug.LOG ) + Log.d(TAG, "setting manual iso"); + iso = Integer.parseInt(value); + if( MyDebug.LOG ) + Log.d(TAG, "iso: " + iso); + } + catch(NumberFormatException exception) { + if( MyDebug.LOG ) + Log.d(TAG, "iso invalid format, can't parse to int"); + iso = -1; + } + return iso; + } + + public void setISO(int new_iso) { + if( MyDebug.LOG ) + Log.d(TAG, "setISO(): " + new_iso); + if( camera_controller != null && supports_iso_range ) { + if( new_iso < min_iso ) + new_iso = min_iso; + else if( new_iso > max_iso ) + new_iso = max_iso; + if( camera_controller.setISO(new_iso) ) { + // now save + applicationInterface.setISOPref(String.valueOf(new_iso)); + showToast(null, getISOString(new_iso), 0, true); + } + } + } + + public void setExposureTime(long new_exposure_time) { + if( MyDebug.LOG ) + Log.d(TAG, "setExposureTime(): " + new_exposure_time); + if( camera_controller != null && supports_exposure_time ) { + if( new_exposure_time < getMinimumExposureTime() ) + new_exposure_time = getMinimumExposureTime(); + else if( new_exposure_time > getMaximumExposureTime() ) + new_exposure_time = getMaximumExposureTime(); + if( camera_controller.setExposureTime(new_exposure_time) ) { + // now save + applicationInterface.setExposureTimePref(new_exposure_time); + showToast(null, getExposureTimeString(new_exposure_time), 0, true); + } + } + } + + public String getExposureCompensationString(int exposure) { + float exposure_ev = exposure * exposure_step; + // show a "+" even for exactly 0, so that we have a consistent text length (useful for the toast when adjusting the exposure compensation slider) + return getResources().getString(R.string.exposure_compensation) + " " + (exposure >= 0 ? "+" : "") + decimal_format_2dp_force0.format(exposure_ev) + " EV"; + } + + public String getISOString(int iso) { + return getResources().getString(R.string.iso) + " " + iso; + } + + public String getExposureTimeString(long exposure_time) { + /*if( MyDebug.LOG ) + Log.d(TAG, "getExposureTimeString(): " + exposure_time);*/ + double exposure_time_s = exposure_time/1000000000.0; + String string; + if( exposure_time > 100000000 ) { + // show exposure times of more than 0.1s directly + string = decimal_format_1dp.format(exposure_time_s) + getResources().getString(R.string.seconds_abbreviation); + } + else { + double exposure_time_r = 1.0/exposure_time_s; + string = " 1/" + (int)(exposure_time_r + 0.5) + getResources().getString(R.string.seconds_abbreviation); + } + /*if( MyDebug.LOG ) + Log.d(TAG, "getExposureTimeString() return: " + string);*/ + return string; + } + + public String getFrameDurationString(long frame_duration) { + double frame_duration_s = frame_duration/1000000000.0; + double frame_duration_r = 1.0/frame_duration_s; + return getResources().getString(R.string.fps) + " " + decimal_format_1dp.format(frame_duration_r); + } + + /*private String getFocusOneDistanceString(float dist) { + if( dist == 0.0f ) + return "inf."; + float real_dist = 1.0f/dist; + return decimal_format_2dp.format(real_dist) + getResources().getString(R.string.metres_abbreviation); + } + + public String getFocusDistanceString(float dist_min, float dist_max) { + String f_s = "f "; + //if( dist_min == dist_max ) + // return f_s + getFocusOneDistanceString(dist_min); + //return f_s + getFocusOneDistanceString(dist_min) + "-" + getFocusOneDistanceString(dist_max); + // just always show max for now + return f_s + getFocusOneDistanceString(dist_max); + }*/ + + public boolean canSwitchCamera() { + if( this.phase == PHASE_TAKING_PHOTO || isVideoRecording() ) { + // just to be safe - risk of cancelling the autofocus before taking a photo, or otherwise messing things up + if( MyDebug.LOG ) + Log.d(TAG, "currently taking a photo"); + return false; + } + int n_cameras = camera_controller_manager.getNumberOfCameras(); + if( MyDebug.LOG ) + Log.d(TAG, "found " + n_cameras + " cameras"); + if( n_cameras == 0 ) + return false; + return true; + } + + public void setCamera(int cameraId, String cameraIdSPhysical) { + if( MyDebug.LOG ) + Log.d(TAG, "setCamera(): " + cameraId + " / " + cameraIdSPhysical); + if( cameraId < 0 || cameraId >= camera_controller_manager.getNumberOfCameras() ) { + if( MyDebug.LOG ) + Log.d(TAG, "invalid cameraId: " + cameraId); + cameraId = 0; + } + if( camera_open_state == CameraOpenState.CAMERAOPENSTATE_OPENING ) { + if( MyDebug.LOG ) + Log.d(TAG, "already opening camera in background thread"); + return; + } + if( canSwitchCamera() ) { + /*closeCamera(false, null); + applicationInterface.setCameraIdPref(cameraId); + this.openCamera();*/ + final int cameraId_f = cameraId; + final String cameraIdSPhysical_f = cameraIdSPhysical; + closeCamera(true, new CloseCameraCallback() { + @Override + public void onClosed() { + if( MyDebug.LOG ) + Log.d(TAG, "CloseCameraCallback.onClosed"); + applicationInterface.setCameraIdPref(cameraId_f, cameraIdSPhysical_f); + openCamera(); + } + }); + } + } + + public static int [] matchPreviewFpsToVideo(List fps_ranges, int video_frame_rate) { + if( MyDebug.LOG ) + Log.d(TAG, "matchPreviewFpsToVideo()"); + int selected_min_fps = -1, selected_max_fps = -1, selected_diff = -1; + for(int [] fps_range : fps_ranges) { + if( MyDebug.LOG ) { + Log.d(TAG, " supported fps range: " + fps_range[0] + " to " + fps_range[1]); + } + int min_fps = fps_range[0]; + int max_fps = fps_range[1]; + if( min_fps <= video_frame_rate && max_fps >= video_frame_rate ) { + int diff = max_fps - min_fps; + if( selected_diff == -1 || diff < selected_diff ) { + selected_min_fps = min_fps; + selected_max_fps = max_fps; + selected_diff = diff; + } + } + } + if( selected_min_fps != -1 ) { + if( MyDebug.LOG ) { + Log.d(TAG, " chosen fps range: " + selected_min_fps + " to " + selected_max_fps); + } + } + else { + selected_diff = -1; + int selected_dist = -1; + for(int [] fps_range : fps_ranges) { + int min_fps = fps_range[0]; + int max_fps = fps_range[1]; + int diff = max_fps - min_fps; + int dist; + if( max_fps < video_frame_rate ) + dist = video_frame_rate - max_fps; + else + dist = min_fps - video_frame_rate; + if( MyDebug.LOG ) { + Log.d(TAG, " supported fps range: " + min_fps + " to " + max_fps + " has dist " + dist + " and diff " + diff); + } + if( selected_dist == -1 || dist < selected_dist || ( dist == selected_dist && diff < selected_diff ) ) { + selected_min_fps = min_fps; + selected_max_fps = max_fps; + selected_dist = dist; + selected_diff = diff; + } + } + if( MyDebug.LOG ) + Log.e(TAG, " can't find match for fps range, so choose closest: " + selected_min_fps + " to " + selected_max_fps); + } + return new int[]{selected_min_fps, selected_max_fps}; + } + + public static int [] chooseBestPreviewFps(List fps_ranges) { + if( MyDebug.LOG ) + Log.d(TAG, "chooseBestPreviewFps()"); + + // find value with lowest min that has max >= 30; if more than one of these, pick the one with highest max + int selected_min_fps = -1, selected_max_fps = -1; + for(int [] fps_range : fps_ranges) { + if( MyDebug.LOG ) { + Log.d(TAG, " supported fps range: " + fps_range[0] + " to " + fps_range[1]); + } + int min_fps = fps_range[0]; + int max_fps = fps_range[1]; + if( max_fps >= 30000 ) { + if( selected_min_fps == -1 || min_fps < selected_min_fps ) { + selected_min_fps = min_fps; + selected_max_fps = max_fps; + } + else if( min_fps == selected_min_fps && max_fps > selected_max_fps ) { + selected_min_fps = min_fps; + selected_max_fps = max_fps; + } + } + } + + if( selected_min_fps != -1 ) { + if( MyDebug.LOG ) { + Log.d(TAG, " chosen fps range: " + selected_min_fps + " to " + selected_max_fps); + } + } + else { + // just pick the widest range; if more than one, pick the one with highest max + int selected_diff = -1; + for(int [] fps_range : fps_ranges) { + int min_fps = fps_range[0]; + int max_fps = fps_range[1]; + int diff = max_fps - min_fps; + if( selected_diff == -1 || diff > selected_diff ) { + selected_min_fps = min_fps; + selected_max_fps = max_fps; + selected_diff = diff; + } + else if( diff == selected_diff && max_fps > selected_max_fps ) { + selected_min_fps = min_fps; + selected_max_fps = max_fps; + selected_diff = diff; + } + } + if( MyDebug.LOG ) + Log.d(TAG, " can't find fps range 30fps or better, so picked widest range: " + selected_min_fps + " to " + selected_max_fps); + } + return new int[]{selected_min_fps, selected_max_fps}; + } + + /* It's important to set a preview FPS using chooseBestPreviewFps() rather than just leaving it to the default, as some devices + * have a poor choice of default - e.g., Nexus 5 and Nexus 6 on original Camera API default to (15000, 15000), which means very dark + * preview and photos in low light, as well as a less smooth framerate in good light. + * See http://stackoverflow.com/questions/18882461/why-is-the-default-android-camera-preview-smoother-than-my-own-camera-preview . + */ + private void setPreviewFps() { + if( MyDebug.LOG ) + Log.d(TAG, "setPreviewFps()"); + VideoProfile profile = getVideoProfile(); + List fps_ranges = camera_controller.getSupportedPreviewFpsRange(); + if( fps_ranges == null || fps_ranges.isEmpty() ) { + if( MyDebug.LOG ) + Log.d(TAG, "fps_ranges not available"); + return; + } + int [] selected_fps = null; + if( camera_controller.isCameraExtension() ) { + // don't set preview fps if using camera extension + // (important not to return here however - still want to call + // camera_controller.clearPreviewFpsRange() to clear a previously set fps) + } + else if( this.is_video ) { + // For Nexus 5 and Nexus 6, we need to set the preview fps using matchPreviewFpsToVideo to avoid problem of dark preview in low light, as described above. + // When the video recording starts, the preview automatically adjusts, but still good to avoid too-dark preview before the user starts recording. + // However I'm wary of changing the behaviour for all devices at the moment, since some devices can be + // very picky about what works when it comes to recording video - e.g., corruption in preview or resultant video. + // So for now, I'm just fixing the Nexus 5/6 behaviour without changing behaviour for other devices. Later we can test on other devices, to see if we can + // use chooseBestPreviewFps() more widely. + // Update for v1.31: we no longer seem to need this - I no longer get a dark preview in photo or video mode if we don't set the fps range; + // but leaving the code as it is, to be safe. + // Update for v1.43: implementing setPreviewFpsRange() for CameraController2 caused the dark preview problem on + // OnePlus 3T. So enable the preview_too_dark for all devices on Camera2. + // Update for v1.43.3: had reports of problems (e.g., setting manual mode with video on camera2) since 1.43. It's unclear + // if there is any benefit to setting the preview fps when we aren't requesting a specific fps value, so seems safest to + // revert to the old behaviour (where CameraController2.setPreviewFpsRange() did nothing). + boolean preview_too_dark = using_android_l || Build.MODEL.equals("Nexus 5") || Build.MODEL.equals("Nexus 6"); + String fps_value = applicationInterface.getVideoFPSPref(); + if( MyDebug.LOG ) { + Log.d(TAG, "preview_too_dark? " + preview_too_dark); + Log.d(TAG, "fps_value: " + fps_value); + } + if( fps_value.equals("default") && using_android_l ) { + if( MyDebug.LOG ) + Log.d(TAG, "don't set preview fps for camera2 and default fps video"); + } + else if( fps_value.equals("default") && preview_too_dark ) { + selected_fps = chooseBestPreviewFps(fps_ranges); + } + else { + selected_fps = matchPreviewFpsToVideo(fps_ranges, (int)(profile.videoCaptureRate*1000)); + } + } + else { + // note that setting an fps here in continuous video focus mode causes preview to not restart after taking a photo on Galaxy Nexus + // but we need to do this, to get good light for Nexus 5 or 6 + // we could hardcode behaviour like we do for video, but this is the same way that Google Camera chooses preview fps for photos + // or I could hardcode behaviour for Galaxy Nexus, but since it's an old device (and an obscure bug anyway - most users don't really need continuous focus in photo mode), better to live with the bug rather than complicating the code + // Update for v1.29: this doesn't seem to happen on Galaxy Nexus with continuous picture focus mode, which is what we now use + // Update for v1.31: we no longer seem to need this for old API - I no longer get a dark preview in photo or video mode if we don't set the fps range; + // but leaving the code as it is, to be safe. + // Update for v1.43.3: as noted above, setPreviewFpsRange() was implemented for CameraController2 in v1.43, but no evidence this + // is needed for anything, so thinking about it, best to keep things as they were before for Camera2 + if( using_android_l ) { + if( MyDebug.LOG ) + Log.d(TAG, "don't set preview fps for camera2 and photo"); + } + else { + selected_fps = chooseBestPreviewFps(fps_ranges); + } + } + if( selected_fps != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "set preview fps range: " + Arrays.toString(selected_fps)); + camera_controller.setPreviewFpsRange(selected_fps[0], selected_fps[1]); + } + else if( using_android_l ) { + camera_controller.clearPreviewFpsRange(); + } + } + + public void switchVideo(boolean during_startup, boolean change_user_pref) { + if( MyDebug.LOG ) + Log.d(TAG, "switchVideo()"); + if( camera_controller == null && during_startup ) { + // if during_startup==false at least, we should allow switching to/from video mode if + // camera failed to open (it may be that the failure to open is specific to video mode + // for example, so should allow user to switch back to photo mode - e.g., setting + // video profile to sRGB on Pixel 6 Pro) + if( MyDebug.LOG ) + Log.d(TAG, "camera not opened!"); + return; + } + if( !is_video && !supports_video ) { + if( MyDebug.LOG ) + Log.d(TAG, "video not supported"); + return; + } + boolean old_is_video = is_video; + if( this.is_video ) { + if( video_recorder != null ) { + stopVideo(false); + } + this.is_video = false; + } + else { + if( this.isOnTimer() ) { + cancelTimer(); + this.is_video = true; + } + else if( this.phase == PHASE_TAKING_PHOTO ) { + // wait until photo taken + if( MyDebug.LOG ) + Log.d(TAG, "wait until photo taken"); + } + else { + this.is_video = true; + } + } + + if( is_video != old_is_video ) { + initZoom(); // needed for when in Panorama mode, but we're in video, when starting up, as zoom should still be shown - see testTakePhotoPanorama + setFocusPref(false); // first restore the saved focus for the new photo/video mode; don't do autofocus, as it'll be cancelled when restarting preview + /*if( !is_video ) { + // changing from video to photo mode + setFocusPref(false); // first restore the saved focus for the new photo/video mode; don't do autofocus, as it'll be cancelled when restarting preview + }*/ + + if( change_user_pref ) { + // now save + applicationInterface.setVideoPref(is_video); + } + if( !during_startup ) { + // if during startup, updateFlashForVideo() needs to always be explicitly called anyway + updateFlashForVideo(); + } + + if( !during_startup ) { + if( MyDebug.LOG ) { + String focus_value = current_focus_index != -1 ? supported_focus_values.get(current_focus_index) : null; + Log.d(TAG, "focus_value is " + focus_value); + } + // Although in theory we only need to stop and start preview, which should be faster, reopening the camera allows that to + // run on the background thread, thus not freezing the UI + // Also workaround for bug on Nexus 6 at least where switching to video and back to photo mode causes continuous picture mode to stop - + // at the least, we need to reopen camera when: ( !is_video && focus_value != null && focus_value.equals("focus_mode_continuous_picture") ). + // Lastly, note that it's important to still call setupCamera() when switching between photo and video modes (see comment for setupCamera()). + // So if we ever allow stopping/starting the preview again, we still need to call setupCamera() again. + // Update: and even if we want to go back to just stopping/starting the preview, it's likely still a good idea to reopen the camera when + // switching from/to vendor camera extensions, otherwise risk of hangs/crashes on at least some devices (see note in MainActivity.updateForSettings) + this.reopenCamera(); + } + + /*if( is_video ) { + // changing from photo to video mode + setFocusPref(false); + }*/ + if( is_video ) { + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && applicationInterface.getRecordAudioPref() ) { + // check for audio permission now, rather than when user starts video recording + // we restrict the checks to Android 6 or later just in case, see note in LocationSupplier.setupLocationListener() + // only request permission if record audio preference is enabled + if( MyDebug.LOG ) + Log.d(TAG, "check for record audio permission"); + if( ContextCompat.checkSelfPermission(getContext(), Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED ) { + if( MyDebug.LOG ) + Log.d(TAG, "record audio permission not available"); + applicationInterface.requestRecordAudioPermission(); + // we can now carry on - if the user starts recording video, we'll check then if the permission was granted + } + } + } + } + } + + private boolean focusIsVideo() { + if( camera_controller != null ) { + return camera_controller.focusIsVideo(); + } + return false; + } + + public void setFocusPref(boolean auto_focus) { + if( MyDebug.LOG ) + Log.d(TAG, "setFocusPref()"); + String focus_value = applicationInterface.getFocusPref(is_video); + if( !focus_value.isEmpty() ) { + if( MyDebug.LOG ) + Log.d(TAG, "found existing focus_value: " + focus_value); + if( !updateFocus(focus_value, true, false, auto_focus) ) { // don't need to save, as this is the value that's already saved + if( MyDebug.LOG ) + Log.d(TAG, "focus value no longer supported!"); + // don't save, as we may be in a temporary mode where the saved focus isn't supported - e.g., this could happen if switching to a specific physical camera + updateFocus(0, true, false, auto_focus); + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "found no existing focus_value"); + // here we set the default values for focus mode + // note if updating default focus value for photo mode, also update MainActivityTest.setToDefault() + if( !updateFocus(is_video ? "focus_mode_continuous_video" : "focus_mode_continuous_picture", true, true, auto_focus) ) { + if( MyDebug.LOG ) + Log.d(TAG, "continuous focus not supported, so fall back to first"); + updateFocus(0, true, true, auto_focus); + } + } + } + + /** If in video mode, update the focus mode if necessary to be continuous video focus mode (if that mode is available). + * Normally we remember the user-specified focus value. And even setting the default is done in setFocusPref(). + * This method is used as a workaround for a bug on Samsung Galaxy S5 with UHD, where if the user switches to another + * (non-continuous-video) focus mode, then goes to Settings, then returns and records video, the preview freezes and the + * video is corrupted. + * @return If the focus mode is changed, this returns the previous focus mode; else it returns null. + */ + private String updateFocusForVideo() { + if( MyDebug.LOG ) + Log.d(TAG, "updateFocusForVideo()"); + String old_focus_mode = null; + if( this.supported_focus_values != null && camera_controller != null && is_video ) { + boolean focus_is_video = focusIsVideo(); + if( MyDebug.LOG ) { + Log.d(TAG, "focus_is_video: " + focus_is_video + " , is_video: " + is_video); + } + if( focus_is_video != is_video ) { + if( MyDebug.LOG ) + Log.d(TAG, "need to change focus mode"); + old_focus_mode = this.getCurrentFocusValue(); + updateFocus("focus_mode_continuous_video", true, false, false); // don't save, as we're just changing focus mode temporarily for the Samsung S5 video hack + } + } + return old_focus_mode; + } + + /** If we've switch to video mode, ensures that we're not in a flash mode other than torch. + * This only changes the internal user setting, we don't tell the application interface to change + * the flash mode. + */ + private void updateFlashForVideo() { + if( MyDebug.LOG ) + Log.d(TAG, "updateFlashForVideo()"); + if( is_video ) { + // check flash is not auto or on + String current_flash = getCurrentFlashValue(); + if( current_flash != null && !isFlashSupportedForVideo(current_flash) ) { + if( MyDebug.LOG ) + Log.d(TAG, "disable flash for video mode"); + current_flash_index = -1; // reset to initial, to prevent toast from showing + updateFlash("flash_off", false); + } + } + } + + /** Whether the flash mode is supported in video mode. + */ + public static boolean isFlashSupportedForVideo(String flash_mode) { + return flash_mode != null && ( flash_mode.equals("flash_off") || flash_mode.equals("flash_torch") || flash_mode.equals("flash_frontscreen_torch") ); + } + + public String getErrorFeatures(VideoProfile profile) { + boolean was_4k = false, was_bitrate = false, was_fps = false, was_slow_motion = false; + if( profile.videoFrameWidth == 3840 && profile.videoFrameHeight == 2160 && applicationInterface.getForce4KPref() ) { + was_4k = true; + } + String bitrate_value = applicationInterface.getVideoBitratePref(); + if( !bitrate_value.equals("default") ) { + was_bitrate = true; + } + String fps_value = applicationInterface.getVideoFPSPref(); + if( applicationInterface.getVideoCaptureRateFactor() < 1.0f-1.0e-5f ) { + was_slow_motion = true; + } + else if( !fps_value.equals("default") ) { + was_fps = true; + } + String features = ""; + if( was_4k || was_bitrate || was_fps || was_slow_motion ) { + if( was_4k ) { + features = getContext().getResources().getString(R.string.error_features_4k); + } + if( was_bitrate ) { + if( features.isEmpty() ) + features = getContext().getResources().getString(R.string.error_features_bitrate); + else + features += "/" + getContext().getResources().getString(R.string.error_features_bitrate); + } + if( was_fps ) { + if( features.isEmpty() ) + features = getContext().getResources().getString(R.string.error_features_frame_rate); + else + features += "/" + getContext().getResources().getString(R.string.error_features_frame_rate); + } + if( was_slow_motion ) { + if( features.isEmpty() ) + features = getContext().getResources().getString(R.string.error_features_slow_motion); + else + features += "/" + getContext().getResources().getString(R.string.error_features_slow_motion); + } + } + return features; + } + + public void updateFlash(String flash_value) { + if( MyDebug.LOG ) + Log.d(TAG, "updateFlash(): " + flash_value); + if( this.phase == PHASE_TAKING_PHOTO && !is_video ) { + // just to be safe - risk of cancelling the autofocus before taking a photo, or otherwise messing things up + if( MyDebug.LOG ) + Log.d(TAG, "currently taking a photo"); + return; + } + updateFlash(flash_value, true); + } + + private boolean updateFlash(String flash_value, boolean save) { + if( MyDebug.LOG ) + Log.d(TAG, "updateFlash(): " + flash_value); + if( supported_flash_values != null ) { + int new_flash_index = supported_flash_values.indexOf(flash_value); + if( MyDebug.LOG ) + Log.d(TAG, "new_flash_index: " + new_flash_index); + if( new_flash_index != -1 ) { + updateFlash(new_flash_index, save); + return true; + } + } + return false; + } + + public void cycleFlash(boolean skip_torch, boolean save) { + if( MyDebug.LOG ) + Log.d(TAG, "cycleFlash()"); + if( supported_flash_values != null ) { + int new_flash_index = (current_flash_index+1) % supported_flash_values.size(); + int start_index = new_flash_index; + boolean done = false; + while( !done ) { + done = true; + + if( skip_torch && supported_flash_values.get(new_flash_index).equals("flash_torch") ) { + if( MyDebug.LOG ) + Log.d(TAG, "cycle past torch"); + new_flash_index = (new_flash_index+1) % supported_flash_values.size(); + // don't bother setting done to false as we shouldn't have two torches in a row... + } + + if( is_video ) { + // check supported for video + String new_flash_value = supported_flash_values.get(new_flash_index); + if( !isFlashSupportedForVideo(new_flash_value) ) { + if( MyDebug.LOG ) + Log.d(TAG, "cycle past flash mode not supported for video: " + new_flash_value); + new_flash_index = (new_flash_index+1) % supported_flash_values.size(); + done = false; + } + } + + if( !done && new_flash_index == start_index ) { + // just in case, prevent infinite loop + Log.e(TAG, "flash looped to start - couldn't find valid flash!"); + break; + } + } + + if( done ) { + updateFlash(new_flash_index, save); + } + } + } + + private void updateFlash(int new_flash_index, boolean save) { + if( MyDebug.LOG ) + Log.d(TAG, "updateFlash(): " + new_flash_index); + // updates the Flash button, and Flash camera mode + if( supported_flash_values != null && new_flash_index != current_flash_index ) { + boolean initial = current_flash_index==-1; + current_flash_index = new_flash_index; + if( MyDebug.LOG ) + Log.d(TAG, " current_flash_index is now " + current_flash_index + " (initial " + initial + ")"); + + //Activity activity = (Activity)this.getContext(); + String [] flash_entries = getResources().getStringArray(R.array.flash_entries); + //String [] flash_icons = getResources().getStringArray(R.array.flash_icons); + String flash_value = supported_flash_values.get(current_flash_index); + if( MyDebug.LOG ) + Log.d(TAG, " flash_value: " + flash_value); + String [] flash_values = getResources().getStringArray(R.array.flash_values); + for(int i=0;i 0 ) { // check in case this isn't cancelled by time we take the photo + applicationInterface.timerBeep(remaining_time); + } + remaining_time -= 1000; + } + } + beepTimer.schedule(beepTimerTask = new BeepTimerTask(), 0, 1000); + } + + private void flashVideo() { + if( MyDebug.LOG ) + Log.d(TAG, "flashVideo"); + // getFlashValue() may return "" if flash not supported! + String flash_value = camera_controller.getFlashValue(); + if( flash_value.isEmpty() ) + return; + String flash_value_ui = getCurrentFlashValue(); + if( flash_value_ui == null ) + return; + if( flash_value_ui.equals("flash_torch") ) + return; + if( flash_value.equals("flash_torch") ) { + // shouldn't happen? but set to what the UI is + cancelAutoFocus(); + camera_controller.setFlashValue(flash_value_ui); + return; + } + // turn on torch + cancelAutoFocus(); + camera_controller.setFlashValue("flash_torch"); + try { + Thread.sleep(100); + } + catch(InterruptedException e) { + MyDebug.logStackTrace(TAG, "InterruptedException from sleep", e); + } + // turn off torch + cancelAutoFocus(); + camera_controller.setFlashValue(flash_value_ui); + } + + private void onVideoInfo(int what, int extra) { + if( MyDebug.LOG ) + Log.d(TAG, "onVideoInfo: " + what + " extra: " + extra); + // n.b., we shouldn't refactor "Build.VERSION.SDK_INT >= Build.VERSION_CODES.O" to a single variable, as it means we'll then get the Android + // warnings of "Call requires API level 26" + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.O && what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_APPROACHING && video_restart_on_max_filesize ) { + if( MyDebug.LOG ) + Log.d(TAG, "seamless restart due to max filesize approaching - try setNextOutputFile"); + if( video_recorder == null ) { + // just in case? + if( MyDebug.LOG ) + Log.d(TAG, "video_recorder is null!"); + } + else if( applicationInterface.getVideoMaxDurationPref() > 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "don't use setNextOutputFile with setMaxDuration"); + // using setNextOutputFile with setMaxDuration seems to be buggy: + // OnePlus3T: setMaxDuration is ignored if we hit max filesize and call setNextOutputFile before + // this would cause testTakeVideoMaxFileSize3 to fail + // Nokia 8: the camera server dies when restarting with setNextOutputFile, if setMaxDuration has been set! + } + else { + // First we need to see if there's enough free storage left - it might be that we hit the max filesize that was + // set in MyApplicationInterface.getVideoMaxFileSizePref() due to the remaining disk space. + // Potentially we could just modify getVideoMaxFileSizePref() to not set VideoMaxFileSize.auto_restart if the + // max file size was set due to remaining disk space rather than user preference, but worth rechecking in case + // disk space has been freed up; also we might encounter a device limit on max filesize that's less than the + // remaining disk space (in which case, we do want to restart). + // See testTakeVideoAvailableMemory(). + boolean has_free_space = false; + try { + // don't care about the return, we're just looking for NoFreeStorageException + applicationInterface.getVideoMaxFileSizePref(); + has_free_space = true; + } + catch(NoFreeStorageException e) { + if( MyDebug.LOG ) + Log.d(TAG, "don't call setNextOutputFile, not enough space remaining"); + } + + final VideoProfile profile = getVideoProfile(); + if( profile.fileExtension.equals("3gp") ) { + // at least on Nokia 8 with Camera2, 3gpp format crashes with IllegalStateException in setNextOutputFile below + // if we try to do seamless restart + if( MyDebug.LOG ) + Log.d(TAG, "seamless restart not supported for 3gpp"); + } + else if( has_free_space ) { + VideoFileInfo info = createVideoFile(profile.fileExtension); + // only assign to videoFileInfo after setNextOutputFile in case it throws an exception (in which case, + // we don't want to overwrite the current videoFileInfo). + if( info != null ) { + try { + //if( true ) + // throw new IOException(); // test + if( info.video_method == ApplicationInterface.VideoMethod.FILE ) { + video_recorder.setNextOutputFile(new File(info.video_filename)); + } + else { + video_recorder.setNextOutputFile(info.video_pfd_saf.getFileDescriptor()); + } + if( MyDebug.LOG ) + Log.d(TAG, "setNextOutputFile succeeded"); + test_called_next_output_file = true; + nextVideoFileInfo = info; + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "failed to setNextOutputFile", e); + info.close(); + } + } + } + } + // no need to explicitly stop if createVideoFile() or setNextOutputFile() fails - just let video reach max filesize + // normally + } + else if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.O && what == MediaRecorder.MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED && video_restart_on_max_filesize ) { + if( MyDebug.LOG ) + Log.d(TAG, "seamless restart with setNextOutputFile has now occurred"); + if( nextVideoFileInfo == null ) { + Log.e(TAG, "received MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED but nextVideoFileInfo is null"); + } + else { + videoFileInfo.close(); + video_time_last_maxfilesize_restart = getVideoTime(false); + applicationInterface.restartedVideo(videoFileInfo.video_method, videoFileInfo.video_uri, videoFileInfo.video_filename); + videoFileInfo = nextVideoFileInfo; + nextVideoFileInfo = null; + test_started_next_output_file = true; + } + } + else if( what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED && video_restart_on_max_filesize ) { + // note, if the restart was handled via MEDIA_RECORDER_INFO_MAX_FILESIZE_APPROACHING, then we shouldn't ever + // receive MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED + if( MyDebug.LOG ) + Log.d(TAG, "restart due to max filesize reached - do manual restart"); + Activity activity = (Activity)Preview.this.getContext(); + activity.runOnUiThread(new Runnable() { + public void run() { + // we run on main thread to avoid problem of camera closing at the same time + // but still need to check that the camera hasn't closed + if( camera_controller != null ) + restartVideo(true); + else { + if( MyDebug.LOG ) + Log.d(TAG, "don't restart video, as already cancelled"); + } + } + }); + } + else if( what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED ) { + if( MyDebug.LOG ) + Log.d(TAG, "reached max duration - see if we need to restart?"); + Activity activity = (Activity)Preview.this.getContext(); + activity.runOnUiThread(new Runnable() { + public void run() { + // we run on main thread to avoid problem of camera closing at the same time + // but still need to check that the camera hasn't closed + if( camera_controller != null ) + restartVideo(false); // n.b., this will only restart if remaining_restart_video > 0 + else { + if( MyDebug.LOG ) + Log.d(TAG, "don't restart video, as already cancelled"); + } + } + }); + } + else if( what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED ) { + stopVideo(false); + } + applicationInterface.onVideoInfo(what, extra); // call this last, so that toasts show up properly (as we're hogging the UI thread here, and mediarecorder takes time to stop) + } + + private void onVideoError(int what, int extra) { + if( MyDebug.LOG ) + Log.d(TAG, "onVideoError: " + what + " extra: " + extra); + stopVideo(false); + applicationInterface.onVideoError(what, extra); // call this last, so that toasts show up properly (as we're hogging the UI thread here, and mediarecorder takes time to stop) + } + + /** Initiate "take picture" command. In video mode this means starting video command. In photo mode this may involve first + * autofocusing. + * @param photo_snapshot If true, then the user has requested taking a photo whilst video + * recording. If false, either take a photo or start/stop video depending + * on the current mode. + * @param continuous_fast_burst If true, then start a continuous fast burst. + */ + private void takePicture(boolean max_filesize_restart, boolean photo_snapshot, boolean continuous_fast_burst) { + if( MyDebug.LOG ) + Log.d(TAG, "takePicture"); + //this.thumbnail_anim = false; + if( !is_video || photo_snapshot ) + this.phase = PHASE_TAKING_PHOTO; + else { + if( phase == PHASE_TIMER ) + this.phase = PHASE_NORMAL; // in case we were previously on timer for starting the video + } + synchronized( this ) { + // synchronise for consistency (keep FindBugs happy) + take_photo_after_autofocus = false; + } + if( camera_controller == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera not opened!"); + this.phase = PHASE_NORMAL; + applicationInterface.cameraInOperation(false, false); + if( is_video ) + applicationInterface.cameraInOperation(false, true); + return; + } + if( !this.has_surface ) { + if( MyDebug.LOG ) + Log.d(TAG, "preview surface not yet available"); + this.phase = PHASE_NORMAL; + applicationInterface.cameraInOperation(false, false); + if( is_video ) + applicationInterface.cameraInOperation(false, true); + return; + } + if( preview_started_state == PREVIEW_IS_STARTING ) { + // just in case? + if( MyDebug.LOG ) + Log.d(TAG, "don't take photo, preview is still opening"); + this.phase = PHASE_NORMAL; + applicationInterface.cameraInOperation(false, false); + if( is_video ) + applicationInterface.cameraInOperation(false, true); + return; + } + + boolean store_location = applicationInterface.getGeotaggingPref(); + if( store_location ) { + boolean require_location = applicationInterface.getRequireLocationPref(); + if( require_location ) { + if( applicationInterface.getLocation() != null ) { + // fine, we have location + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "location data required, but not available"); + showToast(null, R.string.location_not_available, true); + if( !is_video || photo_snapshot ) + this.phase = PHASE_NORMAL; + applicationInterface.cameraInOperation(false, false); + if( is_video ) + applicationInterface.cameraInOperation(false, true); + return; + } + } + } + + if( is_video && !photo_snapshot ) { + if( MyDebug.LOG ) + Log.d(TAG, "start video recording"); + startVideoRecording(max_filesize_restart); + return; + } + + takePhoto(false, continuous_fast_burst); + if( MyDebug.LOG ) + Log.d(TAG, "takePicture exit"); + } + + private VideoFileInfo createVideoFile(String extension) { + if( MyDebug.LOG ) + Log.d(TAG, "createVideoFile"); + VideoFileInfo video_file_info = null; + ParcelFileDescriptor video_pfd_saf = null; + try { + ApplicationInterface.VideoMethod method = applicationInterface.createOutputVideoMethod(); + Uri video_uri = null; + String video_filename = null; + if( MyDebug.LOG ) + Log.d(TAG, "method? " + method); + if( method == ApplicationInterface.VideoMethod.FILE ) { + /*if( true ) + throw new IOException(); // test*/ + File videoFile = applicationInterface.createOutputVideoFile(extension); + video_filename = videoFile.getAbsolutePath(); + if( MyDebug.LOG ) + Log.d(TAG, "save to: " + video_filename); + } + else { + Uri uri; + if( method == ApplicationInterface.VideoMethod.SAF ) { + uri = applicationInterface.createOutputVideoSAF(extension); + } + else if( method == ApplicationInterface.VideoMethod.MEDIASTORE ) { + uri = applicationInterface.createOutputVideoMediaStore(extension); + } + else { + uri = applicationInterface.createOutputVideoUri(); + } + if( MyDebug.LOG ) + Log.d(TAG, "save to: " + uri); + video_pfd_saf = getContext().getContentResolver().openFileDescriptor(uri, "rw"); + video_uri = uri; + } + + video_file_info = new VideoFileInfo(method, video_uri, video_filename, video_pfd_saf); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "couldn't create media video file; check storage permissions?", e); + } + finally { + if( video_file_info == null && video_pfd_saf != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "failed, so clean up video_pfd_saf"); + try { + video_pfd_saf.close(); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "failed to close video_pfd_saf", e); + } + } + } + return video_file_info; + } + + /** Start video recording. + */ + private void startVideoRecording(final boolean max_filesize_restart) { + if( MyDebug.LOG ) + Log.d(TAG, "startVideoRecording"); + focus_success = FOCUS_DONE; // clear focus rectangle (don't do for taking photos yet) + test_called_next_output_file = false; + test_started_next_output_file = false; + nextVideoFileInfo = null; + final VideoProfile profile = getVideoProfile(); + VideoFileInfo info = createVideoFile(profile.fileExtension); + if( info == null ) { + videoFileInfo = new VideoFileInfo(); + applicationInterface.onFailedCreateVideoFileError(); + applicationInterface.cameraInOperation(false, true); + } + else { + videoFileInfo = info; + if( MyDebug.LOG ) { + Log.d(TAG, "current_video_quality: " + this.video_quality_handler.getCurrentVideoQualityIndex()); + if (this.video_quality_handler.getCurrentVideoQualityIndex() != -1) + Log.d(TAG, "current_video_quality value: " + this.video_quality_handler.getCurrentVideoQuality()); + Log.d(TAG, "resolution " + profile.videoFrameWidth + " x " + profile.videoFrameHeight); + Log.d(TAG, "bit rate " + profile.videoBitRate); + } + + boolean enable_sound = applicationInterface.getShutterSoundPref(); + if( MyDebug.LOG ) + Log.d(TAG, "enable_sound? " + enable_sound); + camera_controller.enableShutterSound(enable_sound); // Camera2 API can disable video sound too + + MediaRecorder local_video_recorder = new MediaRecorder(); + this.camera_controller.unlock(); + if( MyDebug.LOG ) + Log.d(TAG, "set video listeners"); + + local_video_recorder.setOnInfoListener(new MediaRecorder.OnInfoListener() { + @Override + public void onInfo(MediaRecorder mr, int what, int extra) { + if( MyDebug.LOG ) + Log.d(TAG, "MediaRecorder info: " + what + " extra: " + extra); + final int final_what = what; + final int final_extra = extra; + Activity activity = (Activity)Preview.this.getContext(); + activity.runOnUiThread(new Runnable() { + public void run() { + // we run on main thread to avoid problem of camera closing at the same time + onVideoInfo(final_what, final_extra); + } + }); + } + }); + local_video_recorder.setOnErrorListener(new MediaRecorder.OnErrorListener() { + public void onError(MediaRecorder mr, int what, int extra) { + final int final_what = what; + final int final_extra = extra; + Activity activity = (Activity)Preview.this.getContext(); + activity.runOnUiThread(new Runnable() { + public void run() { + // we run on main thread to avoid problem of camera closing at the same time + onVideoError(final_what, final_extra); + } + }); + } + }); + + camera_controller.initVideoRecorderPrePrepare(local_video_recorder); + if( profile.no_audio_permission ) { + showToast(null, R.string.permission_record_audio_not_available, true); + } + + boolean store_location = applicationInterface.getGeotaggingPref(); + if( store_location && applicationInterface.getLocation() != null ) { + Location location = applicationInterface.getLocation(); + // don't log location, in case of privacy! + local_video_recorder.setLocation((float)location.getLatitude(), (float)location.getLongitude()); + } + + if( MyDebug.LOG ) + Log.d(TAG, "copy video profile to media recorder"); + + profile.copyToMediaRecorder(local_video_recorder); + + boolean told_app_starting = false; // true if we called applicationInterface.startingVideo() + try { + ApplicationInterface.VideoMaxFileSize video_max_filesize = applicationInterface.getVideoMaxFileSizePref(); + long max_filesize = video_max_filesize.max_filesize; + //max_filesize = 15*1024*1024; // test + if( max_filesize > 0 ) { + if( MyDebug.LOG ) + Log.d(TAG, "set max file size of: " + max_filesize); + try { + local_video_recorder.setMaxFileSize(max_filesize); + } + catch(RuntimeException e) { + // Google Camera warns this can happen - for example, if 64-bit filesizes not supported + MyDebug.logStackTrace(TAG, "failed to set max filesize of: " + max_filesize, e); + } + } + video_restart_on_max_filesize = video_max_filesize.auto_restart; // note, we set this even if max_filesize==0, as it will still apply when hitting device max filesize limit + + // handle restart timer + long video_max_duration = applicationInterface.getVideoMaxDurationPref(); + if( MyDebug.LOG ) + Log.d(TAG, "user preference video_max_duration: " + video_max_duration); + if( max_filesize_restart ) { + if( video_max_duration > 0 ) { + video_max_duration -= video_accumulated_time; + // this should be greater or equal to min_safe_restart_video_time, as too short remaining time should have been caught in restartVideo() + if( video_max_duration < min_safe_restart_video_time ) { + if( MyDebug.LOG ) + Log.e(TAG, "trying to restart video with too short a time: " + video_max_duration); + video_max_duration = min_safe_restart_video_time; + } + } + } + else { + video_accumulated_time = 0; + } + if( MyDebug.LOG ) + Log.d(TAG, "actual video_max_duration: " + video_max_duration); + local_video_recorder.setMaxDuration((int)video_max_duration); + + if( videoFileInfo.video_method == ApplicationInterface.VideoMethod.FILE ) { + local_video_recorder.setOutputFile(videoFileInfo.video_filename); + } + else { + local_video_recorder.setOutputFile(videoFileInfo.video_pfd_saf.getFileDescriptor()); + } + applicationInterface.cameraInOperation(true, true); + told_app_starting = true; + applicationInterface.startingVideo(); + /*if( true ) // test + throw new IOException();*/ + cameraSurface.setVideoRecorder(local_video_recorder); + + local_video_recorder.setOrientationHint(getImageVideoRotation()); + if( MyDebug.LOG ) + Log.d(TAG, "about to prepare video recorder"); + + local_video_recorder.prepare(); + if( test_video_ioexception ) { + if( MyDebug.LOG ) + Log.d(TAG, "test_video_ioexception is true"); + throw new IOException(); + } + + boolean want_photo_video_recording = supportsPhotoVideoRecording() && applicationInterface.usePhotoVideoRecording(); + + camera_controller.initVideoRecorderPostPrepare(local_video_recorder, want_photo_video_recording); + if( test_video_cameracontrollerexception ) { + if( MyDebug.LOG ) + Log.d(TAG, "test_video_cameracontrollerexception is true"); + throw new CameraControllerException(); + } + + if( MyDebug.LOG ) + Log.d(TAG, "about to start video recorder"); + + try { + local_video_recorder.start(); + if( test_video_failure ) { + if( MyDebug.LOG ) + Log.d(TAG, "test_video_failure is true"); + throw new RuntimeException(); + } + this.video_recorder = local_video_recorder; + videoRecordingStarted(max_filesize_restart); + } + catch(RuntimeException e) { + // needed for emulator at least - although MediaRecorder not meant to work with emulator, it's good to fail gracefully + MyDebug.logStackTrace(TAG, "runtime exception starting video recorder", e); + this.video_recorder = local_video_recorder; // still assign, so failedToStartVideoRecorder() will release the video_recorder + // told_app_starting must be true if we're here + applicationInterface.stoppingVideo(); + failedToStartVideoRecorder(profile); + } + + /*final MediaRecorder local_video_recorder_f = local_video_recorder; + new AsyncTask() { + private static final String TAG = "video_recorder.start"; + + @Override + protected Boolean doInBackground(Void... voids) { + if( MyDebug.LOG ) + Log.d(TAG, "doInBackground, async task: " + this); + try { + local_video_recorder_f.start(); + } + catch(RuntimeException e) { + // needed for emulator at least - although MediaRecorder not meant to work with emulator, it's good to fail gracefully + MyDebug.logStackTrace(TAG, "runtime exception starting video recorder", e); + return false; + } + return true; + } + + @Override + protected void onPostExecute(Boolean success) { + if( MyDebug.LOG ) { + Log.d(TAG, "onPostExecute, async task: " + this); + Log.d(TAG, "success: " + success); + } + // still assign even if success==false, so failedToStartVideoRecorder() will release the video_recorder + Preview.this.video_recorder = local_video_recorder_f; + if( success ) { + videoRecordingStarted(max_filesize_restart); + } + else { + // told_app_starting must be true if we're here + applicationInterface.stoppingVideo(); + failedToStartVideoRecorder(profile); + } + } + }.execute();*/ + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "failed to save video", e); + this.video_recorder = local_video_recorder; + if( told_app_starting ) { + applicationInterface.stoppingVideo(); + } + applicationInterface.onFailedCreateVideoFileError(); + video_recorder.reset(); + video_recorder.release(); + video_recorder = null; + video_recorder_is_paused = false; + applicationInterface.deleteUnusedVideo(videoFileInfo.video_method, videoFileInfo.video_uri, videoFileInfo.video_filename); + videoFileInfo = new VideoFileInfo(); + applicationInterface.cameraInOperation(false, true); + this.reconnectCamera(true); + } + catch(CameraControllerException e) { + MyDebug.logStackTrace(TAG, "camera exception starting video recorder", e); + this.video_recorder = local_video_recorder; // still assign, so failedToStartVideoRecorder() will release the video_recorder + if( told_app_starting ) { + applicationInterface.stoppingVideo(); + } + failedToStartVideoRecorder(profile); + } + catch(NoFreeStorageException e) { + MyDebug.logStackTrace(TAG, "nofreestorageexception starting video recorder", e); + this.video_recorder = local_video_recorder; + if( told_app_starting ) { + applicationInterface.stoppingVideo(); + } + video_recorder.reset(); + video_recorder.release(); + video_recorder = null; + video_recorder_is_paused = false; + applicationInterface.deleteUnusedVideo(videoFileInfo.video_method, videoFileInfo.video_uri, videoFileInfo.video_filename); + videoFileInfo = new VideoFileInfo(); + applicationInterface.cameraInOperation(false, true); + this.reconnectCamera(true); + this.showToast(null, R.string.video_no_free_space); + } + } + } + + private void videoRecordingStarted(boolean max_filesize_restart) { + if( MyDebug.LOG ) + Log.d(TAG, "video recorder started"); + video_recorder_is_paused = false; + + if( this.using_face_detection && !this.using_android_l ) { + if( MyDebug.LOG ) + Log.d(TAG, "restart face detection"); + // doing MediaRecorder.start() seems to stop face detection on old Camera API + camera_controller.startFaceDetection(); + faces_detected = null; + } + + video_start_time = System.currentTimeMillis(); + video_start_time_set = true; + video_time_last_maxfilesize_restart = max_filesize_restart ? video_accumulated_time : 0; + applicationInterface.startedVideo(); + // Don't send intent for ACTION_MEDIA_SCANNER_SCAN_FILE yet - wait until finished, so we get completed file. + // Don't do any further calls after applicationInterface.startedVideo() that might throw an error - instead video error + // should be handled by including a call to stopVideo() (since the video_recorder has started). + + // handle restarts + if( remaining_restart_video == 0 && !max_filesize_restart ) { + remaining_restart_video = applicationInterface.getVideoRestartTimesPref(); + if( MyDebug.LOG ) + Log.d(TAG, "initialised remaining_restart_video to: " + remaining_restart_video); + } + + if( applicationInterface.getVideoFlashPref() && supportsFlash() ) { + class FlashVideoTimerTask extends TimerTask { + public void run() { + if( MyDebug.LOG ) + Log.e(TAG, "FlashVideoTimerTask"); + Activity activity = (Activity)Preview.this.getContext(); + activity.runOnUiThread(new Runnable() { + public void run() { + // we run on main thread to avoid problem of camera closing at the same time + // but still need to check that the camera hasn't closed or the task halted, since TimerTask.run() started + if( camera_controller != null && flashVideoTimerTask != null ) + flashVideo(); + else { + if( MyDebug.LOG ) + Log.d(TAG, "flashVideoTimerTask: don't flash video, as already cancelled"); + } + } + }); + } + } + flashVideoTimer.schedule(flashVideoTimerTask = new FlashVideoTimerTask(), 0, 1000); + } + + if( applicationInterface.getVideoLowPowerCheckPref() ) { + /* When a device shuts down due to power off, the application will receive shutdown signals, and normally the video + * should stop and be valid. However it can happen that the video ends up corrupted (I've had people telling me this + * can happen; Googling finds plenty of stories of this happening on Android devices). I think the issue is that for + * very large videos, a lot of time is spent processing during the MediaRecorder.stop() call - if that doesn't complete + * by the time the device switches off, the video may be corrupt. + * So we add an extra safety net - devices typically turn off abou 1%, but we stop video at 3% to be safe. The user + * can try recording more videos after that if the want, but this reduces the risk that really long videos are entirely + * lost. + */ + class BatteryCheckVideoTimerTask extends TimerTask { + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "BatteryCheckVideoTimerTask"); + + // only check periodically - unclear if checking is costly in any way + // note that it's fine to call registerReceiver repeatedly - we pass a null receiver, so this is fine as a "one shot" use + Intent batteryStatus = getContext().registerReceiver(null, battery_ifilter); + int battery_level = batteryStatus.getIntExtra(BatteryManager.EXTRA_LEVEL, -1); + int battery_scale = batteryStatus.getIntExtra(BatteryManager.EXTRA_SCALE, -1); + double battery_frac = battery_level/(double)battery_scale; + if( MyDebug.LOG ) + Log.d(TAG, "batteryCheckVideoTimerTask: battery level at: " + battery_frac); + + if( battery_frac <= 0.03 ) { + if( MyDebug.LOG ) + Log.d(TAG, "batteryCheckVideoTimerTask: battery at critical level, switching off video"); + Activity activity = (Activity)Preview.this.getContext(); + activity.runOnUiThread(new Runnable() { + public void run() { + // we run on main thread to avoid problem of camera closing at the same time + // but still need to check that the camera hasn't closed or the task halted, since TimerTask.run() started + if( camera_controller != null && batteryCheckVideoTimerTask != null ) { + stopVideo(false); + String toast = getContext().getResources().getString(R.string.video_power_critical); + showToast(null, toast); // show the toast afterwards, as we're hogging the UI thread here, and media recorder takes time to stop + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "batteryCheckVideoTimerTask: don't stop video, as already cancelled"); + } + } + }); + } + } + } + final long battery_check_interval_ms = 60 * 1000; + // Since we only first check after battery_check_interval_ms, this means users will get some video recorded even if the battery is already too low. + // But this is fine, as typically short videos won't be corrupted if the device shuts off, and good to allow users to try to record a bit more if they want. + batteryCheckVideoTimer.schedule(batteryCheckVideoTimerTask = new BatteryCheckVideoTimerTask(), battery_check_interval_ms, battery_check_interval_ms); + } + } + + private void failedToStartVideoRecorder(VideoProfile profile) { + applicationInterface.onVideoRecordStartError(profile); + video_recorder.reset(); + video_recorder.release(); + video_recorder = null; + video_recorder_is_paused = false; + applicationInterface.deleteUnusedVideo(videoFileInfo.video_method, videoFileInfo.video_uri, videoFileInfo.video_filename); + videoFileInfo = new VideoFileInfo(); + applicationInterface.cameraInOperation(false, true); + this.reconnectCamera(true); + } + + /** Pauses the video recording - or unpauses if already paused. + * This does nothing if isVideoRecording() returns false, or not on Android 7 or higher. + */ + public void pauseVideo() { + if( MyDebug.LOG ) + Log.d(TAG, "pauseVideo"); + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.N ) { + Log.e(TAG, "pauseVideo called but requires Android N"); + } + else if( this.isVideoRecording() ) { + if( video_recorder_is_paused ) { + if( MyDebug.LOG ) + Log.d(TAG, "resuming..."); + video_recorder.resume(); + video_recorder_is_paused = false; + video_start_time = System.currentTimeMillis(); + this.showToast(pause_video_toast, R.string.video_resume, true); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "pausing..."); + video_recorder.pause(); + video_recorder_is_paused = true; + long last_time = System.currentTimeMillis() - video_start_time; + video_accumulated_time += last_time; + if( MyDebug.LOG ) { + Log.d(TAG, "last_time: " + last_time); + Log.d(TAG, "video_accumulated_time is now: " + video_accumulated_time); + } + this.showToast(pause_video_toast, R.string.video_pause, true); + } + } + else { + Log.e(TAG, "pauseVideo called but not video recording"); + } + } + + /** Take photo. The caller should already have set the phase to PHASE_TAKING_PHOTO. + */ + private void takePhoto(boolean skip_autofocus, final boolean continuous_fast_burst) { + if( MyDebug.LOG ) + Log.d(TAG, "takePhoto"); + if( camera_controller == null ) { + Log.e(TAG, "camera not opened in takePhoto!"); + return; + } + if( preview_started_state == PREVIEW_IS_STARTING ) { + // just in case? + if( MyDebug.LOG ) + Log.d(TAG, "don't take photo, preview is still opening"); + return; + } + applicationInterface.cameraInOperation(true, false); + String current_ui_focus_value = getCurrentFocusValue(); + if( MyDebug.LOG ) + Log.d(TAG, "current_ui_focus_value is " + current_ui_focus_value); + + if( autofocus_in_continuous_mode ) { + if( MyDebug.LOG ) + Log.d(TAG, "continuous mode where user touched to focus"); + + boolean wait_for_focus; + + synchronized(this) { + // as below, if an autofocus is in progress, then take photo when it's completed + if( focus_success == FOCUS_WAITING ) { + if( MyDebug.LOG ) + Log.d(TAG, "autofocus_in_continuous_mode: take photo after current focus"); + wait_for_focus = true; + take_photo_after_autofocus = true; + } + else { + // when autofocus_in_continuous_mode==true, it means the user recently touched to focus in continuous focus mode, so don't do another focus + if( MyDebug.LOG ) + Log.d(TAG, "autofocus_in_continuous_mode: no need to refocus"); + wait_for_focus = false; + } + } + + // call CameraController outside the lock + if( wait_for_focus ) { + camera_controller.setCaptureFollowAutofocusHint(true); + } + else { + takePhotoWhenFocused(continuous_fast_burst); + } + } + else if( camera_controller.focusIsContinuous() ) { + boolean optimise_for_latency = applicationInterface.optimiseFocusForLatency(); + if( optimise_for_latency ) { + if( MyDebug.LOG ) + Log.d(TAG, "take photo under continuous focus mode [optimise for latency]"); + takePhotoWhenFocused(continuous_fast_burst); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "call autofocus for continuous focus mode [optimise for quality]"); + // we call via autoFocus(), to avoid risk of taking photo while the continuous focus is focusing - risk of blurred photo, also sometimes get bug in such situations where we end of repeatedly focusing + // this is the case even if skip_autofocus is true (as we still can't guarantee that continuous focusing might be occurring) + // note: if the user touches to focus in continuous mode, we camera controller may be in auto focus mode, so we should only enter this codepath if the camera_controller is in continuous focus mode + CameraController.AutoFocusCallback autoFocusCallback = new CameraController.AutoFocusCallback() { + @Override + public void onAutoFocus(boolean success) { + if( MyDebug.LOG ) + Log.d(TAG, "continuous mode autofocus complete: " + success); + takePhotoWhenFocused(continuous_fast_burst); + } + }; + camera_controller.autoFocus(autoFocusCallback, true); + } + } + else if( skip_autofocus || this.recentlyFocused() ) { + if( MyDebug.LOG ) { + if( skip_autofocus ) { + Log.d(TAG, "skip_autofocus flag set"); + } + else { + Log.d(TAG, "recently focused successfully, so no need to refocus"); + } + } + takePhotoWhenFocused(continuous_fast_burst); + } + else if( current_ui_focus_value != null && ( current_ui_focus_value.equals("focus_mode_auto") || current_ui_focus_value.equals("focus_mode_macro") ) ) { + boolean wait_for_focus; + // n.b., we check focus_value rather than camera_controller.supportsAutoFocus(), as we want to discount focus_mode_locked + synchronized(this) { + if( focus_success == FOCUS_WAITING ) { + // Needed to fix bug (on Nexus 6, old camera API): if flash was on, pointing at a dark scene, and we take photo when already autofocusing, the autofocus never returned so we got stuck! + // In general, probably a good idea to not redo a focus - just use the one that's already in progress + if( MyDebug.LOG ) + Log.d(TAG, "take photo after current focus"); + wait_for_focus = true; + take_photo_after_autofocus = true; + } + else { + wait_for_focus = false; + focus_success = FOCUS_DONE; // clear focus rectangle for new refocus + } + } + + // call CameraController outside the lock + if( wait_for_focus ) { + camera_controller.setCaptureFollowAutofocusHint(true); + } + else { + CameraController.AutoFocusCallback autoFocusCallback = new CameraController.AutoFocusCallback() { + @Override + public void onAutoFocus(boolean success) { + if( MyDebug.LOG ) + Log.d(TAG, "autofocus complete: " + success); + ensureFlashCorrect(); // need to call this in case user takes picture before startup focus completes! + prepareAutoFocusPhoto(); + takePhotoWhenFocused(continuous_fast_burst); + } + }; + if( MyDebug.LOG ) + Log.d(TAG, "start autofocus to take picture"); + camera_controller.autoFocus(autoFocusCallback, true); + count_cameraAutoFocus++; + } + } + else { + takePhotoWhenFocused(continuous_fast_burst); + } + } + + /** Should be called when taking a photo immediately after an autofocus. + * This is needed for a workaround for Camera2 bug (at least on Nexus 6) where photos sometimes come out dark when using flash + * auto, when the flash fires. This happens when taking a photo in autofocus mode (including when continuous mode has + * transitioned to autofocus mode due to touching to focus). Seems to happen with scenes that have bright and dark regions, + * i.e., on verge of flash firing. + * Seems to be fixed if we have a short delay... + */ + private void prepareAutoFocusPhoto() { + if( MyDebug.LOG ) + Log.d(TAG, "prepareAutoFocusPhoto"); + if( using_android_l ) { + String flash_value = camera_controller.getFlashValue(); + // getFlashValue() may return "" if flash not supported! + if( !flash_value.isEmpty() && ( flash_value.equals("flash_auto") || flash_value.equals("flash_red_eye") ) ) { + if( MyDebug.LOG ) + Log.d(TAG, "wait for a bit..."); + try { + Thread.sleep(100); + } + catch(InterruptedException e) { + MyDebug.logStackTrace(TAG, "InterruptedException from sleep", e); + } + } + } + } + + /** Take photo, assumes any autofocus has already been taken care of, and that applicationInterface.cameraInOperation(true, false) has + * already been called. + * Note that even if a caller wants to take a photo without focusing, you probably want to call takePhoto() with skip_autofocus + * set to true (so that things work okay in continuous picture focus mode). + */ + private void takePhotoWhenFocused(boolean continuous_fast_burst) { + // should be called when auto-focused + if( MyDebug.LOG ) + Log.d(TAG, "takePhotoWhenFocused"); + if( camera_controller == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera not opened!"); + this.phase = PHASE_NORMAL; + applicationInterface.cameraInOperation(false, false); + return; + } + if( !this.has_surface ) { + if( MyDebug.LOG ) + Log.d(TAG, "preview surface not yet available"); + this.phase = PHASE_NORMAL; + applicationInterface.cameraInOperation(false, false); + return; + } + if( preview_started_state == PREVIEW_IS_STARTING ) { + // just in case? + if( MyDebug.LOG ) + Log.d(TAG, "don't take photo, preview is still opening"); + this.phase = PHASE_NORMAL; + applicationInterface.cameraInOperation(false, false); + return; + } + + final String focus_value = current_focus_index != -1 ? supported_focus_values.get(current_focus_index) : null; + if( MyDebug.LOG ) { + Log.d(TAG, "focus_value is " + focus_value); + Log.d(TAG, "focus_success is " + focus_success); + } + + if( focus_value != null && focus_value.equals("focus_mode_locked") && focus_success == FOCUS_WAITING ) { + // make sure there isn't an autofocus in progress - can happen if in locked mode we take a photo while autofocusing - see testTakePhotoLockedFocus() (although that test doesn't always properly test the bug...) + // we only cancel when in locked mode and if still focusing, as I had 2 bug reports for v1.16 that the photo was being taken out of focus; both reports said it worked fine in 1.15, and one confirmed that it was due to the cancelAutoFocus() line, and that it's now fixed with this fix + // they said this happened in every focus mode, including locked - so possible that on some devices, cancelAutoFocus() actually pulls the camera out of focus, or reverts to preview focus? + cancelAutoFocus(); + } + removePendingContinuousFocusReset(); // to avoid switching back to continuous focus mode while taking a photo - instead we'll always make sure we switch back after taking a photo + updateParametersFromLocation(); // do this now, not before, so we don't set location parameters during focus (sometimes get RuntimeException) + + focus_success = FOCUS_DONE; // clear focus rectangle if not already done + successfully_focused = false; // so next photo taken will require an autofocus + if( MyDebug.LOG ) + Log.d(TAG, "remaining_repeat_photos: " + remaining_repeat_photos); + + // if focus_set_for_target_distance==true, then we stick with the last set focus bracketing source distance, as the current focus distance will + // be set to the target + if( applicationInterface.isFocusBracketingPref() && applicationInterface.isFocusBracketingSourceAutoPref() && !focus_set_for_target_distance ) { + camera_controller.setFocusBracketingSourceDistanceFromCurrent(); + } + + CameraController.PictureCallback pictureCallback = new CameraController.PictureCallback() { + private boolean success = false; // whether jpeg callback succeeded + private boolean has_date = false; + private Date current_date = null; + + public void onStarted() { + if( MyDebug.LOG ) + Log.d(TAG, "onStarted"); + applicationInterface.onCaptureStarted(); + if( applicationInterface.getBurstForNoiseReduction() && applicationInterface.getNRModePref() == ApplicationInterface.NRModePref.NRMODE_LOW_LIGHT ) { + if( camera_controller.getBurstTotal() >= CameraController.N_IMAGES_NR_DARK_LOW_LIGHT ) { + showToast(null, R.string.preference_nr_mode_low_light_message, true); + } + } + } + + public void onCompleted() { + if( MyDebug.LOG ) + Log.d(TAG, "onCompleted"); + applicationInterface.onPictureCompleted(); + if( !using_android_l ) { + //is_preview_started = false; // preview automatically stopped due to taking photo on original Camera API + preview_started_state = PREVIEW_NOT_STARTED; // preview automatically stopped due to taking photo on original Camera API + } + phase = PHASE_NORMAL; // need to set this even if remaining repeat photos, so we can restart the preview + if( remaining_repeat_photos == -1 || remaining_repeat_photos > 0 ) { + //if( !is_preview_started ) { + if( preview_started_state == PREVIEW_NOT_STARTED ) { + // we need to restart the preview; and we do this in the callback, as we need to restart after saving the image + // (otherwise this can fail, at least on Nexus 7) + if( MyDebug.LOG ) + Log.d(TAG, "repeat mode photos remaining: onPictureTaken about to start preview: " + remaining_repeat_photos); + startCameraPreview(true, null); + if( MyDebug.LOG ) + Log.d(TAG, "repeat mode photos remaining: onPictureTaken started preview: " + remaining_repeat_photos); + } + applicationInterface.cameraInOperation(false, false); + } + else { + phase = PHASE_NORMAL; + boolean pause_preview = applicationInterface.getPausePreviewPref(); + if( MyDebug.LOG ) + Log.d(TAG, "pause_preview? " + pause_preview); + if( pause_preview && success ) { + //if( is_preview_started ) { + if( preview_started_state == PREVIEW_STARTED ) { + // need to manually stop preview on Android L Camera2 + // also note: even though we now draw the last image on top of the screen instead of relying on the + // camera preview being paused, it's still good practice to pause the preview/camera for privacy reasons + if( camera_controller != null ) { + camera_controller.stopPreview(); + } + //is_preview_started = false; + preview_started_state = PREVIEW_NOT_STARTED; + } + setPreviewPaused(true); + } + else { + //if( !is_preview_started ) { + if( preview_started_state == PREVIEW_NOT_STARTED ) { + // we need to restart the preview; and we do this in the callback, as we need to restart after saving the image + // (otherwise this can fail, at least on Nexus 7) + startCameraPreview(true, null); + } + applicationInterface.cameraInOperation(false, false); + if( MyDebug.LOG ) + Log.d(TAG, "onPictureTaken started preview"); + } + } + continuousFocusReset(); // in case we took a photo after user had touched to focus (causing us to switch from continuous to autofocus mode) + if( camera_controller != null && focus_value != null && ( focus_value.equals("focus_mode_continuous_picture") || focus_value.equals("focus_mode_continuous_video") ) ) { + if( MyDebug.LOG ) + Log.d(TAG, "cancelAutoFocus to restart continuous focusing"); + camera_controller.cancelAutoFocus(); // needed to restart continuous focusing + } + + if( camera_controller != null && camera_controller.getBurstType() == CameraController.BurstType.BURSTTYPE_CONTINUOUS ) { + if( MyDebug.LOG ) + Log.d(TAG, "continuous burst mode ended, so revert to standard mode"); + setupBurstMode(); + } + + if( MyDebug.LOG ) + Log.d(TAG, "do we need to take another photo? remaining_repeat_photos: " + remaining_repeat_photos); + if( remaining_repeat_photos == -1 || remaining_repeat_photos > 0 ) { + takeRemainingRepeatPhotos(); + } + } + + /** Ensures we get the same date for both JPEG and RAW; and that we set the date ASAP so that it corresponds to actual + * photo time. + */ + private void initDate() { + if( !has_date ) { + has_date = true; + current_date = new Date(); + if( MyDebug.LOG ) + Log.d(TAG, "picture taken on date: " + current_date); + } + } + + public void onPictureTaken(byte[] data) { + if( MyDebug.LOG ) + Log.d(TAG, "onPictureTaken"); + initDate(); + if( !applicationInterface.onPictureTaken(data, current_date) ) { + if( MyDebug.LOG ) + Log.e(TAG, "applicationInterface.onPictureTaken failed"); + success = false; + } + else { + success = true; + } + } + + public void onRawPictureTaken(RawImage raw_image) { + if( MyDebug.LOG ) + Log.d(TAG, "onRawPictureTaken"); + initDate(); + if( !applicationInterface.onRawPictureTaken(raw_image, current_date) ) { + if( MyDebug.LOG ) + Log.e(TAG, "applicationInterface.onRawPictureTaken failed"); + } + } + + public void onBurstPictureTaken(List images) { + if( MyDebug.LOG ) + Log.d(TAG, "onBurstPictureTaken"); + initDate(); + + success = true; + if( !applicationInterface.onBurstPictureTaken(images, current_date) ) { + if( MyDebug.LOG ) + Log.e(TAG, "applicationInterface.onBurstPictureTaken failed"); + success = false; + } + } + + public void onRawBurstPictureTaken(List raw_images) { + if( MyDebug.LOG ) + Log.d(TAG, "onRawBurstPictureTaken"); + initDate(); + + if( !applicationInterface.onRawBurstPictureTaken(raw_images, current_date) ) { + if( MyDebug.LOG ) + Log.e(TAG, "applicationInterface.onRawBurstPictureTaken failed"); + } + } + + public void onExtensionProgress(int progress) { + if( MyDebug.LOG ) + Log.d(TAG, "onExtensionProgress: " + progress); + applicationInterface.onExtensionProgress(progress); + } + + public boolean imageQueueWouldBlock(int n_raw, int n_jpegs) { + if( MyDebug.LOG ) + Log.d(TAG, "imageQueueWouldBlock"); + return applicationInterface.imageQueueWouldBlock(n_raw, n_jpegs); + } + + public void onFrontScreenTurnOn() { + if( MyDebug.LOG ) + Log.d(TAG, "onFrontScreenTurnOn"); + applicationInterface.turnFrontScreenFlashOn(); + } + }; + CameraController.ErrorCallback errorCallback = new CameraController.ErrorCallback() { + public void onError() { + if( MyDebug.LOG ) + Log.e(TAG, "error from takePicture"); + count_cameraTakePicture--; // cancel out the increment from after the takePicture() call + if( MyDebug.LOG ) { + Log.d(TAG, "count_cameraTakePicture is now: " + count_cameraTakePicture); + } + applicationInterface.onPhotoError(); + phase = PHASE_NORMAL; + startCameraPreview(true, null); + applicationInterface.cameraInOperation(false, false); + } + }; + { + camera_controller.setRotation(getImageVideoRotation()); + + boolean enable_sound = applicationInterface.getShutterSoundPref(); + if( is_video && isVideoRecording() ) + enable_sound = false; // always disable shutter sound if we're taking a photo while recording video + if( MyDebug.LOG ) + Log.d(TAG, "enable_sound? " + enable_sound); + camera_controller.enableShutterSound(enable_sound); + if( using_android_l ) { + boolean camera2_dummy_capture_hack = applicationInterface.useCamera2DummyCaptureHack(); + if( MyDebug.LOG ) + Log.d(TAG, "camera2_dummy_capture_hack? " + camera2_dummy_capture_hack); + camera_controller.setDummyCaptureHack( camera2_dummy_capture_hack ); + + boolean use_camera2_fast_burst = applicationInterface.useCamera2FastBurst(); + if( MyDebug.LOG ) + Log.d(TAG, "use_camera2_fast_burst? " + use_camera2_fast_burst); + camera_controller.setUseExpoFastBurst( use_camera2_fast_burst ); + } + if( continuous_fast_burst ) { + camera_controller.setBurstType(CameraController.BurstType.BURSTTYPE_CONTINUOUS); + } + + if( MyDebug.LOG ) + Log.d(TAG, "about to call takePicture"); + camera_controller.takePicture(pictureCallback, errorCallback); + count_cameraTakePicture++; + if( MyDebug.LOG ) { + Log.d(TAG, "count_cameraTakePicture is now: " + count_cameraTakePicture); + } + } + if( MyDebug.LOG ) + Log.d(TAG, "takePhotoWhenFocused exit"); + } + + private void takeRemainingRepeatPhotos() { + if( MyDebug.LOG ) + Log.d(TAG, "takeRemainingRepeatPhotos"); + if( remaining_repeat_photos == -1 || remaining_repeat_photos > 0 ) { + if( camera_controller == null ) { + Log.e(TAG, "remaining_repeat_photos still set, but camera is closed!: " + remaining_repeat_photos); + cancelRepeat(); + } + else if( preview_started_state == PREVIEW_IS_STARTING ) { + // just in case? + Log.e(TAG, "remaining_repeat_photos still set, but preview is still opening!: " + remaining_repeat_photos); + cancelRepeat(); + } + else { + // check it's okay to take a photo + if( !applicationInterface.canTakeNewPhoto() ) { + if( MyDebug.LOG ) + Log.d(TAG, "takeRemainingRepeatPhotos: still processing..."); + // wait a bit then check again + final Handler handler = new Handler(); + handler.postDelayed(new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "takeRemainingRepeatPhotos: check again from post delayed runnable"); + takeRemainingRepeatPhotos(); + } + }, 500); + return; + } + + if( remaining_repeat_photos > 0 ) + remaining_repeat_photos--; + if( MyDebug.LOG ) + Log.d(TAG, "takeRemainingRepeatPhotos: remaining_repeat_photos is now: " + remaining_repeat_photos); + + long timer_delay = applicationInterface.getRepeatIntervalPref(); + if( timer_delay == 0 ) { + // we set skip_autofocus to go straight to taking a photo rather than refocusing, for speed + // need to manually set the phase + phase = PHASE_TAKING_PHOTO; + takePhoto(true, false); + } + else { + takePictureOnTimer(timer_delay, true); + } + } + } + } + + public void requestAutoFocus() { + if( MyDebug.LOG ) + Log.d(TAG, "requestAutoFocus"); + cancelAutoFocus(); + tryAutoFocus(false, true); + } + + private void tryAutoFocus(final boolean startup, final boolean manual) { + // manual: whether user has requested autofocus (e.g., by touching screen, or volume focus, or hardware focus button) + // consider whether you want to call requestAutoFocus() instead (which properly cancels any in-progress auto-focus first) + if( MyDebug.LOG ) { + Log.d(TAG, "tryAutoFocus"); + Log.d(TAG, "startup? " + startup); + Log.d(TAG, "manual? " + manual); + } + if( camera_controller == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera not opened!"); + } + else if( !this.has_surface ) { + if( MyDebug.LOG ) + Log.d(TAG, "preview surface not yet available"); + } + //else if( !this.is_preview_started ) { + else if( preview_started_state != PREVIEW_STARTED ) { + if( MyDebug.LOG ) + Log.d(TAG, "preview not yet started"); + } + else if( !(manual && this.is_video) && (this.isVideoRecording() || this.isTakingPhotoOrOnTimer()) ) { + // if taking a video, we allow manual autofocuses + // autofocus may cause problem if there is a video corruption problem, see testTakeVideoBitrate() on Nexus 7 at 30Mbs or 50Mbs, where the startup autofocus would cause a problem here + if( MyDebug.LOG ) + Log.d(TAG, "currently taking a photo"); + } + else { + if( manual ) { + // remove any previous request to switch back to continuous + removePendingContinuousFocusReset(); + } + if( manual && !is_video && camera_controller.focusIsContinuous() && supportedFocusValue("focus_mode_auto") ) { + if( MyDebug.LOG ) + Log.d(TAG, "switch from continuous to autofocus mode for touch focus"); + camera_controller.setFocusValue("focus_mode_auto"); // switch to autofocus + autofocus_in_continuous_mode = true; + // we switch back to continuous via a new reset_continuous_focus_runnable in autoFocusCompleted() + } + // it's only worth doing autofocus when autofocus has an effect (i.e., auto or macro mode) + // but also for continuous focus mode, triggering an autofocus is still important to fire flash when touching the screen + if( camera_controller.supportsAutoFocus() ) { + if( MyDebug.LOG ) + Log.d(TAG, "try to start autofocus"); + if( !using_android_l ) { + set_flash_value_after_autofocus = ""; + String old_flash_value = camera_controller.getFlashValue(); + // getFlashValue() may return "" if flash not supported! + if( startup && !old_flash_value.isEmpty() && !old_flash_value.equals("flash_off") && !old_flash_value.equals("flash_torch") ) { + set_flash_value_after_autofocus = old_flash_value; + camera_controller.setFlashValue("flash_off"); + } + if( MyDebug.LOG ) + Log.d(TAG, "set_flash_value_after_autofocus is now: " + set_flash_value_after_autofocus); + } + CameraController.AutoFocusCallback autoFocusCallback = new CameraController.AutoFocusCallback() { + @Override + public void onAutoFocus(boolean success) { + if( MyDebug.LOG ) + Log.d(TAG, "autofocus complete: " + success); + autoFocusCompleted(manual, success, false); + } + }; + + this.focus_success = FOCUS_WAITING; + if( MyDebug.LOG ) + Log.d(TAG, "set focus_success to " + focus_success); + this.focus_complete_time = -1; + this.successfully_focused = false; + camera_controller.autoFocus(autoFocusCallback, false); + count_cameraAutoFocus++; + this.focus_started_time = System.currentTimeMillis(); + if( MyDebug.LOG ) + Log.d(TAG, "autofocus started, count now: " + count_cameraAutoFocus); + } + else if( has_focus_area ) { + // do this so we get the focus box, for focus modes that support focus area, but don't support autofocus + focus_success = FOCUS_SUCCESS; + focus_complete_time = System.currentTimeMillis(); + // n.b., don't set focus_started_time as that may be used for application to show autofocus animation + } + } + } + + /** If the user touches the screen in continuous focus mode, we switch the camera_controller to autofocus mode. + * After the autofocus completes, we set a reset_continuous_focus_runnable to switch back to the camera_controller + * back to continuous focus after a short delay. + * This function removes any pending reset_continuous_focus_runnable. + */ + private void removePendingContinuousFocusReset() { + if( MyDebug.LOG ) + Log.d(TAG, "removePendingContinuousFocusReset"); + if( reset_continuous_focus_runnable != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "remove pending reset_continuous_focus_runnable"); + reset_continuous_focus_handler.removeCallbacks(reset_continuous_focus_runnable); + reset_continuous_focus_runnable = null; + } + } + + /** If the user touches the screen in continuous focus mode, we switch the camera_controller to autofocus mode. + * This function is called to see if we should switch from autofocus mode back to continuous focus mode. + * If this isn't required, calling this function does nothing. + */ + private void continuousFocusReset() { + if( MyDebug.LOG ) + Log.d(TAG, "switch back to continuous focus after autofocus?"); + if( camera_controller != null && autofocus_in_continuous_mode ) { + autofocus_in_continuous_mode = false; + // check again + String current_ui_focus_value = getCurrentFocusValue(); + if( current_ui_focus_value != null && !camera_controller.getFocusValue().equals(current_ui_focus_value) && camera_controller.getFocusValue().equals("focus_mode_auto") ) { + camera_controller.cancelAutoFocus(); + if( MyDebug.LOG ) + Log.d(TAG, "switch back to: " + current_ui_focus_value); + camera_controller.setFocusValue(current_ui_focus_value); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "no need to switch back to continuous focus after autofocus, mode already changed"); + } + } + } + + private void cancelAutoFocus() { + if( MyDebug.LOG ) + Log.d(TAG, "cancelAutoFocus"); + if( camera_controller != null ) { + camera_controller.cancelAutoFocus(); + autoFocusCompleted(false, false, true); + } + } + + private void ensureFlashCorrect() { + // ensures flash is in correct mode, in case where we had to turn flash temporarily off for startup autofocus + if( !set_flash_value_after_autofocus.isEmpty() && camera_controller != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "set flash back to: " + set_flash_value_after_autofocus); + camera_controller.setFlashValue(set_flash_value_after_autofocus); + set_flash_value_after_autofocus = ""; + } + } + + private void autoFocusCompleted(boolean manual, boolean success, boolean cancelled) { + if( MyDebug.LOG ) { + Log.d(TAG, "autoFocusCompleted"); + Log.d(TAG, " manual? " + manual); + Log.d(TAG, " success? " + success); + Log.d(TAG, " cancelled? " + cancelled); + } + if( cancelled ) { + focus_success = FOCUS_DONE; + } + else { + focus_success = success ? FOCUS_SUCCESS : FOCUS_FAILED; + focus_complete_time = System.currentTimeMillis(); + } + if( manual && !cancelled && ( success || applicationInterface.isTestAlwaysFocus() ) ) { + successfully_focused = true; + successfully_focused_time = focus_complete_time; + } + if( manual && camera_controller != null && autofocus_in_continuous_mode ) { + String current_ui_focus_value = getCurrentFocusValue(); + if( MyDebug.LOG ) + Log.d(TAG, "current_ui_focus_value: " + current_ui_focus_value); + if( current_ui_focus_value != null && !camera_controller.getFocusValue().equals(current_ui_focus_value) && camera_controller.getFocusValue().equals("focus_mode_auto") ) { + reset_continuous_focus_runnable = new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "reset_continuous_focus_runnable running..."); + reset_continuous_focus_runnable = null; + continuousFocusReset(); + } + }; + reset_continuous_focus_handler.postDelayed(reset_continuous_focus_runnable, 3000); + } + } + ensureFlashCorrect(); + if( this.using_face_detection && !cancelled ) { + // On some devices such as mtk6589, face detection does not resume as written in documentation so we have + // to cancelfocus when focus is finished + if( camera_controller != null ) { + camera_controller.cancelAutoFocus(); + } + } + + boolean local_take_photo_after_autofocus; + synchronized(this) { + local_take_photo_after_autofocus = take_photo_after_autofocus; + take_photo_after_autofocus = false; + } + // call CameraController outside the lock + if( local_take_photo_after_autofocus ) { + if( MyDebug.LOG ) + Log.d(TAG, "take_photo_after_autofocus is set"); + prepareAutoFocusPhoto(); + takePhotoWhenFocused(false); + } + if( MyDebug.LOG ) + Log.d(TAG, "autoFocusCompleted exit"); + } + + /** Start the camera preview. + * @param wait_until_started For CameraController.startPreview(). + * @param preview_opened For CameraController.startPreview(). + */ + public void startCameraPreview(boolean wait_until_started, Runnable preview_opened) { + long debug_time = 0; + if( MyDebug.LOG ) { + Log.d(TAG, "startCameraPreview"); + debug_time = System.currentTimeMillis(); + } + if( preview_started_state == PREVIEW_IS_STARTING ) { + if( MyDebug.LOG ) + Log.d(TAG, "preview is already starting!"); + // avoid opening preview again - don't even run the preview_opened callback + return; + } + //if( camera_controller != null && !this.isTakingPhotoOrOnTimer() && !is_preview_started ) { + if( camera_controller != null && !this.isTakingPhotoOrOnTimer() && preview_started_state == PREVIEW_NOT_STARTED ) { + if( MyDebug.LOG ) + Log.d(TAG, "starting the camera preview"); + { + if( MyDebug.LOG ) + Log.d(TAG, "setRecordingHint: " + is_video); + camera_controller.setRecordingHint(this.is_video); + } + setPreviewFps(); + try { + preview_started_state = PREVIEW_IS_STARTING; + camera_controller.startPreview(wait_until_started, new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "startCameraPreview: runnable for starting camera preview"); + + if( camera_controller == null ) { + // Although at CameraController2 we have some checks for the camera closing in the meantime, this isn't + // robust as if wait_until_started==false, both starting the preview and closing the camera may be running + // on background threads. + // However closeCamera() will set camera_controller to null on the UI thread before closing the camera + // on the background thread. + if( MyDebug.LOG ) + Log.d(TAG, "but camera closed in meantime"); + return; + } + + count_cameraStartPreview++; + + //this.is_preview_started = true; + preview_started_state = PREVIEW_STARTED; + if( using_face_detection ) { + if( MyDebug.LOG ) + Log.d(TAG, "start face detection"); + camera_controller.startFaceDetection(); + faces_detected = null; + } + setPreviewPaused(false); + setupContinuousFocusMove(); + + if( preview_opened != null ) { + preview_opened.run(); + } + } + }, new Runnable() { + @Override + public void run() { + Log.e(TAG, "startCameraPreview: runnable for failing to start camera preview"); + if( camera_controller == null ) { + // see comment above + if( MyDebug.LOG ) + Log.d(TAG, "but camera closed in meantime"); + return; + } + // if we update this code, remember to also update the CameraControllerException code below + preview_started_state = PREVIEW_NOT_STARTED; + applicationInterface.onFailedStartPreview(); + if( preview_opened != null ) { + // unclear if we need to run the caller's runnable on failure, but do so for consistency (either + // with wait_until_started==true, or when a CameraControllerException was thrown instead) + preview_opened.run(); + } + } + }); + } + catch(CameraControllerException e) { + MyDebug.logStackTrace(TAG, "CameraControllerException trying to startPreview", e); + // if we update this code, remember to also update the on_failed runnable above + preview_started_state = PREVIEW_NOT_STARTED; + applicationInterface.onFailedStartPreview(); + return; + } + if( MyDebug.LOG ) { + Log.d(TAG, "startCameraPreview: time after starting camera preview: " + (System.currentTimeMillis() - debug_time)); + } + } + else { + // unclear if we still need to run the following here? + this.setPreviewPaused(false); + this.setupContinuousFocusMove(); + if( preview_opened != null ) { + preview_opened.run(); + } + } + if( MyDebug.LOG ) { + Log.d(TAG, "startCameraPreview: total time for startCameraPreview: " + (System.currentTimeMillis() - debug_time)); + } + } + + private void setPreviewPaused(boolean paused) { + if( MyDebug.LOG ) + Log.d(TAG, "setPreviewPaused: " + paused); + applicationInterface.hasPausedPreview(paused); + if( paused ) { + this.phase = PHASE_PREVIEW_PAUSED; + // shouldn't call applicationInterface.cameraInOperation(true, ...), as should already have done when we started to take a photo (or above when exiting immersive mode) + } + else { + this.phase = PHASE_NORMAL; + /*applicationInterface.cameraInOperation(false, false); + if( is_video ) + applicationInterface.cameraInOperation(false, true);*/ + // Need to call camerainOperation for when taking photo with pause preview option; + // also needed so that the GUI is set up correctly (via MainUI.showGUI()), for things like on-screen icons that are + // only shown depending on user options and device support. + applicationInterface.cameraInOperation(false, false); + } + } + + public void onAccelerometerSensorChanged(SensorEvent event) { + /*if( MyDebug.LOG ) + Log.d(TAG, "onAccelerometerSensorChanged: " + event.values[0] + ", " + event.values[1] + ", " + event.values[2]);*/ + + this.has_gravity = true; + for(int i=0;i<3;i++) { + //this.gravity[i] = event.values[i]; + this.gravity[i] = sensor_alpha * this.gravity[i] + (1.0f-sensor_alpha) * event.values[i]; + } + calculateGeoDirection(); + + double x = gravity[0]; + double y = gravity[1]; + double z = gravity[2]; + double mag = Math.sqrt(x*x + y*y + z*z); + /*if( MyDebug.LOG ) + Log.d(TAG, "xyz: " + x + ", " + y + ", " + z);*/ + + this.has_pitch_angle = false; + if( mag > 1.0e-8 ) { + this.has_pitch_angle = true; + this.pitch_angle = Math.asin(- z / mag) * 180.0 / Math.PI; + /*if( MyDebug.LOG ) + Log.d(TAG, "pitch: " + pitch_angle);*/ + + this.has_level_angle = true; + this.natural_level_angle = Math.atan2(-x, y) * 180.0 / Math.PI; + if( this.natural_level_angle < -0.0 ) { + this.natural_level_angle += 360.0; + } + //natural_level_angle = 0.0f; // test zero angle + + updateLevelAngles(); + } + else { + Log.e(TAG, "accel sensor has zero mag: " + mag); + this.has_level_angle = false; + } + + } + + /** This method should be called when the natural level angle, or the calibration angle, has been updated, to update the other level angle variables. + * + */ + public void updateLevelAngles() { + if( has_level_angle ) { + this.level_angle = this.natural_level_angle; + double calibrated_level_angle = applicationInterface.getCalibratedLevelAngle(); + this.level_angle -= calibrated_level_angle; + this.orig_level_angle = this.level_angle; + this.level_angle -= (float) this.current_orientation; + if( this.level_angle < -180.0 ) { + this.level_angle += 360.0; + } + else if( this.level_angle > 180.0 ) { + this.level_angle -= 360.0; + } + /*if( MyDebug.LOG ) + Log.d(TAG, "level_angle is now: " + level_angle);*/ + } + } + + public boolean hasLevelAngle() { + return this.has_level_angle; + } + + /* Returns true if we have the level angle ("roll"), but the pitch is not near vertically up or down (70 degrees to level). + * This is useful as the level angle becomes unstable when device is near vertical + */ + public boolean hasLevelAngleStable() { + if( !is_test && has_pitch_angle && Math.abs(pitch_angle) > 70.0 ) { + // note that if is_test, we always set the level angle - since the device typically lies face down when running tests... + return false; + } + return this.has_level_angle; + } + + /** Returns the uncalibrated level angle in degrees. + */ + public double getLevelAngleUncalibrated() { + return this.natural_level_angle - this.current_orientation; + } + + /** Returns the level angle in degrees. + */ + public double getLevelAngle() { + return this.level_angle; + } + + /** Returns the original level angle in degrees. + */ + public double getOrigLevelAngle() { + return this.orig_level_angle; + } + + public boolean hasPitchAngle() { + return this.has_pitch_angle; + } + + /** Returns the pitch angle in degrees. + */ + public double getPitchAngle() { + return this.pitch_angle; + } + + public void onMagneticSensorChanged(SensorEvent event) { + this.has_geomagnetic = true; + for(int i=0;i<3;i++) { + //this.geomagnetic[i] = event.values[i]; + this.geomagnetic[i] = sensor_alpha * this.geomagnetic[i] + (1.0f-sensor_alpha) * event.values[i]; + } + calculateGeoDirection(); + } + + private void calculateGeoDirection() { + if( !this.has_gravity || !this.has_geomagnetic ) { + return; + } + if( !SensorManager.getRotationMatrix(this.deviceRotation, this.deviceInclination, this.gravity, this.geomagnetic) ) { + return; + } + SensorManager.remapCoordinateSystem(this.deviceRotation, SensorManager.AXIS_X, SensorManager.AXIS_Z, this.cameraRotation); + boolean has_old_geo_direction = has_geo_direction; + this.has_geo_direction = true; + //SensorManager.getOrientation(cameraRotation, geo_direction); + SensorManager.getOrientation(cameraRotation, new_geo_direction); + /*if( MyDebug.LOG ) { + Log.d(TAG, "###"); + Log.d(TAG, "old geo_direction: " + (geo_direction[0]*180/Math.PI) + ", " + (geo_direction[1]*180/Math.PI) + ", " + (geo_direction[2]*180/Math.PI)); + }*/ + for(int i=0;i<3;i++) { + float old_compass = (float)Math.toDegrees(geo_direction[i]); + float new_compass = (float)Math.toDegrees(new_geo_direction[i]); + if( has_old_geo_direction ) { + old_compass = lowPassFilter(old_compass, new_compass, 0.1f, 10.0f); + } + else { + old_compass = new_compass; + } + geo_direction[i] = (float)Math.toRadians(old_compass); + } + /*if( MyDebug.LOG ) { + Log.d(TAG, "new_geo_direction: " + (new_geo_direction[0]*180/Math.PI) + ", " + (new_geo_direction[1]*180/Math.PI) + ", " + (new_geo_direction[2]*180/Math.PI)); + Log.d(TAG, "geo_direction: " + (geo_direction[0]*180/Math.PI) + ", " + (geo_direction[1]*180/Math.PI) + ", " + (geo_direction[2]*180/Math.PI)); + }*/ + } + + /** Low pass filter, for geommagnetic angles. + * @param old_value Old value in degrees. + * @param new_value New value in degrees. + */ + private float lowPassFilter(float old_value, float new_value, float smooth, float threshold) { + // see http://stackoverflow.com/questions/4699417/android-compass-orientation-on-unreliable-low-pass-filter + float diff = Math.abs(new_value - old_value); + /*if( MyDebug.LOG ) + Log.d(TAG, "diff: " + diff);*/ + if( diff < 180.0f ) { + if( diff > threshold ) { + /*if( MyDebug.LOG ) + Log.d(TAG, "jump to new value");*/ + old_value = new_value; + } + else { + old_value = old_value + smooth * (new_value - old_value); + } + } + else { + if( 360.0f - diff > threshold ) { + /*if( MyDebug.LOG ) + Log.d(TAG, "jump to new value");*/ + old_value = new_value; + } + else { + if( old_value > new_value ) { + old_value = (old_value + smooth * ((360 + new_value - old_value) % 360) + 360) % 360; + } + else { + old_value = (old_value - smooth * ((360 - new_value + old_value) % 360) + 360) % 360; + } + } + } + return old_value; + } + + public boolean hasGeoDirection() { + return has_geo_direction; + } + + /** Returns the geo direction in radians. + */ + public double getGeoDirection() { + return geo_direction[0]; + } + + public boolean supportsFaceDetection() { + // don't log this, as we call from DrawPreview! + return supports_face_detection; + } + + /** Whether optical image stabilization (OIS) is supported by the device. + */ + public boolean supportsOpticalStabilization() { + if( MyDebug.LOG ) + Log.d(TAG, "supportsOpticalStabilization"); + return supports_optical_stabilization; + } + + public boolean getOpticalStabilization() { + if( MyDebug.LOG ) + Log.d(TAG, "getOpticalStabilization"); + if( camera_controller == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera not opened!"); + return false; + } + return camera_controller.getOpticalStabilization(); + } + + /** Whether video digital stabilization is supported by the device. + */ + public boolean supportsVideoStabilization() { + if( MyDebug.LOG ) + Log.d(TAG, "supportsVideoStabilization"); + return supports_video_stabilization; + } + + public boolean getVideoStabilization() { + if( MyDebug.LOG ) + Log.d(TAG, "getVideoStabilization"); + if( camera_controller == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera not opened!"); + return false; + } + return camera_controller.getVideoStabilization(); + } + + public boolean supportsPhotoVideoRecording() { + if( MyDebug.LOG ) + Log.d(TAG, "supportsPhotoVideoRecording"); + return supports_photo_video_recording && !video_high_speed; + } + + /** Returns true iff we're in video mode, and a high speed fps video mode is selected. + */ + public boolean isVideoHighSpeed() { + if( MyDebug.LOG ) + Log.d(TAG, "isVideoHighSpeed"); + return is_video && video_high_speed; + } + + public boolean canDisableShutterSound() { + if( MyDebug.LOG ) + Log.d(TAG, "canDisableShutterSound"); + return can_disable_shutter_sound; + } + + public int getTonemapMaxCurvePoints() { + if( MyDebug.LOG ) + Log.d(TAG, "getTonemapMaxCurvePoints"); + return tonemap_max_curve_points; + } + + public boolean supportsTonemapCurve() { + if( MyDebug.LOG ) + Log.d(TAG, "supportsTonemapCurve"); + return supports_tonemap_curve; + } + + /** Return the supported apertures for this camera. + */ + public float [] getSupportedApertures() { + if( MyDebug.LOG ) + Log.d(TAG, "getSupportedApertures"); + return supported_apertures; + } + + public List getSupportedColorEffects() { + if( MyDebug.LOG ) + Log.d(TAG, "getSupportedColorEffects"); + return this.color_effects; + } + + public List getSupportedSceneModes() { + if( MyDebug.LOG ) + Log.d(TAG, "getSupportedSceneModes"); + return this.scene_modes; + } + + public List getSupportedWhiteBalances() { + if( MyDebug.LOG ) + Log.d(TAG, "getSupportedWhiteBalances"); + return this.white_balances; + } + + public List getSupportedAntiBanding() { + if( MyDebug.LOG ) + Log.d(TAG, "getSupportedAntiBanding"); + return this.antibanding; + } + + public List getSupportedEdgeModes() { + if( MyDebug.LOG ) + Log.d(TAG, "getSupportedEdgeModes"); + return this.edge_modes; + } + + public List getSupportedNoiseReductionModes() { + if( MyDebug.LOG ) + Log.d(TAG, "getSupportedNoiseReductionModes"); + return this.noise_reduction_modes; + } + + public String getISOKey() { + if( MyDebug.LOG ) + Log.d(TAG, "getISOKey"); + return camera_controller == null ? "" : camera_controller.getISOKey(); + } + + /** Whether manual white balance temperatures can be specified via setWhiteBalanceTemperature(). + */ + public boolean supportsWhiteBalanceTemperature() { + if( MyDebug.LOG ) + Log.d(TAG, "supportsWhiteBalanceTemperature"); + return this.supports_white_balance_temperature; + } + + /** Minimum allowed white balance temperature. + */ + public int getMinimumWhiteBalanceTemperature() { + if( MyDebug.LOG ) + Log.d(TAG, "getMinimumWhiteBalanceTemperature"); + return this.min_temperature; + } + + /** Maximum allowed white balance temperature. + */ + public int getMaximumWhiteBalanceTemperature() { + if( MyDebug.LOG ) + Log.d(TAG, "getMaximumWhiteBalanceTemperature"); + return this.max_temperature; + } + + /** Returns whether a range of manual ISO values can be set. If this returns true, use + * getMinimumISO() and getMaximumISO() to return the valid range of values. If this returns + * false, getSupportedISOs() to find allowed ISO values. + */ + public boolean supportsISORange() { + if( MyDebug.LOG ) + Log.d(TAG, "supportsISORange"); + return this.supports_iso_range; + } + + /** If supportsISORange() returns false, use this method to return a list of supported ISO values: + * - If this is null, then manual ISO isn't supported. + * - If non-null, this will include "auto" to indicate auto-ISO, and one or more numerical ISO + * values. + * If supportsISORange() returns true, then this method should not be used (and it will return + * null). Instead use getMinimumISO() and getMaximumISO(). + */ + public List getSupportedISOs() { + if( MyDebug.LOG ) + Log.d(TAG, "getSupportedISOs"); + return this.isos; + } + + /** Returns minimum ISO value. Only relevant if supportsISORange() returns true. + */ + public int getMinimumISO() { + if( MyDebug.LOG ) + Log.d(TAG, "getMinimumISO"); + return this.min_iso; + } + + /** Returns maximum ISO value. Only relevant if supportsISORange() returns true. + */ + public int getMaximumISO() { + if( MyDebug.LOG ) + Log.d(TAG, "getMaximumISO"); + return this.max_iso; + } + + public float getMinimumFocusDistance() { + return this.minimum_focus_distance; + } + + public boolean supportsExposureTime() { + if( MyDebug.LOG ) + Log.d(TAG, "supportsExposureTime"); + return this.supports_exposure_time; + } + + public long getMinimumExposureTime() { + if( MyDebug.LOG ) + Log.d(TAG, "getMinimumExposureTime: " + min_exposure_time); + return this.min_exposure_time; + } + + public long getMaximumExposureTime() { + if( MyDebug.LOG ) + Log.d(TAG, "getMaximumExposureTime: " + max_exposure_time); + long max = max_exposure_time; + if( applicationInterface.isExpoBracketingPref() || applicationInterface.isFocusBracketingPref() || applicationInterface.isCameraBurstPref() ) { + // doesn't make sense to allow long exposure times in these modes + if( applicationInterface.getBurstForNoiseReduction() ) + max = Math.min(max_exposure_time, 1000000000L*2); // limit to 2s + else + max = Math.min(max_exposure_time, 1000000000L/2); // limit to 0.5s + } + if( MyDebug.LOG ) + Log.d(TAG, "max: " + max); + return max; + } + + public boolean supportsExposures() { + if( MyDebug.LOG ) + Log.d(TAG, "supportsExposures"); + return this.exposures != null; + } + + public int getMinimumExposure() { + if( MyDebug.LOG ) + Log.d(TAG, "getMinimumExposure"); + return this.min_exposure; + } + + public int getMaximumExposure() { + if( MyDebug.LOG ) + Log.d(TAG, "getMaximumExposure"); + return this.max_exposure; + } + + public int getCurrentExposure() { + if( MyDebug.LOG ) + Log.d(TAG, "getCurrentExposure"); + if( camera_controller == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera not opened!"); + return 0; + } + return camera_controller.getExposureCompensation(); + } + + /*List getSupportedExposures() { + if( MyDebug.LOG ) + Log.d(TAG, "getSupportedExposures"); + return this.exposures; + }*/ + + public boolean supportsExpoBracketing() { + /*if( MyDebug.LOG ) + Log.d(TAG, "supportsExpoBracketing");*/ + return this.supports_expo_bracketing; + } + + public int maxExpoBracketingNImages() { + if( MyDebug.LOG ) + Log.d(TAG, "maxExpoBracketingNImages"); + return this.max_expo_bracketing_n_images; + } + + public boolean supportsFocusBracketing() { + return this.supports_focus_bracketing; + } + + public boolean supportsBurst() { + return this.supports_burst; + } + + /** Whether the Camera vendor extension is supported (see + * https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics ). + */ + public boolean supportsCameraExtension(int extension) { + if( extension == CameraExtensionCharacteristics.EXTENSION_HDR ) { + // blocked for now, as have yet to be able to test this (seems to have no effect on Galaxy S10e; + // not available on Pixel 6 Pro or Galaxy S24+) + return false; + } + return this.supported_extensions != null && this.supported_extensions.contains(extension); + } + + /** Whether the camera vendor extensions supports zoom. + */ + public boolean supportsZoomForCameraExtension(int extension) { + return this.supported_extensions_zoom != null && this.supported_extensions_zoom.contains(extension); + } + + public boolean supportsJpegR() { + return this.supports_jpeg_r; + } + + public boolean supportsRaw() { + return this.supports_raw; + } + + /** Returns the horizontal angle of view in degrees (when unzoomed). + */ + /*public float getViewAngleX() { + return this.view_angle_x; + }*/ + + /** Returns the vertical angle of view in degrees (when unzoomed). + */ + /*public float getViewAngleY() { + return this.view_angle_y; + }*/ + + /** Returns the horizontal angle of view in degrees (when unzoomed). + */ + public float getViewAngleX(boolean for_preview) { + if( MyDebug.LOG ) + Log.d(TAG, "getViewAngleX: " + for_preview); + CameraController.Size size = for_preview ? this.getCurrentPreviewSize() : this.getCurrentPictureSize(); + if( size == null ) { + Log.e(TAG, "can't find view angle x size"); + return this.view_angle_x; + } + float view_aspect_ratio = view_angle_x/view_angle_y; + float actual_aspect_ratio = ((float)size.width)/(float)size.height; + /*if( MyDebug.LOG ) { + Log.d(TAG, "view_angle_x: " + view_angle_x); + Log.d(TAG, "view_angle_y: " + view_angle_y); + Log.d(TAG, "view_aspect_ratio: " + view_aspect_ratio); + Log.d(TAG, "actual_aspect_ratio: " + actual_aspect_ratio); + }*/ + if( Math.abs(actual_aspect_ratio - view_aspect_ratio) < 1.0e-5f ) { + return this.view_angle_x; + } + else if( actual_aspect_ratio > view_aspect_ratio ) { + return this.view_angle_x; + } + else { + float aspect_ratio_scale = actual_aspect_ratio/view_aspect_ratio; + //float actual_view_angle_x = view_angle_x*aspect_ratio_scale; + float actual_view_angle_x = (float)Math.toDegrees(2.0 * Math.atan(aspect_ratio_scale * Math.tan(Math.toRadians(view_angle_x) / 2.0))); + /*if( MyDebug.LOG ) + Log.d(TAG, "actual_view_angle_x: " + actual_view_angle_x);*/ + return actual_view_angle_x; + } + } + + /** Returns the vertical angle of view in degrees (when unzoomed). + */ + public float getViewAngleY(boolean for_preview) { + if( MyDebug.LOG ) + Log.d(TAG, "getViewAngleY: " + for_preview); + CameraController.Size size = for_preview ? this.getCurrentPreviewSize() : this.getCurrentPictureSize(); + if( size == null ) { + Log.e(TAG, "can't find view angle y size"); + return this.view_angle_y; + } + float view_aspect_ratio = view_angle_x/view_angle_y; + float actual_aspect_ratio = ((float)size.width)/(float)size.height; + /*if( MyDebug.LOG ) { + Log.d(TAG, "view_angle_x: " + view_angle_x); + Log.d(TAG, "view_angle_y: " + view_angle_y); + Log.d(TAG, "view_aspect_ratio: " + view_aspect_ratio); + Log.d(TAG, "actual_aspect_ratio: " + actual_aspect_ratio); + }*/ + if( Math.abs(actual_aspect_ratio - view_aspect_ratio) < 1.0e-5f ) { + return this.view_angle_y; + } + else if( actual_aspect_ratio > view_aspect_ratio ) { + float aspect_ratio_scale = view_aspect_ratio/actual_aspect_ratio; + //float actual_view_angle_y = view_angle_y*aspect_ratio_scale; + float actual_view_angle_y = (float)Math.toDegrees(2.0 * Math.atan(aspect_ratio_scale * Math.tan(Math.toRadians(view_angle_y) / 2.0))); + /*if( MyDebug.LOG ) + Log.d(TAG, "actual_view_angle_y: " + actual_view_angle_y);*/ + return actual_view_angle_y; + } + else { + return this.view_angle_y; + } + } + + public List getSupportedPreviewSizes() { + if( MyDebug.LOG ) + Log.d(TAG, "getSupportedPreviewSizes"); + return this.supported_preview_sizes; + } + + public CameraController.Size getCurrentPreviewSize() { + return new CameraController.Size(preview_w, preview_h); + } + + public double getCurrentPreviewAspectRatio() { + return ((double)preview_w)/(double)preview_h; + } + + /** + * @param check_supported If true, and a burst mode is in use (fast burst, expo, HDR), or + * a camera vendor extension mode, and/o a constraint was set via + * getCameraResolutionPref(), then the returned list will be filtered to + * remove sizes that don't support burst and/or these constraints. + */ + public List getSupportedPictureSizes(boolean check_supported) { + if( MyDebug.LOG ) + Log.d(TAG, "getSupportedPictureSizes"); + boolean is_burst = ( camera_controller != null && camera_controller.isCaptureFastBurst() ); + boolean is_extension = ( camera_controller != null && camera_controller.isCameraExtension() ); + int extension = is_extension ? camera_controller.getCameraExtension() : -1; + boolean has_constraints = photo_size_constraints != null && photo_size_constraints.hasConstraints(); + if( check_supported && ( is_burst || is_extension || has_constraints ) ) { + if( MyDebug.LOG ) + Log.d(TAG, "need to filter picture sizes for burst mode and/or extension mode and/or constraints"); + List filtered_sizes = new ArrayList<>(); + for(CameraController.Size size : photo_sizes) { + if( !size.supportsRequirements(is_burst, is_extension, extension) ) { + // burst or extension mode not supported + } + else if( !photo_size_constraints.satisfies(size) ) { + // doesn't satisfy imposed constraints + } + else { + filtered_sizes.add(size); + } + } + return filtered_sizes; + } + return this.photo_sizes; + } + + /*public int getCurrentPictureSizeIndex() { + if( MyDebug.LOG ) + Log.d(TAG, "getCurrentPictureSizeIndex"); + return this.current_size_index; + }*/ + + public CameraController.Size getCurrentPictureSize() { + if( current_size_index == -1 || photo_sizes == null ) + return null; + return photo_sizes.get(current_size_index); + } + + public VideoQualityHandler getVideoQualityHander() { + return this.video_quality_handler; + } + + /** Returns the supported video "qualities", but unlike + * getVideoQualityHander().getSupportedVideoQuality(), allows filtering to the supplied + * fps_value. + * @param fps_value If not "default", the returned video qualities will be filtered to those that supported the requested + * frame rate. + */ + public List getSupportedVideoQuality(String fps_value) { + if( MyDebug.LOG ) + Log.d(TAG, "getSupportedVideoQuality: " + fps_value); + if( !fps_value.equals("default") && supports_video_high_speed ) { + try { + int fps = Integer.parseInt(fps_value); + if( MyDebug.LOG ) + Log.d(TAG, "fps: " + fps); + List filtered_video_quality = new ArrayList<>(); + for(String quality : video_quality_handler.getSupportedVideoQuality()) { + if( MyDebug.LOG ) + Log.d(TAG, "quality: " + quality); + CamcorderProfile profile = getCamcorderProfile(quality); + if( MyDebug.LOG ) { + Log.d(TAG, " width: " + profile.videoFrameWidth); + Log.d(TAG, " height: " + profile.videoFrameHeight); + } + CameraController.Size best_video_size = video_quality_handler.findVideoSizeForFrameRate(profile.videoFrameWidth, profile.videoFrameHeight, fps, false); + if( best_video_size != null ) { + if( MyDebug.LOG ) + Log.d(TAG, " requested frame rate is supported"); + filtered_video_quality.add(quality); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, " requested frame rate is NOT supported"); + } + } + return filtered_video_quality; + } + catch(NumberFormatException exception) { + if( MyDebug.LOG ) + Log.d(TAG, "fps invalid format, can't parse to int: " + fps_value); + } + } + return video_quality_handler.getSupportedVideoQuality(); + } + + /** Returns whether the user's fps preference is both non-default, and is considered a + * "high-speed" frame rate, but not a normal frame rate. (Note, we go by the supplied + * fps_value, and not what the user's preference necessarily is; so this doesn't say whether + * the Preview is currently set to normal or high speed video mode.) + */ + public boolean fpsIsHighSpeed(String fps_value) { + if( MyDebug.LOG ) + Log.d(TAG, "fpsIsHighSpeed: " + fps_value); + if( !fps_value.equals("default") && supports_video_high_speed ) { + try { + int fps = Integer.parseInt(fps_value); + if( MyDebug.LOG ) + Log.d(TAG, "fps: " + fps); + // need to check both, e.g., 30fps on Nokia 8 is in fps ranges of both normal and high speed video sizes + if( video_quality_handler.videoSupportsFrameRate(fps) ) { + if( MyDebug.LOG ) + Log.d(TAG, "fps is normal"); + return false; + } + else if( video_quality_handler.videoSupportsFrameRateHighSpeed(fps) ) { + if( MyDebug.LOG ) + Log.d(TAG, "fps is high speed"); + return true; + } + else { + // shouldn't be here?! + Log.e(TAG, "fps is neither normal nor high speed"); + return false; + } + } + catch(NumberFormatException exception) { + if( MyDebug.LOG ) + Log.d(TAG, "fps invalid format, can't parse to int: " + fps_value); + } + } + if( MyDebug.LOG ) + Log.d(TAG, "fps is not high speed"); + return false; + } + + public boolean supportsVideoHighSpeed() { + return this.supports_video_high_speed; + } + + public List getSupportedFlashValues() { + return supported_flash_values; + } + + public List getSupportedFocusValues() { + return supported_focus_values; + } + + /** Returns the current camera ID, or 0 if the camera isn't opened. + */ + public int getCameraId() { + if( camera_controller == null ) + return 0; + return camera_controller.getCameraId(); + } + + public String getCameraAPI() { + if( camera_controller == null ) + return "None"; + return camera_controller.getAPI(); + } + + /** Call when activity is resumed. + */ + public void onResume() { + if( MyDebug.LOG ) + Log.d(TAG, "onResume"); + recreatePreviewBitmap(); + this.app_is_paused = false; + this.is_paused = false; + cameraSurface.onResume(); + if( canvasView != null ) + canvasView.onResume(); + + if( camera_open_state == CameraOpenState.CAMERAOPENSTATE_CLOSING ) { + // when pausing, we close the camera on a background thread - so if this is still happening when we resume, + // we won't be able to open the camera, so need to open camera when it's closed + if( MyDebug.LOG ) + Log.d(TAG, "camera still closing"); + if( close_camera_task != null ) { // just to be safe + close_camera_task.reopen = true; + } + else { + Log.e(TAG, "onResume: state is CAMERAOPENSTATE_CLOSING, but close_camera_task is null"); + } + } + else { + this.openCamera(); + } + } + + /** Call when activity is paused. + */ + public void onPause() { + onPause(true); + } + + /** Call when activity is paused, or the application wants to put the Preview into a paused + * state (closing the camera etc). + * @param activity_is_pausing Set to true if this is called because the activity is being paused; + * set to false if the activity is not pausing. + */ + public void onPause(boolean activity_is_pausing) { + if( MyDebug.LOG ) + Log.d(TAG, "onPause"); + this.is_paused = true; + if( activity_is_pausing ) + this.app_is_paused = true; // note, if activity_is_paused==false, we don't change app_is_paused, in case app was paused indicated via a separate call to onPause + if( camera_open_state == CameraOpenState.CAMERAOPENSTATE_OPENING ) { + if( MyDebug.LOG ) + Log.d(TAG, "cancel open_camera_task"); + if( open_camera_task != null ) { // just to be safe + this.open_camera_task.cancel(true); + } + else { + Log.e(TAG, "onPause: state is CAMERAOPENSTATE_OPENING, but open_camera_task is null"); + } + } + //final boolean use_background_thread = false; + final boolean use_background_thread = true; + this.closeCamera(use_background_thread, null); + cameraSurface.onPause(); + if( canvasView != null ) + canvasView.onPause(); + freePreviewBitmap(); + } + + public void onDestroy() { + if( MyDebug.LOG ) + Log.d(TAG, "onDestroy"); + + if( refreshPreviewBitmapTaskIsRunning() ) { + // if we're being destroyed, better to wait until completion rather than just cancelling + try { + refreshPreviewBitmapTask.get(); // forces thread to complete + } + catch(ExecutionException | InterruptedException e) { + MyDebug.logStackTrace(TAG, "exception while waiting for background_task to finish", e); + } + } + freePreviewBitmap(); // in case onDestroy() called directly without onPause() + + if( camera_open_state == CameraOpenState.CAMERAOPENSTATE_CLOSING ) { + // If the camera is currently closing on a background thread, then wait until the camera has closed to be safe + if( MyDebug.LOG ) { + Log.d(TAG, "wait for close_camera_task"); + } + if( close_camera_task != null ) { // just to be safe + long time_s = System.currentTimeMillis(); + try { + close_camera_task.get(3000, TimeUnit.MILLISECONDS); // set timeout to avoid ANR (camera resource should be freed by the OS when destroyed anyway) + } + catch(ExecutionException | InterruptedException | TimeoutException e) { + MyDebug.logStackTrace(TAG, "exception while waiting for close_camera_task to finish", e); + } + if( MyDebug.LOG ) { + Log.d(TAG, "done waiting for close_camera_task"); + Log.d(TAG, "### time after waiting for close_camera_task: " + (System.currentTimeMillis() - time_s)); + } + } + else { + Log.e(TAG, "onResume: state is CAMERAOPENSTATE_CLOSING, but close_camera_task is null"); + } + } + } + + /*void updateUIPlacement() { + // we cache the preference_ui_placement to save having to check it in the draw() method + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getContext()); + String ui_placement = sharedPreferences.getString(MainActivity.getUIPlacementPreferenceKey(), "ui_right"); + this.ui_placement_right = ui_placement.equals("ui_right"); + }*/ + + public void onSaveInstanceState(Bundle state) { + if( MyDebug.LOG ) + Log.d(TAG, "onSaveInstanceState"); + } + + private final Handler fake_toast_handler = new Handler(); + private TextView active_fake_toast = null; + + public void clearActiveFakeToast() { + clearActiveFakeToast(false); + } + + /** Removes any fake toast, if it exists. + * @param called_from_handler Should be false, unless called from the fake_toast_handler. + */ + private void clearActiveFakeToast(boolean called_from_handler) { + if( !called_from_handler ) { + // important to remove the callback, otherwise when it runs, it may end up deleting a + // new fake toast that is created after this method call, but before the callback runs + fake_toast_handler.removeCallbacksAndMessages(null); + } + // run on UI thread, to avoid threading issues + final Activity activity = (Activity)this.getContext(); + activity.runOnUiThread(new Runnable() { + public void run() { + if( active_fake_toast != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "remove fake toast: " + active_fake_toast); + ViewParent parent = active_fake_toast.getParent(); + if( parent != null ) { + ((ViewGroup)parent).removeView(active_fake_toast); + } + active_fake_toast = null; + } + } + }); + } + + public void showToast(final ToastBoxer clear_toast, final int message_id) { + showToast(clear_toast, getResources().getString(message_id), false); + } + + public void showToast(final ToastBoxer clear_toast, final int message_id, final boolean use_fake_toast) { + showToast(clear_toast, getResources().getString(message_id), use_fake_toast); + } + + public void showToast(final ToastBoxer clear_toast, final String message) { + showToast(clear_toast, message, false); + } + + @SuppressWarnings("WeakerAccess") + public void showToast(final String message, final boolean use_fake_toast) { + showToast(null, message, use_fake_toast); + } + + public void showToast(final ToastBoxer clear_toast, final String message, final boolean use_fake_toast) { + showToast(clear_toast, message, 32, use_fake_toast); + } + + public void showToast(final ToastBoxer clear_toast, final String message, final boolean use_fake_toast, boolean dont_clear) { + showToast(clear_toast, message, 32, use_fake_toast, dont_clear); + } + + /*public void showToast(final ToastBoxer clear_toast, final String message, final boolean use_fake_toast, int duration) { + showToast(clear_toast, message, 32, use_fake_toast, duration); + }*/ + + /*public void showToast(final String message, final int offset_y_dp, final boolean use_fake_toast) { + showToast(null, message, offset_y_dp, use_fake_toast); + }*/ + + public void showToast(final ToastBoxer clear_toast, final String message, final int offset_y_dp, final boolean use_fake_toast) { + showToast(clear_toast, message, offset_y_dp, use_fake_toast, false); + } + + /** Displays a "toast", but has several advantages over calling Android's Toast API directly. + * We use a custom view, to rotate the toast to account for the device orientation (since + * Open Camera always runs in landscape). + * @param clear_toast Only relevant if use_fake_toast is false. If non-null, calls to this method + * with the same clear_toast value will overwrite the previous ones rather than + * being queued. Note that toasts no longer seem to be queued anyway on + * Android 9+. + * (N.B., some callers with use_fake_toast==true still supply a use_fake_toast + * for historical reasons, from when previously those calls weren't using a fake + * toast.) + * @param message The message to display. + * @param offset_y_dp The y-offset from the centre of the screen. Only relevant if use_fake_toast is + * true. + * @param use_fake_toast If true, don't use Android's Toast system at all, and instead display a message + * on the Preview. + * This is due to problems on Android 9+ where rapidly displaying toasts (e.g., to + * display values from a seekbar being modified) cause problems where toast sometimes + * disappear (this happens whether using clear_toast or not). Note that using + * use_fake_toast means that the toasts don't have the fade out effect. + * Update: Toasts with custom views (Toast.setView()) are now deprecated. So + * use_fake_toast==false no longer uses a custom view. So we should now only set + * use_fake_toast==false for when we really want to use the system toast (e.g., + * anything that isn't when the Preview is showing such as from Settings, or when + * we want the Android toast look such as for an error message). + * Usages where we want to display info on the Preview should always set + * use_fake_toast==true for a consistent look. + * @param dont_clear If true, then the toast will remain until explicitly cleared via + * clearActiveFakeToast(). Only supported if use_fake_toast==true. + */ + public void showToast(final ToastBoxer clear_toast, final String message, final int offset_y_dp, final boolean use_fake_toast, boolean dont_clear) { + //final boolean use_fake_toast = true; + //final boolean use_fake_toast = old_use_fake_toast; + if( !applicationInterface.getShowToastsPref() ) { + return; + } + + if( MyDebug.LOG ) { + Log.d(TAG, "showToast: " + message); + Log.d(TAG, "use_fake_toast: " + use_fake_toast); + } + + if( this.app_is_paused && use_fake_toast ) { + if( MyDebug.LOG ) + Log.e(TAG, "don't show fake toast as application is paused: " + message); + // When targeting Android 11+, toasts with custom views won't be shown in background anyway - in theory we + // shouldn't be making toasts when in background, but check just in case. + // However we no longer use custom views when use_fake_toast==false, so fine to allow those - and indeed this + // is useful for cases where the toast is created shortly before Open Camera resumes, e.g., cancelling SAF + // (see toast in MainActivity.onActivityResult()), or denying location permission (see toast from + // PermissionHandler.onRequestPermissionsResult()). + return; + } + + final Activity activity = (Activity)this.getContext(); + // We get a crash on emulator at least if Toast constructor isn't run on main thread (e.g., the toast for taking a photo when on timer). + // Also see http://stackoverflow.com/questions/13267239/toast-from-a-non-ui-thread + // Also for the use_fake_toast code, running the creation code, and the postDelayed code (and the code in clearActiveFakeToast()), on the UI thread avoids threading issues + activity.runOnUiThread(new Runnable() { + public void run() { + if( Preview.this.app_is_paused && use_fake_toast ) { + if( MyDebug.LOG ) + Log.e(TAG, "don't show fake toast as application is paused: " + message); + // see note above + return; + } + + final float scale = Preview.this.getResources().getDisplayMetrics().density; + final int offset_y = (int) (offset_y_dp * scale + 0.5f); // convert dps to pixels + float shadow_radius = (2.0f * scale + 0.5f); // convert pt to pixels + shadow_radius = Math.max(shadow_radius, 1.0f); + if( MyDebug.LOG ) + Log.d(TAG, "shadow_radius: " + shadow_radius); + + if( use_fake_toast ) { + if( active_fake_toast != null ) { + // re-use existing fake toast + if( MyDebug.LOG ) + Log.d(TAG, "re-use fake toast: " + active_fake_toast); + active_fake_toast.setText(message); + active_fake_toast.setPadding(0, offset_y, 0, 0); + active_fake_toast.invalidate(); // make sure the view is redrawn + } + else { + Activity activity = (Activity) Preview.this.getContext(); + @SuppressLint("InflateParams") // we add the view a few lines below + final View view = LayoutInflater.from(activity).inflate(R.layout.toast_textview, null); + active_fake_toast = view.findViewById(R.id.text_view); + active_fake_toast.setShadowLayer(shadow_radius, 0.0f, 0.0f, Color.BLACK); + active_fake_toast.setPadding(0, offset_y, 0, 0); + active_fake_toast.setText(message); + if( MyDebug.LOG ) + Log.d(TAG, "create new fake toast: " + active_fake_toast); + final FrameLayout rootLayout = activity.findViewById(android.R.id.content); + rootLayout.addView(active_fake_toast); + } + + // in theory the fake_toast_handler should only have a callback on it if re-using an existing fake toast, + // but we remove callbacks always just in case + fake_toast_handler.removeCallbacksAndMessages(null); + + if( !dont_clear ) { + fake_toast_handler.postDelayed(new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "destroy fake toast due to time expired"); + clearActiveFakeToast(true); + } + }, 2000); // supposedly matches Toast.LENGTH_SHORT + } + + return; + } + + /*if( clear_toast != null && clear_toast.toast != null ) + clear_toast.toast.cancel(); + + Toast toast = new Toast(activity); + if( clear_toast != null ) + clear_toast.toast = toast;*/ + if( MyDebug.LOG ) { + Log.d(TAG, "clear_toast: " + clear_toast); + if( clear_toast != null ) + Log.d(TAG, "clear_toast.toast: " + clear_toast.toast); + Log.d(TAG, "last_toast: " + last_toast); + Log.d(TAG, "last_toast_time_ms: " + last_toast_time_ms); + } + // This method is better, as otherwise a previous toast (with different or no clear_toast) never seems to clear if we repeatedly issue new toasts - this doesn't happen if we reuse existing toasts if possible + // However should only do this if the previous toast was the most recent toast (to avoid messing up ordering) + Toast toast; + long time_now = System.currentTimeMillis(); + /* + // We recreate a toast every 2s, to workaround Android toast bug that calling show() no longer seems to extend the toast duration! + // (E.g., see bug where toasts for sliders disappear after a while if continually moving the slider.) + if( clear_toast != null && clear_toast.toast != null && clear_toast.toast == last_toast && time_now < last_toast_time_ms+2000) { + if( MyDebug.LOG ) + Log.d(TAG, "reuse last toast: " + last_toast); + toast = clear_toast.toast; + // for performance, important to reuse the same view, instead of creating a new one (otherwise we get jerky preview update e.g. for changing manual focus slider) + TextView view = (TextView)toast.getView(); + view.setText(message); + view.setPadding(0, offset_y, 0, 0); + view.invalidate(); // make sure the toast is redrawn + toast.setView(view); + } + else*/ { + if( clear_toast != null && clear_toast.toast != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "cancel last toast: " + clear_toast.toast); + clear_toast.toast.cancel(); + } + //toast = new Toast(activity); + toast = Toast.makeText(activity, message, Toast.LENGTH_SHORT); + if( MyDebug.LOG ) + Log.d(TAG, "created new toast: " + toast); + if( clear_toast != null ) + clear_toast.toast = toast; + /*@SuppressLint("InflateParams") // we add the view to the toast + final View view = LayoutInflater.from(activity).inflate(R.layout.toast_textview, null); + TextView text = view.findViewById(R.id.text_view); + text.setShadowLayer(shadow_radius, 0.0f, 0.0f, Color.BLACK); + text.setText(message); + view.setPadding(0, offset_y, 0, 0); + toast.setView(text); + toast.setGravity(Gravity.CENTER, 0, 0);*/ + last_toast_time_ms = time_now; + } + //toast.setDuration(Toast.LENGTH_SHORT); + if( !((Activity)getContext()).isFinishing() ) { + // Workaround for crash due to bug in Android 7.1 when activity is closing whilst toast shows. + // This was fixed in Android 8, but still good to fix the crash on Android 7.1! See + // https://stackoverflow.com/questions/47548317/what-belong-is-badtokenexception-at-classes-of-project and + // https://github.com/drakeet/ToastCompat#why . + toast.show(); + } + last_toast = toast; + } + }); + } + + public void setUIRotation(int ui_rotation) { + if( MyDebug.LOG ) + Log.d(TAG, "setUIRotation: " + ui_rotation); + this.ui_rotation = ui_rotation; + } + + public int getUIRotation() { + return this.ui_rotation; + } + + /** If geotagging is enabled, pass the location info to the camera controller (for photos). + */ + private void updateParametersFromLocation() { + if( MyDebug.LOG ) + Log.d(TAG, "updateParametersFromLocation"); + if( camera_controller != null ) { + boolean store_location = applicationInterface.getGeotaggingPref(); + if( store_location && applicationInterface.getLocation() != null ) { + Location location = applicationInterface.getLocation(); + if( MyDebug.LOG ) { + Log.d(TAG, "updating parameters from location..."); + // don't log location, in case of privacy! + } + camera_controller.setLocationInfo(location); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "removing location data from parameters..."); + camera_controller.removeLocationInfo(); + } + } + } + + public void enablePreviewBitmap(boolean use_preview_bitmap_small, boolean use_preview_bitmap_full) { + if( MyDebug.LOG ) + Log.d(TAG, "enablePreviewBitmap"); + if( cameraSurface instanceof TextureView ) { + want_preview_bitmap = true; + this.use_preview_bitmap_small = use_preview_bitmap_small; + this.use_preview_bitmap_full = use_preview_bitmap_full; + recreatePreviewBitmap(); + } + } + + public void disablePreviewBitmap() { + if( MyDebug.LOG ) + Log.d(TAG, "disablePreviewBitmap"); + freePreviewBitmap(); + want_preview_bitmap = false; + use_preview_bitmap_small = false; + use_preview_bitmap_full = false; + } + + public boolean isPreviewBitmapEnabled() { + return this.want_preview_bitmap; + } + + public boolean usePreviewBitmapSmall() { + return this.want_preview_bitmap && this.use_preview_bitmap_small; + } + + public boolean usePreviewBitmapFull() { + return this.want_preview_bitmap && this.use_preview_bitmap_full; + } + + public boolean refreshPreviewBitmapTaskIsRunning() { + return refreshPreviewBitmapTask != null; + } + + /** Runs the supplied runnable, but waits until the refreshPreviewBitmapTask is no longer running. + */ + private void runForPreviewTask(final Runnable runnable) { + if( MyDebug.LOG ) + Log.d(TAG, "runForPreviewTask"); + if( !refreshPreviewBitmapTaskIsRunning() ) { + if( MyDebug.LOG ) + Log.d(TAG, "refreshPreviewBitmapTask not running, can run runnable"); + runnable.run(); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "refreshPreviewBitmapTask still running, wait before running runnable"); + final Handler handler = new Handler(); + final long delay = 500; + handler.postDelayed(new Runnable() { + @Override + public void run() { + if( !refreshPreviewBitmapTaskIsRunning() ) { + if( MyDebug.LOG ) + Log.d(TAG, "refreshPreviewBitmapTask not running now, can run runnable"); + runnable.run(); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "refreshPreviewBitmapTask still running, wait again before running runnable"); + handler.postDelayed(this, delay); + } + } + }, delay); + } + } + + /* Recycles the supplied bitmap, but if the refreshPreviewBitmapTask is running, waits until + it isn't running. + */ + private void recycleBitmapForPreviewTask(final Bitmap bitmap) { + if( MyDebug.LOG ) + Log.d(TAG, "recycleBitmapForPreviewTask"); + // Don't want to recycle bitmap whilst thread is running! + // See test testPreviewBitmap(). + runForPreviewTask(new Runnable() { + @Override + public void run() { + bitmap.recycle(); + } + }); + } + + private void freePreviewBitmap() { + if( MyDebug.LOG ) + Log.d(TAG, "freePreviewBitmap"); + cancelRefreshPreviewBitmap(); + histogram = null; + if( preview_bitmap != null ) { + recycleBitmapForPreviewTask(preview_bitmap); + // It's okay to set preview_bitmap to null even if refreshPreviewBitmapTask is currently running in the background + // as it takes it's own reference. But we shouldn't recycle until the background thread is complete. + preview_bitmap = null; + } + + // It's okay to set these to -1 even if refreshPreviewBitmapTask is currently running in the background + // as it takes it's own reference. But we shouldn't recycle until the background thread is complete. + preview_bitmap_full_w = -1; + preview_bitmap_full_h = -1; + ring_buffer.flush(); // even if we're recreating the preview_bitmap_full, it might be at a different resolution, so safest to flush the previous pre-shots + if( use_preview_bitmap_full ) { + runForPreviewTask(new Runnable() { + @Override + public void run() { + ring_buffer.flush(); // important to flush again, in case the refreshPreviewBitmapTask already running in the background added a new image + } + }); + } + + freeZebraStripesBitmap(); + freeFocusPeakingBitmap(); + } + + private void recreatePreviewBitmap() { + if( MyDebug.LOG ) { + Log.d(TAG, "recreatePreviewBitmap"); + Log.d(TAG, "textureview_w: " + textureview_w); + Log.d(TAG, "textureview_h: " + textureview_h); + Log.d(TAG, "want_preview_bitmap: " + want_preview_bitmap); + Log.d(TAG, "use_preview_bitmap_small: " + use_preview_bitmap_small); + Log.d(TAG, "use_preview_bitmap_full: " + use_preview_bitmap_full); + } + freePreviewBitmap(); + + // Note we need to take into account getDisplayRotationDegrees(), as TextureView.getBitmap() + // returns the texture in the "natural" orientation of the device - it doesn't take the transform + // we've applied in configureTransform() into account. + if( want_preview_bitmap && use_preview_bitmap_small ) { + if( MyDebug.LOG ) + Log.d(TAG, "create preview_bitmap"); + final int downscale = 4; + int bitmap_width = textureview_w / downscale; + int bitmap_height = textureview_h / downscale; + int rotation = getDisplayRotationDegrees(false); + if( rotation == 90 || rotation == 270 ) { + int dummy = bitmap_width; + //noinspection SuspiciousNameCombination + bitmap_width = bitmap_height; + bitmap_height = dummy; + } + if( MyDebug.LOG ) { + Log.d(TAG, "bitmap_width: " + bitmap_width); + Log.d(TAG, "bitmap_height: " + bitmap_height); + Log.d(TAG, "rotation: " + rotation); + } + try { + /*if( true ) + throw new IllegalArgumentException(); // test*/ + preview_bitmap = Bitmap.createBitmap(bitmap_width, bitmap_height, Bitmap.Config.ARGB_8888); + } + catch(IllegalArgumentException e) { + MyDebug.logStackTrace(TAG, "failed to create preview_bitmap", e); + // Note if we failed to create the preview_bitmap, we don't call disablePreviewBitmap() or set want_preview_bitmap to false, + // otherwise DrawPreview will keep trying. + } + createZebraStripesBitmap(); + createFocusPeakingBitmap(); + } + if( want_preview_bitmap && use_preview_bitmap_full ) { + if( MyDebug.LOG ) + Log.d(TAG, "set up preview_bitmap_full"); + int bitmap_width = textureview_w; + int bitmap_height = textureview_h; + int rotation = getDisplayRotationDegrees(false); + if( rotation == 90 || rotation == 270 ) { + int dummy = bitmap_width; + //noinspection SuspiciousNameCombination + bitmap_width = bitmap_height; + bitmap_height = dummy; + } + if( MyDebug.LOG ) { + Log.d(TAG, "bitmap_width: " + bitmap_width); + Log.d(TAG, "bitmap_height: " + bitmap_height); + Log.d(TAG, "rotation: " + rotation); + } + this.preview_bitmap_full_w = bitmap_width; + this.preview_bitmap_full_h = bitmap_height; + } + } + + private void freeZebraStripesBitmap() { + if( MyDebug.LOG ) + Log.d(TAG, "freeZebraStripesBitmap"); + if( zebra_stripes_bitmap_buffer != null ) { + recycleBitmapForPreviewTask(zebra_stripes_bitmap_buffer); + zebra_stripes_bitmap_buffer = null; + } + if( zebra_stripes_bitmap != null ) { + zebra_stripes_bitmap.recycle(); + zebra_stripes_bitmap = null; + } + } + + private void createZebraStripesBitmap() { + if( MyDebug.LOG ) + Log.d(TAG, "createZebraStripesBitmap"); + // n.b., preview_bitmap might be null if we failed to create the bitmap + if( want_zebra_stripes && preview_bitmap != null ) { + try { + /*if( true ) + throw new IllegalArgumentException(); // test*/ + zebra_stripes_bitmap_buffer = Bitmap.createBitmap(preview_bitmap.getWidth(), preview_bitmap.getHeight(), Bitmap.Config.ARGB_8888); + // zebra_stripes_bitmap itself is created dynamically when generating the zebra stripes + } + catch(IllegalArgumentException e) { + MyDebug.logStackTrace(TAG, "failed to create zebra_stripes_bitmap_buffer", e); + } + } + } + + private void freeFocusPeakingBitmap() { + if( MyDebug.LOG ) + Log.d(TAG, "freeFocusPeakingBitmap"); + if( focus_peaking_bitmap_buffer != null ) { + recycleBitmapForPreviewTask(focus_peaking_bitmap_buffer); + focus_peaking_bitmap_buffer = null; + } + if( focus_peaking_bitmap_buffer_temp != null ) { + recycleBitmapForPreviewTask(focus_peaking_bitmap_buffer_temp); + focus_peaking_bitmap_buffer_temp = null; + } + if( focus_peaking_bitmap != null ) { + focus_peaking_bitmap.recycle(); + focus_peaking_bitmap = null; + } + } + + private void createFocusPeakingBitmap() { + if( MyDebug.LOG ) + Log.d(TAG, "createFocusPeakingBitmap"); + // n.b., preview_bitmap might be null if we failed to create the bitmap + if( want_focus_peaking & preview_bitmap != null ) { + try { + /*if( true ) + throw new IllegalArgumentException(); // test*/ + focus_peaking_bitmap_buffer = Bitmap.createBitmap(preview_bitmap.getWidth(), preview_bitmap.getHeight(), Bitmap.Config.ARGB_8888); + focus_peaking_bitmap_buffer_temp = Bitmap.createBitmap(preview_bitmap.getWidth(), preview_bitmap.getHeight(), Bitmap.Config.ARGB_8888); + // focus_peaking_bitmap itself is created dynamically when generating + } + catch(IllegalArgumentException e) { + MyDebug.logStackTrace(TAG, "failed to create focus_peaking_bitmap_buffers", e); + } + } + } + + public void enableHistogram(HistogramType histogram_type) { + this.want_histogram = true; + this.histogram_type = histogram_type; + } + + public void disableHistogram() { + this.want_histogram = false; + } + + public int [] getHistogram() { + return this.histogram; + } + + public void enableZebraStripes(int zebra_stripes_threshold, int zebra_stripes_color_foreground, int zebra_stripes_color_background) { + this.want_zebra_stripes = true; + this.zebra_stripes_threshold = zebra_stripes_threshold; + this.zebra_stripes_color_foreground = zebra_stripes_color_foreground; + this.zebra_stripes_color_background = zebra_stripes_color_background; + if( this.zebra_stripes_bitmap_buffer == null ) { + createZebraStripesBitmap(); + } + } + + public void disableZebraStripes() { + if( this.want_zebra_stripes ) { + this.want_zebra_stripes = false; + freeZebraStripesBitmap(); + } + } + + public Bitmap getZebraStripesBitmap() { + return this.zebra_stripes_bitmap; + } + + public void enableFocusPeaking() { + this.want_focus_peaking = true; + if( this.focus_peaking_bitmap_buffer == null ) { + createFocusPeakingBitmap(); + } + } + + public void disableFocusPeaking() { + if( this.want_focus_peaking ) { + this.want_focus_peaking = false; + freeFocusPeakingBitmap(); + } + } + + public void enablePreShots() { + this.want_pre_shots = true; + } + + public void disablePreShots() { + if( want_pre_shots ) { + this.want_pre_shots = false; + ring_buffer.flush(); // so we don't have old pre-shots hanging around if it's later enabled + } + } + + public Bitmap getFocusPeakingBitmap() { + return this.focus_peaking_bitmap; + } + + public static class RingBuffer { + final int max_size_c = 12; + final List bitmaps = new ArrayList<>(); + + private void flush() { + if( MyDebug.LOG ) + Log.d(TAG, "RingBuffer.flush()"); + while( !bitmaps.isEmpty() ) { + Bitmap bm = bitmaps.remove(0); + bm.recycle(); + } + } + + private void add(Bitmap bitmap) { + while( bitmaps.size() >= max_size_c ) { + Bitmap bm = bitmaps.remove(0); + bm.recycle(); + } + bitmaps.add(bitmap); + } + + public boolean hasBitmaps() { + return !bitmaps.isEmpty(); + } + + public int getNBitmaps() { + return bitmaps.size(); + } + + public Bitmap get() { + return bitmaps.remove(0); + } + } + + private final RingBuffer ring_buffer = new RingBuffer(); + + public RingBuffer getPreShotsRingBuffer() { + return ring_buffer; + } + + private static class RefreshPreviewBitmapTaskResult { + int [] new_histogram; + Bitmap new_zebra_stripes_bitmap; + Bitmap new_focus_peaking_bitmap; + Bitmap preview_bitmap_full_copy; + } + + // use static class, and WeakReferences, to avoid memory leaks: https://stackoverflow.com/questions/44309241/warning-this-asynctask-class-should-be-static-or-leaks-might-occur/46166223 + private static class RefreshPreviewBitmapTask extends AsyncTask { + private static final String TAG = "RefreshPreviewBmTask"; + private final WeakReference previewReference; + // we take references to the bitmaps, so the Preview class can set this to null even whilst the background thread is running + private final WeakReference preview_bitmapReference; + private final WeakReference zebra_stripes_bitmap_bufferReference; + private final WeakReference focus_peaking_bitmap_bufferReference; + private final WeakReference focus_peaking_bitmap_buffer_tempReference; + private final boolean update_histogram; + private final boolean update_preshot; + private final int preview_bitmap_full_w; + private final int preview_bitmap_full_h; + + RefreshPreviewBitmapTask(Preview preview, boolean update_histogram, boolean update_preshot, int preview_bitmap_full_w, int preview_bitmap_full_h) { + this.previewReference = new WeakReference<>(preview); + this.preview_bitmapReference = new WeakReference<>(preview.preview_bitmap); + this.zebra_stripes_bitmap_bufferReference = new WeakReference<>(preview.zebra_stripes_bitmap_buffer); + this.focus_peaking_bitmap_bufferReference = new WeakReference<>(preview.focus_peaking_bitmap_buffer); + this.focus_peaking_bitmap_buffer_tempReference = new WeakReference<>(preview.focus_peaking_bitmap_buffer_temp); + this.update_histogram = update_histogram; + this.update_preshot = update_preshot; + this.preview_bitmap_full_w = preview_bitmap_full_w; + this.preview_bitmap_full_h = preview_bitmap_full_h; + } + + @Override + protected RefreshPreviewBitmapTaskResult doInBackground(Void... voids) { + long debug_time = 0; + if( MyDebug.LOG ) { + Log.d(TAG, "doInBackground, async task: " + this); + debug_time = System.currentTimeMillis(); + } + + Preview preview = previewReference.get(); + if( preview == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "preview is null"); + return null; + } + Bitmap preview_bitmap = preview_bitmapReference.get(); + Bitmap zebra_stripes_bitmap_buffer = zebra_stripes_bitmap_bufferReference.get(); + Bitmap focus_peaking_bitmap_buffer = focus_peaking_bitmap_bufferReference.get(); + Bitmap focus_peaking_bitmap_buffer_temp = focus_peaking_bitmap_buffer_tempReference.get(); + Activity activity = (Activity)preview.getContext(); + if( activity == null || activity.isFinishing() ) { + if( MyDebug.LOG ) + Log.d(TAG, "activity is null or finishing"); + return null; + } + + RefreshPreviewBitmapTaskResult result = new RefreshPreviewBitmapTaskResult(); + + try { + if( MyDebug.LOG ) + Log.d(TAG, "time before getBitmap: " + (System.currentTimeMillis() - debug_time)); + TextureView textureView = (TextureView)preview.cameraSurface; + if( preview_bitmap != null ) { + textureView.getBitmap(preview_bitmap); + if( MyDebug.LOG ) + Log.d(TAG, "time after getBitmap: " + (System.currentTimeMillis() - debug_time)); + } + if( preview_bitmap_full_w != -1 && preview_bitmap_full_h != -1 && update_preshot ) { + // much faster to create a fresh preview_bitmap_full to read into, instead of copying it after + try { + if( MyDebug.LOG ) + Log.d(TAG, "time before creating preview_bitmap_full_copy: " + (System.currentTimeMillis() - debug_time)); + result.preview_bitmap_full_copy = Bitmap.createBitmap(preview_bitmap_full_w, preview_bitmap_full_h, Bitmap.Config.ARGB_8888); + if( MyDebug.LOG ) + Log.d(TAG, "time after creating preview_bitmap_full_copy: " + (System.currentTimeMillis() - debug_time)); + textureView.getBitmap(result.preview_bitmap_full_copy); + if( MyDebug.LOG ) + Log.d(TAG, "time after getBitmap for preview_bitmap_full_copy: " + (System.currentTimeMillis() - debug_time)); + // See comments below for zebra stripes for why we need to rotate + // But since rotating is slower (and presumably more CPU intensive) than taking a copy, we leave this to the ImageSaver thread - + // although that'll also be just as slow, better to only do it when we're actually saving pre-shots, + // rather than having this run all the time. + // Also see above - it's much faster to create a new bitmap to read into, than to copy a bitmap + } + catch(IllegalArgumentException e) { + MyDebug.logStackTrace(TAG, "failed to create preview_bitmap_full_copy", e); + } + } + + if( MyDebug.LOG ) + Log.d(TAG, "time after createFromBitmap: " + (System.currentTimeMillis() - debug_time)); + + if( update_histogram && preview_bitmap != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "generate histogram"); + + long debug_time_histogram = 0; + if( MyDebug.LOG ) { + debug_time_histogram = System.currentTimeMillis(); + } + if( MyDebug.LOG ) + Log.d(TAG, "time before computeHistogram: " + (System.currentTimeMillis() - debug_time)); + + { + JavaImageFunctions.ComputeHistogramApplyFunction.Type java_type; + switch( preview.histogram_type ) { + case HISTOGRAM_TYPE_RGB: + java_type = JavaImageFunctions.ComputeHistogramApplyFunction.Type.TYPE_RGB; + break; + case HISTOGRAM_TYPE_LUMINANCE: + java_type = JavaImageFunctions.ComputeHistogramApplyFunction.Type.TYPE_LUMINANCE; + break; + case HISTOGRAM_TYPE_VALUE: + java_type = JavaImageFunctions.ComputeHistogramApplyFunction.Type.TYPE_VALUE; + break; + case HISTOGRAM_TYPE_INTENSITY: + java_type = JavaImageFunctions.ComputeHistogramApplyFunction.Type.TYPE_INTENSITY; + break; + case HISTOGRAM_TYPE_LIGHTNESS: + java_type = JavaImageFunctions.ComputeHistogramApplyFunction.Type.TYPE_LIGHTNESS; + break; + default: + throw new RuntimeException("unknown histogram type: " + preview.histogram_type); + } + JavaImageFunctions.ComputeHistogramApplyFunction function = new JavaImageFunctions.ComputeHistogramApplyFunction(java_type); + JavaImageProcessing.applyFunction(function, preview_bitmap, null, 0, 0, preview_bitmap.getWidth(), preview_bitmap.getHeight()); + result.new_histogram = function.getHistogram(); + } + + if( MyDebug.LOG ) { + Log.d(TAG, "time for computeHistogram: " + (System.currentTimeMillis() - debug_time_histogram)); + Log.d(TAG, "time after computeHistogram: " + (System.currentTimeMillis() - debug_time)); + } + } + + if( preview.want_zebra_stripes && preview_bitmap != null && zebra_stripes_bitmap_buffer != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "generate zebra stripes bitmap"); + + long debug_time_zebra = 0; + if( MyDebug.LOG ) { + debug_time_zebra = System.currentTimeMillis(); + } + + int zebra_stripes_width = zebra_stripes_bitmap_buffer.getWidth()/20; + + JavaImageFunctions.ZebraStripesApplyFunction function = new JavaImageFunctions.ZebraStripesApplyFunction(preview.zebra_stripes_threshold, preview.zebra_stripes_color_foreground, preview.zebra_stripes_color_background, zebra_stripes_width); + JavaImageProcessing.applyFunction(function, preview_bitmap, zebra_stripes_bitmap_buffer, 0, 0, preview_bitmap.getWidth(), preview_bitmap.getHeight()); + + // The original orientation of the bitmap we get from textureView.getBitmap() needs to be rotated to + // account for the orientation of camera vs device, but not to account for the current orientation + // of the device. + // This is because TextureView.getBitmap() returns the texture in the "natural" orientation of the device - it doesn't take the transform + // we've applied in configureTransform() into account. + int rotation_degrees = preview.getDisplayRotationDegrees(false); + /*if( MyDebug.LOG ) { + Log.d(TAG, "orientation of display relative to natural orientation: " + rotation_degrees); + }*/ + if( MyDebug.LOG ) + Log.d(TAG, "time before creating new_zebra_stripes_bitmap: " + (System.currentTimeMillis() - debug_time)); + Matrix matrix = new Matrix(); + matrix.postRotate(-rotation_degrees); + result.new_zebra_stripes_bitmap = Bitmap.createBitmap(zebra_stripes_bitmap_buffer, 0, 0, + zebra_stripes_bitmap_buffer.getWidth(), zebra_stripes_bitmap_buffer.getHeight(), matrix, false); + + if( MyDebug.LOG ) + Log.d(TAG, "time after creating new_zebra_stripes_bitmap: " + (System.currentTimeMillis() - debug_time)); + + if( MyDebug.LOG ) { + Log.d(TAG, "time for zebra stripes: " + (System.currentTimeMillis() - debug_time_zebra)); + } + /* + // test: + //File file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM) + "/zebra_stripes_bitmap_buffer.jpg"); + File file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM) + "/zebra_stripes_bitmap.jpg"); + try { + OutputStream outputStream = new FileOutputStream(file); + //zebra_stripes_bitmap_buffer.compress(Bitmap.CompressFormat.JPEG, 90, outputStream); + preview.zebra_stripes_bitmap.compress(Bitmap.CompressFormat.JPEG, 90, outputStream); + outputStream.close(); + MainActivity mActivity = (MainActivity) preview.getContext(); + mActivity.getStorageUtils().broadcastFile(file, true, false, true); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "failed to save image", e); + } + */ + } + + if( preview.want_focus_peaking && preview_bitmap != null && focus_peaking_bitmap_buffer != null && focus_peaking_bitmap_buffer_temp != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "generate focus peaking bitmap"); + + long debug_time_focus_peaking = 0; + if( MyDebug.LOG ) { + debug_time_focus_peaking = System.currentTimeMillis(); + } + + JavaImageFunctions.FocusPeakingApplyFunction function = new JavaImageFunctions.FocusPeakingApplyFunction(preview_bitmap); + JavaImageProcessing.applyFunction(function, preview_bitmap, focus_peaking_bitmap_buffer_temp, 0, 0, preview_bitmap.getWidth(), preview_bitmap.getHeight()); + + JavaImageFunctions.FocusPeakingFilteredApplyFunction function_filtered = new JavaImageFunctions.FocusPeakingFilteredApplyFunction(focus_peaking_bitmap_buffer_temp); + JavaImageProcessing.applyFunction(function_filtered, focus_peaking_bitmap_buffer_temp, focus_peaking_bitmap_buffer, 0, 0, preview_bitmap.getWidth(), preview_bitmap.getHeight()); + + // See comments above for zebra stripes + int rotation_degrees = preview.getDisplayRotationDegrees(false); + if( MyDebug.LOG ) + Log.d(TAG, "time before creating new_focus_peaking_bitmap: " + (System.currentTimeMillis() - debug_time)); + Matrix matrix = new Matrix(); + matrix.postRotate(-rotation_degrees); + result.new_focus_peaking_bitmap = Bitmap.createBitmap(focus_peaking_bitmap_buffer, 0, 0, + focus_peaking_bitmap_buffer.getWidth(), focus_peaking_bitmap_buffer.getHeight(), matrix, false); + if( MyDebug.LOG ) + Log.d(TAG, "time after creating new_focus_peaking_bitmap: " + (System.currentTimeMillis() - debug_time)); + + if( MyDebug.LOG ) { + Log.d(TAG, "time for focus peaking: " + (System.currentTimeMillis() - debug_time_focus_peaking)); + } + } + } + catch(IllegalStateException e) { + MyDebug.logStackTrace(TAG, "failed to getBitmap", e); + } + + if( MyDebug.LOG ) { + Log.d(TAG, "time taken for RefreshPreviewBitmapTaskResult: " + (System.currentTimeMillis() - debug_time)); + } + return result; + } + + /** The system calls this to perform work in the UI thread and delivers + * the result from doInBackground() */ + @Override + protected void onPostExecute(RefreshPreviewBitmapTaskResult result) { + if( MyDebug.LOG ) + Log.d(TAG, "onPostExecute, async task: " + this); + + Preview preview = previewReference.get(); + if( preview == null ) { + return; + } + Activity activity = (Activity)preview.getContext(); + if( activity == null || activity.isFinishing() ) { + return; + } + if( result == null ) { + return; + } + + if( result.new_histogram != null ) + preview.histogram = result.new_histogram; + /*if( MyDebug.LOG && preview.histogram != null ) { + for(int i=0;i last_preview_bitmap_time_ms + refresh_time ) { + if( MyDebug.LOG ) + Log.d(TAG, "refreshPreviewBitmap"); + // even if we're running the background task at a faster rate (due to zebra stripes etc), we still update the histogram + // at the standard rate + boolean update_histogram = want_histogram && time_now > last_histogram_time_ms + refresh_histogram_rate_ms; + if( MyDebug.LOG ) { + Log.d(TAG, "update_histogram: " + update_histogram); + Log.d(TAG, "want_histogram: " + want_histogram); + Log.d(TAG, "time_now: " + time_now); + Log.d(TAG, "last_preview_bitmap_time_ms: " + last_preview_bitmap_time_ms); + Log.d(TAG, "last_histogram_time_ms: " + last_histogram_time_ms); + } + + this.last_preview_bitmap_time_ms = time_now; + if( update_histogram ) { + this.last_histogram_time_ms = time_now; + } + + boolean update_preshot = false; + if( camera_controller == null || camera_controller.shouldCoverPreview() ) { + // don't take preshot - instead flush + ring_buffer.flush(); + } + else if( want_pre_shots ) { + update_preshot = true; + } + + refreshPreviewBitmapTask = new RefreshPreviewBitmapTask(this, update_histogram, update_preshot, preview_bitmap_full_w, preview_bitmap_full_h); + refreshPreviewBitmapTask.execute(); + } + } + + private void cancelRefreshPreviewBitmap() { + if( MyDebug.LOG ) + Log.d(TAG, "cancelRefreshPreviewBitmap"); + if( refreshPreviewBitmapTaskIsRunning() ) { + refreshPreviewBitmapTask.cancel(true); + // we don't set refreshPreviewBitmapTask to null - this will be done by the task itself when it completes; + // and we want to know when the task is no longer running (e.g., for freePreviewBitmap()). + } + } + + /** Whether we are in video mode, or photo mode. + */ + public boolean isVideo() { + return is_video; + } + + public boolean isVideoRecording() { + return video_recorder != null && video_start_time_set; + } + + public boolean isVideoRecordingPaused() { + return isVideoRecording() && video_recorder_is_paused; + } + + /** Returns the time of the current video. + * In case of restarting due to max filesize (whether on Android 8+ or not), this includes the + * total time of all the previous video files too, unless this_file_only==true; + */ + public long getVideoTime(boolean this_file_only) { + long offset = this_file_only ? video_time_last_maxfilesize_restart : 0; + if( this.isVideoRecordingPaused() ) { + return video_accumulated_time - offset; + } + long time_now = System.currentTimeMillis(); + return time_now - video_start_time + video_accumulated_time - offset; + } + + public long getVideoAccumulatedTime() { + return video_accumulated_time; + } + + public int getMaxAmplitude() { + return video_recorder != null ? video_recorder.getMaxAmplitude() : 0; + } + + /** Returns the frame rate that the preview's surface or canvas view should be updated. + */ + public long getFrameRate() { + /* See https://stackoverflow.com/questions/44594711/slow-rendering-when-updating-textview , + https://stackoverflow.com/questions/44233870/how-to-fix-slow-rendering-android-vitals - + there is evidence that using an infrequent update actually results in poorer performance, + due to devices running in a lower power state, but Google Play analytics do not take this + into consideration. Thus we are forced to request updates at 60fps whether we need them + or not. I can reproducing this giving improved performance on OnePlus 3T for old and + Camera2 API. Testing suggests this does not seem to adversely affect battery life. + This is limited to Android 7+, to avoid causing problems on older devices (which don't + contribute to Google Analytics anyway). + If we ever are able to use lower frame rates in future, remember we'll still need a high + frame rate when applying the dimming effect when reopening or updating the camera (see + DrawPreview.setDimPreview()) (especially for MainActivity.updateForSettings() when we + pause/unpause the preview instead of reopening the camera). + Update: On more recent Android versions, this effect no longer seems to happen, and on + Android 13 (at least Pixel 6 Pro), we see the reverse (but more reasonable) behaviour + where we have fewer janky frames with a longer frame rate. Behaviour is much better at + 32ms compared to 16ms; and we shouldn't go any slower (firstly so that UI still runs + smoothly; secondly for dimming effect as noted above). + */ + // + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU ) { + return 32; + } + else if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.N ) { + if( is_test_junit4 ) { + // see https://stackoverflow.com/questions/29550508/espresso-freezing-on-view-with-looping-animation + return 32; + } + return 16; + } + // old behaviour: avoid overloading ui thread when taking photo + return this.isTakingPhoto() ? 500 : 100; + } + + public boolean isTakingPhoto() { + return this.phase == PHASE_TAKING_PHOTO; + } + + public boolean usingCamera2API() { + return this.using_android_l; + } + + public CameraController getCameraController() { + return this.camera_controller; + } + + public CameraControllerManager getCameraControllerManager() { + return this.camera_controller_manager; + } + + public boolean supportsFocus() { + return this.supported_focus_values != null; + } + + /** Whether flash is supported by the camera. + */ + public boolean supportsFlash() { + return this.supported_flash_values != null; + } + + public boolean supportsExposureLock() { + return this.is_exposure_lock_supported; + } + + public boolean isExposureLocked() { + return this.is_exposure_locked; + } + + public boolean supportsWhiteBalanceLock() { + return this.is_white_balance_lock_supported; + } + + public boolean isWhiteBalanceLocked() { + return this.is_white_balance_locked; + } + + public boolean supportsZoom() { + return this.has_zoom; + } + + public int getMaxZoom() { + return this.max_zoom_factor; + } + + public boolean hasFocusArea() { + return this.has_focus_area; + } + + public Pair getFocusPos() { + // note, we don't store the screen coordinates, as they may become out of date in the + // screen orientation changes (if MainActivity.lock_to_landscape==false) + float [] coords = {focus_camera_x, focus_camera_y}; + final Matrix matrix = getCameraToPreviewMatrix(); + matrix.mapPoints(coords); + return new Pair<>((int)coords[0], (int)coords[1]); + } + + public int getMaxNumFocusAreas() { + return this.max_num_focus_areas; + } + + public boolean isTakingPhotoOrOnTimer() { + return this.phase == PHASE_TAKING_PHOTO || this.phase == PHASE_TIMER; + } + + public boolean isOnTimer() { + return this.phase == PHASE_TIMER; + } + + public long getTimerEndTime() { + return take_photo_time; + } + + /** Note this means the state of being paused after taking a photo (when pause preview option is + * enabled). Callers wanting to know if the camera preview is started or not should use + * isPreviewStarted(). + */ + public boolean isPreviewPaused() { + return this.phase == PHASE_PREVIEW_PAUSED; + } + + public boolean isPreviewStarted() { + //return this.is_preview_started; + return preview_started_state == PREVIEW_STARTED; + } + + public boolean isPreviewStarting() { + return preview_started_state == PREVIEW_IS_STARTING; + } + + public boolean isFocusWaiting() { + return focus_success == FOCUS_WAITING; + } + + public boolean isFocusRecentSuccess() { + return focus_success == FOCUS_SUCCESS; + } + + public long timeSinceStartedAutoFocus() { + if( focus_started_time != -1 ) + return System.currentTimeMillis() - focus_started_time; + return 0; + } + + public boolean isFocusRecentFailure() { + return focus_success == FOCUS_FAILED; + } + + /** Whether we can skip the autofocus before taking a photo. + */ + private boolean recentlyFocused() { + return this.successfully_focused && System.currentTimeMillis() < this.successfully_focused_time + 5000; + } + + /** If non-null, this returned array will stored the currently detected faces (if face recognition + * is enabled). The face.temp rect will store the face rectangle in screen coordinates. + */ + public CameraController.Face [] getFacesDetected() { + if( faces_detected != null && faces_detected.length > 0 ) { + // note, we don't store the screen coordinates, as they may become out of date in the + // screen orientation changes (if MainActivity.lock_to_landscape==false) + final Matrix matrix = getCameraToPreviewMatrix(); + for(CameraController.Face face : faces_detected) { + face_rect.set(face.rect); + matrix.mapRect(face_rect); + face_rect.round(face.temp); + } + } + // FindBugs warns about returning the array directly, but in fact we need to return direct access rather than copying, so that the on-screen display of faces rectangles updates + return this.faces_detected; + } + + /** Returns the current zoom factor of the camera. Always returns 1.0f if zoom isn't supported. + */ + public float getZoomRatio() { + if( zoom_ratios == null ) + return 1.0f; + int zoom_factor = camera_controller.getZoom(); + return this.zoom_ratios.get(zoom_factor)/100.0f; + } + + public float getZoomRatio(int index) { + if( zoom_ratios == null ) + return 1.0f; + return this.zoom_ratios.get(index)/100.0f; + } + + public float getMinZoomRatio() { + if( zoom_ratios == null ) + return 1.0f; + return this.zoom_ratios.get(0)/100.0f; + } + + public float getMaxZoomRatio() { + if( zoom_ratios == null ) + return 1.0f; + return this.zoom_ratios.get(max_zoom_factor)/100.0f; + } + + public boolean hasPhysicalCameras() { + return this.physical_camera_ids != null; + } + + public final Set getPhysicalCameras() { + return this.physical_camera_ids; + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/preview/VideoProfile.java b/app/src/main/java/net/sourceforge/opencamera/preview/VideoProfile.java new file mode 100644 index 0000000..6fae2d6 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/preview/VideoProfile.java @@ -0,0 +1,114 @@ +package net.sourceforge.opencamera.preview; + +import android.media.CamcorderProfile; +import android.media.MediaRecorder; +import android.util.Log; + +import androidx.annotation.NonNull; + +import net.sourceforge.opencamera.MyDebug; + +/** This is essentially similar to CamcorderProfile in that it encapsulates a set of video settings + * to be passed to MediaRecorder, but allows us to store additional fields. + */ +public class VideoProfile { + private static final String TAG = "VideoProfile"; + + public boolean record_audio; + public boolean no_audio_permission; // set to true if record_audio==false, but where the user had requested audio and we don't have microphone permission + public int audioSource; + public int audioCodec; + public int audioChannels; + @SuppressWarnings("WeakerAccess") + public int audioBitRate; + @SuppressWarnings("WeakerAccess") + public int audioSampleRate; + public int fileFormat; + public String fileExtension = "mp4"; + public int videoSource; + public int videoCodec; + public int videoFrameRate; + public double videoCaptureRate; + public int videoBitRate; + public int videoFrameHeight; + public int videoFrameWidth; + + /** Returns a dummy video profile, used if video isn't supported. + */ + VideoProfile() { + } + + VideoProfile(CamcorderProfile camcorderProfile) { + this.record_audio = true; + this.no_audio_permission = false; + this.audioSource = MediaRecorder.AudioSource.CAMCORDER; + this.audioCodec = camcorderProfile.audioCodec; + this.audioChannels = camcorderProfile.audioChannels; + this.audioBitRate = camcorderProfile.audioBitRate; + this.audioSampleRate = camcorderProfile.audioSampleRate; + this.fileFormat = camcorderProfile.fileFormat; + this.videoSource = MediaRecorder.VideoSource.CAMERA; + this.videoCodec = camcorderProfile.videoCodec; + this.videoFrameRate = camcorderProfile.videoFrameRate; + this.videoCaptureRate = camcorderProfile.videoFrameRate; + this.videoBitRate = camcorderProfile.videoBitRate; + this.videoFrameHeight = camcorderProfile.videoFrameHeight; + this.videoFrameWidth = camcorderProfile.videoFrameWidth; + } + + @NonNull + public String toString() { + return ("\nAudioSource: " + this.audioSource + + "\nVideoSource: " + this.videoSource + + "\nFileFormat: " + this.fileFormat + + "\nFileExtension: " + this.fileExtension + + "\nAudioCodec: " + this.audioCodec + + "\nAudioChannels: " + this.audioChannels + + "\nAudioBitrate: " + this.audioBitRate + + "\nAudioSampleRate: " + this.audioSampleRate + + "\nVideoCodec: " + this.videoCodec + + "\nVideoFrameRate: " + this.videoFrameRate + + "\nVideoCaptureRate: " + this.videoCaptureRate + + "\nVideoBitRate: " + this.videoBitRate + + "\nVideoWidth: " + this.videoFrameWidth + + "\nVideoHeight: " + this.videoFrameHeight + ); + } + + /** + * Copies the fields of this profile to a MediaRecorder instance. + */ + public void copyToMediaRecorder(MediaRecorder media_recorder) { + if( MyDebug.LOG ) + Log.d(TAG, "copyToMediaRecorder: " + media_recorder); + if( record_audio ) { + if( MyDebug.LOG ) + Log.d(TAG, "record audio"); + media_recorder.setAudioSource(this.audioSource); + } + media_recorder.setVideoSource(this.videoSource); + // n.b., order may be important - output format should be first, at least + // also match order of MediaRecorder.setProfile() just to be safe, see https://stackoverflow.com/questions/5524672/is-it-possible-to-use-camcorderprofile-without-audio-source + media_recorder.setOutputFormat(this.fileFormat); + if( MyDebug.LOG ) + Log.d(TAG, "set frame rate: " + this.videoFrameRate); + media_recorder.setVideoFrameRate(this.videoFrameRate); + // it's probably safe to always call setCaptureRate, but to be safe (and keep compatibility with old Open Camera versions), we only do so when needed + if( this.videoCaptureRate != (double)this.videoFrameRate ) { + if( MyDebug.LOG ) + Log.d(TAG, "set capture rate: " + this.videoCaptureRate); + media_recorder.setCaptureRate(this.videoCaptureRate); + } + media_recorder.setVideoSize(this.videoFrameWidth, this.videoFrameHeight); + media_recorder.setVideoEncodingBitRate(this.videoBitRate); + media_recorder.setVideoEncoder(this.videoCodec); + if( record_audio ) { + media_recorder.setAudioEncodingBitRate(this.audioBitRate); + media_recorder.setAudioChannels(this.audioChannels); + media_recorder.setAudioSamplingRate(this.audioSampleRate); + media_recorder.setAudioEncoder(this.audioCodec); + } + if( MyDebug.LOG ) + Log.d(TAG, "done: " + media_recorder); + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/preview/VideoQualityHandler.java b/app/src/main/java/net/sourceforge/opencamera/preview/VideoQualityHandler.java new file mode 100644 index 0000000..a458b18 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/preview/VideoQualityHandler.java @@ -0,0 +1,226 @@ +package net.sourceforge.opencamera.preview; + +import android.media.CamcorderProfile; +import android.util.Log; + +import net.sourceforge.opencamera.cameracontroller.CameraController; +import net.sourceforge.opencamera.MyDebug; + +import java.io.Serial; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; + +/** Handles video quality options. + * Note that this class should avoid calls to the Android API, so we can perform local unit testing + * on it. + */ +public class VideoQualityHandler { + private static final String TAG = "VideoQualityHandler"; + + public static class Dimension2D { + final int width; + final int height; + + public Dimension2D(int width, int height) { + this.width = width; + this.height = height; + } + } + + // video_quality can either be: + // - an int, in which case it refers to a CamcorderProfile + // - of the form [CamcorderProfile]_r[width]x[height] - we use the CamcorderProfile as a base, and override the video resolution - this is needed to support resolutions which don't have corresponding camcorder profiles + private List video_quality; + private int current_video_quality = -1; // this is an index into the video_quality array, or -1 if not found (though this shouldn't happen?) + private List video_sizes; + private List video_sizes_high_speed; // may be null if high speed not supported + + void resetCurrentQuality() { + video_quality = null; + current_video_quality = -1; + } + + /** Initialises the class with the available video profiles and resolutions. The user should first + * set the video sizes via setVideoSizes(). + * @param profiles A list of qualities (see CamcorderProfile.QUALITY_*). Should be supplied in + * order from highest to lowest quality. + * @param dimensions A corresponding list of the width/height for that quality (as given by + * videoFrameWidth, videoFrameHeight in the profile returned by CamcorderProfile.get()). + */ + public void initialiseVideoQualityFromProfiles(List profiles, List dimensions) { + if( MyDebug.LOG ) + Log.d(TAG, "initialiseVideoQualityFromProfiles()"); + video_quality = new ArrayList<>(); + boolean[] done_video_size = null; + if( video_sizes != null ) { + done_video_size = new boolean[video_sizes.size()]; + for(int i=0;i, Serializable { + @Serial + private static final long serialVersionUID = 5802214721033718212L; + + @Override + public int compare(final CameraController.Size a, final CameraController.Size b) { + return b.width * b.height - a.width * a.height; + } + } + + public void sortVideoSizes() { + if( MyDebug.LOG ) + Log.d(TAG, "sortVideoSizes()"); + Collections.sort(this.video_sizes, new SortVideoSizesComparator()); + if( MyDebug.LOG ) { + for(CameraController.Size size : video_sizes) { + Log.d(TAG, " supported video size: " + size.width + ", " + size.height); + } + } + } + + private void addVideoResolutions(boolean[] done_video_size, int base_profile, int min_resolution_w, int min_resolution_h) { + if( video_sizes == null ) { + return; + } + if( MyDebug.LOG ) + Log.d(TAG, "profile " + base_profile + " is resolution " + min_resolution_w + " x " + min_resolution_h); + for(int i=0;i= min_resolution_w*min_resolution_h ) { + String str = base_profile + "_r" + size.width + "x" + size.height; + video_quality.add(str); + done_video_size[i] = true; + if( MyDebug.LOG ) + Log.d(TAG, "added: " + i + ":" + str); + } + } + } + + public List getSupportedVideoQuality() { + if( MyDebug.LOG ) + Log.d(TAG, "getSupportedVideoQuality"); + return this.video_quality; + } + + int getCurrentVideoQualityIndex() { + if( MyDebug.LOG ) + Log.d(TAG, "getCurrentVideoQualityIndex"); + return this.current_video_quality; + } + + void setCurrentVideoQualityIndex(int current_video_quality) { + if( MyDebug.LOG ) + Log.d(TAG, "setCurrentVideoQualityIndex: " + current_video_quality); + this.current_video_quality = current_video_quality; + } + + public String getCurrentVideoQuality() { + if( current_video_quality == -1 ) + return null; + return video_quality.get(current_video_quality); + } + + public List getSupportedVideoSizes() { + if( MyDebug.LOG ) + Log.d(TAG, "getSupportedVideoSizes"); + return this.video_sizes; + } + + public List getSupportedVideoSizesHighSpeed() { + if( MyDebug.LOG ) + Log.d(TAG, "getSupportedVideoSizesHighSpeed"); + return this.video_sizes_high_speed; + } + + /** Whether the requested fps is supported, without relying on high-speed mode. + * Typically caller should first check videoSupportsFrameRateHighSpeed(). + */ + public boolean videoSupportsFrameRate(int fps) { + return CameraController.CameraFeatures.supportsFrameRate(this.video_sizes, fps); + } + + /** Whether the requested fps is supported as a high-speed mode. + */ + public boolean videoSupportsFrameRateHighSpeed(int fps) { + return CameraController.CameraFeatures.supportsFrameRate(this.video_sizes_high_speed, fps); + } + + CameraController.Size findVideoSizeForFrameRate(int width, int height, double fps, boolean return_closest) { + if( MyDebug.LOG ) { + Log.d(TAG, "findVideoSizeForFrameRate"); + Log.d(TAG, "width: " + width); + Log.d(TAG, "height: " + height); + Log.d(TAG, "fps: " + fps); + } + CameraController.Size requested_size = new CameraController.Size(width, height); + CameraController.Size best_video_size = CameraController.CameraFeatures.findSize(this.getSupportedVideoSizes(), requested_size, fps, return_closest); + if( best_video_size == null && this.getSupportedVideoSizesHighSpeed() != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "need to check high speed sizes"); + // check high speed + best_video_size = CameraController.CameraFeatures.findSize(this.getSupportedVideoSizesHighSpeed(), requested_size, fps, return_closest); + } + return best_video_size; + } + + private static CameraController.Size getMaxVideoSize(List sizes) { + int max_width = -1, max_height = -1; + for(CameraController.Size size : sizes) { + if( max_width == -1 || size.width*size.height > max_width*max_height ) { + max_width = size.width; + max_height = size.height; + } + } + return new CameraController.Size(max_width, max_height); + } + + /** Returns the maximum supported (non-high-speed) video size. + */ + CameraController.Size getMaxSupportedVideoSize() { + return getMaxVideoSize(video_sizes); + } + + /** Returns the maximum supported high speed video size. + */ + CameraController.Size getMaxSupportedVideoSizeHighSpeed() { + return getMaxVideoSize(video_sizes_high_speed); + } + + public void setVideoSizes(List video_sizes) { + this.video_sizes = video_sizes; + this.sortVideoSizes(); + } + + public void setVideoSizesHighSpeed(List video_sizes_high_speed) { + this.video_sizes_high_speed = video_sizes_high_speed; + } + +} diff --git a/app/src/main/java/net/sourceforge/opencamera/preview/camerasurface/CameraSurface.java b/app/src/main/java/net/sourceforge/opencamera/preview/camerasurface/CameraSurface.java new file mode 100644 index 0000000..826238b --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/preview/camerasurface/CameraSurface.java @@ -0,0 +1,19 @@ +package net.sourceforge.opencamera.preview.camerasurface; + +import net.sourceforge.opencamera.cameracontroller.CameraController; + +import android.graphics.Matrix; +import android.media.MediaRecorder; +import android.view.View; + +/** Provides support for the surface used for the preview - this can either be + * a SurfaceView or a TextureView. + */ +public interface CameraSurface { + View getView(); + void setPreviewDisplay(CameraController camera_controller); // n.b., uses double-dispatch similar to Visitor pattern - behaviour depends on type of CameraSurface and CameraController + void setVideoRecorder(MediaRecorder video_recorder); + void setTransform(Matrix matrix); + void onPause(); + void onResume(); +} diff --git a/app/src/main/java/net/sourceforge/opencamera/preview/camerasurface/MySurfaceView.java b/app/src/main/java/net/sourceforge/opencamera/preview/camerasurface/MySurfaceView.java new file mode 100644 index 0000000..56e5b14 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/preview/camerasurface/MySurfaceView.java @@ -0,0 +1,117 @@ +package net.sourceforge.opencamera.preview.camerasurface; + +import net.sourceforge.opencamera.MyDebug; +import net.sourceforge.opencamera.cameracontroller.CameraController; +import net.sourceforge.opencamera.cameracontroller.CameraControllerException; +import net.sourceforge.opencamera.preview.Preview; + +import android.annotation.SuppressLint; +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Matrix; +import android.media.MediaRecorder; +import android.os.Handler; +import android.util.Log; +import android.view.MotionEvent; +import android.view.SurfaceView; +import android.view.View; + +import androidx.annotation.NonNull; + +/** Provides support for the surface used for the preview, using a SurfaceView. + */ +public class MySurfaceView extends SurfaceView implements CameraSurface { + private static final String TAG = "MySurfaceView"; + + private final Preview preview; + private final int [] measure_spec = new int[2]; + private final Handler handler = new Handler(); + private final Runnable tick; + + public + MySurfaceView(Context context, final Preview preview) { + super(context); + this.preview = preview; + if( MyDebug.LOG ) { + Log.d(TAG, "new MySurfaceView"); + } + + // Install a SurfaceHolder.Callback so we get notified when the + // underlying surface is created and destroyed. + getHolder().addCallback(preview); + // deprecated setting, but required on Android versions prior to 3.0 + //getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); // deprecated + + tick = new Runnable() { + public void run() { + /*if( MyDebug.LOG ) + Log.d(TAG, "invalidate()");*/ + preview.test_ticker_called = true; + invalidate(); + handler.postDelayed(this, preview.getFrameRate()); + } + }; + } + + @Override + public View getView() { + return this; + } + + @Override + public void setPreviewDisplay(CameraController camera_controller) { + if( MyDebug.LOG ) + Log.d(TAG, "setPreviewDisplay"); + try { + camera_controller.setPreviewDisplay(this.getHolder()); + } + catch(CameraControllerException e) { + MyDebug.logStackTrace(TAG, "failed to set preview display", e); + } + } + + @Override + public void setVideoRecorder(MediaRecorder video_recorder) { + video_recorder.setPreviewDisplay(this.getHolder().getSurface()); + } + + @SuppressLint("ClickableViewAccessibility") + @Override + public boolean onTouchEvent(MotionEvent event) { + return preview.touchEvent(event); + } + + @Override + public void onDraw(@NonNull Canvas canvas) { + preview.draw(canvas); + } + + @Override + protected void onMeasure(int widthSpec, int heightSpec) { + if( MyDebug.LOG ) + Log.d(TAG, "onMeasure: " + widthSpec + " x " + heightSpec); + preview.getMeasureSpec(measure_spec, widthSpec, heightSpec); + super.onMeasure(measure_spec[0], measure_spec[1]); + } + + @Override + public void setTransform(Matrix matrix) { + if( MyDebug.LOG ) + Log.d(TAG, "setting transforms not supported for MySurfaceView"); + throw new RuntimeException(); + } + + @Override + public void onPause() { + if( MyDebug.LOG ) + Log.d(TAG, "onPause()"); + handler.removeCallbacks(tick); + } + + @Override + public void onResume() { + if( MyDebug.LOG ) + Log.d(TAG, "onResume()"); + tick.run(); + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/preview/camerasurface/MyTextureView.java b/app/src/main/java/net/sourceforge/opencamera/preview/camerasurface/MyTextureView.java new file mode 100644 index 0000000..586c9b8 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/preview/camerasurface/MyTextureView.java @@ -0,0 +1,90 @@ +package net.sourceforge.opencamera.preview.camerasurface; + +import net.sourceforge.opencamera.MyDebug; +import net.sourceforge.opencamera.cameracontroller.CameraController; +import net.sourceforge.opencamera.cameracontroller.CameraControllerException; +import net.sourceforge.opencamera.preview.Preview; + +import android.annotation.SuppressLint; +import android.content.Context; +import android.graphics.Matrix; +import android.media.MediaRecorder; +import android.util.Log; +import android.view.MotionEvent; +import android.view.TextureView; +import android.view.View; + +/** Provides support for the surface used for the preview, using a TextureView. + */ +public class MyTextureView extends TextureView implements CameraSurface { + private static final String TAG = "MyTextureView"; + + private final Preview preview; + private final int [] measure_spec = new int[2]; + + public MyTextureView(Context context, Preview preview) { + super(context); + this.preview = preview; + if( MyDebug.LOG ) { + Log.d(TAG, "new MyTextureView"); + } + + // Install a TextureView.SurfaceTextureListener so we get notified when the + // underlying surface is created and destroyed. + this.setSurfaceTextureListener(preview); + } + + @Override + public View getView() { + return this; + } + + @Override + public void setPreviewDisplay(CameraController camera_controller) { + if( MyDebug.LOG ) + Log.d(TAG, "setPreviewDisplay"); + try { + camera_controller.setPreviewTexture(this); + } + catch(CameraControllerException e) { + MyDebug.logStackTrace(TAG, "failed to set preview display", e); + } + } + + @Override + public void setVideoRecorder(MediaRecorder video_recorder) { + // should be no need to do anything (see documentation for MediaRecorder.setPreviewDisplay()) + } + + @SuppressLint("ClickableViewAccessibility") + @Override + public boolean onTouchEvent(MotionEvent event) { + return preview.touchEvent(event); + } + + /*@Override + public void onDraw(Canvas canvas) { + preview.draw(canvas); + }*/ + + @Override + protected void onMeasure(int widthSpec, int heightSpec) { + if( MyDebug.LOG ) + Log.d(TAG, "onMeasure: " + widthSpec + " x " + heightSpec); + preview.getMeasureSpec(measure_spec, widthSpec, heightSpec); + super.onMeasure(measure_spec[0], measure_spec[1]); + } + + @Override + public void setTransform(Matrix matrix) { + super.setTransform(matrix); + } + + @Override + public void onPause() { + } + + @Override + public void onResume() { + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/remotecontrol/BluetoothLeService.java b/app/src/main/java/net/sourceforge/opencamera/remotecontrol/BluetoothLeService.java new file mode 100644 index 0000000..f7fde91 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/remotecontrol/BluetoothLeService.java @@ -0,0 +1,532 @@ +package net.sourceforge.opencamera.remotecontrol; + +import net.sourceforge.opencamera.MyDebug; + +import android.Manifest; +import android.app.Service; +import android.bluetooth.BluetoothAdapter; +import android.bluetooth.BluetoothDevice; +import android.bluetooth.BluetoothGatt; +import android.bluetooth.BluetoothGattCallback; +import android.bluetooth.BluetoothGattCharacteristic; +import android.bluetooth.BluetoothGattDescriptor; +import android.bluetooth.BluetoothGattService; +import android.bluetooth.BluetoothManager; +import android.bluetooth.BluetoothProfile; +import android.content.Context; +import android.content.Intent; +import android.content.pm.PackageManager; +import android.os.Binder; +import android.os.Handler; +import android.os.IBinder; +import androidx.core.content.ContextCompat; + +import android.util.Log; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Timer; +import java.util.TimerTask; +import java.util.UUID; + +public class BluetoothLeService extends Service { + private final static String TAG = "BluetoothLeService"; + + private boolean is_bound; // whether service is bound + private BluetoothManager bluetoothManager; + private BluetoothAdapter bluetoothAdapter; + private String device_address; + private BluetoothGatt bluetoothGatt; + private String remote_device_type; + private final Handler bluetoothHandler = new Handler(); + private final HashMap subscribed_characteristics = new HashMap<>(); + private final List charsToSubscribe = new ArrayList<>(); + + private double currentTemp = -1; + private double currentDepth = -1; + + /*private static final int STATE_DISCONNECTED = 0; + private static final int STATE_CONNECTING = 1; + private static final int STATE_CONNECTED = 2;*/ + + public final static String ACTION_GATT_CONNECTED = + "net.sourceforge.opencamera.Remotecontrol.ACTION_GATT_CONNECTED"; + public final static String ACTION_GATT_DISCONNECTED = + "net.sourceforge.opencamera.Remotecontrol.ACTION_GATT_DISCONNECTED"; + public final static String ACTION_GATT_SERVICES_DISCOVERED = + "net.sourceforge.opencamera.Remotecontrol.ACTION_GATT_SERVICES_DISCOVERED"; + public final static String ACTION_DATA_AVAILABLE = + "net.sourceforge.opencamera.Remotecontrol.ACTION_DATA_AVAILABLE"; + public final static String ACTION_REMOTE_COMMAND = + "net.sourceforge.opencamera.Remotecontrol.COMMAND"; + public final static String ACTION_SENSOR_VALUE = + "net.sourceforge.opencamera.Remotecontrol.SENSOR"; + public final static String SENSOR_TEMPERATURE = + "net.sourceforge.opencamera.Remotecontrol.TEMPERATURE"; + public final static String SENSOR_DEPTH = + "net.sourceforge.opencamera.Remotecontrol.DEPTH"; + public final static String EXTRA_DATA = + "net.sourceforge.opencamera.Remotecontrol.EXTRA_DATA"; + public final static int COMMAND_SHUTTER = 32; + public final static int COMMAND_MODE = 16; + public final static int COMMAND_MENU = 48; + public final static int COMMAND_AFMF = 97; + public final static int COMMAND_UP = 64; + public final static int COMMAND_DOWN = 80; + + /* This forces a gratuitous BLE scan to help the device + * connect to the remote faster. This is due to limitations of the + * Android BLE stack and API (just knowing the MAC is not enough on + * many phones).*/ + private void triggerScan() { + if( MyDebug.LOG ) + Log.d(TAG, "triggerScan"); + + if( !is_bound ) { + // Don't allow calls to startLeScan() (which requires location permission) when service + // not bound, as application may be in background! + // In theory this shouldn't be needed here, as we also check is_bound in connect(), but + // have it here too just to be safe. + Log.e(TAG, "triggerScan shouldn't be called when service not bound"); + return; + } + + // Check for Android 12 Bluetooth permission just in case (and for Android lint error) + if( DeviceScanner.useAndroid12BluetoothPermissions() ) { + if( ContextCompat.checkSelfPermission(this, Manifest.permission.BLUETOOTH_SCAN) != PackageManager.PERMISSION_GRANTED ) { + Log.e(TAG, "bluetooth scan permission not granted!"); + return; + } + } + + // Stops scanning after a pre-defined scan period. + bluetoothHandler.postDelayed(new Runnable() { + @Override + public void run() { + // Check for Android 12 Bluetooth permission just in case (and for Android lint error) + if( DeviceScanner.useAndroid12BluetoothPermissions() ) { + if( ContextCompat.checkSelfPermission(BluetoothLeService.this, Manifest.permission.BLUETOOTH_SCAN) != PackageManager.PERMISSION_GRANTED ) { + Log.e(TAG, "bluetooth scan permission not granted!"); + return; + } + } + bluetoothAdapter.stopLeScan(null); + } + }, 10000); + bluetoothAdapter.startLeScan(null); + } + + public void setRemoteDeviceType(String remote_device_type) { + if( MyDebug.LOG ) + Log.d(TAG, "Setting remote type: " + remote_device_type); + this.remote_device_type = remote_device_type; + } + + private final BluetoothGattCallback mGattCallback = new BluetoothGattCallback() { + @Override + public void onConnectionStateChange(BluetoothGatt gatt, int status, int newState) { + String intentAction; + if( newState == BluetoothProfile.STATE_CONNECTED ) { + intentAction = ACTION_GATT_CONNECTED; + broadcastUpdate(intentAction); + if( MyDebug.LOG ) { + Log.d(TAG, "Connected to GATT server, call discoverServices()"); + } + + // Check for Android 12 Bluetooth permission just in case (and for Android lint error) + boolean has_bluetooth_permission = true; + if( DeviceScanner.useAndroid12BluetoothPermissions() ) { + if( ContextCompat.checkSelfPermission(BluetoothLeService.this, Manifest.permission.BLUETOOTH_CONNECT) != PackageManager.PERMISSION_GRANTED ) { + Log.e(TAG, "bluetooth scan permission not granted!"); + has_bluetooth_permission = false; + } + } + + if( has_bluetooth_permission ) { + bluetoothGatt.discoverServices(); + } + + currentDepth = -1; + currentTemp = -1; + + } + else if (newState == BluetoothProfile.STATE_DISCONNECTED) { + intentAction = ACTION_GATT_DISCONNECTED; + if( MyDebug.LOG ) + Log.d(TAG, "Disconnected from GATT server, reattempting every 5 seconds."); + broadcastUpdate(intentAction); + attemptReconnect(); + } + } + + void attemptReconnect() { + if( !is_bound ) { + // We check is_bound in connect() itself, but seems pointless to even try if we + // know the service is unbound (and if it's later bound again, we'll try connecting + // again anyway without needing this). + Log.e(TAG, "don't attempt to reconnect when service not bound"); + } + + Timer timer = new Timer(); + timer.schedule(new TimerTask() { + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "Attempting to reconnect to remote."); + connect(device_address); + } + }, 5000); + } + + @Override + public void onServicesDiscovered(BluetoothGatt gatt, int status) { + if( status == BluetoothGatt.GATT_SUCCESS ) { + broadcastUpdate(ACTION_GATT_SERVICES_DISCOVERED); + subscribeToServices(); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "onServicesDiscovered received: " + status); + } + } + + @Override + public void onCharacteristicRead(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic, int status) { + if( status == BluetoothGatt.GATT_SUCCESS ) { + broadcastUpdate(ACTION_DATA_AVAILABLE, characteristic); + } + } + + @Override + public void onCharacteristicChanged(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic) { + if( MyDebug.LOG ) + Log.d(TAG,"Got notification"); + broadcastUpdate(ACTION_DATA_AVAILABLE, characteristic); + } + + @Override + public void onDescriptorWrite(BluetoothGatt gatt, BluetoothGattDescriptor descriptor, int status) { + // We need to wait for this callback before enabling the next notification in case we + // have several in our list + if( !charsToSubscribe.isEmpty() ) { + setCharacteristicNotification(charsToSubscribe.remove(0), true); + } + } + }; + + /** + * Subscribe to the services/characteristics we need depending + * on the remote device model + * + */ + private void subscribeToServices() { + List gattServices = getSupportedGattServices(); + if (gattServices == null) return; + List mCharacteristicsWanted; + + //noinspection SwitchStatementWithTooFewBranches + switch( remote_device_type ) { + case "preference_remote_type_kraken": + mCharacteristicsWanted = KrakenGattAttributes.getDesiredCharacteristics(); + break; + default: + mCharacteristicsWanted = Collections.singletonList(UUID.fromString("0000")); + break; + } + + for(BluetoothGattService gattService : gattServices) { + List gattCharacteristics = + gattService.getCharacteristics(); + for(BluetoothGattCharacteristic gattCharacteristic : gattCharacteristics) { + UUID uuid = gattCharacteristic.getUuid(); + if( mCharacteristicsWanted.contains(uuid) ) { + if( MyDebug.LOG ) + Log.d(TAG, "Found characteristic to subscribe to: " + uuid); + charsToSubscribe.add(gattCharacteristic); + } + } + } + setCharacteristicNotification(charsToSubscribe.remove(0), true); + } + + private void broadcastUpdate(final String action) { + final Intent intent = new Intent(action); + sendBroadcast(intent); + } + + private void broadcastUpdate(String ignoredAction, final BluetoothGattCharacteristic characteristic) { + UUID uuid = characteristic.getUuid(); + final int format_uint8 = BluetoothGattCharacteristic.FORMAT_UINT8; + final int format_uint16 = BluetoothGattCharacteristic.FORMAT_UINT16; + int remoteCommand = -1; + + if( KrakenGattAttributes.KRAKEN_BUTTONS_CHARACTERISTIC.equals(uuid) ) { + if( MyDebug.LOG ) + Log.d(TAG,"Got Kraken button press"); + final int buttonCode= characteristic.getIntValue(format_uint8, 0); + if( MyDebug.LOG ) + Log.d(TAG, String.format("Received Button press: %d", buttonCode)); + // Note: we stay at a fairly generic level here and will manage variants + // on the various button actions in MainActivity, because those will change depending + // on the current state of the app, and we don't want to know anything about that state + // from the Bluetooth LE service + // TODO: update to remove all those tests and just forward buttonCode since value is identical + // but this is more readable if we want to implement other drivers + if( buttonCode == 32 ) { + // Shutter press + remoteCommand = COMMAND_SHUTTER; + } + else if( buttonCode == 16 ) { + // "Mode" button: either "back" action or "Photo/Camera" switch + remoteCommand = COMMAND_MODE; + } + else if( buttonCode == 48 ) { + // "Menu" button + remoteCommand = COMMAND_MENU; + } + else if( buttonCode == 97 ) { + // AF/MF button + remoteCommand = COMMAND_AFMF; + } + else if( buttonCode == 96 ) { + // Long press on MF/AF button. + // Note: the camera issues button code 97 first, then + // 96 after one second of continuous press + } + else if( buttonCode == 64 ) { + // Up button + remoteCommand = COMMAND_UP; + } else if (buttonCode == 80) { + // Down button + remoteCommand = COMMAND_DOWN; + } + // Only send forward if we have something to say + if( remoteCommand > -1 ) { + final Intent intent = new Intent(ACTION_REMOTE_COMMAND); + intent.putExtra(EXTRA_DATA, remoteCommand); + sendBroadcast(intent); + } + } + else if( KrakenGattAttributes.KRAKEN_SENSORS_CHARACTERISTIC.equals(uuid) ) { + // The housing returns four bytes. + // Byte 0-1: depth = (Byte 0 + Byte 1 << 8) / 10 / density + // Byte 2-3: temperature = (Byte 2 + Byte 3 << 8) / 10 + // + // Depth is valid for fresh water by default ( makes you wonder whether the sensor + // is really designed for saltwater at all), and the value has to be divided by the density + // of saltwater. A commonly accepted value is 1030 kg/m3 (1.03 density) + + double temperature = characteristic.getIntValue(format_uint16, 2) / 10.0; + double depth = characteristic.getIntValue(format_uint16, 0) / 10.0; + + if( temperature == currentTemp && depth == currentDepth ) + return; + + currentDepth = depth; + currentTemp = temperature; + + if( MyDebug.LOG ) + Log.d(TAG, "Got new Kraken sensor reading. Temperature: " + temperature + " Depth:" + depth); + + final Intent intent = new Intent(ACTION_SENSOR_VALUE); + intent.putExtra(SENSOR_TEMPERATURE, temperature); + intent.putExtra(SENSOR_DEPTH, depth); + sendBroadcast(intent); + } + + } + + public class LocalBinder extends Binder { + public BluetoothLeService getService() { + return BluetoothLeService.this; + } + } + + private final IBinder mBinder = new LocalBinder(); + + @Override + public IBinder onBind(Intent intent) { + if( MyDebug.LOG ) + Log.d(TAG, "onBind"); + return mBinder; + } + + @Override + public boolean onUnbind(Intent intent) { + if( MyDebug.LOG ) + Log.d(TAG, "onUnbind"); + this.is_bound = false; + close(); + return super.onUnbind(intent); + } + + /** Only call this after service is bound (from ServiceConnection.onServiceConnected())! + */ + public boolean initialize() { + if( MyDebug.LOG ) + Log.d(TAG, "initialize"); + + // in theory we'd put this in onBind(), to be more symmetric with onUnbind() where we + // set to false - but unclear whether onBind() is always called before + // ServiceConnection.onServiceConnected(). + this.is_bound = true; + + if( bluetoothManager == null ) { + bluetoothManager = (BluetoothManager) getSystemService(Context.BLUETOOTH_SERVICE); + if( bluetoothManager == null ) { + Log.e(TAG, "Unable to initialize BluetoothManager."); + return false; + } + } + + bluetoothAdapter = bluetoothManager.getAdapter(); + if( bluetoothAdapter == null ) { + Log.e(TAG, "Unable to obtain a BluetoothAdapter."); + return false; + } + + return true; + } + + public boolean connect(final String address) { + if( MyDebug.LOG ) + Log.d(TAG, "connect: " + address); + if( bluetoothAdapter == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "bluetoothAdapter is null"); + return false; + } + else if( address == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "address is null"); + return false; + } + else if( !is_bound ) { + // Don't allow calls to startLeScan() via triggerScan() (which requires location + // permission) when service not bound, as application may be in background! + // And it doesn't seem sensible to even allow connecting if service not bound. + // Under normal operation this isn't needed, but there are calls to connect() that can + // happen from postDelayed() or TimerTask in this class, so a risk that they call + // connect() after the service is unbound! + Log.e(TAG, "connect shouldn't be called when service not bound"); + return false; + } + + // Check for Android 12 Bluetooth permission just in case (and for Android lint error) + if( DeviceScanner.useAndroid12BluetoothPermissions() ) { + if( ContextCompat.checkSelfPermission(this, Manifest.permission.BLUETOOTH_CONNECT) != PackageManager.PERMISSION_GRANTED ) { + Log.e(TAG, "bluetooth scan permission not granted!"); + return false; + } + } + + // test code for infinite looping, seeing if this runs in background: + /*if( address.equals("undefined") ) { + Handler handler = new Handler(); + handler.postDelayed(new Runnable() { + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "trying connect again from postdelayed"); + connect(address); + } + }, 1000); + } + + if( address.equals("undefined") ) { + // test - only needed if we've hacked BluetoothRemoteControl.remoteEnabled() to not check for being undefined + if( MyDebug.LOG ) + Log.d(TAG, "address is undefined"); + return false; + }*/ + + if( address.equals(device_address) && bluetoothGatt != null ) { + bluetoothGatt.disconnect(); + bluetoothGatt.close(); + bluetoothGatt = null; + } + + final BluetoothDevice device = bluetoothAdapter.getRemoteDevice(address); + if( device == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "device not found"); + Handler handler = new Handler(); + handler.postDelayed(new Runnable() { + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "attempt to connect to remote"); + connect(address); + } + }, 5000); + return false; + } + + // It looks like Android won't connect to BLE devices properly without scanning + // for them first, even when connecting by explicit MAC address. Since we're using + // BLE for underwater housings and we want rock solid connectivity, we trigger + // a scan for 10 seconds + triggerScan(); + + bluetoothGatt = device.connectGatt(this, true, mGattCallback); + device_address = address; + return true; + } + + private void close() { + if( bluetoothGatt == null ) { + return; + } + + // Check for Android 12 Bluetooth permission just in case (and for Android lint error) + if( DeviceScanner.useAndroid12BluetoothPermissions() ) { + if( ContextCompat.checkSelfPermission(this, Manifest.permission.BLUETOOTH_CONNECT) != PackageManager.PERMISSION_GRANTED ) { + Log.e(TAG, "bluetooth scan permission not granted!"); + return; + } + } + + bluetoothGatt.close(); + bluetoothGatt = null; + } + + private void setCharacteristicNotification(BluetoothGattCharacteristic characteristic, boolean enabled) { + if( bluetoothAdapter == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "bluetoothAdapter is null"); + return; + } + else if( bluetoothGatt == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "bluetoothGatt is null"); + return; + } + + // Check for Android 12 Bluetooth permission just in case (and for Android lint error) + if( DeviceScanner.useAndroid12BluetoothPermissions() ) { + if( ContextCompat.checkSelfPermission(this, Manifest.permission.BLUETOOTH_CONNECT) != PackageManager.PERMISSION_GRANTED ) { + Log.e(TAG, "bluetooth scan permission not granted!"); + return; + } + } + + String uuid = characteristic.getUuid().toString(); + bluetoothGatt.setCharacteristicNotification(characteristic, enabled); + if( enabled ) { + subscribed_characteristics.put(uuid, characteristic); + } + else { + subscribed_characteristics.remove(uuid); + } + + BluetoothGattDescriptor descriptor = characteristic.getDescriptor(KrakenGattAttributes.CLIENT_CHARACTERISTIC_CONFIG); + descriptor.setValue(BluetoothGattDescriptor.ENABLE_NOTIFICATION_VALUE); + bluetoothGatt.writeDescriptor(descriptor); + } + + private List getSupportedGattServices() { + if( bluetoothGatt == null ) + return null; + + return bluetoothGatt.getServices(); + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/remotecontrol/BluetoothRemoteControl.java b/app/src/main/java/net/sourceforge/opencamera/remotecontrol/BluetoothRemoteControl.java new file mode 100644 index 0000000..9714a76 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/remotecontrol/BluetoothRemoteControl.java @@ -0,0 +1,309 @@ +package net.sourceforge.opencamera.remotecontrol; + +import static android.content.Context.RECEIVER_NOT_EXPORTED; + +import android.content.BroadcastReceiver; +import android.content.ComponentName; +import android.content.Context; +import android.content.Intent; +import android.content.IntentFilter; +import android.content.ServiceConnection; +import android.content.SharedPreferences; +import android.os.Build; +import android.os.Handler; +import android.os.IBinder; +import android.preference.PreferenceManager; +import android.util.Log; + +import net.sourceforge.opencamera.MainActivity; +import net.sourceforge.opencamera.MyApplicationInterface; +import net.sourceforge.opencamera.MyDebug; +import net.sourceforge.opencamera.PreferenceKeys; +import net.sourceforge.opencamera.ui.MainUI; + +/** Class for handling the Bluetooth LE remote control functionality. + */ +public class BluetoothRemoteControl { + private final static String TAG = "BluetoothRemoteControl"; + + private final MainActivity main_activity; + + private BluetoothLeService bluetoothLeService; + private String remoteDeviceAddress; + private String remoteDeviceType; + private boolean is_connected; + + public BluetoothRemoteControl(MainActivity main_activity) { + this.main_activity = main_activity; + } + + // class to manage the Service lifecycle for remote control. + private final ServiceConnection mServiceConnection = new ServiceConnection() { + + @Override + public void onServiceConnected(ComponentName componentName, IBinder service) { + if( MyDebug.LOG ) + Log.d(TAG, "onServiceConnected"); + if( main_activity.isAppPaused() ) { + if( MyDebug.LOG ) + Log.d(TAG, "but app is now paused"); + // Unclear if this could happen - possibly if app pauses immediately after starting + // the service, but before we connect? In theory we should then unbind the service, + // but seems safer not to try to call initialize or connect. + // This will mean the BluetoothLeService still thinks it's unbound (is_bound will + // be left false), but find, that just means we'll enforce not trying to connect at + // a later stage). + return; + } + bluetoothLeService = ((BluetoothLeService.LocalBinder) service).getService(); + if( !bluetoothLeService.initialize() ) { + Log.e(TAG, "Unable to initialize Bluetooth"); + stopRemoteControl(); + } + // connect to the device + bluetoothLeService.connect(remoteDeviceAddress); + } + + /** Called when a connection to the Service has been lost. This typically happens when the + * process hosting the service has crashed or been killed. + * So in particular, note this isn't the inverse to onServiceConnected() - whilst + * onServiceConnected is always called (after the service receives onBind()), upon normal + * disconnection (after we call unbindService()), the service receives onUnbind(), but + * onServiceDisconnected is not called under normal operation. + */ + @Override + public void onServiceDisconnected(ComponentName componentName) { + if( MyDebug.LOG ) + Log.d(TAG, "onServiceDisconnected"); + Handler handler = new Handler(); + handler.postDelayed(new Runnable() { + public void run() { + bluetoothLeService.connect(remoteDeviceAddress); + } + }, 5000); + + } + + }; + + /** + * Receives event from the remote command handler through intents + * Handles various events fired by the Service. + */ + private final BroadcastReceiver remoteControlCommandReceiver = new BroadcastReceiver() { + @Override + public void onReceive(Context context, Intent intent) { + final String action = intent.getAction(); + MyApplicationInterface applicationInterface = main_activity.getApplicationInterface(); + MainUI mainUI = main_activity.getMainUI(); + if( BluetoothLeService.ACTION_GATT_CONNECTED.equals(action) ) { + if( MyDebug.LOG ) + Log.d(TAG, "Remote connected"); + // Tell the Bluetooth service what type of remote we want to use + bluetoothLeService.setRemoteDeviceType(remoteDeviceType); + main_activity.setBrightnessForCamera(false); + } + else if( BluetoothLeService.ACTION_GATT_DISCONNECTED.equals(action) ) { + if( MyDebug.LOG ) + Log.d(TAG, "Remote disconnected"); + is_connected = false; + applicationInterface.getDrawPreview().onExtraOSDValuesChanged("-- \u00B0C", "-- m"); + mainUI.updateRemoteConnectionIcon(); + main_activity.setBrightnessToMinimumIfWanted(); + if (mainUI.isExposureUIOpen()) + mainUI.toggleExposureUI(); + } + else if( BluetoothLeService.ACTION_GATT_SERVICES_DISCOVERED.equals(action) ) { + if( MyDebug.LOG ) + Log.d(TAG, "Remote services discovered"); + // We let the BluetoothLEService subscribe to what is relevant, so we + // do nothing here, but we wait until this is done to update the UI + // icon + is_connected = true; + mainUI.updateRemoteConnectionIcon(); + } + else if( BluetoothLeService.ACTION_SENSOR_VALUE.equals(action) ) { + double temp = intent.getDoubleExtra(BluetoothLeService.SENSOR_TEMPERATURE, -1); + double depth = intent.getDoubleExtra(BluetoothLeService.SENSOR_DEPTH, -1) / main_activity.getWaterDensity(); + depth = (Math.round(depth* 10)) / 10.0; // Round to 1 decimal + if( MyDebug.LOG ) + Log.d(TAG, "Sensor values: depth: " + depth + " - temp: " + temp); + // Create two OSD lines + String line1 = temp + " \u00B0C"; + String line2 = depth + " m"; + applicationInterface.getDrawPreview().onExtraOSDValuesChanged(line1, line2); + } + else if( BluetoothLeService.ACTION_REMOTE_COMMAND.equals(action) ) { + int command = intent.getIntExtra(BluetoothLeService.EXTRA_DATA, -1); + // TODO: we could abstract this into a method provided by each remote control model + switch( command ) { + case BluetoothLeService.COMMAND_SHUTTER: + // Easiest - just take a picture (or start/stop camera) + main_activity.takePicture(false); + break; + case BluetoothLeService.COMMAND_MODE: + // "Mode" key :either toggles photo/video mode, or + // closes the settings screen that is currently open + if( mainUI.popupIsOpen() ) { + mainUI.togglePopupSettings(); + } + else if( mainUI.isExposureUIOpen() ) { + mainUI.toggleExposureUI(); + } + else { + main_activity.clickedSwitchVideo(null); + } + break; + case BluetoothLeService.COMMAND_MENU: + // Open the exposure UI (ISO/Exposure) or + // select the current line on an open UI or + // select the current option on a button on a selected line + if( !mainUI.popupIsOpen() ) { + if( !mainUI.isExposureUIOpen() ) { + mainUI.toggleExposureUI(); + } + else { + mainUI.commandMenuExposure(); + } + } + else { + mainUI.commandMenuPopup(); + } + break; + case BluetoothLeService.COMMAND_UP: + if( !mainUI.processRemoteUpButton() ) { + // Default up behaviour: + // - if we are on manual focus, then adjust focus. + // - if we are on autofocus, then adjust zoom. + if( main_activity.getPreview().getCurrentFocusValue() != null && main_activity.getPreview().getCurrentFocusValue().equals("focus_mode_manual2") ) { + main_activity.changeFocusDistance(-25, false); + } + else { + // Adjust zoom + main_activity.zoomIn(); + } + } + break; + case BluetoothLeService.COMMAND_DOWN: + if( !mainUI.processRemoteDownButton() ) { + if( main_activity.getPreview().getCurrentFocusValue() != null && main_activity.getPreview().getCurrentFocusValue().equals("focus_mode_manual2") ) { + main_activity.changeFocusDistance(25, false); + } + else { + // Adjust zoom + main_activity.zoomOut(); + } + } + break; + case BluetoothLeService.COMMAND_AFMF: + // Open the camera settings popup menu (not the app settings) + // or selects the current line/icon in the popup menu, and finally + // clicks the icon + //if( !mainUI.popupIsOpen() ) { + mainUI.togglePopupSettings(); + //} + break; + default: + break; + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "Other remote event"); + } + } + }; + + public boolean remoteConnected() { + /*if( true ) + return true; // test*/ + return is_connected; + } + + // TODO: refactor for a filter than receives generic remote control intents + private static IntentFilter makeRemoteCommandIntentFilter() { + final IntentFilter intentFilter = new IntentFilter(); + intentFilter.addAction(BluetoothLeService.ACTION_GATT_CONNECTED); + intentFilter.addAction(BluetoothLeService.ACTION_GATT_DISCONNECTED); + intentFilter.addAction(BluetoothLeService.ACTION_GATT_SERVICES_DISCOVERED); + intentFilter.addAction(BluetoothLeService.ACTION_DATA_AVAILABLE); + intentFilter.addAction(BluetoothLeService.ACTION_REMOTE_COMMAND); + intentFilter.addAction(BluetoothLeService.ACTION_SENSOR_VALUE); + return intentFilter; + } + + /** + * Starts or stops the remote control layer + */ + public void startRemoteControl() { + if( MyDebug.LOG ) + Log.d(TAG, "BLE Remote control service start check..."); + Intent gattServiceIntent = new Intent(main_activity, BluetoothLeService.class); + // Check isAppPaused() just to be safe - in theory shouldn't be needed, but don't want to + // start up the service if we're in background! (And we might as well then try to stop the + // service instead.) + if( !main_activity.isAppPaused() && remoteEnabled() ) { + if( MyDebug.LOG ) + Log.d(TAG, "Remote enabled, starting service"); + main_activity.bindService(gattServiceIntent, mServiceConnection, Context.BIND_AUTO_CREATE); + // For Android 14 (UPSIDE_DOWN_CAKE) onwards, a flag of RECEIVER_EXPORTED or RECEIVER_NOT_EXPORTED must be specified when using + // registerReceiver with non-system intents, otherwise a SecurityException will be thrown. + // The if condition is for TIRAMISU as there seems no harm doing this for earlier versions too, but RECEIVER_NOT_EXPORTED + // requires Android 13. + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU ) { + main_activity.registerReceiver(remoteControlCommandReceiver, makeRemoteCommandIntentFilter(), RECEIVER_NOT_EXPORTED); + } + else { + // n.b., this gets an Android lint warning, even though this can only be fixed for TIRAMISU onwards (as + // RECEIVER_NOT_EXPORTED not available on older versions)! + main_activity.registerReceiver(remoteControlCommandReceiver, makeRemoteCommandIntentFilter()); + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "Remote disabled, stopping service"); + // Stop the service if necessary + try { + main_activity.unregisterReceiver(remoteControlCommandReceiver); + main_activity.unbindService(mServiceConnection); + is_connected = false; // Unbinding closes the connection, of course + main_activity.getMainUI().updateRemoteConnectionIcon(); + } + catch(IllegalArgumentException e){ + if( MyDebug.LOG ) + Log.d(TAG, "Remote Service was not running, that's fine"); + } + } + } + + public void stopRemoteControl() { + if( MyDebug.LOG ) + Log.d(TAG, "BLE Remote control service shutdown..."); + if( remoteEnabled()) { + // Stop the service if necessary + try { + main_activity.unregisterReceiver(remoteControlCommandReceiver); + main_activity.unbindService(mServiceConnection); + is_connected = false; // Unbinding closes the connection, of course + main_activity.getMainUI().updateRemoteConnectionIcon(); + } + catch(IllegalArgumentException e){ + MyDebug.logStackTrace(TAG, "Remote Service was not running, that's strange", e); + } + } + } + + /** + * Checks if remote control is enabled in the settings, and the remote control address + * is also defined + * @return true if this is the case + */ + public boolean remoteEnabled() { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + boolean remote_enabled = sharedPreferences.getBoolean(PreferenceKeys.EnableRemote, false); + remoteDeviceType = sharedPreferences.getString(PreferenceKeys.RemoteType, "undefined"); + remoteDeviceAddress = sharedPreferences.getString(PreferenceKeys.RemoteName, "undefined"); + //return remote_enabled; // test - if using this, also need to enable test code in BluetoothLeService.connect() + return remote_enabled && !remoteDeviceAddress.equals("undefined"); + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/remotecontrol/DeviceScanner.java b/app/src/main/java/net/sourceforge/opencamera/remotecontrol/DeviceScanner.java new file mode 100644 index 0000000..8cfbe97 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/remotecontrol/DeviceScanner.java @@ -0,0 +1,542 @@ +package net.sourceforge.opencamera.remotecontrol; + +import android.Manifest; +import android.app.Activity; +import android.app.AlertDialog; +//import android.app.ListActivity; +import android.bluetooth.BluetoothAdapter; +import android.bluetooth.BluetoothDevice; +import android.bluetooth.BluetoothManager; +import android.content.Context; +import android.content.DialogInterface; +import android.content.Intent; +import android.content.SharedPreferences; +import android.content.pm.PackageManager; +import android.os.Build; +import android.os.Bundle; +import android.os.Handler; +import android.preference.PreferenceManager; +import androidx.annotation.NonNull; +import androidx.appcompat.app.AppCompatActivity; +import androidx.core.app.ActivityCompat; +import androidx.core.content.ContextCompat; +import android.util.Log; +import android.view.LayoutInflater; +import android.view.View; +import android.view.ViewGroup; +import android.widget.AdapterView; +import android.widget.BaseAdapter; +import android.widget.Button; +import android.widget.ListView; +import android.widget.TextView; +import android.widget.Toast; + +import net.sourceforge.opencamera.MyDebug; +import net.sourceforge.opencamera.PreferenceKeys; +import net.sourceforge.opencamera.R; + +import java.util.ArrayList; + +//public class DeviceScanner extends ListActivity { +//public class DeviceScanner extends Activity { +public class DeviceScanner extends AppCompatActivity { + private static final String TAG = "OC-BLEScanner"; + private LeDeviceListAdapter leDeviceListAdapter; + private BluetoothAdapter bluetoothAdapter; + private boolean is_scanning; + private Handler bluetoothHandler; + private SharedPreferences mSharedPreferences; + + private static final int REQUEST_ENABLE_BT = 1; + private static final int REQUEST_LOCATION_PERMISSIONS = 2; + private static final int REQUEST_BLUETOOTHSCANCONNECT_PERMISSIONS = 3; + + @Override + public void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_device_select); + bluetoothHandler = new Handler(); + + if( !getPackageManager().hasSystemFeature(PackageManager.FEATURE_BLUETOOTH_LE) ) { + Toast.makeText(this, R.string.ble_not_supported, Toast.LENGTH_SHORT).show(); + finish(); + } + + final BluetoothManager bluetoothManager = (BluetoothManager) getSystemService(Context.BLUETOOTH_SERVICE); + bluetoothAdapter = bluetoothManager.getAdapter(); + + if( bluetoothAdapter == null ) { + Toast.makeText(this, R.string.bluetooth_not_supported, Toast.LENGTH_SHORT).show(); + finish(); + return; + } + + Button startScanningButton = findViewById(R.id.StartScanButton); + startScanningButton.setOnClickListener(new View.OnClickListener() { + public void onClick(View v) { + startScanning(); + } + }); + + mSharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getApplicationContext()); + String preference_remote_device_name = PreferenceKeys.RemoteName; + String remote_name = mSharedPreferences.getString(preference_remote_device_name, "none"); + if( MyDebug.LOG ) + Log.d(TAG, "preference_remote_device_name: " + remote_name); + + TextView currentRemote = findViewById(R.id.currentRemote); + currentRemote.setText(getResources().getString(R.string.bluetooth_current_remote) + " " + remote_name); + } + + @Override + public void onContentChanged() { + if( MyDebug.LOG ) + Log.d(TAG, "onContentChanged"); + + super.onContentChanged(); + + ListView list = findViewById(R.id.list); + list.setOnItemClickListener(new AdapterView.OnItemClickListener() { + public void onItemClick(AdapterView parent, View v, int position, long id) { + onListItemClick(position); + } + }); + } + + /** Returns whether we can use the new Android 12 permissions for bluetooth (BLUETOOTH_SCAN, + * BLUETOOTH_CONNECT) - if so, we should use these and NOT location permissions. + * See https://developer.android.com/guide/topics/connectivity/bluetooth/permissions . + */ + static boolean useAndroid12BluetoothPermissions() { + return Build.VERSION.SDK_INT >= Build.VERSION_CODES.S; + } + + private void checkBluetoothEnabled() { + if( MyDebug.LOG ) + Log.d(TAG, "checkBluetoothEnabled"); + // BLUETOOTH_CONNECT permission is needed for BluetoothAdapter.ACTION_REQUEST_ENABLE. + // Callers should have already checked for bluetooth permission, but we have this check + // just in case - and also to avoid the Android lint error that we'd get. + if( useAndroid12BluetoothPermissions() ) { + if( MyDebug.LOG ) + Log.d(TAG, "check for bluetooth connect permission"); + if( ContextCompat.checkSelfPermission(this, Manifest.permission.BLUETOOTH_CONNECT) != PackageManager.PERMISSION_GRANTED ) { + Log.e(TAG, "bluetooth connect permission not granted!"); + return; + } + } + if( !bluetoothAdapter.isEnabled() ) { + // fire an intent to display a dialog asking the user to grant permission to enable Bluetooth + // n.b., on Android 12 need BLUETOOTH_CONNECT permission for this + if( MyDebug.LOG ) + Log.d(TAG, "request to enable bluetooth"); + Intent enableBtIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE); + startActivityForResult(enableBtIntent, REQUEST_ENABLE_BT); + } + } + + private void startScanning() { + if( MyDebug.LOG ) + Log.d(TAG, "Start scanning"); + + // In real life most of bluetooth LE devices associated with location, so without this + // permission the sample shows nothing in most cases + // Also see https://stackoverflow.com/questions/33045581/location-needs-to-be-enabled-for-bluetooth-low-energy-scanning-on-android-6-0 + // Update: on Android 10+, ACCESS_FINE_LOCATION is needed: https://developer.android.com/about/versions/10/privacy/changes#location-telephony-bluetooth-wifi + // Update: on Android 12+, we use the new bluetooth permissions instead of location permissions. + boolean has_permission = false; + if( useAndroid12BluetoothPermissions() ) { + if( ContextCompat.checkSelfPermission(this, Manifest.permission.BLUETOOTH_SCAN) == PackageManager.PERMISSION_GRANTED + && + ContextCompat.checkSelfPermission(this, Manifest.permission.BLUETOOTH_CONNECT) == PackageManager.PERMISSION_GRANTED + ) { + has_permission = true; + } + } + else { + String permission_needed = Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q ? Manifest.permission.ACCESS_FINE_LOCATION : Manifest.permission.ACCESS_COARSE_LOCATION; + + int permissionCoarse = Build.VERSION.SDK_INT >= Build.VERSION_CODES.M ? + ContextCompat + .checkSelfPermission(this, permission_needed) : + PackageManager.PERMISSION_GRANTED; + + if( permissionCoarse == PackageManager.PERMISSION_GRANTED ) { + has_permission = true; + } + } + + if( has_permission ) { + checkBluetoothEnabled(); + } + + leDeviceListAdapter = new LeDeviceListAdapter(); + //setListAdapter(leDeviceListAdapter); + ListView list = findViewById(R.id.list); + list.setAdapter(leDeviceListAdapter); + + if( has_permission ) { + scanLeDevice(true); + } + else { + askForDeviceScannerPermission(); + } + } + + /** Request permissions needed for bluetooth (BLUETOOTH_SCAN and BLUETOOTH_CONNECT on Android + * 12+, else location permission). + */ + private void askForDeviceScannerPermission() { + if( MyDebug.LOG ) + Log.d(TAG, "askForDeviceScannerPermission"); + // n.b., we only need ACCESS_COARSE_LOCATION, but it's simpler to request both to be consistent with Open Camera's + // location permission requests in PermissionHandler. If we only request ACCESS_COARSE_LOCATION here, and later the + // user enables something that needs ACCESS_FINE_LOCATION, Android ends up showing the "rationale" dialog - and once + // that's dismissed, the permission seems to be granted without showing the permission request dialog (so it works, + // but is confusing for the user) + // Also note that if we did want to only request ACCESS_COARSE_LOCATION here, we'd need to declare that permission + // explicitly in the AndroidManifest.xml, otherwise the dialog to request permission is never shown (and the permission + // is denied automatically). + // Update: on Android 10+, ACCESS_FINE_LOCATION is needed anyway: https://developer.android.com/about/versions/10/privacy/changes#location-telephony-bluetooth-wifi + // Update: on Android 12+, we use the new bluetooth permissions instead of location permissions. + if( useAndroid12BluetoothPermissions() ) { + if( ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.BLUETOOTH_SCAN) || + ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.BLUETOOTH_CONNECT) ) { + // Show an explanation to the user *asynchronously* -- don't block + // this thread waiting for the user's response! After the user + // sees the explanation, try again to request the permission. + showRequestBluetoothScanConnectPermissionRationale(); + } + else { + // Can go ahead and request the permission + if( MyDebug.LOG ) + Log.d(TAG, "requesting bluetooth scan/connect permissions..."); + ActivityCompat.requestPermissions(this, + new String[]{Manifest.permission.BLUETOOTH_SCAN, Manifest.permission.BLUETOOTH_CONNECT}, + REQUEST_BLUETOOTHSCANCONNECT_PERMISSIONS); + } + } + else { + if( ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.ACCESS_FINE_LOCATION) || + ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.ACCESS_COARSE_LOCATION) ) { + // Show an explanation to the user *asynchronously* -- don't block + // this thread waiting for the user's response! After the user + // sees the explanation, try again to request the permission. + showRequestLocationPermissionRationale(); + } + else { + // Can go ahead and request the permission + if( MyDebug.LOG ) + Log.d(TAG, "requesting location permissions..."); + ActivityCompat.requestPermissions(this, + new String[]{Manifest.permission.ACCESS_FINE_LOCATION, Manifest.permission.ACCESS_COARSE_LOCATION}, + REQUEST_LOCATION_PERMISSIONS); + } + } + } + + private void showRequestBluetoothScanConnectPermissionRationale() { + if( MyDebug.LOG ) + Log.d(TAG, "showRequestBluetoothScanConnectPermissionRationale"); + if( !useAndroid12BluetoothPermissions() ) { + // just in case! + Log.e(TAG, "shouldn't be requesting bluetooth scan/connect permissions!"); + return; + } + + String [] permissions = new String[]{Manifest.permission.BLUETOOTH_SCAN, Manifest.permission.BLUETOOTH_CONNECT}; + int message_id = R.string.permission_rationale_bluetooth_scan_connect; + + final String [] permissions_f = permissions; + new AlertDialog.Builder(this) + .setTitle(R.string.permission_rationale_title) + .setMessage(message_id) + .setIcon(android.R.drawable.ic_dialog_alert) + .setPositiveButton(android.R.string.ok, null) + .setOnDismissListener(new DialogInterface.OnDismissListener() { + public void onDismiss(DialogInterface dialog) { + if( MyDebug.LOG ) + Log.d(TAG, "requesting permission..."); + ActivityCompat.requestPermissions(DeviceScanner.this, permissions_f, REQUEST_BLUETOOTHSCANCONNECT_PERMISSIONS); + } + }).show(); + } + + private void showRequestLocationPermissionRationale() { + if( MyDebug.LOG ) + Log.d(TAG, "showRequestLocationPermissionRationale"); + if( Build.VERSION.SDK_INT < Build.VERSION_CODES.M ) { + if( MyDebug.LOG ) + Log.e(TAG, "shouldn't be requesting permissions for pre-Android M!"); + return; + } + + String [] permissions = new String[]{Manifest.permission.ACCESS_FINE_LOCATION, Manifest.permission.ACCESS_COARSE_LOCATION}; + int message_id = R.string.permission_rationale_location; + + final String [] permissions_f = permissions; + new AlertDialog.Builder(this) + .setTitle(R.string.permission_rationale_title) + .setMessage(message_id) + .setIcon(android.R.drawable.ic_dialog_alert) + .setPositiveButton(android.R.string.ok, null) + .setOnDismissListener(new DialogInterface.OnDismissListener() { + public void onDismiss(DialogInterface dialog) { + if( MyDebug.LOG ) + Log.d(TAG, "requesting permission..."); + ActivityCompat.requestPermissions(DeviceScanner.this, permissions_f, REQUEST_LOCATION_PERMISSIONS); + } + }).show(); + } + + @Override + public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, + @NonNull int[] grantResults) { + if( MyDebug.LOG ) + Log.d(TAG, "onRequestPermissionsResult: requestCode " + requestCode); + + super.onRequestPermissionsResult(requestCode, permissions, grantResults); + + switch (requestCode) { + case REQUEST_LOCATION_PERMISSIONS: { + if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) { + if( MyDebug.LOG ) + Log.d(TAG, "location permission granted"); + checkBluetoothEnabled(); + scanLeDevice(true); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "location permission denied"); + } + + break; + } + case REQUEST_BLUETOOTHSCANCONNECT_PERMISSIONS: { + if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) { + if( MyDebug.LOG ) + Log.d(TAG, "bluetooth scan/connect permission granted"); + checkBluetoothEnabled(); + scanLeDevice(true); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "bluetooth scan/connect permission denied"); + } + + break; + } + } + } + + @Override + protected void onActivityResult(int requestCode, int resultCode, Intent data) { + if( MyDebug.LOG ) + Log.d(TAG, "onActivityResult"); + // user decided to cancel the enabling of Bluetooth, so exit + if( requestCode == REQUEST_ENABLE_BT && resultCode == Activity.RESULT_CANCELED ) { + finish(); + return; + } + super.onActivityResult(requestCode, resultCode, data); + } + + @Override + protected void onPause() { + if( MyDebug.LOG ) + Log.d(TAG, "onPause"); + super.onPause(); + if( is_scanning ) { + scanLeDevice(false); + leDeviceListAdapter.clear(); + } + } + + @Override + protected void onStop() { + if( MyDebug.LOG ) + Log.d(TAG, "onStop"); + super.onStop(); + + // we do this in onPause, but done here again just to be certain! + if( is_scanning ) { + scanLeDevice(false); + leDeviceListAdapter.clear(); + } + } + + @Override + protected void onDestroy() { + if( MyDebug.LOG ) + Log.d(TAG, "onDestroy"); + + // we do this in onPause, but done here again just to be certain! + if( is_scanning ) { + scanLeDevice(false); + leDeviceListAdapter.clear(); + } + + super.onDestroy(); + } + + //@Override + protected void onListItemClick(int position) { + final BluetoothDevice device = leDeviceListAdapter.getDevice(position); + if( device == null ) + return; + if( MyDebug.LOG ) { + Log.d(TAG, "onListItemClick"); + Log.d(TAG, device.getAddress()); + } + String preference_remote_device_name = PreferenceKeys.RemoteName; + SharedPreferences.Editor editor = mSharedPreferences.edit(); + editor.putString(preference_remote_device_name, device.getAddress()); + editor.apply(); + scanLeDevice(false); + finish(); + } + + private void scanLeDevice(final boolean enable) { + if( MyDebug.LOG ) + Log.d(TAG, "scanLeDevice: " + enable); + + // BLUETOOTH_SCAN permission is needed for bluetoothAdapter.startLeScan and + // bluetoothAdapter.stopLeScan. Callers should have already checked for bluetooth + // permission, but we have this check just in case - and also to avoid the Android lint + // error that we'd get. + if( useAndroid12BluetoothPermissions() ) { + if( MyDebug.LOG ) + Log.d(TAG, "check for bluetooth scan permission"); + if( ContextCompat.checkSelfPermission(this, Manifest.permission.BLUETOOTH_SCAN) != PackageManager.PERMISSION_GRANTED ) { + Log.e(TAG, "bluetooth scan permission not granted!"); + return; + } + } + + if( enable ) { + // stop scanning after certain time + bluetoothHandler.postDelayed(new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "stop scanning after delay"); + /*is_scanning = false; + bluetoothAdapter.stopLeScan(mLeScanCallback); + invalidateOptionsMenu();*/ + scanLeDevice(false); + } + }, 10000); + + is_scanning = true; + bluetoothAdapter.startLeScan(mLeScanCallback); + } + else { + is_scanning = false; + bluetoothAdapter.stopLeScan(mLeScanCallback); + } + invalidateOptionsMenu(); + } + + private class LeDeviceListAdapter extends BaseAdapter { + private final ArrayList mLeDevices; + private final LayoutInflater mInflator; + + LeDeviceListAdapter() { + super(); + mLeDevices = new ArrayList<>(); + mInflator = DeviceScanner.this.getLayoutInflater(); + } + + void addDevice(BluetoothDevice device) { + if( !mLeDevices.contains(device) ) { + mLeDevices.add(device); + } + } + + BluetoothDevice getDevice(int position) { + return mLeDevices.get(position); + } + + void clear() { + mLeDevices.clear(); + } + + @Override + public int getCount() { + return mLeDevices.size(); + } + + @Override + public Object getItem(int i) { + return mLeDevices.get(i); + } + + @Override + public long getItemId(int i) { + return i; + } + + @Override + public View getView(int i, View view, ViewGroup viewGroup) { + ViewHolder viewHolder; + if( view == null ) { + view = mInflator.inflate(R.layout.listitem_device, null); + viewHolder = new ViewHolder(); + viewHolder.deviceAddress = view.findViewById(R.id.device_address); + viewHolder.deviceName = view.findViewById(R.id.device_name); + view.setTag(viewHolder); + } + else { + viewHolder = (ViewHolder) view.getTag(); + } + + // BLUETOOTH_CONNECT permission is needed for device.getName. In theory we shouldn't + // have added to this list if bluetooth permission not available, but we have this + // check just in case - and also to avoid the Android lint error that we'd get. + boolean has_bluetooth_scan_permission = true; + if( useAndroid12BluetoothPermissions() ) { + if( MyDebug.LOG ) + Log.d(TAG, "check for bluetooth connect permission"); + if( ContextCompat.checkSelfPermission(DeviceScanner.this, Manifest.permission.BLUETOOTH_CONNECT) != PackageManager.PERMISSION_GRANTED ) { + has_bluetooth_scan_permission = false; + } + } + + BluetoothDevice device = mLeDevices.get(i); + + if( !has_bluetooth_scan_permission ) { + Log.e(TAG, "bluetooth connect permission not granted!"); + viewHolder.deviceName.setText(R.string.unknown_device_no_permission); + } + else { + final String deviceName = device.getName(); + if( deviceName != null && !deviceName.isEmpty() ) + viewHolder.deviceName.setText(deviceName); + else + viewHolder.deviceName.setText(R.string.unknown_device); + } + + viewHolder.deviceAddress.setText(device.getAddress()); + + return view; + } + } + + private final BluetoothAdapter.LeScanCallback mLeScanCallback = new BluetoothAdapter.LeScanCallback() { + @Override + public void onLeScan(final BluetoothDevice device, int rssi, byte[] scanRecord) { + runOnUiThread(new Runnable() { + @Override + public void run() { + leDeviceListAdapter.addDevice(device); + leDeviceListAdapter.notifyDataSetChanged(); + } + }); + } + }; + + static class ViewHolder { + TextView deviceName; + TextView deviceAddress; + } +} \ No newline at end of file diff --git a/app/src/main/java/net/sourceforge/opencamera/remotecontrol/KrakenGattAttributes.java b/app/src/main/java/net/sourceforge/opencamera/remotecontrol/KrakenGattAttributes.java new file mode 100644 index 0000000..516c8c7 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/remotecontrol/KrakenGattAttributes.java @@ -0,0 +1,26 @@ +package net.sourceforge.opencamera.remotecontrol; + +import java.util.Arrays; +import java.util.List; +import java.util.UUID; + +/** + * This class includes the GATT attributes of the Kraken Smart Housing, which is + * an underwater camera housing that communicates its key presses with the phone over + * Bluetooth Low Energy + */ +class KrakenGattAttributes { + static final UUID CLIENT_CHARACTERISTIC_CONFIG = UUID.fromString("00002902-0000-1000-8000-00805f9b34fb"); + + //static final UUID KRAKEN_SENSORS_SERVICE = UUID.fromString("00001623-1212-efde-1523-785feabcd123"); + static final UUID KRAKEN_SENSORS_CHARACTERISTIC = UUID.fromString("00001625-1212-efde-1523-785feabcd123"); + //static final UUID KRAKEN_BUTTONS_SERVICE= UUID.fromString("00001523-1212-efde-1523-785feabcd123"); + static final UUID KRAKEN_BUTTONS_CHARACTERISTIC= UUID.fromString("00001524-1212-efde-1523-785feabcd123"); + //static final UUID BATTERY_SERVICE = UUID.fromString("180f"); + //static final UUID BATTERY_LEVEL = UUID.fromString("2a19"); + + static List getDesiredCharacteristics() { + return Arrays.asList(KRAKEN_BUTTONS_CHARACTERISTIC, KRAKEN_SENSORS_CHARACTERISTIC); + } + +} diff --git a/app/src/main/java/net/sourceforge/opencamera/ui/ArraySeekBarPreference.java b/app/src/main/java/net/sourceforge/opencamera/ui/ArraySeekBarPreference.java new file mode 100644 index 0000000..9f3f3e2 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/ui/ArraySeekBarPreference.java @@ -0,0 +1,242 @@ +package net.sourceforge.opencamera.ui; + +import android.content.Context; +import android.content.res.TypedArray; +import android.os.Parcel; +import android.os.Parcelable; +import android.preference.DialogPreference; +import android.text.TextUtils; +import android.util.AttributeSet; +import android.view.View; +import android.widget.SeekBar; +import android.widget.TextView; + +import net.sourceforge.opencamera.MainActivity; +import net.sourceforge.opencamera.R; + +/** This contains a custom preference to display a seekbar in place of a ListPreference. + */ +public class ArraySeekBarPreference extends DialogPreference { + //private static final String TAG = "ArraySeekBarPreference"; + + private SeekBar seekbar; + private TextView textView; + + private CharSequence [] entries; // user readable strings + private CharSequence [] values; // values corresponding to each string + + private final String default_value; + private String value; // current saved value of this preference (note that this is intentionally not updated when the seekbar changes, as we don't save until the user clicks ok) + private boolean value_set; + + public ArraySeekBarPreference(Context context, AttributeSet attrs) { + super(context, attrs); + + String namespace = "http://schemas.android.com/apk/res/android"; + this.default_value = attrs.getAttributeValue(namespace, "defaultValue"); + + int entries_id = attrs.getAttributeResourceValue(namespace, "entries", 0); + if( entries_id > 0 ) + this.setEntries(entries_id); + int values_id = attrs.getAttributeResourceValue(namespace, "entryValues", 0); + if( values_id > 0 ) + this.setEntryValues(values_id); + + setDialogLayoutResource(R.layout.arrayseekbarpreference); + } + + @Override + protected void onBindDialogView(View view) { + super.onBindDialogView(view); + + if( entries == null || values == null ) { + throw new IllegalStateException("ArraySeekBarPreference requires entries and entryValues array"); + } + else if( entries.length != values.length ) { + throw new IllegalStateException("ArraySeekBarPreference requires entries and entryValues arrays of same length"); + } + + this.seekbar = view.findViewById(R.id.arrayseekbarpreference_seekbar); + this.textView = view.findViewById(R.id.arrayseekbarpreference_value); + + seekbar.setMax(entries.length-1); + { + int index = getValueIndex(); + if( index == -1 ) { + // If we're here, it means the stored value isn't in the values array. + // ListPreference just shows a dialog with no selected entry, but that doesn't really work for + // a seekbar that needs to show the current position! So instead, set the position to the default. + if( default_value != null && values != null ) { + for(int i = values.length - 1; i >= 0; i--) { + if( values[i].equals(default_value) ) { + index = i; + break; + } + } + } + } + if( index >= 0 ) + seekbar.setProgress(index); + } + seekbar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() { + private long last_haptic_time; + + @Override + public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { + String new_entry = entries[progress].toString(); + textView.setText(new_entry); + if( fromUser ) { + last_haptic_time = MainActivity.performHapticFeedback(seekBar, last_haptic_time); + } + } + + @Override + public void onStartTrackingTouch(SeekBar seekBar) { + } + + @Override + public void onStopTrackingTouch(SeekBar seekBar) { + } + }); + + String new_entry = entries[seekbar.getProgress()].toString(); + textView.setText(new_entry); + } + + @Override + protected void onDialogClosed(boolean positiveResult) { + super.onDialogClosed(positiveResult); + + if( positiveResult && values != null ) { + int progress = seekbar.getProgress(); + String new_value = values[progress].toString(); + if( callChangeListener(new_value) ) { + setValue(new_value); + } + } + } + + public void setEntries(CharSequence[] entries) { + this.entries = entries; + } + + private void setEntries(int entries) { + setEntries(getContext().getResources().getTextArray(entries)); + } + + public void setEntryValues(CharSequence[] values) { + this.values = values; + } + + private void setEntryValues(int values) { + setEntryValues(getContext().getResources().getTextArray(values)); + } + + @Override + public CharSequence getSummary() { + CharSequence summary = super.getSummary(); + if( summary != null ) { + CharSequence entry = getEntry(); + return String.format(summary.toString(), entry == null ? "" : entry); + } + else + return null; + } + + /** Returns the index of the current value in the values array, or -1 if not found. + */ + private int getValueIndex() { + if( value != null && values != null ) { + // go backwards for compatibility with ListPreference in cases with duplicate values + for(int i = values.length - 1; i >= 0; i--) { + if( values[i].equals(value) ) { + return i; + } + } + } + return -1; + } + + /** Returns the human readable string of the current value. + */ + private CharSequence getEntry() { + int index = getValueIndex(); + return index >= 0 && entries != null ? entries[index] : null; + } + + private void setValue(String value) { + final boolean changed = !TextUtils.equals(this.value, value); + if( changed || !value_set ) { + this.value = value; + value_set = true; + persistString(value); + if( changed ) { + notifyChanged(); + } + } + } + + @Override + protected Object onGetDefaultValue(TypedArray a, int index) { + return a.getString(index); + } + + @Override + protected void onSetInitialValue(boolean restoreValue, Object defaultValue) { + setValue(restoreValue ? getPersistedString(value) : (String) defaultValue); + } + + @Override + protected Parcelable onSaveInstanceState() { + final Parcelable superState = super.onSaveInstanceState(); + if( isPersistent() ) { + return superState; + } + + final SavedState state = new SavedState(superState); + state.value = value; + return state; + } + + @Override + protected void onRestoreInstanceState(Parcelable state) { + if( state == null || !state.getClass().equals(SavedState.class) ) { + super.onRestoreInstanceState(state); + return; + } + + SavedState myState = (SavedState)state; + super.onRestoreInstanceState(myState.getSuperState()); + setValue(myState.value); + } + + private static class SavedState extends BaseSavedState { + String value; + + SavedState(Parcel source) { + super(source); + value = source.readString(); + } + + @Override + public void writeToParcel(Parcel dest, int flags) { + super.writeToParcel(dest, flags); + dest.writeString(value); + } + + SavedState(Parcelable superState) { + super(superState); + } + + public static final Parcelable.Creator CREATOR = + new Parcelable.Creator<>() { + public SavedState createFromParcel(Parcel in) { + return new SavedState(in); + } + + public SavedState[] newArray(int size) { + return new SavedState[size]; + } + }; + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/ui/DrawPreview.java b/app/src/main/java/net/sourceforge/opencamera/ui/DrawPreview.java new file mode 100644 index 0000000..791b724 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/ui/DrawPreview.java @@ -0,0 +1,3217 @@ +package net.sourceforge.opencamera.ui; + +import java.io.IOException; +import java.io.InputStream; +import java.text.DateFormat; +import java.text.DecimalFormat; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.List; +import java.util.Locale; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; + +import net.sourceforge.opencamera.GyroSensor; +import net.sourceforge.opencamera.ImageSaver; +import net.sourceforge.opencamera.LocationSupplier; +import net.sourceforge.opencamera.MainActivity; +import net.sourceforge.opencamera.MyApplicationInterface; +import net.sourceforge.opencamera.MyDebug; +import net.sourceforge.opencamera.PreferenceKeys; +import net.sourceforge.opencamera.preview.ApplicationInterface; +import net.sourceforge.opencamera.R; +import net.sourceforge.opencamera.cameracontroller.CameraController; +import net.sourceforge.opencamera.preview.Preview; + +import android.app.KeyguardManager; +import android.content.Context; +import android.content.Intent; +import android.content.IntentFilter; +import android.content.SharedPreferences; +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Matrix; +import android.graphics.Paint; +import android.graphics.Path; +import android.graphics.Point; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; +import android.graphics.Rect; +import android.graphics.RectF; +import android.graphics.Typeface; +import android.location.Location; +import android.net.Uri; +import android.os.BatteryManager; +import android.os.Handler; +import android.os.Looper; +import android.preference.PreferenceManager; +import android.util.Log; +import android.util.Pair; +import android.view.Surface; +import android.view.View; +import android.widget.RelativeLayout; + +public class DrawPreview { + private static final String TAG = "DrawPreview"; + + private final MainActivity main_activity; + private final MyApplicationInterface applicationInterface; + + // In some cases when reopening the camera or pausing preview, we apply a dimming effect (only + // supported when using Camera2 API, since we need to know when frames have been received). + enum DimPreview { + DIM_PREVIEW_OFF, // don't dim the preview + DIM_PREVIEW_ON, // do dim the preview + DIM_PREVIEW_UNTIL // dim the preview until the camera_controller is non-null and has received frames, then switch to DIM_PREVIEW_OFF + } + private DimPreview dim_preview = DimPreview.DIM_PREVIEW_OFF; + + // Time for the dimming effect. This should be quick, because we call Preview.setupCamera() on + // the UI thread, which will block redraws: + // - When reopening the camera, we want the dimming to have occurred whilst reopening the + // camera, before we call setupCamera() on the UI thread. + // - When pausing the preview in MainActivity.updateForSettings(), we call setupCamera() after + // this delay - so we don't want to keep the user waiting too long. + public final static long dim_effect_time_c = 50; + + private boolean cover_preview; // whether to cover the preview for Camera2 API + private long camera_inactive_time_ms = -1; // if != -1, the time when the camera became inactive + + // store to avoid calling PreferenceManager.getDefaultSharedPreferences() repeatedly + private final SharedPreferences sharedPreferences; + + // cached preferences (need to call updateSettings() to refresh): + private boolean has_settings; + private MyApplicationInterface.PhotoMode photoMode; + private boolean show_time_pref; + private boolean show_camera_id_pref; + private boolean show_free_memory_pref; + private boolean show_iso_pref; + private boolean show_video_max_amp_pref; + private boolean show_zoom_pref; + private boolean show_battery_pref; + private boolean show_angle_pref; + private int angle_highlight_color_pref; + private boolean show_geo_direction_pref; + private boolean take_photo_border_pref; + private boolean preview_size_wysiwyg_pref; + private String crop_guide_pref; + private boolean store_location_pref; + private boolean show_angle_line_pref; + private boolean show_pitch_lines_pref; + private boolean show_geo_direction_lines_pref; + private boolean immersive_mode_everything_pref; + private boolean has_stamp_pref; + private boolean is_raw_pref; // whether in RAW+JPEG or RAW only mode + private boolean is_raw_only_pref; // whether in RAW only mode + private boolean is_face_detection_pref; + private boolean is_audio_enabled_pref; + private boolean is_high_speed; + private float capture_rate_factor; + private boolean auto_stabilise_pref; + private String preference_grid_pref; + private String ghost_image_pref; + private String ghost_selected_image_pref = ""; + private Bitmap ghost_selected_image_bitmap; + private int ghost_image_alpha; + private boolean want_histogram; + private Preview.HistogramType histogram_type; + private boolean want_zebra_stripes; + private int zebra_stripes_threshold; + private int zebra_stripes_color_foreground; + private int zebra_stripes_color_background; + private boolean want_focus_peaking; + private int focus_peaking_color_pref; + private boolean want_pre_shots; + + // avoid doing things that allocate memory every frame! + private final Paint p = new Paint(); + private final RectF draw_rect = new RectF(); + private final int [] gui_location = new int[2]; + private final static DecimalFormat decimalFormat = new DecimalFormat("#0.0"); + private final float scale_font; // SP scaling + private final float scale_dp; // DP scaling + private final float stroke_width; // stroke_width used for various UI elements + private Calendar calendar; + private DateFormat dateFormatTimeInstance; + private final String ybounds_text; + private final int [] temp_histogram_channel = new int[256]; + private final LocationSupplier.LocationInfo locationInfo = new LocationSupplier.LocationInfo(); + private final int [] auto_stabilise_crop = new int [2]; + //private final DecimalFormat decimal_format_1dp_force0 = new DecimalFormat("0.0"); + // cached Rects for drawTextWithBackground() calls + private Rect text_bounds_time; + private Rect text_bounds_camera_id; + private Rect text_bounds_free_memory; + private Rect text_bounds_angle_single; + private Rect text_bounds_angle_double; + + private final static double close_level_angle = 1.0f; + private String angle_string; // cached for UI performance + private double cached_angle; // the angle that we used for the cached angle_string + private long last_angle_string_time; + + private float free_memory_gb = -1.0f; + private String free_memory_gb_string; + private long last_free_memory_time; + private Future free_memory_future; + // Important to call StorageUtils.freeMemory() on background thread: we've had ANRs reported + // from StorageUtils.freeMemory()->freeMemorySAF()->ContentResolver.openFileDescriptor(); also + // pauses can be seen if running on UI thread if there are a large number of files in the save + // folder. + private final ExecutorService free_memory_executor = Executors.newSingleThreadExecutor(); + private final Runnable free_memory_runnable = new Runnable() { + final Handler handler = new Handler(Looper.getMainLooper()); + + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "free_memory_runnable: run"); + long free_mb = main_activity.getStorageUtils().freeMemory(); + if( free_mb >= 0 ) { + final float new_free_memory_gb = free_mb/1024.0f; + handler.post(new Runnable() { + @Override + public void run() { + onPostExecute(true, new_free_memory_gb); + } + }); + } + else { + handler.post(new Runnable() { + @Override + public void run() { + onPostExecute(false, -1.0f); + } + }); + } + } + + /** Runs on UI thread, after background work is complete. + */ + private void onPostExecute(boolean has_new_free_memory, float new_free_memory_gb) { + if( MyDebug.LOG ) + Log.d(TAG, "free_memory_runnable: onPostExecute"); + if( free_memory_future != null && free_memory_future.isCancelled() ) { + if( MyDebug.LOG ) + Log.d(TAG, "was cancelled"); + free_memory_future = null; + return; + } + + if( MyDebug.LOG ) { + Log.d(TAG, "has_new_free_memory: " + has_new_free_memory); + Log.d(TAG, "free_memory_gb: " + free_memory_gb); + Log.d(TAG, "new_free_memory_gb: " + new_free_memory_gb); + } + if( has_new_free_memory && Math.abs(new_free_memory_gb - free_memory_gb) > 0.001f ) { + free_memory_gb = new_free_memory_gb; + free_memory_gb_string = decimalFormat.format(free_memory_gb) + getContext().getResources().getString(R.string.gb_abbreviation); + } + + free_memory_future = null; + } + }; + + private String current_time_string; + private long last_current_time_time; + + private String camera_id_string; + private long last_camera_id_time; + + private String iso_exposure_string; + private boolean is_scanning; + private long last_iso_exposure_time; + + private boolean need_flash_indicator = false; + private long last_need_flash_indicator_time; + + private final IntentFilter battery_ifilter = new IntentFilter(Intent.ACTION_BATTERY_CHANGED); + private boolean has_battery_frac; + private float battery_frac; + private long last_battery_time; + + private boolean has_video_max_amp; + private int video_max_amp; + private long last_video_max_amp_time; + private int video_max_amp_prev2; + private int video_max_amp_peak; + + private Bitmap location_bitmap; + private Bitmap location_off_bitmap; + private Bitmap raw_jpeg_bitmap; + private Bitmap raw_only_bitmap; + private Bitmap auto_stabilise_bitmap; + private Bitmap dro_bitmap; + private Bitmap hdr_bitmap; + private Bitmap panorama_bitmap; + private Bitmap expo_bitmap; + //private Bitmap focus_bracket_bitmap; + // no longer bother with a focus bracketing icon - hard to come up with a clear icon, and should be obvious from the two on-screen seekbars + private Bitmap burst_bitmap; + private Bitmap nr_bitmap; + private Bitmap x_night_bitmap; + private Bitmap x_bokeh_bitmap; + private Bitmap x_beauty_bitmap; + private Bitmap photostamp_bitmap; + private Bitmap flash_bitmap; + private Bitmap face_detection_bitmap; + private Bitmap audio_disabled_bitmap; + private Bitmap high_speed_fps_bitmap; + private Bitmap slow_motion_bitmap; + private Bitmap time_lapse_bitmap; + private Bitmap rotate_left_bitmap; + private Bitmap rotate_right_bitmap; + + private final Rect icon_dest = new Rect(); + private long needs_flash_time = -1; // time when flash symbol comes on (used for fade-in effect) + private final Path path = new Path(); + + private Bitmap last_thumbnail; // thumbnail of last picture taken + private volatile boolean thumbnail_anim; // whether we are displaying the thumbnail animation; must be volatile for test project reading the state + private long thumbnail_anim_start_ms = -1; // time that the thumbnail animation started + public volatile int test_thumbnail_anim_count; + private final RectF thumbnail_anim_src_rect = new RectF(); + private final RectF thumbnail_anim_dst_rect = new RectF(); + private final Matrix thumbnail_anim_matrix = new Matrix(); + private boolean last_thumbnail_is_video; // whether thumbnail is for video + + private boolean show_last_image; // whether to show the last image as part of "pause preview" + private final RectF last_image_src_rect = new RectF(); + private final RectF last_image_dst_rect = new RectF(); + private final Matrix last_image_matrix = new Matrix(); + private boolean allow_ghost_last_image; // whether to allow ghosting the last image + + private long ae_started_scanning_ms = -1; // time when ae started scanning + + private boolean taking_picture; // true iff camera is in process of capturing a picture (including any necessary prior steps such as autofocus, flash/precapture) + private boolean capture_started; // true iff the camera is capturing + private boolean front_screen_flash; // true iff the front screen display should maximise to simulate flash + private boolean image_queue_full; // whether we can no longer take new photos due to image queue being full (or rather, would become full if a new photo taken) + + private boolean continuous_focus_moving; + private long continuous_focus_moving_ms; + + private boolean enable_gyro_target_spot; + private final List gyro_directions = new ArrayList<>(); + private final float [] transformed_gyro_direction = new float[3]; + private final float [] gyro_direction_up = new float[3]; + private final float [] transformed_gyro_direction_up = new float[3]; + + // call updateCachedViewAngles() before reading these values + private float view_angle_x_preview; + private float view_angle_y_preview; + private long last_view_angles_time; + + private int take_photo_top; // coordinate (in canvas x coordinates, or y coords if system_orientation_portrait==true) of top of the take photo icon + private long last_take_photo_top_time; + + private int top_icon_shift; // shift that may be needed for on-screen text to avoid clashing with icons (when arranged "along top") + private long last_top_icon_shift_time; + + private int focus_seekbars_margin_left = -1; // margin left that's been set for the focus seekbars + + private long last_update_focus_seekbar_auto_time; + + // OSD extra lines + private String OSDLine1; + private String OSDLine2; + + private final static int histogram_width_dp = 100; + private final static int histogram_height_dp = 60; + + private final static int crop_shading_alpha_c = 160; // alpha to use for shading areas not of interest + + public DrawPreview(MainActivity main_activity, MyApplicationInterface applicationInterface) { + if( MyDebug.LOG ) + Log.d(TAG, "DrawPreview"); + this.main_activity = main_activity; + this.sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + this.applicationInterface = applicationInterface; + // n.b., don't call updateSettings() here, as it may rely on things that aren't yet initialise (e.g., the preview) + // see testHDRRestart + + p.setAntiAlias(true); + p.setTypeface(Typeface.create(Typeface.DEFAULT, Typeface.BOLD)); + p.setStrokeCap(Paint.Cap.ROUND); + scale_dp = getContext().getResources().getDisplayMetrics().density; + scale_font = getContext().getResources().getDisplayMetrics().scaledDensity; + //noinspection PointlessArithmeticExpression + this.stroke_width = (1.0f * scale_dp + 0.5f); // convert dps to pixels + p.setStrokeWidth(this.stroke_width); + + location_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.ic_gps_fixed_white_48dp); + location_off_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.ic_gps_off_white_48dp); + raw_jpeg_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.raw_icon); + raw_only_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.raw_only_icon); + auto_stabilise_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.auto_stabilise_icon); + dro_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.dro_icon); + hdr_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.ic_hdr_on_white_48dp); + panorama_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.baseline_panorama_horizontal_white_48); + expo_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.expo_icon); + //focus_bracket_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.focus_bracket_icon); + burst_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.ic_burst_mode_white_48dp); + nr_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.nr_icon); + x_night_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.baseline_bedtime_white_48); + x_bokeh_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.baseline_portrait_white_48); + x_beauty_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.baseline_face_retouching_natural_white_48); + photostamp_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.ic_text_format_white_48dp); + flash_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.flash_on); + face_detection_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.ic_face_white_48dp); + audio_disabled_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.ic_mic_off_white_48dp); + high_speed_fps_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.ic_fast_forward_white_48dp); + slow_motion_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.ic_slow_motion_video_white_48dp); + time_lapse_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.ic_timelapse_white_48dp); + rotate_left_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.baseline_rotate_left_white_48); + rotate_right_bitmap = BitmapFactory.decodeResource(getContext().getResources(), R.drawable.baseline_rotate_right_white_48); + + ybounds_text = getContext().getResources().getString(R.string.zoom) + getContext().getResources().getString(R.string.angle) + getContext().getResources().getString(R.string.direction); + } + + public void onDestroy() { + if( MyDebug.LOG ) + Log.d(TAG, "onDestroy"); + if( free_memory_future != null ) { + if( MyDebug.LOG ) + Log.d(TAG, "cancel free_memory_future"); + free_memory_future.cancel(true); + } + // clean up just in case + if( location_bitmap != null ) { + location_bitmap.recycle(); + location_bitmap = null; + } + if( location_off_bitmap != null ) { + location_off_bitmap.recycle(); + location_off_bitmap = null; + } + if( raw_jpeg_bitmap != null ) { + raw_jpeg_bitmap.recycle(); + raw_jpeg_bitmap = null; + } + if( raw_only_bitmap != null ) { + raw_only_bitmap.recycle(); + raw_only_bitmap = null; + } + if( auto_stabilise_bitmap != null ) { + auto_stabilise_bitmap.recycle(); + auto_stabilise_bitmap = null; + } + if( dro_bitmap != null ) { + dro_bitmap.recycle(); + dro_bitmap = null; + } + if( hdr_bitmap != null ) { + hdr_bitmap.recycle(); + hdr_bitmap = null; + } + if( panorama_bitmap != null ) { + panorama_bitmap.recycle(); + panorama_bitmap = null; + } + if( expo_bitmap != null ) { + expo_bitmap.recycle(); + expo_bitmap = null; + } + /*if( focus_bracket_bitmap != null ) { + focus_bracket_bitmap.recycle(); + focus_bracket_bitmap = null; + }*/ + if( burst_bitmap != null ) { + burst_bitmap.recycle(); + burst_bitmap = null; + } + if( nr_bitmap != null ) { + nr_bitmap.recycle(); + nr_bitmap = null; + } + if( x_night_bitmap != null ) { + x_night_bitmap.recycle(); + x_night_bitmap = null; + } + if( x_bokeh_bitmap != null ) { + x_bokeh_bitmap.recycle(); + x_bokeh_bitmap = null; + } + if( x_beauty_bitmap != null ) { + x_beauty_bitmap.recycle(); + x_beauty_bitmap = null; + } + if( photostamp_bitmap != null ) { + photostamp_bitmap.recycle(); + photostamp_bitmap = null; + } + if( flash_bitmap != null ) { + flash_bitmap.recycle(); + flash_bitmap = null; + } + if( face_detection_bitmap != null ) { + face_detection_bitmap.recycle(); + face_detection_bitmap = null; + } + if( audio_disabled_bitmap != null ) { + audio_disabled_bitmap.recycle(); + audio_disabled_bitmap = null; + } + if( high_speed_fps_bitmap != null ) { + high_speed_fps_bitmap.recycle(); + high_speed_fps_bitmap = null; + } + if( slow_motion_bitmap != null ) { + slow_motion_bitmap.recycle(); + slow_motion_bitmap = null; + } + if( time_lapse_bitmap != null ) { + time_lapse_bitmap.recycle(); + time_lapse_bitmap = null; + } + if( rotate_left_bitmap != null ) { + rotate_left_bitmap.recycle(); + rotate_left_bitmap = null; + } + if( rotate_right_bitmap != null ) { + rotate_right_bitmap.recycle(); + rotate_right_bitmap = null; + } + + if( ghost_selected_image_bitmap != null ) { + ghost_selected_image_bitmap.recycle(); + ghost_selected_image_bitmap = null; + } + ghost_selected_image_pref = ""; + } + + private Context getContext() { + return main_activity; + } + + /** Computes the x coordinate on screen of left side of the view, equivalent to + * view.getLocationOnScreen(), but we undo the effect of the view's rotation. + * This is because getLocationOnScreen() will return the coordinates of the view's top-left + * *after* applying the rotation, when we want the top left of the icon as shown on screen. + * This should not be called every frame but instead should be cached, due to cost of calling + * view.getLocationOnScreen(). + * Update: For supporting landscape and portrait (if MainActivity.lock_to_landscape==false), + * instead this returns the top side if in portrait. Note though we still need to take rotation + * into account, as we still apply rotation to the icons when changing orienations (e.g., this + * is needed when rotating from reverse landscape to portrait, for on-screen text like level + * angle to be offset correctly above the shutter button (see take_photo_top) when the preview + * has a wide aspect ratio. + */ + private int getViewOnScreenX(View view) { + view.getLocationOnScreen(gui_location); + + MainActivity.SystemOrientation system_orientation = main_activity.getSystemOrientation(); + boolean system_orientation_portrait = system_orientation == MainActivity.SystemOrientation.PORTRAIT; + int xpos = gui_location[system_orientation_portrait ? 1 : 0]; + int rotation = Math.round(view.getRotation()); + // rotation can be outside [0, 359] if the user repeatedly rotates in same direction! + rotation = (rotation % 360 + 360) % 360; // version of (rotation % 360) that work if rotation is -ve + /*if( MyDebug.LOG ) + Log.d(TAG, " mod rotation: " + rotation);*/ + // undo annoying behaviour that getLocationOnScreen takes the rotation into account + if( system_orientation_portrait ) { + if( rotation == 180 || rotation == 270 ) { + xpos -= view.getHeight(); + } + } + else { + if( rotation == 90 || rotation == 180 ) { + xpos -= view.getWidth(); + } + } + return xpos; + } + + /** Sets a current thumbnail for a photo or video just taken. Used for thumbnail animation, + * and when ghosting the last image. + */ + public void updateThumbnail(Bitmap thumbnail, boolean is_video, boolean want_thumbnail_animation) { + if( MyDebug.LOG ) + Log.d(TAG, "updateThumbnail"); + if( want_thumbnail_animation && applicationInterface.getThumbnailAnimationPref() ) { + if( MyDebug.LOG ) + Log.d(TAG, "thumbnail_anim started"); + thumbnail_anim = true; + thumbnail_anim_start_ms = System.currentTimeMillis(); + test_thumbnail_anim_count++; + if( MyDebug.LOG ) + Log.d(TAG, "test_thumbnail_anim_count is now: " + test_thumbnail_anim_count); + } + Bitmap old_thumbnail = this.last_thumbnail; + this.last_thumbnail = thumbnail; + this.last_thumbnail_is_video = is_video; + this.allow_ghost_last_image = true; + if( old_thumbnail != null ) { + // only recycle after we've set the new thumbnail + old_thumbnail.recycle(); + } + } + + public boolean hasThumbnailAnimation() { + return this.thumbnail_anim; + } + + /** Displays the thumbnail as a fullscreen image (used for pause preview option). + */ + public void showLastImage() { + if( MyDebug.LOG ) + Log.d(TAG, "showLastImage"); + this.show_last_image = true; + } + + public void clearLastImage() { + if( MyDebug.LOG ) + Log.d(TAG, "clearLastImage"); + this.show_last_image = false; + } + + public void allowGhostImage() { + if( MyDebug.LOG ) + Log.d(TAG, "allowGhostImage"); + if( last_thumbnail != null ) + this.allow_ghost_last_image = true; + } + + public void clearGhostImage() { + if( MyDebug.LOG ) + Log.d(TAG, "clearGhostImage"); + this.allow_ghost_last_image = false; + } + + public void cameraInOperation(boolean in_operation) { + if( in_operation && !main_activity.getPreview().isVideo() ) { + taking_picture = true; + } + else { + taking_picture = false; + front_screen_flash = false; + capture_started = false; + } + } + + public void setImageQueueFull(boolean image_queue_full) { + this.image_queue_full = image_queue_full; + } + + public void turnFrontScreenFlashOn() { + if( MyDebug.LOG ) + Log.d(TAG, "turnFrontScreenFlashOn"); + front_screen_flash = true; + } + + public void onCaptureStarted() { + if( MyDebug.LOG ) + Log.d(TAG, "onCaptureStarted"); + capture_started = true; + } + + public void onContinuousFocusMove(boolean start) { + if( MyDebug.LOG ) + Log.d(TAG, "onContinuousFocusMove: " + start); + if( start ) { + if( !continuous_focus_moving ) { // don't restart the animation if already in motion + continuous_focus_moving = true; + continuous_focus_moving_ms = System.currentTimeMillis(); + } + } + // if we receive start==false, we don't stop the animation - let it continue + } + + public void clearContinuousFocusMove() { + if( MyDebug.LOG ) + Log.d(TAG, "clearContinuousFocusMove"); + if( continuous_focus_moving ) { + continuous_focus_moving = false; + continuous_focus_moving_ms = 0; + } + } + + public void setGyroDirectionMarker(float x, float y, float z) { + enable_gyro_target_spot = true; + this.gyro_directions.clear(); + addGyroDirectionMarker(x, y, z); + gyro_direction_up[0] = 0.f; + gyro_direction_up[1] = 1.f; + gyro_direction_up[2] = 0.f; + } + + public void addGyroDirectionMarker(float x, float y, float z) { + float [] vector = new float[]{x, y, z}; + this.gyro_directions.add(vector); + } + + public void clearGyroDirectionMarker() { + enable_gyro_target_spot = false; + } + + /** For performance reasons, some of the SharedPreferences settings are cached. This method + * should be used when the settings may have changed. + */ + public void updateSettings() { + if( MyDebug.LOG ) + Log.d(TAG, "updateSettings"); + + photoMode = applicationInterface.getPhotoMode(); + if( MyDebug.LOG ) + Log.d(TAG, "photoMode: " + photoMode); + + show_time_pref = sharedPreferences.getBoolean(PreferenceKeys.ShowTimePreferenceKey, true); + // reset in case user changes the preference: + dateFormatTimeInstance = DateFormat.getTimeInstance(); + current_time_string = null; + last_current_time_time = 0; + text_bounds_time = null; + + show_camera_id_pref = main_activity.isMultiCam() && sharedPreferences.getBoolean(PreferenceKeys.ShowCameraIDPreferenceKey, true); + //show_camera_id_pref = true; // test + show_free_memory_pref = sharedPreferences.getBoolean(PreferenceKeys.ShowFreeMemoryPreferenceKey, true); + show_iso_pref = sharedPreferences.getBoolean(PreferenceKeys.ShowISOPreferenceKey, true); + show_video_max_amp_pref = sharedPreferences.getBoolean(PreferenceKeys.ShowVideoMaxAmpPreferenceKey, false); + show_zoom_pref = sharedPreferences.getBoolean(PreferenceKeys.ShowZoomPreferenceKey, true); + show_battery_pref = sharedPreferences.getBoolean(PreferenceKeys.ShowBatteryPreferenceKey, true); + + show_angle_pref = sharedPreferences.getBoolean(PreferenceKeys.ShowAnglePreferenceKey, false); + String angle_highlight_color = sharedPreferences.getString(PreferenceKeys.ShowAngleHighlightColorPreferenceKey, "#14e715"); + angle_highlight_color_pref = Color.parseColor(angle_highlight_color); + show_geo_direction_pref = sharedPreferences.getBoolean(PreferenceKeys.ShowGeoDirectionPreferenceKey, false); + + take_photo_border_pref = sharedPreferences.getBoolean(PreferenceKeys.TakePhotoBorderPreferenceKey, true); + preview_size_wysiwyg_pref = sharedPreferences.getString(PreferenceKeys.PreviewSizePreferenceKey, "preference_preview_size_wysiwyg").equals("preference_preview_size_wysiwyg"); + crop_guide_pref = sharedPreferences.getString(PreferenceKeys.ShowCropGuidePreferenceKey, "crop_guide_none"); + store_location_pref = sharedPreferences.getBoolean(PreferenceKeys.LocationPreferenceKey, false); + + show_angle_line_pref = sharedPreferences.getBoolean(PreferenceKeys.ShowAngleLinePreferenceKey, false); + show_pitch_lines_pref = sharedPreferences.getBoolean(PreferenceKeys.ShowPitchLinesPreferenceKey, false); + show_geo_direction_lines_pref = sharedPreferences.getBoolean(PreferenceKeys.ShowGeoDirectionLinesPreferenceKey, false); + + String immersive_mode = sharedPreferences.getString(PreferenceKeys.ImmersiveModePreferenceKey, "immersive_mode_off"); + immersive_mode_everything_pref = immersive_mode.equals("immersive_mode_everything"); + + has_stamp_pref = applicationInterface.getStampPref().equals("preference_stamp_yes"); + is_raw_pref = applicationInterface.getRawPref() != ApplicationInterface.RawPref.RAWPREF_JPEG_ONLY; + is_raw_only_pref = applicationInterface.isRawOnly(); + is_face_detection_pref = applicationInterface.getFaceDetectionPref(); + is_audio_enabled_pref = applicationInterface.getRecordAudioPref(); + + is_high_speed = applicationInterface.fpsIsHighSpeed(); + capture_rate_factor = applicationInterface.getVideoCaptureRateFactor(); + + auto_stabilise_pref = applicationInterface.getAutoStabilisePref(); + + preference_grid_pref = sharedPreferences.getString(PreferenceKeys.ShowGridPreferenceKey, "preference_grid_none"); + + ghost_image_pref = sharedPreferences.getString(PreferenceKeys.GhostImagePreferenceKey, "preference_ghost_image_off"); + if( ghost_image_pref.equals("preference_ghost_image_selected") ) { + String new_ghost_selected_image_pref = sharedPreferences.getString(PreferenceKeys.GhostSelectedImageSAFPreferenceKey, ""); + if( MyDebug.LOG ) + Log.d(TAG, "new_ghost_selected_image_pref: " + new_ghost_selected_image_pref); + + KeyguardManager keyguard_manager = (KeyguardManager)main_activity.getSystemService(Context.KEYGUARD_SERVICE); + boolean is_locked = keyguard_manager != null && keyguard_manager.inKeyguardRestrictedInputMode(); + if( MyDebug.LOG ) + Log.d(TAG, "is_locked?: " + is_locked); + + if( is_locked ) { + // don't show selected image when device locked, as this could be a security flaw + if( ghost_selected_image_bitmap != null ) { + ghost_selected_image_bitmap.recycle(); + ghost_selected_image_bitmap = null; + ghost_selected_image_pref = ""; // so we'll load the bitmap again when unlocked + } + } + else if( !new_ghost_selected_image_pref.equals(ghost_selected_image_pref) ) { + if( MyDebug.LOG ) + Log.d(TAG, "ghost_selected_image_pref has changed"); + ghost_selected_image_pref = new_ghost_selected_image_pref; + if( ghost_selected_image_bitmap != null ) { + ghost_selected_image_bitmap.recycle(); + ghost_selected_image_bitmap = null; + } + Uri uri = Uri.parse(ghost_selected_image_pref); + try { + ghost_selected_image_bitmap = loadBitmap(uri); + } + catch(IOException e) { + MyDebug.logStackTrace(TAG, "failed to load ghost_selected_image uri: " + uri, e); + ghost_selected_image_bitmap = null; + // don't set ghost_selected_image_pref to null, as we don't want to repeatedly try loading the invalid uri + } + } + } + else { + if( ghost_selected_image_bitmap != null ) { + ghost_selected_image_bitmap.recycle(); + ghost_selected_image_bitmap = null; + } + ghost_selected_image_pref = ""; + } + ghost_image_alpha = applicationInterface.getGhostImageAlpha(); + + String histogram_pref = sharedPreferences.getString(PreferenceKeys.HistogramPreferenceKey, "preference_histogram_off"); + want_histogram = !histogram_pref.equals("preference_histogram_off") && main_activity.supportsPreviewBitmaps(); + histogram_type = Preview.HistogramType.HISTOGRAM_TYPE_VALUE; + if( want_histogram ) { + switch( histogram_pref ) { + case "preference_histogram_rgb": + histogram_type = Preview.HistogramType.HISTOGRAM_TYPE_RGB; + break; + case "preference_histogram_luminance": + histogram_type = Preview.HistogramType.HISTOGRAM_TYPE_LUMINANCE; + break; + case "preference_histogram_value": + histogram_type = Preview.HistogramType.HISTOGRAM_TYPE_VALUE; + break; + case "preference_histogram_intensity": + histogram_type = Preview.HistogramType.HISTOGRAM_TYPE_INTENSITY; + break; + case "preference_histogram_lightness": + histogram_type = Preview.HistogramType.HISTOGRAM_TYPE_LIGHTNESS; + break; + } + } + + String zebra_stripes_value = sharedPreferences.getString(PreferenceKeys.ZebraStripesPreferenceKey, "0"); + try { + zebra_stripes_threshold = Integer.parseInt(zebra_stripes_value); + } + catch(NumberFormatException e) { + MyDebug.logStackTrace(TAG, "failed to parse zebra_stripes_value: " + zebra_stripes_value, e); + zebra_stripes_threshold = 0; + } + want_zebra_stripes = zebra_stripes_threshold != 0 & main_activity.supportsPreviewBitmaps(); + + String zebra_stripes_color_foreground_value = sharedPreferences.getString(PreferenceKeys.ZebraStripesForegroundColorPreferenceKey, "#ff000000"); + zebra_stripes_color_foreground = Color.parseColor(zebra_stripes_color_foreground_value); + String zebra_stripes_color_background_value = sharedPreferences.getString(PreferenceKeys.ZebraStripesBackgroundColorPreferenceKey, "#ffffffff"); + zebra_stripes_color_background = Color.parseColor(zebra_stripes_color_background_value); + + want_focus_peaking = applicationInterface.getFocusPeakingPref(); + String focus_peaking_color = sharedPreferences.getString(PreferenceKeys.FocusPeakingColorPreferenceKey, "#ffffff"); + focus_peaking_color_pref = Color.parseColor(focus_peaking_color); + + want_pre_shots = applicationInterface.getPreShotsPref(photoMode); + + last_camera_id_time = 0; // in case camera id changed + last_view_angles_time = 0; // force view angles to be recomputed + last_take_photo_top_time = 0; // force take_photo_top to be recomputed + last_top_icon_shift_time = 0; // for top_icon_shift to be recomputed + + focus_seekbars_margin_left = -1; // needed as the focus seekbars can only be updated when visible + + has_settings = true; + } + + /** Indicates that navigation gaps have changed, as a hint to avoid cached data. + */ + public void onNavigationGapChanged() { + // needed for OnePlus Pad when rotating, to avoid delay in updating last_take_photo_top_time (affects placement of on-screen text e.g. zoom) + this.last_take_photo_top_time = 0; + } + + private void updateCachedViewAngles(long time_ms) { + if( last_view_angles_time == 0 || time_ms > last_view_angles_time + 10000 ) { + if( MyDebug.LOG ) + Log.d(TAG, "update cached view angles"); + // don't call this too often, for UI performance + // note that updateSettings will force the time to reset anyway, but we check every so often + // again just in case... + Preview preview = main_activity.getPreview(); + view_angle_x_preview = preview.getViewAngleX(true); + view_angle_y_preview = preview.getViewAngleY(true); + last_view_angles_time = time_ms; + } + } + + /** Loads the bitmap from the uri. + * The image will be downscaled if required to be comparable to the preview width. + */ + private Bitmap loadBitmap(Uri uri) throws IOException { + if( MyDebug.LOG ) + Log.d(TAG, "loadBitmap: " + uri); + Bitmap bitmap; + try { + //bitmap = MediaStore.Images.Media.getBitmap(main_activity.getContentResolver(), uri); + + int sample_size = 1; + { + // attempt to compute appropriate scaling + BitmapFactory.Options bounds = new BitmapFactory.Options(); + bounds.inJustDecodeBounds = true; + InputStream input = main_activity.getContentResolver().openInputStream(uri); + BitmapFactory.decodeStream(input, null, bounds); + if( input != null ) + input.close(); + + if( bounds.outWidth != -1 && bounds.outHeight != -1 ) { + // compute appropriate scaling + int image_size = Math.max(bounds.outWidth, bounds.outHeight); + + Point point = new Point(); + applicationInterface.getDisplaySize(point, true); + int display_size = Math.max(point.x, point.y); + + int ratio = (int) Math.ceil((double) image_size / display_size); + sample_size = Integer.highestOneBit(ratio); + if( MyDebug.LOG ) { + Log.d(TAG, "display_size: " + display_size); + Log.d(TAG, "image_size: " + image_size); + Log.d(TAG, "ratio: " + ratio); + Log.d(TAG, "sample_size: " + sample_size); + } + } + else { + if( MyDebug.LOG ) + Log.e(TAG, "failed to obtain width/height of bitmap"); + } + } + + BitmapFactory.Options options = new BitmapFactory.Options(); + options.inMutable = false; + options.inSampleSize = sample_size; + InputStream input = main_activity.getContentResolver().openInputStream(uri); + bitmap = BitmapFactory.decodeStream(input, null, options); + if( input != null ) + input.close(); + if( MyDebug.LOG && bitmap != null ) { + Log.d(TAG, "bitmap width: " + bitmap.getWidth()); + Log.d(TAG, "bitmap height: " + bitmap.getHeight()); + } + } + catch(Exception e) { + // Although Media.getBitmap() is documented as only throwing FileNotFoundException, IOException + // (with the former being a subset of IOException anyway), I've had SecurityException from + // Google Play - best to catch everything just in case. + MyDebug.logStackTrace(TAG, "MediaStore.Images.Media.getBitmap exception", e); + throw new IOException(); + } + if( bitmap == null ) { + // just in case! + Log.e(TAG, "MediaStore.Images.Media.getBitmap returned null"); + throw new IOException(); + } + + // now need to take exif orientation into account, as some devices or camera apps store the orientation in the exif tag, + // which getBitmap() doesn't account for + bitmap = main_activity.rotateForExif(bitmap, uri); + + return bitmap; + } + + private String getTimeStringFromSeconds(long time) { + int secs = (int)(time % 60); + time /= 60; + int mins = (int)(time % 60); + time /= 60; + long hours = time; + return hours + ":" + String.format(Locale.getDefault(), "%02d", mins) + ":" + String.format(Locale.getDefault(), "%02d", secs); + } + + private void drawGrids(Canvas canvas) { + Preview preview = main_activity.getPreview(); + CameraController camera_controller = preview.getCameraController(); + if( camera_controller == null ) { + return; + } + + p.setStrokeWidth(stroke_width); + + switch( preference_grid_pref ) { + case "preference_grid_3x3": + p.setColor(Color.WHITE); + canvas.drawLine(canvas.getWidth() / 3.0f, 0.0f, canvas.getWidth() / 3.0f, canvas.getHeight() - 1.0f, p); + canvas.drawLine(2.0f * canvas.getWidth() / 3.0f, 0.0f, 2.0f * canvas.getWidth() / 3.0f, canvas.getHeight() - 1.0f, p); + canvas.drawLine(0.0f, canvas.getHeight() / 3.0f, canvas.getWidth() - 1.0f, canvas.getHeight() / 3.0f, p); + canvas.drawLine(0.0f, 2.0f * canvas.getHeight() / 3.0f, canvas.getWidth() - 1.0f, 2.0f * canvas.getHeight() / 3.0f, p); + break; + case "preference_grid_phi_3x3": + p.setColor(Color.WHITE); + canvas.drawLine(canvas.getWidth() / 2.618f, 0.0f, canvas.getWidth() / 2.618f, canvas.getHeight() - 1.0f, p); + canvas.drawLine(1.618f * canvas.getWidth() / 2.618f, 0.0f, 1.618f * canvas.getWidth() / 2.618f, canvas.getHeight() - 1.0f, p); + canvas.drawLine(0.0f, canvas.getHeight() / 2.618f, canvas.getWidth() - 1.0f, canvas.getHeight() / 2.618f, p); + canvas.drawLine(0.0f, 1.618f * canvas.getHeight() / 2.618f, canvas.getWidth() - 1.0f, 1.618f * canvas.getHeight() / 2.618f, p); + break; + case "preference_grid_4x2": + p.setColor(Color.GRAY); + canvas.drawLine(canvas.getWidth() / 4.0f, 0.0f, canvas.getWidth() / 4.0f, canvas.getHeight() - 1.0f, p); + canvas.drawLine(canvas.getWidth() / 2.0f, 0.0f, canvas.getWidth() / 2.0f, canvas.getHeight() - 1.0f, p); + canvas.drawLine(3.0f * canvas.getWidth() / 4.0f, 0.0f, 3.0f * canvas.getWidth() / 4.0f, canvas.getHeight() - 1.0f, p); + canvas.drawLine(0.0f, canvas.getHeight() / 2.0f, canvas.getWidth() - 1.0f, canvas.getHeight() / 2.0f, p); + p.setColor(Color.WHITE); + int crosshairs_radius = (int) (20 * scale_dp + 0.5f); // convert dps to pixels + + canvas.drawLine(canvas.getWidth() / 2.0f, canvas.getHeight() / 2.0f - crosshairs_radius, canvas.getWidth() / 2.0f, canvas.getHeight() / 2.0f + crosshairs_radius, p); + canvas.drawLine(canvas.getWidth() / 2.0f - crosshairs_radius, canvas.getHeight() / 2.0f, canvas.getWidth() / 2.0f + crosshairs_radius, canvas.getHeight() / 2.0f, p); + break; + case "preference_grid_crosshair": + p.setColor(Color.WHITE); + canvas.drawLine(canvas.getWidth() / 2.0f, 0.0f, canvas.getWidth() / 2.0f, canvas.getHeight() - 1.0f, p); + canvas.drawLine(0.0f, canvas.getHeight() / 2.0f, canvas.getWidth() - 1.0f, canvas.getHeight() / 2.0f, p); + break; + case "preference_grid_golden_spiral_right": + case "preference_grid_golden_spiral_left": + case "preference_grid_golden_spiral_upside_down_right": + case "preference_grid_golden_spiral_upside_down_left": + canvas.save(); + switch( preference_grid_pref ) { + case "preference_grid_golden_spiral_left": + canvas.scale(-1.0f, 1.0f, canvas.getWidth() * 0.5f, canvas.getHeight() * 0.5f); + break; + case "preference_grid_golden_spiral_right": + // no transformation needed + break; + case "preference_grid_golden_spiral_upside_down_left": + canvas.rotate(180.0f, canvas.getWidth() * 0.5f, canvas.getHeight() * 0.5f); + break; + case "preference_grid_golden_spiral_upside_down_right": + canvas.scale(1.0f, -1.0f, canvas.getWidth() * 0.5f, canvas.getHeight() * 0.5f); + break; + } + p.setColor(Color.WHITE); + p.setStyle(Paint.Style.STROKE); + p.setStrokeWidth(stroke_width); + int fibb = 34; + int fibb_n = 21; + int left = 0, top = 0; + int full_width = canvas.getWidth(); + int full_height = canvas.getHeight(); + int width = (int) (full_width * ((double) fibb_n) / (double) (fibb)); + int height = full_height; + + for (int count = 0; count < 2; count++) { + canvas.save(); + draw_rect.set(left, top, left + width, top + height); + canvas.clipRect(draw_rect); + canvas.drawRect(draw_rect, p); + draw_rect.set(left, top, left + 2 * width, top + 2 * height); + canvas.drawOval(draw_rect, p); + canvas.restore(); + + int old_fibb = fibb; + fibb = fibb_n; + fibb_n = old_fibb - fibb; + + left += width; + full_width = full_width - width; + width = full_width; + height = (int) (height * ((double) fibb_n) / (double) (fibb)); + + canvas.save(); + draw_rect.set(left, top, left + width, top + height); + canvas.clipRect(draw_rect); + canvas.drawRect(draw_rect, p); + draw_rect.set(left - width, top, left + width, top + 2 * height); + canvas.drawOval(draw_rect, p); + canvas.restore(); + + old_fibb = fibb; + fibb = fibb_n; + fibb_n = old_fibb - fibb; + + top += height; + full_height = full_height - height; + height = full_height; + width = (int) (width * ((double) fibb_n) / (double) (fibb)); + left += full_width - width; + + canvas.save(); + draw_rect.set(left, top, left + width, top + height); + canvas.clipRect(draw_rect); + canvas.drawRect(draw_rect, p); + draw_rect.set(left - width, top - height, left + width, top + height); + canvas.drawOval(draw_rect, p); + canvas.restore(); + + old_fibb = fibb; + fibb = fibb_n; + fibb_n = old_fibb - fibb; + + full_width = full_width - width; + width = full_width; + left -= width; + height = (int) (height * ((double) fibb_n) / (double) (fibb)); + top += full_height - height; + + canvas.save(); + draw_rect.set(left, top, left + width, top + height); + canvas.clipRect(draw_rect); + canvas.drawRect(draw_rect, p); + draw_rect.set(left, top - height, left + 2 * width, top + height); + canvas.drawOval(draw_rect, p); + canvas.restore(); + + old_fibb = fibb; + fibb = fibb_n; + fibb_n = old_fibb - fibb; + + full_height = full_height - height; + height = full_height; + top -= height; + width = (int) (width * ((double) fibb_n) / (double) (fibb)); + } + + canvas.restore(); + p.setStyle(Paint.Style.FILL); // reset + + break; + case "preference_grid_golden_triangle_1": + case "preference_grid_golden_triangle_2": + p.setColor(Color.WHITE); + double theta = Math.atan2(canvas.getWidth(), canvas.getHeight()); + double dist = canvas.getHeight() * Math.cos(theta); + float dist_x = (float) (dist * Math.sin(theta)); + float dist_y = (float) (dist * Math.cos(theta)); + if( preference_grid_pref.equals("preference_grid_golden_triangle_1") ) { + canvas.drawLine(0.0f, canvas.getHeight() - 1.0f, canvas.getWidth() - 1.0f, 0.0f, p); + canvas.drawLine(0.0f, 0.0f, dist_x, canvas.getHeight() - dist_y, p); + canvas.drawLine(canvas.getWidth() - 1.0f - dist_x, dist_y - 1.0f, canvas.getWidth() - 1.0f, canvas.getHeight() - 1.0f, p); + } + else { + canvas.drawLine(0.0f, 0.0f, canvas.getWidth() - 1.0f, canvas.getHeight() - 1.0f, p); + canvas.drawLine(canvas.getWidth() - 1.0f, 0.0f, canvas.getWidth() - 1.0f - dist_x, canvas.getHeight() - dist_y, p); + canvas.drawLine(dist_x, dist_y - 1.0f, 0.0f, canvas.getHeight() - 1.0f, p); + } + break; + case "preference_grid_diagonals": + p.setColor(Color.WHITE); + canvas.drawLine(0.0f, 0.0f, canvas.getHeight() - 1.0f, canvas.getHeight() - 1.0f, p); + canvas.drawLine(canvas.getHeight() - 1.0f, 0.0f, 0.0f, canvas.getHeight() - 1.0f, p); + int diff = canvas.getWidth() - canvas.getHeight(); + // n.b., diff is -ve in portrait orientation + canvas.drawLine(diff, 0.0f, diff + canvas.getHeight() - 1.0f, canvas.getHeight() - 1.0f, p); + canvas.drawLine(diff + canvas.getHeight() - 1.0f, 0.0f, diff, canvas.getHeight() - 1.0f, p); + break; + } + } + + private void drawCropGuides(Canvas canvas) { + Preview preview = main_activity.getPreview(); + CameraController camera_controller = preview.getCameraController(); + if( preview.isVideo() || preview_size_wysiwyg_pref ) { + if( camera_controller != null && preview.getTargetRatio() > 0.0 && !crop_guide_pref.equals("crop_guide_none") ) { + double crop_ratio = -1.0; + switch(crop_guide_pref) { + case "crop_guide_1": + crop_ratio = 1.0; + break; + case "crop_guide_1.25": + crop_ratio = 1.25; + break; + case "crop_guide_1.33": + crop_ratio = 1.33333333; + break; + case "crop_guide_1.4": + crop_ratio = 1.4; + break; + case "crop_guide_1.5": + crop_ratio = 1.5; + break; + case "crop_guide_1.78": + crop_ratio = 1.77777778; + break; + case "crop_guide_1.85": + crop_ratio = 1.85; + break; + case "crop_guide_2": + crop_ratio = 2.0; + break; + case "crop_guide_2.33": + crop_ratio = 2.33333333; + break; + case "crop_guide_2.35": + crop_ratio = 2.35006120; // actually 1920:817 + break; + case "crop_guide_2.4": + crop_ratio = 2.4; + break; + case "crop_guide_2.71": + crop_ratio = 2.70833333; // actually 65:24 + break; + case "crop_guide_3": + crop_ratio = 3.0; + break; + } + if( crop_ratio > 0.0 ) { + // we should compare to getCurrentPreviewAspectRatio() not getTargetRatio(), as the actual preview + // aspect ratio may differ to the requested photo/video resolution's aspect ratio, in which case it's still useful + // to display the crop guide + double preview_aspect_ratio = preview.getCurrentPreviewAspectRatio(); + MainActivity.SystemOrientation system_orientation = main_activity.getSystemOrientation(); + boolean system_orientation_portrait = system_orientation == MainActivity.SystemOrientation.PORTRAIT; + if( system_orientation_portrait ) { + // crop ratios are always drawn as if in landscape + crop_ratio = 1.0/crop_ratio; + preview_aspect_ratio = 1.0/preview_aspect_ratio; + } + if( Math.abs(preview_aspect_ratio - crop_ratio) > 1.0e-5 ) { + /*if( MyDebug.LOG ) { + Log.d(TAG, "crop_ratio: " + crop_ratio); + Log.d(TAG, "preview_aspect_ratio: " + preview_aspect_ratio); + Log.d(TAG, "canvas width: " + canvas.getWidth()); + Log.d(TAG, "canvas height: " + canvas.getHeight()); + }*/ + p.setStyle(Paint.Style.FILL); + p.setColor(Color.rgb(0, 0, 0)); + p.setAlpha(crop_shading_alpha_c); + int left = 1, top = 1, right = canvas.getWidth()-1, bottom = canvas.getHeight()-1; + if( crop_ratio > preview_aspect_ratio ) { + // crop ratio is wider, so we have to crop top/bottom + double new_hheight = ((double)canvas.getWidth()) / (2.0f*crop_ratio); + top = (canvas.getHeight()/2 - (int)new_hheight); + bottom = (canvas.getHeight()/2 + (int)new_hheight); + // draw shaded area + canvas.drawRect(0, 0, canvas.getWidth(), top, p); + canvas.drawRect(0, bottom, canvas.getWidth(), canvas.getHeight(), p); + } + else { + // crop ratio is taller, so we have to crop left/right + double new_hwidth = (((double)canvas.getHeight()) * crop_ratio) / 2.0f; + left = (canvas.getWidth()/2 - (int)new_hwidth); + right = (canvas.getWidth()/2 + (int)new_hwidth); + // draw shaded area + canvas.drawRect(0, 0, left, canvas.getHeight(), p); + canvas.drawRect(right, 0, canvas.getWidth(), canvas.getHeight(), p); + } + p.setStyle(Paint.Style.STROKE); + p.setStrokeWidth(stroke_width); + p.setColor(Color.rgb(255, 235, 59)); // Yellow 500 + canvas.drawRect(left, top, right, bottom, p); + p.setStyle(Paint.Style.FILL); // reset + p.setAlpha(255); // reset + } + } + } + } + } + + private void onDrawInfoLines(Canvas canvas, final int top_x, final int top_y, final int bottom_y, final int device_ui_rotation, long time_ms) { + Preview preview = main_activity.getPreview(); + CameraController camera_controller = preview.getCameraController(); + int ui_rotation = preview.getUIRotation(); + + // set up text etc for the multiple lines of "info" (time, free mem, etc) + p.setTextSize(16 * scale_font + 0.5f); // convert dps to pixels + p.setTextAlign(Paint.Align.LEFT); + int location_x = top_x; + int location_y = top_y; + final int gap_x = (int) (8 * scale_font + 0.5f); // convert dps to pixels + final int gap_y = (int) (0 * scale_font + 0.5f); // convert dps to pixels + final int icon_gap_y = (int) (2 * scale_dp + 0.5f); // convert dps to pixels + if( ui_rotation == 90 || ui_rotation == 270 ) { + // n.b., this is only for when lock_to_landscape==true, so we don't look at device_ui_rotation + int diff = canvas.getWidth() - canvas.getHeight(); + location_x += diff/2; + location_y -= diff/2; + } + if( device_ui_rotation == 90 ) { + location_y = canvas.getHeight() - location_y - (int) (20 * scale_font + 0.5f); + } + boolean align_right = false; + if( device_ui_rotation == 180 ) { + location_x = canvas.getWidth() - location_x; + p.setTextAlign(Paint.Align.RIGHT); + align_right = true; + } + + int first_line_height = 0; + int first_line_xshift = 0; + if( show_time_pref ) { + if( current_time_string == null || time_ms/1000 > last_current_time_time/1000 ) { + // avoid creating a new calendar object every time + if( calendar == null ) + calendar = Calendar.getInstance(); + else + calendar.setTimeInMillis(time_ms); + + current_time_string = dateFormatTimeInstance.format(calendar.getTime()); + //current_time_string = DateUtils.formatDateTime(getContext(), c.getTimeInMillis(), DateUtils.FORMAT_SHOW_TIME); + last_current_time_time = time_ms; + } + // n.b., DateFormat.getTimeInstance() ignores user preferences such as 12/24 hour or date format, but this is an Android bug. + // Whilst DateUtils.formatDateTime doesn't have that problem, it doesn't print out seconds! See: + // http://stackoverflow.com/questions/15981516/simpledateformat-gettimeinstance-ignores-24-hour-format + // http://daniel-codes.blogspot.co.uk/2013/06/how-to-correctly-format-datetime.html + // http://code.google.com/p/android/issues/detail?id=42104 + // update: now seems to be fixed + // also possibly related https://code.google.com/p/android/issues/detail?id=181201 + //int height = applicationInterface.drawTextWithBackground(canvas, p, current_time_string, Color.WHITE, Color.BLACK, location_x, location_y, MyApplicationInterface.Alignment.ALIGNMENT_TOP); + if( text_bounds_time == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "compute text_bounds_time"); + text_bounds_time = new Rect(); + // better to not use a fixed string like "00:00:00" as don't want to make assumptions - e.g., in 12 hour format we'll have the appended am/pm to account for! + Calendar calendar = Calendar.getInstance(); + calendar.set(100, 0, 1, 10, 59, 59); + String bounds_time_string = dateFormatTimeInstance.format(calendar.getTime()); + if( MyDebug.LOG ) + Log.d(TAG, "bounds_time_string:" + bounds_time_string); + p.getTextBounds(bounds_time_string, 0, bounds_time_string.length(), text_bounds_time); + } + first_line_xshift += text_bounds_time.width() + gap_x; + int height = applicationInterface.drawTextWithBackground(canvas, p, current_time_string, Color.WHITE, Color.BLACK, location_x, location_y, MyApplicationInterface.Alignment.ALIGNMENT_TOP, null, MyApplicationInterface.Shadow.SHADOW_OUTLINE, text_bounds_time); + height += gap_y; + // don't update location_y yet, as we have time and cameraid shown on the same line + first_line_height = Math.max(first_line_height, height); + } + if( show_camera_id_pref && camera_controller != null ) { + if( camera_id_string == null || time_ms > last_camera_id_time + 10000 ) { + // cache string for performance + + camera_id_string = getContext().getResources().getString(R.string.camera_id) + ":" + preview.getCameraId(); // intentionally don't put a space + last_camera_id_time = time_ms; + } + if( text_bounds_camera_id == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "compute text_bounds_camera_id"); + text_bounds_camera_id = new Rect(); + p.getTextBounds(camera_id_string, 0, camera_id_string.length(), text_bounds_camera_id); + } + int xpos = align_right ? location_x - first_line_xshift : location_x + first_line_xshift; + int height = applicationInterface.drawTextWithBackground(canvas, p, camera_id_string, Color.WHITE, Color.BLACK, xpos, location_y, MyApplicationInterface.Alignment.ALIGNMENT_TOP, null, MyApplicationInterface.Shadow.SHADOW_OUTLINE, text_bounds_camera_id); + height += gap_y; + // don't update location_y yet, as we have time and cameraid shown on the same line + first_line_height = Math.max(first_line_height, height); + } + // update location_y for first line (time and camera id) + if( device_ui_rotation == 90 ) { + // upside-down portrait + location_y -= first_line_height; + } + else { + location_y += first_line_height; + } + + if( camera_controller != null && show_free_memory_pref ) { + if( (last_free_memory_time == 0 || time_ms > last_free_memory_time + 10000) && free_memory_future == null ) { + // don't call this too often, for UI performance + + free_memory_future = free_memory_executor.submit(free_memory_runnable); + + last_free_memory_time = time_ms; // always set this, so that in case of free memory not being available, we aren't calling freeMemory() every frame + } + if( free_memory_gb >= 0.0f && free_memory_gb_string != null ) { + //int height = applicationInterface.drawTextWithBackground(canvas, p, free_memory_gb_string, Color.WHITE, Color.BLACK, location_x, location_y, MyApplicationInterface.Alignment.ALIGNMENT_TOP); + if( text_bounds_free_memory == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "compute text_bounds_free_memory"); + text_bounds_free_memory = new Rect(); + p.getTextBounds(free_memory_gb_string, 0, free_memory_gb_string.length(), text_bounds_free_memory); + } + int height = applicationInterface.drawTextWithBackground(canvas, p, free_memory_gb_string, Color.WHITE, Color.BLACK, location_x, location_y, MyApplicationInterface.Alignment.ALIGNMENT_TOP, null, MyApplicationInterface.Shadow.SHADOW_OUTLINE, text_bounds_free_memory); + height += gap_y; + if( device_ui_rotation == 90 ) { + location_y -= height; + } + else { + location_y += height; + } + } + } + + // Now draw additional info on the lower left corner if needed + int y_offset = (int) (27 * scale_font + 0.5f); + p.setTextSize(24 * scale_font + 0.5f); // convert dps to pixels + if (OSDLine1 != null && !OSDLine1.isEmpty() ) { + applicationInterface.drawTextWithBackground(canvas, p, OSDLine1, + Color.WHITE, Color.BLACK, location_x, bottom_y - y_offset, + MyApplicationInterface.Alignment.ALIGNMENT_BOTTOM, null, MyApplicationInterface.Shadow.SHADOW_OUTLINE); + } + if (OSDLine2 != null && !OSDLine2.isEmpty() ) { + applicationInterface.drawTextWithBackground(canvas, p, OSDLine2, + Color.WHITE, Color.BLACK, location_x, bottom_y, + MyApplicationInterface.Alignment.ALIGNMENT_BOTTOM, null, MyApplicationInterface.Shadow.SHADOW_OUTLINE); + } + p.setTextSize(16 * scale_font + 0.5f); // Restore text size + + if( camera_controller != null && show_iso_pref ) { + if( iso_exposure_string == null || time_ms > last_iso_exposure_time + 500 ) { + iso_exposure_string = ""; + if( camera_controller.captureResultHasIso() ) { + int iso = camera_controller.captureResultIso(); + if( !iso_exposure_string.isEmpty() ) + iso_exposure_string += " "; + iso_exposure_string += preview.getISOString(iso); + } + if( camera_controller.captureResultHasExposureTime() ) { + long exposure_time = camera_controller.captureResultExposureTime(); + if( !iso_exposure_string.isEmpty() ) + iso_exposure_string += " "; + iso_exposure_string += preview.getExposureTimeString(exposure_time); + } + if( preview.isVideoRecording() && camera_controller.captureResultHasFrameDuration() ) { + long frame_duration = camera_controller.captureResultFrameDuration(); + if( !iso_exposure_string.isEmpty() ) + iso_exposure_string += " "; + iso_exposure_string += preview.getFrameDurationString(frame_duration); + } + /*if( camera_controller.captureResultHasAperture() ) { + float aperture = camera_controller.captureResultAperture(); + if( iso_exposure_string.length() > 0 ) + iso_exposure_string += " F"; + iso_exposure_string += decimal_format_1dp_force0.format(aperture); + }*/ + + is_scanning = false; + if( camera_controller.captureResultIsAEScanning() ) { + // only show as scanning if in auto ISO mode (problem on Nexus 6 at least that if we're in manual ISO mode, after pausing and + // resuming, the camera driver continually reports CONTROL_AE_STATE_SEARCHING) + String value = sharedPreferences.getString(PreferenceKeys.ISOPreferenceKey, CameraController.ISO_DEFAULT); + if( value.equals("auto") ) { + is_scanning = true; + } + } + + last_iso_exposure_time = time_ms; + } + + if( !iso_exposure_string.isEmpty() ) { + int text_color = Color.rgb(255, 235, 59); // Yellow 500 + if( is_scanning ) { + // we only change the color if ae scanning is at least a certain time, otherwise we get a lot of flickering of the color + if( ae_started_scanning_ms == -1 ) { + ae_started_scanning_ms = time_ms; + } + else if( time_ms - ae_started_scanning_ms > 500 ) { + text_color = Color.rgb(244, 67, 54); // Red 500 + } + } + else { + ae_started_scanning_ms = -1; + } + // can't cache the bounds rect, as the width may change significantly as the ISO or exposure values change + int height = applicationInterface.drawTextWithBackground(canvas, p, iso_exposure_string, text_color, Color.BLACK, location_x, location_y, MyApplicationInterface.Alignment.ALIGNMENT_TOP, ybounds_text, MyApplicationInterface.Shadow.SHADOW_OUTLINE); + height += gap_y; + // only move location_y if we actually print something (because on old camera API, even if the ISO option has + // been enabled, we'll never be able to display the on-screen ISO) + if( device_ui_rotation == 90 ) { + location_y -= height; + } + else { + location_y += height; + } + } + } + + // padding to align with earlier text + final int flash_padding = (int) (1 * scale_font + 0.5f); // convert dps to pixels + + if( camera_controller != null ) { + // draw info icons + + int location_x2 = location_x - flash_padding; + final int icon_size = (int) (16 * scale_dp + 0.5f); // convert dps to pixels + if( device_ui_rotation == 180 ) { + location_x2 = location_x - icon_size + flash_padding; + } + + if( store_location_pref ) { + icon_dest.set(location_x2, location_y, location_x2 + icon_size, location_y + icon_size); + p.setStyle(Paint.Style.FILL); + p.setColor(Color.BLACK); + p.setAlpha(64); + canvas.drawRect(icon_dest, p); + p.setAlpha(255); + + Location location = applicationInterface.getLocation(locationInfo); + if( location != null ) { + canvas.drawBitmap(location_bitmap, null, icon_dest, p); + int location_radius = icon_size / 10; + int indicator_x = location_x2 + icon_size - (int)(location_radius*1.5); + int indicator_y = location_y + (int)(location_radius*1.5); + p.setColor(locationInfo.LocationWasCached() ? Color.rgb(127, 127, 127) : + location.getAccuracy() < 25.01f ? Color.rgb(37, 155, 36) : + Color.rgb(255, 235, 59)); // Green 500 or Yellow 500 + canvas.drawCircle(indicator_x, indicator_y, location_radius, p); + } + else { + canvas.drawBitmap(location_off_bitmap, null, icon_dest, p); + } + + if( device_ui_rotation == 180 ) { + location_x2 -= icon_size + flash_padding; + } + else { + location_x2 += icon_size + flash_padding; + } + } + + if( + is_raw_pref && + preview.supportsRaw() + // RAW can be enabled, even if it isn't available for this camera (e.g., user enables RAW for back camera, but then + // switches to front camera which doesn't support it) + ) { + icon_dest.set(location_x2, location_y, location_x2 + icon_size, location_y + icon_size); + p.setStyle(Paint.Style.FILL); + p.setColor(Color.BLACK); + p.setAlpha(64); + canvas.drawRect(icon_dest, p); + p.setAlpha(255); + canvas.drawBitmap(is_raw_only_pref ? raw_only_bitmap : raw_jpeg_bitmap, null, icon_dest, p); + + if( device_ui_rotation == 180 ) { + location_x2 -= icon_size + flash_padding; + } + else { + location_x2 += icon_size + flash_padding; + } + } + + if( is_face_detection_pref && preview.supportsFaceDetection() ) { + icon_dest.set(location_x2, location_y, location_x2 + icon_size, location_y + icon_size); + p.setStyle(Paint.Style.FILL); + p.setColor(Color.BLACK); + p.setAlpha(64); + canvas.drawRect(icon_dest, p); + p.setAlpha(255); + canvas.drawBitmap(face_detection_bitmap, null, icon_dest, p); + + if( device_ui_rotation == 180 ) { + location_x2 -= icon_size + flash_padding; + } + else { + location_x2 += icon_size + flash_padding; + } + } + + if( auto_stabilise_pref && preview.hasLevelAngleStable() ) { // auto-level is supported for photos taken in video mode + icon_dest.set(location_x2, location_y, location_x2 + icon_size, location_y + icon_size); + p.setStyle(Paint.Style.FILL); + p.setColor(Color.BLACK); + p.setAlpha(64); + canvas.drawRect(icon_dest, p); + p.setAlpha(255); + canvas.drawBitmap(auto_stabilise_bitmap, null, icon_dest, p); + + if( device_ui_rotation == 180 ) { + location_x2 -= icon_size + flash_padding; + } + else { + location_x2 += icon_size + flash_padding; + } + } + + if( ( + photoMode == MyApplicationInterface.PhotoMode.DRO || + photoMode == MyApplicationInterface.PhotoMode.HDR || + photoMode == MyApplicationInterface.PhotoMode.Panorama || + photoMode == MyApplicationInterface.PhotoMode.ExpoBracketing || + //photoMode == MyApplicationInterface.PhotoMode.FocusBracketing || + photoMode == MyApplicationInterface.PhotoMode.FastBurst || + photoMode == MyApplicationInterface.PhotoMode.NoiseReduction || + photoMode == MyApplicationInterface.PhotoMode.X_Night || + photoMode == MyApplicationInterface.PhotoMode.X_Bokeh || + photoMode == MyApplicationInterface.PhotoMode.X_Beauty + ) && + !applicationInterface.isVideoPref() ) { // these photo modes not supported for video mode + icon_dest.set(location_x2, location_y, location_x2 + icon_size, location_y + icon_size); + p.setStyle(Paint.Style.FILL); + p.setColor(Color.BLACK); + p.setAlpha(64); + canvas.drawRect(icon_dest, p); + p.setAlpha(255); + Bitmap bitmap = photoMode == MyApplicationInterface.PhotoMode.DRO ? dro_bitmap : + photoMode == MyApplicationInterface.PhotoMode.HDR ? hdr_bitmap : + photoMode == MyApplicationInterface.PhotoMode.Panorama ? panorama_bitmap : + photoMode == MyApplicationInterface.PhotoMode.ExpoBracketing ? expo_bitmap : + //photoMode == MyApplicationInterface.PhotoMode.FocusBracketing ? focus_bracket_bitmap : + photoMode == MyApplicationInterface.PhotoMode.FastBurst ? burst_bitmap : + photoMode == MyApplicationInterface.PhotoMode.NoiseReduction ? nr_bitmap : + photoMode == MyApplicationInterface.PhotoMode.X_Night ? x_night_bitmap : + photoMode == MyApplicationInterface.PhotoMode.X_Bokeh ? x_bokeh_bitmap : + photoMode == MyApplicationInterface.PhotoMode.X_Beauty ? x_beauty_bitmap : + null; + if( bitmap != null ) { + if( photoMode == MyApplicationInterface.PhotoMode.NoiseReduction && applicationInterface.getNRModePref() == ApplicationInterface.NRModePref.NRMODE_LOW_LIGHT ) { + p.setColorFilter(new PorterDuffColorFilter(Color.rgb(255, 235, 59), PorterDuff.Mode.SRC_IN)); // Yellow 500 + } + canvas.drawBitmap(bitmap, null, icon_dest, p); + p.setColorFilter(null); + + if( device_ui_rotation == 180 ) { + location_x2 -= icon_size + flash_padding; + } + else { + location_x2 += icon_size + flash_padding; + } + } + } + + + // photo-stamp is supported for photos taken in video mode + // but it isn't supported in RAW-only mode + if( has_stamp_pref && !( is_raw_only_pref && preview.supportsRaw() ) ) { + icon_dest.set(location_x2, location_y, location_x2 + icon_size, location_y + icon_size); + p.setStyle(Paint.Style.FILL); + p.setColor(Color.BLACK); + p.setAlpha(64); + canvas.drawRect(icon_dest, p); + p.setAlpha(255); + canvas.drawBitmap(photostamp_bitmap, null, icon_dest, p); + + if( device_ui_rotation == 180 ) { + location_x2 -= icon_size + flash_padding; + } + else { + location_x2 += icon_size + flash_padding; + } + } + + if( !is_audio_enabled_pref && applicationInterface.isVideoPref() ) { + icon_dest.set(location_x2, location_y, location_x2 + icon_size, location_y + icon_size); + p.setStyle(Paint.Style.FILL); + p.setColor(Color.BLACK); + p.setAlpha(64); + canvas.drawRect(icon_dest, p); + p.setAlpha(255); + canvas.drawBitmap(audio_disabled_bitmap, null, icon_dest, p); + + if( device_ui_rotation == 180 ) { + location_x2 -= icon_size + flash_padding; + } + else { + location_x2 += icon_size + flash_padding; + } + } + + // icons for slow motion, time lapse or high speed video + if( Math.abs(capture_rate_factor - 1.0f) > 1.0e-5 && applicationInterface.isVideoPref() ) { + icon_dest.set(location_x2, location_y, location_x2 + icon_size, location_y + icon_size); + p.setStyle(Paint.Style.FILL); + p.setColor(Color.BLACK); + p.setAlpha(64); + canvas.drawRect(icon_dest, p); + p.setAlpha(255); + canvas.drawBitmap(capture_rate_factor < 1.0f ? slow_motion_bitmap : time_lapse_bitmap, null, icon_dest, p); + + if( device_ui_rotation == 180 ) { + location_x2 -= icon_size + flash_padding; + } + else { + location_x2 += icon_size + flash_padding; + } + } + else if( is_high_speed && applicationInterface.isVideoPref() ) { + icon_dest.set(location_x2, location_y, location_x2 + icon_size, location_y + icon_size); + p.setStyle(Paint.Style.FILL); + p.setColor(Color.BLACK); + p.setAlpha(64); + canvas.drawRect(icon_dest, p); + p.setAlpha(255); + canvas.drawBitmap(high_speed_fps_bitmap, null, icon_dest, p); + + if( device_ui_rotation == 180 ) { + location_x2 -= icon_size + flash_padding; + } + else { + location_x2 += icon_size + flash_padding; + } + } + + if( time_ms > last_need_flash_indicator_time + 100 ) { + need_flash_indicator = false; + String flash_value = preview.getCurrentFlashValue(); + // note, flash_frontscreen_auto not yet support for the flash symbol (as camera_controller.needsFlash() only returns info on the built-in actual flash, not frontscreen flash) + if( flash_value != null && + ( flash_value.equals("flash_on") + || ( (flash_value.equals("flash_auto") || flash_value.equals("flash_red_eye")) && camera_controller.needsFlash() ) + || camera_controller.needsFrontScreenFlash() ) && + !applicationInterface.isVideoPref() ) { // flash-indicator not supported for photos taken in video mode + need_flash_indicator = true; + } + + last_need_flash_indicator_time = time_ms; + } + if( need_flash_indicator ) { + if( needs_flash_time != -1 ) { + final long fade_ms = 500; + float alpha = (time_ms - needs_flash_time)/(float)fade_ms; + if( time_ms - needs_flash_time >= fade_ms ) + alpha = 1.0f; + icon_dest.set(location_x2, location_y, location_x2 + icon_size, location_y + icon_size); + + /*if( MyDebug.LOG ) + Log.d(TAG, "alpha: " + alpha);*/ + p.setStyle(Paint.Style.FILL); + p.setColor(Color.BLACK); + p.setAlpha((int)(64*alpha)); + canvas.drawRect(icon_dest, p); + p.setAlpha((int)(255*alpha)); + canvas.drawBitmap(flash_bitmap, null, icon_dest, p); + p.setAlpha(255); + } + else { + needs_flash_time = time_ms; + } + } + else { + needs_flash_time = -1; + } + + if( device_ui_rotation == 90 ) { + location_y -= icon_gap_y; + } + else { + location_y += (icon_size+icon_gap_y); + } + } + + if( camera_controller != null && !show_last_image ) { + // draw histogram + if( preview.isPreviewBitmapEnabled() ) { + int [] histogram = preview.getHistogram(); + if( histogram != null ) { + /*if( MyDebug.LOG ) + Log.d(TAG, "histogram length: " + histogram.length);*/ + final int histogram_width = (int) (histogram_width_dp * scale_dp + 0.5f); // convert dps to pixels + final int histogram_height = (int) (histogram_height_dp * scale_dp + 0.5f); // convert dps to pixels + // n.b., if changing the histogram_height, remember to update focus_seekbar and + // focus_bracketing_target_seekbar margins in activity_main.xml + int location_x2 = location_x - flash_padding; + if( device_ui_rotation == 180 ) { + location_x2 = location_x - histogram_width + flash_padding; + } + icon_dest.set(location_x2 - flash_padding, location_y, location_x2 - flash_padding + histogram_width, location_y + histogram_height); + if( device_ui_rotation == 90 ) { + icon_dest.top -= histogram_height; + icon_dest.bottom -= histogram_height; + } + + p.setStyle(Paint.Style.FILL); + p.setColor(Color.argb(64, 0, 0, 0)); + canvas.drawRect(icon_dest, p); + + int max = 0; + for(int value : histogram) { + max = Math.max(max, value); + } + + if( histogram.length == 256*3 ) { + int c=0; + + /* For overlapping rgb, we'll have: + (1, (1-a2).(1-a1).a0.r, (1-a2).a1.g, a2.b) + If we wanted to have the alpha scaling the same (i.e., same r, g, b values + if r=g=b, then this gives: + a2 = 1/[2+1/a0] + a1 = 1 - a2/[a0.(1-a2)] + However this then means that for non-overlapping colours, red is too + strong whilst blue is too weak, so we instead adjust to: + a0' = (a0+a1)/2 + a1' = a1 + a2' = (a1+a2)/2 + */ + /*final int a0 = 255; + final int a1 = 128; + final int a2 = 85;*/ + //final int a0 = 191; + final int a0 = 151; + final int a1 = 110; + //final int a2 = 77; + final int a2 = 94; + /*final int a0 = 128; + final int a1 = 85; + final int a2 = 64;*/ + final int r = 255; + final int g = 255; + final int b = 255; + + for(int i=0;i<256;i++) + temp_histogram_channel[i] = histogram[c++]; + p.setColor(Color.argb(a0, r, 0, 0)); + drawHistogramChannel(canvas, temp_histogram_channel, max); + + for(int i=0;i<256;i++) + temp_histogram_channel[i] = histogram[c++]; + p.setColor(Color.argb(a1, 0, g, 0)); + drawHistogramChannel(canvas, temp_histogram_channel, max); + + for(int i=0;i<256;i++) + temp_histogram_channel[i] = histogram[c++]; + p.setColor(Color.argb(a2, 0, 0, b)); + drawHistogramChannel(canvas, temp_histogram_channel, max); + } + else { + p.setColor(Color.argb(192, 255, 255, 255)); + drawHistogramChannel(canvas, histogram, max); + } + } + } + } + } + + /** Draws histogram for a single color channel. + * @param canvas Canvas to draw onto. + * @param histogram_channel The histogram for this color. + * @param max The maximum value of histogram_channel, or if drawing multiple channels, this + * should be the maximum value of all histogram channels. + */ + private void drawHistogramChannel(Canvas canvas, int [] histogram_channel, int max) { + /*long debug_time = 0; + if( MyDebug.LOG ) { + debug_time = System.currentTimeMillis(); + }*/ + + /*if( MyDebug.LOG ) + Log.d(TAG, "drawHistogramChannel, time before creating path: " + (System.currentTimeMillis() - debug_time));*/ + path.reset(); + path.moveTo(icon_dest.left, icon_dest.bottom); + for(int c=0;c last_take_photo_top_time + 1000 ) { + /*if( MyDebug.LOG ) + Log.d(TAG, "update cached take_photo_top");*/ + // don't call this too often, for UI performance (due to calling View.getLocationOnScreen()) + View view = main_activity.findViewById(R.id.take_photo); + // align with "top" of the take_photo button, but remember to take the rotation into account! + int view_left = getViewOnScreenX(view); + preview.getView().getLocationOnScreen(gui_location); + int this_left = gui_location[system_orientation_portrait ? 1 : 0]; + take_photo_top = view_left - this_left; + + last_take_photo_top_time = time_ms; + /*if( MyDebug.LOG ) { + Log.d(TAG, "device_ui_rotation: " + device_ui_rotation); + Log.d(TAG, "view_left: " + view_left); + Log.d(TAG, "this_left: " + this_left); + Log.d(TAG, "take_photo_top: " + take_photo_top); + }*/ + } + + // diff_x is the difference from the centre of the canvas to the position we want + int max_x = system_orientation_portrait ? canvas.getHeight() : canvas.getWidth(); + int mid_x = max_x/2; + int diff_x = take_photo_top - mid_x; + + /*if( MyDebug.LOG ) { + Log.d(TAG, "view left: " + view_left); + Log.d(TAG, "this left: " + this_left); + Log.d(TAG, "canvas is " + canvas.getWidth() + " x " + canvas.getHeight()); + Log.d(TAG, "compare offset_x: " + (preview.getView().getRootView().getRight()/2 - diff_x)/scale); + }*/ + + // diff_x is the difference from the centre of the canvas to the position we want + // assumes canvas is centered + // avoids calling getLocationOnScreen for performance + /*int offset_x = (int) (124 * scale + 0.5f); // convert dps to pixels + // offset_x should be enough such that on-screen level angle (this is the lowest display on-screen text) does not + // interfere with take photo icon when using at least a 16:9 preview aspect ratio + // should correspond to the logged "compare offset_x" above + int diff_x = preview.getView().getRootView().getRight()/2 - offset_x; + */ + + if( device_ui_rotation == 90 ) { + // so we don't interfere with the top bar info (datetime, free memory, ISO) when upside down + max_x -= (int)(2.5*gap_y); + } + /*if( MyDebug.LOG ) { + Log.d(TAG, "root view right: " + preview.getView().getRootView().getRight()); + Log.d(TAG, "diff_x: " + diff_x); + Log.d(TAG, "canvas.getWidth()/2 + diff_x: " + (canvas.getWidth()/2+diff_x)); + Log.d(TAG, "max_x: " + max_x); + }*/ + if( mid_x + diff_x > max_x ) { + // in case goes off the size of the canvas, for "black bar" cases (when preview aspect ratio < screen aspect ratio) + diff_x = max_x - mid_x; + } + text_base_y = canvas.getHeight()/2 + diff_x - (int)(0.5*gap_y); + } + + if( device_ui_rotation == 0 || device_ui_rotation == 180 ) { + // also avoid navigation bar in (reverse) landscape (for e.g. OnePlus Pad which has a landscape navigation bar when in landscape orientation) + int navigation_gap = device_ui_rotation == 0 ? main_activity.getNavigationGapLandscape() : main_activity.getNavigationGapReverseLandscape(); + text_base_y -= navigation_gap; + } + + if( avoid_ui ) { + // avoid parts of the UI + View view = main_activity.findViewById(R.id.focus_seekbar); + if(view.getVisibility() == View.VISIBLE ) { + text_base_y -= view.getHeight(); + } + view = main_activity.findViewById(R.id.focus_bracketing_target_seekbar); + if(view.getVisibility() == View.VISIBLE ) { + text_base_y -= view.getHeight(); + } + /*view = main_activity.findViewById(R.id.sliders_container); + if(view.getVisibility() == View.VISIBLE ) { + text_base_y -= view.getHeight(); + }*/ + } + + boolean draw_angle = has_level_angle && show_angle_pref; + boolean draw_geo_direction = has_geo_direction && show_geo_direction_pref; + if( draw_angle ) { + int color = Color.WHITE; + p.setTextSize(14 * scale_font + 0.5f); // convert dps to pixels + int pixels_offset_x; + if( draw_geo_direction ) { + pixels_offset_x = - (int) (35 * scale_font + 0.5f); // convert dps to pixels + p.setTextAlign(Paint.Align.LEFT); + } + else { + //p.setTextAlign(Paint.Align.CENTER); + // slightly better for performance to use Align.LEFT, due to avoid measureText() call in drawTextWithBackground() + pixels_offset_x = - (int) ((level_angle<0 ? 16 : 14) * scale_font + 0.5f); // convert dps to pixels + p.setTextAlign(Paint.Align.LEFT); + } + if( Math.abs(level_angle) <= close_level_angle ) { + color = angle_highlight_color_pref; + p.setUnderlineText(true); + } + if( angle_string == null || time_ms > this.last_angle_string_time + 500 ) { + // update cached string + /*if( MyDebug.LOG ) + Log.d(TAG, "update angle_string: " + angle_string);*/ + last_angle_string_time = time_ms; + String number_string = formatLevelAngle(level_angle); + //String number_string = "" + level_angle; + angle_string = number_string + (char)0x00B0; + cached_angle = level_angle; + //String angle_string = "" + level_angle; + } + //applicationInterface.drawTextWithBackground(canvas, p, angle_string, color, Color.BLACK, canvas.getWidth() / 2 + pixels_offset_x, text_base_y, MyApplicationInterface.Alignment.ALIGNMENT_BOTTOM, ybounds_text, true); + if( text_bounds_angle_single == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "compute text_bounds_angle_single"); + text_bounds_angle_single = new Rect(); + String bounds_angle_string = "-9.0" + (char)0x00B0; + p.getTextBounds(bounds_angle_string, 0, bounds_angle_string.length(), text_bounds_angle_single); + } + if( text_bounds_angle_double == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "compute text_bounds_angle_double"); + text_bounds_angle_double = new Rect(); + String bounds_angle_string = "-45.0" + (char)0x00B0; + p.getTextBounds(bounds_angle_string, 0, bounds_angle_string.length(), text_bounds_angle_double); + } + applicationInterface.drawTextWithBackground(canvas, p, angle_string, color, Color.BLACK, canvas.getWidth() / 2 + pixels_offset_x, text_base_y, MyApplicationInterface.Alignment.ALIGNMENT_BOTTOM, null, MyApplicationInterface.Shadow.SHADOW_OUTLINE, Math.abs(cached_angle) < 10.0 ? text_bounds_angle_single : text_bounds_angle_double); + p.setUnderlineText(false); + } + if( draw_geo_direction ) { + int color = Color.WHITE; + p.setTextSize(14 * scale_font + 0.5f); // convert dps to pixels + int pixels_offset_x; + if( draw_angle ) { + pixels_offset_x = (int) (10 * scale_font + 0.5f); // convert dps to pixels + p.setTextAlign(Paint.Align.LEFT); + } + else { + //p.setTextAlign(Paint.Align.CENTER); + // slightly better for performance to use Align.LEFT, due to avoid measureText() call in drawTextWithBackground() + pixels_offset_x = - (int) (14 * scale_font + 0.5f); // convert dps to pixels + p.setTextAlign(Paint.Align.LEFT); + } + float geo_angle = (float)Math.toDegrees(geo_direction); + if( geo_angle < 0.0f ) { + geo_angle += 360.0f; + } + String string = String.valueOf(Math.round(geo_angle)) + (char)0x00B0; + applicationInterface.drawTextWithBackground(canvas, p, string, color, Color.BLACK, canvas.getWidth() / 2 + pixels_offset_x, text_base_y, MyApplicationInterface.Alignment.ALIGNMENT_BOTTOM, ybounds_text, MyApplicationInterface.Shadow.SHADOW_OUTLINE); + } + if( preview.isOnTimer() ) { + long remaining_time = (preview.getTimerEndTime() - time_ms + 999)/1000; + if( MyDebug.LOG ) + Log.d(TAG, "remaining_time: " + remaining_time); + if( remaining_time > 0 ) { + p.setTextSize(42 * scale_font + 0.5f); // convert dps to pixels + p.setTextAlign(Paint.Align.CENTER); + String time_s; + if( remaining_time < 60 ) { + // simpler to just show seconds when less than a minute + time_s = String.valueOf(remaining_time); + } + else { + time_s = getTimeStringFromSeconds(remaining_time); + } + applicationInterface.drawTextWithBackground(canvas, p, time_s, Color.rgb(244, 67, 54), Color.BLACK, canvas.getWidth() / 2, canvas.getHeight() / 2); // Red 500 + } + } + else if( preview.isVideoRecording() ) { + long video_time = preview.getVideoTime(false); + String time_s = getTimeStringFromSeconds(video_time/1000); + /*if( MyDebug.LOG ) + Log.d(TAG, "video_time: " + video_time + " " + time_s);*/ + p.setTextSize(14 * scale_font + 0.5f); // convert dps to pixels + p.setTextAlign(Paint.Align.CENTER); + int pixels_offset_y = 2*text_y; // avoid overwriting the zoom + int color = Color.rgb(244, 67, 54); // Red 500 + if( main_activity.isScreenLocked() ) { + // writing in reverse order, bottom to top + applicationInterface.drawTextWithBackground(canvas, p, getContext().getResources().getString(R.string.screen_lock_message_2), color, Color.BLACK, canvas.getWidth() / 2, text_base_y - pixels_offset_y); + pixels_offset_y += text_y; + applicationInterface.drawTextWithBackground(canvas, p, getContext().getResources().getString(R.string.screen_lock_message_1), color, Color.BLACK, canvas.getWidth() / 2, text_base_y - pixels_offset_y); + pixels_offset_y += text_y; + } + if( !preview.isVideoRecordingPaused() || ((int)(time_ms / 500)) % 2 == 0 ) { // if video is paused, then flash the video time + applicationInterface.drawTextWithBackground(canvas, p, time_s, color, Color.BLACK, canvas.getWidth() / 2, text_base_y - pixels_offset_y); + pixels_offset_y += text_y; + } + if( show_video_max_amp_pref && !preview.isVideoRecordingPaused() ) { + // audio amplitude + if( !this.has_video_max_amp || time_ms > this.last_video_max_amp_time + 50 ) { + has_video_max_amp = true; + int video_max_amp_prev1 = video_max_amp_prev2; + video_max_amp_prev2 = video_max_amp; + video_max_amp = preview.getMaxAmplitude(); + last_video_max_amp_time = time_ms; + if( MyDebug.LOG ) { + if( video_max_amp > 30000 ) { + Log.d(TAG, "max_amp: " + video_max_amp); + } + if( video_max_amp > 32767 ) { + Log.e(TAG, "video_max_amp greater than max: " + video_max_amp); + } + } + if( video_max_amp_prev2 > video_max_amp_prev1 && video_max_amp_prev2 > video_max_amp ) { + // new peak + video_max_amp_peak = video_max_amp_prev2; + } + //video_max_amp_peak = Math.max(video_max_amp_peak, video_max_amp); + } + float amp_frac = video_max_amp/32767.0f; + amp_frac = Math.max(amp_frac, 0.0f); + amp_frac = Math.min(amp_frac, 1.0f); + //applicationInterface.drawTextWithBackground(canvas, p, "" + max_amp, color, Color.BLACK, canvas.getWidth() / 2, text_base_y - pixels_offset_y); + + pixels_offset_y += text_y; // allow extra space + int amp_width = (int) (160 * scale_dp + 0.5f); // convert dps to pixels + int amp_height = (int) (10 * scale_dp + 0.5f); // convert dps to pixels + int amp_x = (canvas.getWidth() - amp_width)/2; + p.setColor(Color.WHITE); + p.setStyle(Paint.Style.STROKE); + p.setStrokeWidth(stroke_width); + canvas.drawRect(amp_x, text_base_y - pixels_offset_y, amp_x+amp_width, text_base_y - pixels_offset_y+amp_height, p); + p.setStyle(Paint.Style.FILL); + canvas.drawRect(amp_x, text_base_y - pixels_offset_y, amp_x+amp_frac*amp_width, text_base_y - pixels_offset_y+amp_height, p); + if( amp_frac < 1.0f ) { + p.setColor(Color.BLACK); + p.setAlpha(64); + canvas.drawRect(amp_x+amp_frac*amp_width+1, text_base_y - pixels_offset_y, amp_x+amp_width, text_base_y - pixels_offset_y+amp_height, p); + p.setAlpha(255); + } + if( video_max_amp_peak > video_max_amp ) { + float peak_frac = video_max_amp_peak/32767.0f; + peak_frac = Math.max(peak_frac, 0.0f); + peak_frac = Math.min(peak_frac, 1.0f); + p.setColor(Color.YELLOW); + p.setStyle(Paint.Style.STROKE); + p.setStrokeWidth(stroke_width); + canvas.drawLine(amp_x+peak_frac*amp_width, text_base_y - pixels_offset_y, amp_x+peak_frac*amp_width, text_base_y - pixels_offset_y+amp_height, p); + p.setColor(Color.WHITE); + } + } + } + else if( taking_picture && capture_started ) { + if( camera_controller.isCapturingBurst() ) { + int n_burst_taken = camera_controller.getNBurstTaken() + 1; + int n_burst_total = camera_controller.getBurstTotal(); + p.setTextSize(14 * scale_font + 0.5f); // convert dps to pixels + p.setTextAlign(Paint.Align.CENTER); + int pixels_offset_y = 2*text_y; // avoid overwriting the zoom + if( device_ui_rotation == 0 && applicationInterface.getPhotoMode() == MyApplicationInterface.PhotoMode.FocusBracketing ) { + // avoid clashing with the target focus bracketing seekbar in landscape orientation + pixels_offset_y = 5*gap_y; + } + String text = getContext().getResources().getString(R.string.capturing) + " " + n_burst_taken; + if( n_burst_total > 0 ) { + text += " / " + n_burst_total; + } + applicationInterface.drawTextWithBackground(canvas, p, text, Color.WHITE, Color.BLACK, canvas.getWidth() / 2, text_base_y - pixels_offset_y); + } + else if( camera_controller.isManualISO() ) { + // only show "capturing" text with time for manual exposure time >= 0.5s + long exposure_time = camera_controller.getExposureTime(); + if( exposure_time >= 500000000L ) { + if( ((int)(time_ms / 500)) % 2 == 0 ) { + p.setTextSize(14 * scale_font + 0.5f); // convert dps to pixels + p.setTextAlign(Paint.Align.CENTER); + int pixels_offset_y = 2*text_y; // avoid overwriting the zoom + int color = Color.rgb(244, 67, 54); // Red 500 + applicationInterface.drawTextWithBackground(canvas, p, getContext().getResources().getString(R.string.capturing), color, Color.BLACK, canvas.getWidth() / 2, text_base_y - pixels_offset_y); + } + } + } + } + else if( image_queue_full ) { + if( ((int)(time_ms / 500)) % 2 == 0 ) { + p.setTextSize(14 * scale_font + 0.5f); // convert dps to pixels + p.setTextAlign(Paint.Align.CENTER); + int pixels_offset_y = 2 * text_y; // avoid overwriting the zoom + int n_images_to_save = applicationInterface.getImageSaver().getNRealImagesToSave(); + String string = getContext().getResources().getString(R.string.processing) + " (" + n_images_to_save + " " + getContext().getResources().getString(R.string.remaining) + ")"; + applicationInterface.drawTextWithBackground(canvas, p, string, Color.LTGRAY, Color.BLACK, canvas.getWidth() / 2, text_base_y - pixels_offset_y); + } + } + + if( preview.supportsZoom() && show_zoom_pref && preview.isPreviewStarted() ) { + // don't show if preview not started - otherwise if we're not waiting on UI thread for preview to start (see wait_until_started in + // Preview and CameraController), we may see incorrect zoom being shown until preview has started, as in Preview.setupCamera() we only + // set the default zoom for CameraController once preview has started + float zoom_ratio = preview.getZoomRatio(); + // only show when actually zoomed in - or out! + // but only show if zoomed in by at least 1.1x, to avoid showing when only very slightly + // zoomed in - otherwise on devices that support zooming out to ultrawide, it's hard to + // zoom back to exactly 1.0x + //if( zoom_ratio < 1.0f - 1.0e-5f || zoom_ratio > 1.0f + 1.0e-5f ) { + if( zoom_ratio < 1.0f - 1.0e-5f || zoom_ratio > 1.1f - 1.0e-5f ) { + // Convert the dps to pixels, based on density scale + p.setTextSize(14 * scale_font + 0.5f); // convert dps to pixels + p.setTextAlign(Paint.Align.CENTER); + applicationInterface.drawTextWithBackground(canvas, p, getContext().getResources().getString(R.string.zoom) + ": " + zoom_ratio +"x", Color.WHITE, Color.BLACK, canvas.getWidth() / 2, text_base_y - text_y, MyApplicationInterface.Alignment.ALIGNMENT_BOTTOM, ybounds_text, MyApplicationInterface.Shadow.SHADOW_OUTLINE); + } + } + + } + else if( camera_controller == null ) { + /*if( MyDebug.LOG ) { + Log.d(TAG, "no camera!"); + Log.d(TAG, "width " + canvas.getWidth() + " height " + canvas.getHeight()); + }*/ + p.setColor(Color.WHITE); + p.setTextSize(14 * scale_font + 0.5f); // convert dps to pixels + p.setTextAlign(Paint.Align.CENTER); + int pixels_offset = (int) (20 * scale_font + 0.5f); // convert dps to pixels + if( preview.hasPermissions() ) { + if( preview.openCameraFailed() ) { + canvas.drawText(getContext().getResources().getString(R.string.failed_to_open_camera_1), canvas.getWidth() / 2.0f, canvas.getHeight() / 2.0f, p); + canvas.drawText(getContext().getResources().getString(R.string.failed_to_open_camera_2), canvas.getWidth() / 2.0f, canvas.getHeight() / 2.0f + pixels_offset, p); + canvas.drawText(getContext().getResources().getString(R.string.failed_to_open_camera_3), canvas.getWidth() / 2.0f, canvas.getHeight() / 2.0f + 2 * pixels_offset, p); + // n.b., use applicationInterface.getCameraIdPref(), as preview.getCameraId() returns 0 if camera_controller==null + canvas.drawText(getContext().getResources().getString(R.string.camera_id) + ":" + applicationInterface.getCameraIdPref(), canvas.getWidth() / 2.0f, canvas.getHeight() / 2.0f + 3 * pixels_offset, p); + } + } + else { + canvas.drawText(getContext().getResources().getString(R.string.no_permission), canvas.getWidth() / 2.0f, canvas.getHeight() / 2.0f, p); + } + //canvas.drawRect(0.0f, 0.0f, 100.0f, 100.0f, p); + //canvas.drawRGB(255, 0, 0); + //canvas.drawRect(0.0f, 0.0f, canvas.getWidth(), canvas.getHeight(), p); + } + + int top_x = (int) (5 * scale_dp + 0.5f); // convert dps to pixels + int top_y = (int) (5 * scale_dp + 0.5f); // convert dps to pixels + View top_icon = main_activity.getMainUI().getTopIcon(); + if( top_icon != null ) { + if( last_top_icon_shift_time == 0 || time_ms > last_top_icon_shift_time + 1000 ) { + // avoid computing every time, due to cost of calling View.getLocationOnScreen() + /*if( MyDebug.LOG ) + Log.d(TAG, "update cached top_icon_shift");*/ + int top_margin = getViewOnScreenX(top_icon); + if( system_orientation == MainActivity.SystemOrientation.LANDSCAPE ) + top_margin += top_icon.getWidth(); + else if( system_orientation == MainActivity.SystemOrientation.PORTRAIT ) + top_margin += top_icon.getHeight(); + // n.b., don't adjust top_margin for icon width/height for an reverse orientation + preview.getView().getLocationOnScreen(gui_location); + int preview_left = gui_location[system_orientation_portrait ? 1 : 0]; + if( system_orientation == MainActivity.SystemOrientation.REVERSE_LANDSCAPE ) + preview_left += preview.getView().getWidth(); // actually want preview-right for reverse landscape + this.top_icon_shift = top_margin - preview_left; + if( system_orientation == MainActivity.SystemOrientation.REVERSE_LANDSCAPE ) + this.top_icon_shift = -this.top_icon_shift; + /*if( MyDebug.LOG ) { + Log.d(TAG, "top_icon.getRotation(): " + top_icon.getRotation()); + Log.d(TAG, "preview_left: " + preview_left); + Log.d(TAG, "top_margin: " + top_margin); + Log.d(TAG, "top_icon_shift: " + top_icon_shift); + }*/ + + last_top_icon_shift_time = time_ms; + } + + if( this.top_icon_shift > 0 ) { + if( device_ui_rotation == 90 || device_ui_rotation == 270 ) { + // portrait + top_y += top_icon_shift; + } + else { + // landscape + top_x += top_icon_shift; + } + } + } + + { + /*int focus_seekbars_margin_left_dp = 85; + if( want_histogram ) + focus_seekbars_margin_left_dp += DrawPreview.histogram_height_dp;*/ + // 135 needed to make room for on-screen info lines in DrawPreview.onDrawInfoLines(), including the histogram + // but we also need to take the top_icon_shift into account, for widescreen aspect ratios and "icons along top" UI placement + int focus_seekbars_margin_left_dp = 135; + int new_focus_seekbars_margin_left = (int) (focus_seekbars_margin_left_dp * scale_dp + 0.5f); // convert dps to pixels + if( top_icon_shift > 0 ) { + //noinspection SuspiciousNameCombination + new_focus_seekbars_margin_left += top_icon_shift; + } + + if( focus_seekbars_margin_left == -1 || new_focus_seekbars_margin_left != focus_seekbars_margin_left ) { + // we check whether focus_seekbars_margin_left has changed, in case there is a performance cost for setting layoutparams + this.focus_seekbars_margin_left = new_focus_seekbars_margin_left; + if( MyDebug.LOG ) + Log.d(TAG, "set focus_seekbars_margin_left to " + focus_seekbars_margin_left); + + // "left" and "right" here are written assuming we're in landscape system orientation + + View view = main_activity.findViewById(R.id.focus_seekbar); + RelativeLayout.LayoutParams layoutParams = (RelativeLayout.LayoutParams)view.getLayoutParams(); + preview.getView().getLocationOnScreen(gui_location); + int preview_left = gui_location[system_orientation_portrait ? 1 : 0]; + if( system_orientation == MainActivity.SystemOrientation.REVERSE_LANDSCAPE ) + preview_left += preview.getView().getWidth(); // actually want preview-right for reverse landscape + + view.getLocationOnScreen(gui_location); + int seekbar_right = gui_location[system_orientation_portrait ? 1 : 0]; + if( system_orientation == MainActivity.SystemOrientation.LANDSCAPE || system_orientation == MainActivity.SystemOrientation.PORTRAIT ) { + // n.b., we read view.getWidth() even if system_orientation is portrait, because the seekbar is rotated in portrait orientation + seekbar_right += view.getWidth(); + } + else { + // and for reversed landscape, the seekbar is rotated 180 degrees, and getLocationOnScreen() returns the location after the rotation + seekbar_right -= view.getWidth(); + } + + int min_seekbar_width = (int) (150 * scale_dp + 0.5f); // convert dps to pixels + int new_seekbar_width; + if( system_orientation == MainActivity.SystemOrientation.LANDSCAPE || system_orientation == MainActivity.SystemOrientation.PORTRAIT ) { + new_seekbar_width = seekbar_right - (preview_left+focus_seekbars_margin_left); + } + else { + // reversed landscape + new_seekbar_width = preview_left - focus_seekbars_margin_left - seekbar_right; + } + new_seekbar_width = Math.max(new_seekbar_width, min_seekbar_width); + /*if( MyDebug.LOG ) { + Log.d(TAG, "preview_left: " + preview_left); + Log.d(TAG, "seekbar_right: " + seekbar_right); + Log.d(TAG, "new_seekbar_width: " + new_seekbar_width); + }*/ + layoutParams.width = new_seekbar_width; + view.setLayoutParams(layoutParams); + + view = main_activity.findViewById(R.id.focus_bracketing_target_seekbar); + layoutParams = (RelativeLayout.LayoutParams)view.getLayoutParams(); + layoutParams.width = new_seekbar_width; + view.setLayoutParams(layoutParams); + + // need to update due to changing width of focus seekbars + main_activity.getMainUI().setFocusSeekbarsRotation(); + } + } + + int battery_x = top_x; + int battery_y = top_y + (int) (5 * scale_dp + 0.5f); + int battery_width = (int) (5 * scale_dp + 0.5f); // convert dps to pixels + int battery_height = 4*battery_width; + if( ui_rotation == 90 || ui_rotation == 270 ) { + // n.b., this is only for when lock_to_landscape==true, so we don't look at device_ui_rotation + int diff = canvas.getWidth() - canvas.getHeight(); + battery_x += diff/2; + battery_y -= diff/2; + } + if( device_ui_rotation == 90 ) { + battery_y = canvas.getHeight() - battery_y - battery_height; + } + if( device_ui_rotation == 180 ) { + battery_x = canvas.getWidth() - battery_x - battery_width; + } + if( show_battery_pref ) { + if( !this.has_battery_frac || time_ms > this.last_battery_time + 60000 ) { + // only check periodically - unclear if checking is costly in any way + // note that it's fine to call registerReceiver repeatedly - we pass a null receiver, so this is fine as a "one shot" use + Intent batteryStatus = main_activity.registerReceiver(null, battery_ifilter); + int battery_level = batteryStatus.getIntExtra(BatteryManager.EXTRA_LEVEL, -1); + int battery_scale = batteryStatus.getIntExtra(BatteryManager.EXTRA_SCALE, -1); + has_battery_frac = true; + battery_frac = battery_level/(float)battery_scale; + last_battery_time = time_ms; + if( MyDebug.LOG ) + Log.d(TAG, "Battery status is " + battery_level + " / " + battery_scale + " : " + battery_frac); + } + //battery_frac = 0.2999f; // test + boolean draw_battery = true; + if( battery_frac <= 0.05f ) { + // flash icon at this low level + draw_battery = ((( time_ms / 1000 )) % 2) == 0; + } + if( draw_battery ) { + p.setColor(battery_frac > 0.15f ? Color.rgb(37, 155, 36) : Color.rgb(244, 67, 54)); // Green 500 or Red 500 + p.setStyle(Paint.Style.FILL); + canvas.drawRect(battery_x, battery_y+(1.0f-battery_frac)*(battery_height-2), battery_x+battery_width, battery_y+battery_height, p); + if( battery_frac < 1.0f ) { + p.setColor(Color.BLACK); + p.setAlpha(64); + canvas.drawRect(battery_x, battery_y, battery_x + battery_width, battery_y + (1.0f - battery_frac) * (battery_height - 2), p); + p.setAlpha(255); + } + } + top_x += (int) (10 * scale_dp + 0.5f); // convert dps to pixels + } + + onDrawInfoLines(canvas, top_x, top_y, text_base_y, device_ui_rotation, time_ms); + + canvas.restore(); + } + + private int getAngleStep() { + Preview preview = main_activity.getPreview(); + int angle_step = 10; + float zoom_ratio = preview.getZoomRatio(); + if( zoom_ratio >= 10.0f ) + angle_step = 1; + else if( zoom_ratio >= 5.0f ) + angle_step = 2; + else if( zoom_ratio >= 2.0f ) + angle_step = 5; + return angle_step; + } + + private void drawAngleLines(Canvas canvas, int device_ui_rotation, long time_ms) { + Preview preview = main_activity.getPreview(); + CameraController camera_controller = preview.getCameraController(); + MainActivity.SystemOrientation system_orientation = main_activity.getSystemOrientation(); + boolean system_orientation_portrait = system_orientation == MainActivity.SystemOrientation.PORTRAIT; + boolean has_level_angle = preview.hasLevelAngle(); + boolean actual_show_angle_line_pref; + if( photoMode == MyApplicationInterface.PhotoMode.Panorama ) { + // in panorama mode, we should the level iff we aren't taking the panorama photos + actual_show_angle_line_pref = !main_activity.getApplicationInterface().getGyroSensor().isRecording(); + } + else + actual_show_angle_line_pref = show_angle_line_pref; + + boolean allow_angle_lines = camera_controller != null && !preview.isPreviewPaused(); + + if( allow_angle_lines && has_level_angle && ( actual_show_angle_line_pref || show_pitch_lines_pref || show_geo_direction_lines_pref ) ) { + double level_angle = preview.getLevelAngle(); + boolean has_pitch_angle = preview.hasPitchAngle(); + double pitch_angle = preview.getPitchAngle(); + boolean has_geo_direction = preview.hasGeoDirection(); + double geo_direction = preview.getGeoDirection(); + // n.b., must draw this without the standard canvas rotation + // lines should be shorter in portrait + int radius_dps = (device_ui_rotation == 90 || device_ui_rotation == 270) ? 60 : 80; + int radius = (int) (radius_dps * scale_dp + 0.5f); // convert dps to pixels + int o_radius = (int) (10 * scale_dp + 0.5f); // convert dps to pixels + double angle = - preview.getOrigLevelAngle(); + // see http://android-developers.blogspot.co.uk/2010/09/one-screen-turn-deserves-another.html + int rotation = main_activity.getDisplayRotation(false); + switch (rotation) { + case Surface.ROTATION_90: + angle -= 90.0; + break; + case Surface.ROTATION_270: + angle += 90.0; + break; + case Surface.ROTATION_180: + angle += 180.0; + break; + case Surface.ROTATION_0: + default: + break; + } + /*if( MyDebug.LOG ) { + Log.d(TAG, "system_orientation: " + system_orientation); + Log.d(TAG, "rotation: " + rotation); + }*/ + /*if( MyDebug.LOG ) { + Log.d(TAG, "orig_level_angle: " + preview.getOrigLevelAngle()); + Log.d(TAG, "angle: " + angle); + }*/ + int cx = canvas.getWidth()/2; + int cy = canvas.getHeight()/2; + + boolean is_level = false; + if( has_level_angle && Math.abs(level_angle) <= close_level_angle ) { // n.b., use level_angle, not angle or orig_level_angle + is_level = true; + } + + final int line_alpha = 160; + float hthickness = (0.5f * scale_dp + 0.5f); // convert dps to pixels + float shadow_radius = hthickness; + shadow_radius = Math.max(shadow_radius, 1.0f); + p.setStyle(Paint.Style.FILL); + + if( actual_show_angle_line_pref && preview.hasLevelAngleStable() ) { + // draw the non-rotated part of the level + // only show the angle line if level angle "stable" (i.e., not pointing near vertically up or down) + + p.setShadowLayer(shadow_radius, 0.0f, 0.0f, Color.BLACK); + + if( is_level ) { + p.setColor(angle_highlight_color_pref); + } + else { + p.setColor(Color.WHITE); + } + p.setAlpha(line_alpha); + draw_rect.set(cx - radius - o_radius, cy - hthickness, cx - radius, cy + hthickness); + canvas.drawRoundRect(draw_rect, hthickness, hthickness, p); + draw_rect.set(cx + radius, cy - hthickness, cx + radius + o_radius, cy + hthickness); + canvas.drawRoundRect(draw_rect, hthickness, hthickness, p); + + p.clearShadowLayer(); + } + + canvas.save(); + canvas.rotate((float)angle, cx, cy); + + if( actual_show_angle_line_pref && preview.hasLevelAngleStable() ) { + // only show the angle line if level angle "stable" (i.e., not pointing near vertically up or down) + + p.setShadowLayer(shadow_radius, 0.0f, 0.0f, Color.BLACK); + + if( is_level ) { + p.setColor(angle_highlight_color_pref); + } + else { + p.setColor(Color.WHITE); + } + p.setAlpha(line_alpha); + draw_rect.set(cx - radius, cy - hthickness, cx + radius, cy + hthickness); + canvas.drawRoundRect(draw_rect, hthickness, hthickness, p); + + // draw the vertical crossbar + draw_rect.set(cx - hthickness, cy - radius / 2.0f, cx + hthickness, cy + radius / 2.0f); + canvas.drawRoundRect(draw_rect, hthickness, hthickness, p); + + if( is_level ) { + // draw a second line + + p.setColor(angle_highlight_color_pref); + p.setAlpha(line_alpha); + draw_rect.set(cx - radius, cy - 6 * hthickness, cx + radius, cy - 4 * hthickness); + canvas.drawRoundRect(draw_rect, hthickness, hthickness, p); + } + + p.clearShadowLayer(); + } + updateCachedViewAngles(time_ms); // ensure view_angle_x_preview, view_angle_y_preview are computed and up to date + float camera_angle_x, camera_angle_y; + if( system_orientation_portrait ) { + //noinspection SuspiciousNameCombination + camera_angle_x = this.view_angle_y_preview; + //noinspection SuspiciousNameCombination + camera_angle_y = this.view_angle_x_preview; + } + else { + camera_angle_x = this.view_angle_x_preview; + camera_angle_y = this.view_angle_y_preview; + } + float angle_scale_x = (float)( canvas.getWidth() / (2.0 * Math.tan( Math.toRadians((camera_angle_x/2.0)) )) ); + float angle_scale_y = (float)( canvas.getHeight() / (2.0 * Math.tan( Math.toRadians((camera_angle_y/2.0)) )) ); + /*if( MyDebug.LOG ) { + Log.d(TAG, "camera_angle_x: " + camera_angle_x); + Log.d(TAG, "camera_angle_y: " + camera_angle_y); + Log.d(TAG, "angle_scale_x: " + angle_scale_x); + Log.d(TAG, "angle_scale_y: " + angle_scale_y); + Log.d(TAG, "angle_scale_x/scale: " + angle_scale_x/scale); + Log.d(TAG, "angle_scale_y/scale: " + angle_scale_y/scale); + }*/ + /*if( MyDebug.LOG ) { + Log.d(TAG, "has_pitch_angle?: " + has_pitch_angle); + Log.d(TAG, "show_pitch_lines?: " + show_pitch_lines); + }*/ + float angle_scale = (float)Math.sqrt( angle_scale_x*angle_scale_x + angle_scale_y*angle_scale_y ); + angle_scale *= preview.getZoomRatio(); + if( has_pitch_angle && show_pitch_lines_pref ) { + // lines should be shorter in portrait + int pitch_radius_dps = (device_ui_rotation == 90 || device_ui_rotation == 270) ? 80 : 100; + int pitch_radius = (int) (pitch_radius_dps * scale_dp + 0.5f); // convert dps to pixels + int angle_step = getAngleStep(); + for(int latitude_angle=-90;latitude_angle<=90;latitude_angle+=angle_step) { + double this_angle = pitch_angle - latitude_angle; + if( Math.abs(this_angle) < 90.0 ) { + float pitch_distance = angle_scale * (float)Math.tan( Math.toRadians(this_angle) ); // angle_scale is already in pixels rather than dps + /*if( MyDebug.LOG ) { + Log.d(TAG, "pitch_angle: " + pitch_angle); + Log.d(TAG, "pitch_distance_dp: " + pitch_distance_dp); + }*/ + p.setColor(Color.WHITE); + p.setTextAlign(Paint.Align.LEFT); + if( latitude_angle == 0 && Math.abs(pitch_angle) < 1.0 ) { + p.setAlpha(255); + } + else if( latitude_angle == 90 && Math.abs(pitch_angle - 90) < 3.0 ) { + p.setAlpha(255); + } + else if( latitude_angle == -90 && Math.abs(pitch_angle + 90) < 3.0 ) { + p.setAlpha(255); + } + else { + p.setAlpha(line_alpha); + } + p.setShadowLayer(shadow_radius, 0.0f, 0.0f, Color.BLACK); + // can't use drawRoundRect(left, top, right, bottom, ...) as that requires API 21 + draw_rect.set(cx - pitch_radius, cy + pitch_distance - hthickness, cx + pitch_radius, cy + pitch_distance + hthickness); + canvas.drawRoundRect(draw_rect, hthickness, hthickness, p); + p.clearShadowLayer(); + // draw pitch angle indicator + applicationInterface.drawTextWithBackground(canvas, p, latitude_angle + "\u00B0", p.getColor(), Color.BLACK, (int)(cx + pitch_radius + 4*hthickness), (int)(cy + pitch_distance - 2*hthickness), MyApplicationInterface.Alignment.ALIGNMENT_CENTRE); + } + } + } + if( has_geo_direction && has_pitch_angle && show_geo_direction_lines_pref ) { + // lines should be longer in portrait - n.b., this is opposite to behaviour of pitch lines, as + // geo lines are drawn perpendicularly + int geo_radius_dps = (device_ui_rotation == 90 || device_ui_rotation == 270) ? 100 : 80; + int geo_radius = (int) (geo_radius_dps * scale_dp + 0.5f); // convert dps to pixels + float geo_angle = (float)Math.toDegrees(geo_direction); + int angle_step = getAngleStep(); + for(int longitude_angle=0;longitude_angle<360;longitude_angle+=angle_step) { + double this_angle = longitude_angle - geo_angle; + /*if( MyDebug.LOG ) { + Log.d(TAG, "longitude_angle: " + longitude_angle); + Log.d(TAG, "geo_angle: " + geo_angle); + Log.d(TAG, "this_angle: " + this_angle); + }*/ + // normalise to be in interval [0, 360) + while( this_angle >= 360.0 ) + this_angle -= 360.0; + while( this_angle < -360.0 ) + this_angle += 360.0; + // pick shortest angle + if( this_angle > 180.0 ) + this_angle = - (360.0 - this_angle); + if( Math.abs(this_angle) < 90.0 ) { + /*if( MyDebug.LOG ) { + Log.d(TAG, "this_angle is now: " + this_angle); + }*/ + float geo_distance = angle_scale * (float)Math.tan( Math.toRadians(this_angle) ); // angle_scale is already in pixels rather than dps + p.setColor(Color.WHITE); + p.setTextAlign(Paint.Align.CENTER); + p.setAlpha(line_alpha); + p.setShadowLayer(shadow_radius, 0.0f, 0.0f, Color.BLACK); + // can't use drawRoundRect(left, top, right, bottom, ...) as that requires API 21 + draw_rect.set(cx + geo_distance - hthickness, cy - geo_radius, cx + geo_distance + hthickness, cy + geo_radius); + canvas.drawRoundRect(draw_rect, hthickness, hthickness, p); + p.clearShadowLayer(); + // draw geo direction angle indicator + applicationInterface.drawTextWithBackground(canvas, p, longitude_angle + "\u00B0", p.getColor(), Color.BLACK, (int)(cx + geo_distance), (int)(cy - geo_radius - 4*hthickness), MyApplicationInterface.Alignment.ALIGNMENT_BOTTOM); + } + } + } + + p.setAlpha(255); + p.setStyle(Paint.Style.FILL); // reset + + canvas.restore(); + } + + if( allow_angle_lines && auto_stabilise_pref && preview.hasLevelAngleStable() && !preview.isVideo() ) { + // although auto-level is supported for photos taken in video mode, there's the risk that it's misleading to display + // the guide when in video mode! + double level_angle = preview.getLevelAngle(); + double auto_stabilise_level_angle = level_angle; + //double auto_stabilise_level_angle = angle; + while( auto_stabilise_level_angle < -90 ) + auto_stabilise_level_angle += 180; + while( auto_stabilise_level_angle > 90 ) + auto_stabilise_level_angle -= 180; + double level_angle_rad_abs = Math.abs( Math.toRadians(auto_stabilise_level_angle) ); + + int w1 = canvas.getWidth(); + int h1 = canvas.getHeight(); + double w0 = (w1 * Math.cos(level_angle_rad_abs) + h1 * Math.sin(level_angle_rad_abs)); + double h0 = (w1 * Math.sin(level_angle_rad_abs) + h1 * Math.cos(level_angle_rad_abs)); + + if( ImageSaver.autoStabiliseCrop(auto_stabilise_crop, level_angle_rad_abs, w0, h0, w1, h1, canvas.getWidth(), canvas.getHeight()) ) { + int w2 = auto_stabilise_crop[0]; + int h2 = auto_stabilise_crop[1]; + int cx = canvas.getWidth()/2; + int cy = canvas.getHeight()/2; + + float left = (canvas.getWidth() - w2)/2.0f; + float top = (canvas.getHeight() - h2)/2.0f; + float right = (canvas.getWidth() + w2)/2.0f; + float bottom = (canvas.getHeight() + h2)/2.0f; + + canvas.save(); + canvas.rotate((float)-level_angle, cx, cy); + + // draw shaded area + float o_dist = (float)Math.sqrt(canvas.getWidth()*canvas.getWidth() + canvas.getHeight()*canvas.getHeight()); + float o_left = (canvas.getWidth() - o_dist)/2.0f; + float o_top = (canvas.getHeight() - o_dist)/2.0f; + float o_right = (canvas.getWidth() + o_dist)/2.0f; + float o_bottom = (canvas.getHeight() + o_dist)/2.0f; + p.setStyle(Paint.Style.FILL); + p.setColor(Color.rgb(0, 0, 0)); + p.setAlpha(crop_shading_alpha_c); + canvas.drawRect(o_left, o_top, left, o_bottom, p); + canvas.drawRect(right, o_top, o_right, o_bottom, p); + canvas.drawRect(left, o_top, right, top, p); // top + canvas.drawRect(left, bottom, right, o_bottom, p); // bottom + + if( has_level_angle && Math.abs(level_angle) <= close_level_angle ) { // n.b., use level_angle, not angle or orig_level_angle + p.setColor(angle_highlight_color_pref); + } + else { + p.setColor(Color.WHITE); + } + p.setStyle(Paint.Style.STROKE); + p.setStrokeWidth(stroke_width); + + canvas.drawRect(left, top, right, bottom, p); + + canvas.restore(); + + p.setStyle(Paint.Style.FILL); // reset + p.setAlpha(255); // reset + } + } + } + + private void doThumbnailAnimation(Canvas canvas, long time_ms) { + Preview preview = main_activity.getPreview(); + CameraController camera_controller = preview.getCameraController(); + // note, no need to check preferences here, as we do that when setting thumbnail_anim + if( camera_controller != null && this.thumbnail_anim && last_thumbnail != null ) { + int ui_rotation = preview.getUIRotation(); + long time = time_ms - this.thumbnail_anim_start_ms; + final long duration = 500; + if( time > duration ) { + if( MyDebug.LOG ) + Log.d(TAG, "thumbnail_anim finished"); + this.thumbnail_anim = false; + } + else { + thumbnail_anim_src_rect.left = 0; + thumbnail_anim_src_rect.top = 0; + thumbnail_anim_src_rect.right = last_thumbnail.getWidth(); + thumbnail_anim_src_rect.bottom = last_thumbnail.getHeight(); + View galleryButton = main_activity.findViewById(R.id.gallery); + float alpha = ((float)time)/(float)duration; + + int st_x = canvas.getWidth()/2; + int st_y = canvas.getHeight()/2; + int nd_x = galleryButton.getLeft() + galleryButton.getWidth()/2; + int nd_y = galleryButton.getTop() + galleryButton.getHeight()/2; + int thumbnail_x = (int)( (1.0f-alpha)*st_x + alpha*nd_x ); + int thumbnail_y = (int)( (1.0f-alpha)*st_y + alpha*nd_y ); + + float st_w = canvas.getWidth(); + float st_h = canvas.getHeight(); + float nd_w = galleryButton.getWidth(); + float nd_h = galleryButton.getHeight(); + //int thumbnail_w = (int)( (1.0f-alpha)*st_w + alpha*nd_w ); + //int thumbnail_h = (int)( (1.0f-alpha)*st_h + alpha*nd_h ); + float correction_w = st_w/nd_w - 1.0f; + float correction_h = st_h/nd_h - 1.0f; + int thumbnail_w = (int)(st_w/(1.0f+alpha*correction_w)); + int thumbnail_h = (int)(st_h/(1.0f+alpha*correction_h)); + thumbnail_anim_dst_rect.left = thumbnail_x - thumbnail_w/2.0f; + thumbnail_anim_dst_rect.top = thumbnail_y - thumbnail_h/2.0f; + thumbnail_anim_dst_rect.right = thumbnail_x + thumbnail_w/2.0f; + thumbnail_anim_dst_rect.bottom = thumbnail_y + thumbnail_h/2.0f; + //canvas.drawBitmap(this.thumbnail, thumbnail_anim_src_rect, thumbnail_anim_dst_rect, p); + thumbnail_anim_matrix.setRectToRect(thumbnail_anim_src_rect, thumbnail_anim_dst_rect, Matrix.ScaleToFit.FILL); + //thumbnail_anim_matrix.reset(); + if( ui_rotation == 90 || ui_rotation == 270 ) { + float ratio = ((float)last_thumbnail.getWidth())/(float)last_thumbnail.getHeight(); + thumbnail_anim_matrix.preScale(ratio, 1.0f/ratio, last_thumbnail.getWidth()/2.0f, last_thumbnail.getHeight()/2.0f); + } + thumbnail_anim_matrix.preRotate(ui_rotation, last_thumbnail.getWidth()/2.0f, last_thumbnail.getHeight()/2.0f); + canvas.drawBitmap(last_thumbnail, thumbnail_anim_matrix, p); + } + } + } + + private void doFocusAnimation(Canvas canvas, long time_ms) { + Preview preview = main_activity.getPreview(); + CameraController camera_controller = preview.getCameraController(); + if( camera_controller != null && continuous_focus_moving && !taking_picture ) { + // we don't display the continuous focusing animation when taking a photo - and can also give the impression of having + // frozen if we pause because the image saver queue is full + long dt = time_ms - continuous_focus_moving_ms; + final long length = 1000; + /*if( MyDebug.LOG ) + Log.d(TAG, "continuous focus moving, dt: " + dt);*/ + if( dt <= length ) { + float frac = ((float)dt) / (float)length; + float pos_x = canvas.getWidth()/2.0f; + float pos_y = canvas.getHeight()/2.0f; + float min_radius = (40 * scale_dp + 0.5f); // convert dps to pixels + float max_radius = (60 * scale_dp + 0.5f); // convert dps to pixels + float radius; + if( frac < 0.5f ) { + float alpha = frac*2.0f; + radius = (1.0f-alpha) * min_radius + alpha * max_radius; + } + else { + float alpha = (frac-0.5f)*2.0f; + radius = (1.0f-alpha) * max_radius + alpha * min_radius; + } + /*if( MyDebug.LOG ) { + Log.d(TAG, "dt: " + dt); + Log.d(TAG, "radius: " + radius); + }*/ + p.setColor(Color.WHITE); + p.setStyle(Paint.Style.STROKE); + p.setStrokeWidth(stroke_width); + canvas.drawCircle(pos_x, pos_y, radius, p); + p.setStyle(Paint.Style.FILL); // reset + } + else { + clearContinuousFocusMove(); + } + } + + if( preview.isFocusWaiting() || preview.isFocusRecentSuccess() || preview.isFocusRecentFailure() ) { + long time_since_focus_started = preview.timeSinceStartedAutoFocus(); + float min_radius = (40 * scale_dp + 0.5f); // convert dps to pixels + float max_radius = (45 * scale_dp + 0.5f); // convert dps to pixels + float radius = min_radius; + if( time_since_focus_started > 0 ) { + final long length = 500; + float frac = ((float)time_since_focus_started) / (float)length; + if( frac > 1.0f ) + frac = 1.0f; + if( frac < 0.5f ) { + float alpha = frac*2.0f; + radius = (1.0f-alpha) * min_radius + alpha * max_radius; + } + else { + float alpha = (frac-0.5f)*2.0f; + radius = (1.0f-alpha) * max_radius + alpha * min_radius; + } + } + int size = (int)radius; + + if( preview.isFocusRecentSuccess() ) + p.setColor(Color.rgb(20, 231, 21)); // Green A400 + else if( preview.isFocusRecentFailure() ) + p.setColor(Color.rgb(244, 67, 54)); // Red 500 + else + p.setColor(Color.WHITE); + p.setStyle(Paint.Style.STROKE); + p.setStrokeWidth(stroke_width); + int pos_x; + int pos_y; + if( preview.hasFocusArea() ) { + Pair focus_pos = preview.getFocusPos(); + pos_x = focus_pos.first; + pos_y = focus_pos.second; + } + else { + pos_x = canvas.getWidth() / 2; + pos_y = canvas.getHeight() / 2; + } + float frac = 0.5f; + // horizontal strokes + canvas.drawLine(pos_x - size, pos_y - size, pos_x - frac*size, pos_y - size, p); + canvas.drawLine(pos_x + frac*size, pos_y - size, pos_x + size, pos_y - size, p); + canvas.drawLine(pos_x - size, pos_y + size, pos_x - frac*size, pos_y + size, p); + canvas.drawLine(pos_x + frac*size, pos_y + size, pos_x + size, pos_y + size, p); + // vertical strokes + canvas.drawLine(pos_x - size, pos_y - size, pos_x - size, pos_y - frac*size, p); + canvas.drawLine(pos_x - size, pos_y + frac*size, pos_x - size, pos_y + size, p); + canvas.drawLine(pos_x + size, pos_y - size, pos_x + size, pos_y - frac*size, p); + canvas.drawLine(pos_x + size, pos_y + frac*size, pos_x + size, pos_y + size, p); + p.setStyle(Paint.Style.FILL); // reset + } + } + + public void setCoverPreview(boolean cover_preview) { + if( MyDebug.LOG ) + Log.d(TAG, "setCoverPreview: " + cover_preview); + this.cover_preview = cover_preview; + } + + public void setDimPreview(boolean on) { + if( MyDebug.LOG ) + Log.d(TAG, "setDimPreview: " + on); + if( on ) { + this.dim_preview = DimPreview.DIM_PREVIEW_ON; + } + else if( this.dim_preview == DimPreview.DIM_PREVIEW_ON ) { + this.dim_preview = DimPreview.DIM_PREVIEW_UNTIL; + } + } + + public void clearDimPreview() { + this.dim_preview = DimPreview.DIM_PREVIEW_OFF; + } + + public void onDrawPreview(Canvas canvas) { + /*if( MyDebug.LOG ) + Log.d(TAG, "onDrawPreview");*/ + /*if( MyDebug.LOG ) + Log.d(TAG, "onDrawPreview hardware accelerated: " + canvas.isHardwareAccelerated());*/ + + final long time_ms = System.currentTimeMillis(); + + if( !has_settings ) { + if( MyDebug.LOG ) + Log.d(TAG, "onDrawPreview: need to update settings"); + updateSettings(); + } + Preview preview = main_activity.getPreview(); + CameraController camera_controller = preview.getCameraController(); + int ui_rotation = preview.getUIRotation(); + + // set up preview bitmaps (histogram etc) + boolean want_preview_bitmap = want_histogram || want_zebra_stripes || want_focus_peaking || want_pre_shots; + boolean use_preview_bitmap_small = want_histogram || want_zebra_stripes || want_focus_peaking; + boolean use_preview_bitmap_full = want_pre_shots; + if( want_preview_bitmap != preview.isPreviewBitmapEnabled() || use_preview_bitmap_small != preview.usePreviewBitmapSmall() || use_preview_bitmap_full != preview.usePreviewBitmapFull() ) { + if( want_preview_bitmap ) { + preview.enablePreviewBitmap(use_preview_bitmap_small, use_preview_bitmap_full); + } + else + preview.disablePreviewBitmap(); + } + if( want_preview_bitmap ) { + if( want_histogram ) + preview.enableHistogram(histogram_type); + else + preview.disableHistogram(); + + if( want_zebra_stripes ) + preview.enableZebraStripes(zebra_stripes_threshold, zebra_stripes_color_foreground, zebra_stripes_color_background); + else + preview.disableZebraStripes(); + + if( want_focus_peaking ) + preview.enableFocusPeaking(); + else + preview.disableFocusPeaking(); + + if( want_pre_shots ) + preview.enablePreShots(); + else + preview.disablePreShots(); + } + + // See documentation for CameraController.shouldCoverPreview(). + // Note, originally we checked camera_controller.shouldCoverPreview() every frame, but this + // has the problem that we blank whenever the camera is being reopened, e.g., when switching + // cameras or changing photo modes that require a reopen. The intent however is to only + // cover up the camera when the application is pausing, and to keep it covered up until + // after we've resumed, and the camera has been reopened and we've received frames. + if( preview.usingCamera2API() ) { + boolean camera_is_active = camera_controller != null && !camera_controller.shouldCoverPreview(); + if( cover_preview ) { + // see if we have received a frame yet + if( camera_is_active ) { + if( MyDebug.LOG ) + Log.d(TAG, "no longer need to cover preview"); + cover_preview = false; + } + } + if( cover_preview ) { + // camera has never been active since last resuming + p.setColor(Color.BLACK); + //p.setColor(Color.RED); // test + canvas.drawRect(0.0f, 0.0f, canvas.getWidth(), canvas.getHeight(), p); + } + else if( dim_preview == DimPreview.DIM_PREVIEW_ON || ( !camera_is_active && dim_preview == DimPreview.DIM_PREVIEW_UNTIL ) ) { + long time_now = System.currentTimeMillis(); + if( camera_inactive_time_ms == -1 ) { + camera_inactive_time_ms = time_now; + } + float frac = ((time_now - camera_inactive_time_ms) / (float)dim_effect_time_c); + frac = Math.min(frac, 1.0f); + int alpha = (int)(frac * 127); + /*if( MyDebug.LOG ) { + Log.d(TAG, "time diff: " + (time_now - camera_inactive_time_ms)); + Log.d(TAG, " frac: " + frac); + Log.d(TAG, " alpha: " + alpha); + }*/ + p.setColor(Color.BLACK); + p.setAlpha(alpha); + canvas.drawRect(0.0f, 0.0f, canvas.getWidth(), canvas.getHeight(), p); + p.setAlpha(255); + } + else { + camera_inactive_time_ms = -1; + if( dim_preview == DimPreview.DIM_PREVIEW_UNTIL && camera_is_active ) { + dim_preview = DimPreview.DIM_PREVIEW_OFF; + } + } + } + + if( camera_controller!= null && front_screen_flash ) { + p.setColor(Color.WHITE); + canvas.drawRect(0.0f, 0.0f, canvas.getWidth(), canvas.getHeight(), p); + } + else if( "flash_frontscreen_torch".equals(preview.getCurrentFlashValue()) ) { // getCurrentFlashValue() may return null + p.setColor(Color.WHITE); + p.setAlpha(200); // set alpha so user can still see some of the preview + canvas.drawRect(0.0f, 0.0f, canvas.getWidth(), canvas.getHeight(), p); + p.setAlpha(255); + } + + if( main_activity.getMainUI().inImmersiveMode() ) { + if( immersive_mode_everything_pref ) { + // exit, to ensure we don't display anything! + // though note we still should do the front screen flash (since the user can take photos via volume keys when + // in immersive_mode_everything mode) + return; + } + } + + // If MainActivity.lock_to_landscape==true, then the ui_rotation represents the orientation of the + // device; if MainActivity.lock_to_landscape==false then ui_rotation is always 0 as we don't need to + // apply any orientation ourselves. However, we're we do want to know the true rotation of the + // device, as it affects how certain elements of the UI are layed out. + int device_ui_rotation; + if( MainActivity.lock_to_landscape ) { + device_ui_rotation = ui_rotation; + } + else { + MainActivity.SystemOrientation system_orientation = main_activity.getSystemOrientation(); + device_ui_rotation = MainActivity.getRotationFromSystemOrientation(system_orientation); + } + + if( camera_controller != null && taking_picture && !front_screen_flash && take_photo_border_pref ) { + p.setColor(Color.WHITE); + p.setStyle(Paint.Style.STROKE); + p.setStrokeWidth(stroke_width); + float this_stroke_width = (5.0f * scale_dp + 0.5f); // convert dps to pixels + p.setStrokeWidth(this_stroke_width); + canvas.drawRect(0.0f, 0.0f, canvas.getWidth(), canvas.getHeight(), p); + p.setStyle(Paint.Style.FILL); // reset + p.setStrokeWidth(stroke_width); // reset + } + drawGrids(canvas); + + drawCropGuides(canvas); + + // n.b., don't display ghost image if front_screen_flash==true (i.e., frontscreen flash is in operation), otherwise + // the effectiveness of the "flash" is reduced + if( last_thumbnail != null && !last_thumbnail_is_video && camera_controller != null && ( show_last_image || ( allow_ghost_last_image && !front_screen_flash && ghost_image_pref.equals("preference_ghost_image_last") ) ) ) { + // If changing this code, ensure that pause preview still works when: + // - Taking a photo in portrait or landscape - and check rotating the device while preview paused + // - Taking a photo with lock to portrait/landscape options still shows the thumbnail with aspect ratio preserved + // Also check ghost last image works okay! + if( show_last_image ) { + p.setColor(Color.rgb(0, 0, 0)); // in case image doesn't cover the canvas (due to different aspect ratios) + canvas.drawRect(0.0f, 0.0f, canvas.getWidth(), canvas.getHeight(), p); // in case + } + setLastImageMatrix(canvas, last_thumbnail, ui_rotation, !show_last_image); + if( !show_last_image ) + p.setAlpha(ghost_image_alpha); + canvas.drawBitmap(last_thumbnail, last_image_matrix, p); + if( !show_last_image ) + p.setAlpha(255); + } + else if( camera_controller != null && !front_screen_flash && ghost_selected_image_bitmap != null ) { + setLastImageMatrix(canvas, ghost_selected_image_bitmap, ui_rotation, true); + p.setAlpha(ghost_image_alpha); + canvas.drawBitmap(ghost_selected_image_bitmap, last_image_matrix, p); + p.setAlpha(255); + } + + if( preview.isPreviewBitmapEnabled() && !show_last_image ) { + // draw additional real-time effects + + // draw zebra stripes + Bitmap zebra_stripes_bitmap = preview.getZebraStripesBitmap(); + if( zebra_stripes_bitmap != null ) { + setLastImageMatrix(canvas, zebra_stripes_bitmap, 0, false); + p.setAlpha(255); + canvas.drawBitmap(zebra_stripes_bitmap, last_image_matrix, p); + } + + // draw focus peaking + Bitmap focus_peaking_bitmap = preview.getFocusPeakingBitmap(); + if( focus_peaking_bitmap != null ) { + setLastImageMatrix(canvas, focus_peaking_bitmap, 0, false); + p.setAlpha(127); + if( focus_peaking_color_pref != Color.WHITE ) { + p.setColorFilter(new PorterDuffColorFilter(focus_peaking_color_pref, PorterDuff.Mode.SRC_IN)); + } + canvas.drawBitmap(focus_peaking_bitmap, last_image_matrix, p); + if( focus_peaking_color_pref != Color.WHITE ) { + p.setColorFilter(null); + } + p.setAlpha(255); + } + } + + doThumbnailAnimation(canvas, time_ms); + + drawUI(canvas, device_ui_rotation, time_ms); + + drawAngleLines(canvas, device_ui_rotation, time_ms); + + doFocusAnimation(canvas, time_ms); + + CameraController.Face [] faces_detected = preview.getFacesDetected(); + if( faces_detected != null ) { + p.setColor(Color.rgb(255, 235, 59)); // Yellow 500 + p.setStyle(Paint.Style.STROKE); + p.setStrokeWidth(stroke_width); + for(CameraController.Face face : faces_detected) { + // Android doc recommends filtering out faces with score less than 50 (same for both Camera and Camera2 APIs) + if( face.score >= 50 ) { + canvas.drawRect(face.temp, p); + } + } + p.setStyle(Paint.Style.FILL); // reset + } + + if( enable_gyro_target_spot && camera_controller != null ) { + GyroSensor gyroSensor = main_activity.getApplicationInterface().getGyroSensor(); + if( gyroSensor.isRecording() ) { + MainActivity.SystemOrientation system_orientation = main_activity.getSystemOrientation(); + boolean system_orientation_portrait = system_orientation == MainActivity.SystemOrientation.PORTRAIT; + for(float [] gyro_direction : gyro_directions) { + gyroSensor.getRelativeInverseVector(transformed_gyro_direction, gyro_direction); + gyroSensor.getRelativeInverseVector(transformed_gyro_direction_up, gyro_direction_up); + // note that although X of gyro_direction represents left to right on the device, because we're in landscape mode, + // this is y coordinates on the screen + float angle_x, angle_y; + if( system_orientation_portrait ) { + angle_x = (float)Math.asin(transformed_gyro_direction[0]); + angle_y = - (float)Math.asin(transformed_gyro_direction[1]); + } + else { + angle_x = - (float)Math.asin(transformed_gyro_direction[1]); + angle_y = - (float)Math.asin(transformed_gyro_direction[0]); + } + if( Math.abs(angle_x) < 0.5f*Math.PI && Math.abs(angle_y) < 0.5f*Math.PI ) { + updateCachedViewAngles(time_ms); // ensure view_angle_x_preview, view_angle_y_preview are computed and up to date + float camera_angle_x, camera_angle_y; + if( system_orientation_portrait ) { + //noinspection SuspiciousNameCombination + camera_angle_x = this.view_angle_y_preview; + //noinspection SuspiciousNameCombination + camera_angle_y = this.view_angle_x_preview; + } + else { + camera_angle_x = this.view_angle_x_preview; + camera_angle_y = this.view_angle_y_preview; + } + float angle_scale_x = (float) (canvas.getWidth() / (2.0 * Math.tan(Math.toRadians((camera_angle_x / 2.0))))); + float angle_scale_y = (float) (canvas.getHeight() / (2.0 * Math.tan(Math.toRadians((camera_angle_y / 2.0))))); + angle_scale_x *= preview.getZoomRatio(); + angle_scale_y *= preview.getZoomRatio(); + float distance_x = angle_scale_x * (float) Math.tan(angle_x); // angle_scale is already in pixels rather than dps + float distance_y = angle_scale_y * (float) Math.tan(angle_y); // angle_scale is already in pixels rather than dps + p.setColor(Color.WHITE); + drawGyroSpot(canvas, 0.0f, 0.0f, -1.0f, 0.0f, 48, true); // draw spot for the centre of the screen, to help the user orient the device + p.setColor(Color.BLUE); + float dir_x = -transformed_gyro_direction_up[1]; + float dir_y = -transformed_gyro_direction_up[0]; + drawGyroSpot(canvas, distance_x, distance_y, dir_x, dir_y, 45, false); + /*{ + // for debug only, draw the gyro spot that isn't calibrated with the accelerometer + gyroSensor.getRelativeInverseVectorGyroOnly(transformed_gyro_direction, gyro_direction); + gyroSensor.getRelativeInverseVectorGyroOnly(transformed_gyro_direction_up, gyro_direction_up); + p.setColor(Color.YELLOW); + angle_x = - (float)Math.asin(transformed_gyro_direction[1]); + angle_y = - (float)Math.asin(transformed_gyro_direction[0]); + distance_x = angle_scale_x * (float) Math.tan(angle_x); // angle_scale is already in pixels rather than dps + distance_y = angle_scale_y * (float) Math.tan(angle_y); // angle_scale is already in pixels rather than dps + dir_x = -transformed_gyro_direction_up[1]; + dir_y = -transformed_gyro_direction_up[0]; + drawGyroSpot(canvas, distance_x, distance_y, dir_x, dir_y, 45); + }*/ + } + + // show indicator for not being "upright", but only if tilt angle is within 20 degrees + if( gyroSensor.isUpright() != 0 && Math.abs(angle_x) <= 20.0f*0.0174532925199f ) { + //applicationInterface.drawTextWithBackground(canvas, p, "not upright", Color.WHITE, Color.BLACK, canvas.getWidth()/2, canvas.getHeight()/2, MyApplicationInterface.Alignment.ALIGNMENT_CENTRE, null, true); + canvas.save(); + canvas.rotate(ui_rotation, canvas.getWidth()/2.0f, canvas.getHeight()/2.0f); + final int icon_size = (int) (64 * scale_dp + 0.5f); // convert dps to pixels + final int cy_offset = (int) (80 * scale_dp + 0.5f); // convert dps to pixels + int cx = canvas.getWidth()/2, cy = canvas.getHeight()/2 - cy_offset; + icon_dest.set(cx - icon_size/2, cy - icon_size/2, cx + icon_size/2, cy + icon_size/2); + /*p.setStyle(Paint.Style.FILL); + p.setColor(Color.BLACK); + p.setAlpha(64); + canvas.drawRect(icon_dest, p); + p.setAlpha(255);*/ + canvas.drawBitmap(gyroSensor.isUpright() > 0 ? rotate_left_bitmap : rotate_right_bitmap, null, icon_dest, p); + canvas.restore(); + } + } + } + } + + if( time_ms > last_update_focus_seekbar_auto_time + 100 ) { + last_update_focus_seekbar_auto_time = time_ms; + + if( camera_controller != null && photoMode == MyApplicationInterface.PhotoMode.FocusBracketing && applicationInterface.isFocusBracketingSourceAutoPref() ) { + // not strictly related to drawing on the preview, but a convenient place to do this + // also need to wait some time after getSettingTargetFocusDistanceTime(), as when user stops changing target seekbar, it takes time to return to + // continuous focus + if( !main_activity.getPreview().isSettingTargetFocusDistance() && time_ms > main_activity.getPreview().getSettingTargetFocusDistanceTime() + 500 && + camera_controller.captureResultHasFocusDistance() ) { + main_activity.setManualFocusSeekbarProgress(false, camera_controller.captureResultFocusDistance()); + } + } + } + + /*if( MyDebug.LOG ) { + long time_taken = System.currentTimeMillis() - time_ms; + Log.d(TAG, "onDrawPreview time: " + time_taken); + }*/ + } + + private void setLastImageMatrix(Canvas canvas, Bitmap bitmap, int this_ui_rotation, boolean flip_front) { + Preview preview = main_activity.getPreview(); + CameraController camera_controller = preview.getCameraController(); + last_image_src_rect.left = 0; + last_image_src_rect.top = 0; + last_image_src_rect.right = bitmap.getWidth(); + last_image_src_rect.bottom = bitmap.getHeight(); + if( this_ui_rotation == 90 || this_ui_rotation == 270 ) { + last_image_src_rect.right = bitmap.getHeight(); + last_image_src_rect.bottom = bitmap.getWidth(); + } + last_image_dst_rect.left = 0; + last_image_dst_rect.top = 0; + last_image_dst_rect.right = canvas.getWidth(); + last_image_dst_rect.bottom = canvas.getHeight(); + /*if( MyDebug.LOG ) { + Log.d(TAG, "thumbnail: " + bitmap.getWidth() + " x " + bitmap.getHeight()); + Log.d(TAG, "canvas: " + canvas.getWidth() + " x " + canvas.getHeight()); + }*/ + last_image_matrix.setRectToRect(last_image_src_rect, last_image_dst_rect, Matrix.ScaleToFit.CENTER); // use CENTER to preserve aspect ratio + if( this_ui_rotation == 90 || this_ui_rotation == 270 ) { + // the rotation maps (0, 0) to (tw/2 - th/2, th/2 - tw/2), so we translate to undo this + float diff = bitmap.getHeight() - bitmap.getWidth(); + last_image_matrix.preTranslate(diff/2.0f, -diff/2.0f); + } + last_image_matrix.preRotate(this_ui_rotation, bitmap.getWidth()/2.0f, bitmap.getHeight()/2.0f); + if( flip_front ) { + boolean is_front_facing = camera_controller != null && (camera_controller.getFacing() == CameraController.Facing.FACING_FRONT); + if( is_front_facing && !sharedPreferences.getString(PreferenceKeys.FrontCameraMirrorKey, "preference_front_camera_mirror_no").equals("preference_front_camera_mirror_photo") ) { + last_image_matrix.preScale(-1.0f, 1.0f, bitmap.getWidth()/2.0f, 0.0f); + } + } + } + + private void drawGyroSpot(Canvas canvas, float distance_x, float distance_y, float dir_x, float dir_y, int radius_dp, boolean outline) { + if( outline ) { + p.setStyle(Paint.Style.STROKE); + p.setStrokeWidth(stroke_width); + p.setAlpha(255); + } + else { + p.setAlpha(127); + } + float radius = (radius_dp * scale_dp + 0.5f); // convert dps to pixels + float cx = canvas.getWidth()/2.0f + distance_x; + float cy = canvas.getHeight()/2.0f + distance_y; + + // if gyro spots would be outside the field of view, it's still better to show them on the + // border of the canvas, so the user knows which direction to move the device + cx = Math.max(cx, 0.0f); + cx = Math.min(cx, canvas.getWidth()); + cy = Math.max(cy, 0.0f); + cy = Math.min(cy, canvas.getHeight()); + + canvas.drawCircle(cx, cy, radius, p); + p.setAlpha(255); + p.setStyle(Paint.Style.FILL); // reset + + // draw crosshairs + //p.setColor(Color.WHITE); + /*p.setStrokeWidth(stroke_width); + canvas.drawLine(cx - radius*dir_x, cy - radius*dir_y, cx + radius*dir_x, cy + radius*dir_y, p); + canvas.drawLine(cx - radius*dir_y, cy + radius*dir_x, cx + radius*dir_y, cy - radius*dir_x, p);*/ + } + + /** + * A generic method to display up to two lines on the preview. + * Currently used by the Kraken underwater housing sensor to display + * temperature and depth. + * + * The two lines are displayed in the lower left corner of the screen. + * + * @param line1 First line to display + * @param line2 Second line to display + */ + public void onExtraOSDValuesChanged(String line1, String line2) { + OSDLine1 = line1; + OSDLine2 = line2; + } + + // for testing: + + public boolean getStoredHasStampPref() { + return this.has_stamp_pref; + } + + public boolean getStoredAutoStabilisePref() { + return this.auto_stabilise_pref; + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/ui/FolderChooserDialog.java b/app/src/main/java/net/sourceforge/opencamera/ui/FolderChooserDialog.java new file mode 100644 index 0000000..2a0ae19 --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/ui/FolderChooserDialog.java @@ -0,0 +1,445 @@ +package net.sourceforge.opencamera.ui; + +import net.sourceforge.opencamera.MyDebug; +import net.sourceforge.opencamera.R; +import net.sourceforge.opencamera.StorageUtils; + +import java.io.File; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Locale; + +import android.app.AlertDialog; +import android.app.Dialog; +import android.app.DialogFragment; +import android.content.DialogInterface; +import android.os.Bundle; +import android.os.Environment; +import androidx.annotation.NonNull; +import android.text.InputFilter; +import android.text.Spanned; +import android.util.Log; +import android.util.TypedValue; +import android.view.LayoutInflater; +import android.view.View; +import android.widget.AdapterView; +import android.widget.AdapterView.OnItemClickListener; +import android.widget.ArrayAdapter; +import android.widget.Button; +import android.widget.EditText; +import android.widget.ListView; +import android.widget.Toast; + +/** Dialog to pick a folder or file. Also allows creating new folders. Used when not + * using the Storage Access Framework. + */ +public class FolderChooserDialog extends DialogFragment { + private static final String TAG = "FolderChooserFragment"; + + private boolean show_new_folder_button = true; // whether to show a button for creating a new folder + private boolean show_dcim_shortcut = true; // whether to show a shortcut to the DCIM/ folder + private boolean mode_folder = true; // if true, the dialog is for selecting a folder; if false, the dialog is for selecting a file + private String extension; // if non-null, and mode_folder==false, only show files matching this file extension + + private File start_folder = new File(""); + private File current_folder; + private File max_parent; // if non-null, don't show the Parent option if viewing this folder (so the user can't go above that folder) + private AlertDialog folder_dialog; + private ListView list; + private String chosen_folder; + private String chosen_file; // only set if mode_folder==false + + private static class FileWrapper implements Comparable { + private final File file; + private final String override_name; // if non-null, use this as the display name instead + private final int sort_order; // items are sorted first by sort_order, then alphabetically + + FileWrapper(File file, String override_name, int sort_order) { + this.file = file; + this.override_name = override_name; + this.sort_order = sort_order; + } + + @NonNull + @Override + public String toString() { + if( override_name != null ) + return override_name; + if( file.isDirectory() ) + return file.getName() + File.separator; + return file.getName(); + } + + @Override + public int compareTo(@NonNull FileWrapper o) { + if( this.sort_order < o.sort_order ) + return -1; + else if( this.sort_order > o.sort_order ) + return 1; + return this.file.getName().toLowerCase(Locale.US).compareTo(o.getFile().getName().toLowerCase(Locale.US)); + } + + @Override + public boolean equals(Object o) { + // important to override equals(), since we're overriding compareTo() + if( !(o instanceof FileWrapper) ) + return false; + FileWrapper that = (FileWrapper)o; + if( this.sort_order != that.sort_order ) + return false; + return this.file.getName().toLowerCase(Locale.US).equals(that.getFile().getName().toLowerCase(Locale.US)); + } + + @Override + public int hashCode() { + // must override this, as we override equals() + return this.file.getName().toLowerCase(Locale.US).hashCode(); + } + + File getFile() { + return file; + } + } + + @Override + public Dialog onCreateDialog(Bundle savedInstanceState) { + if( MyDebug.LOG ) + Log.d(TAG, "onCreateDialog"); + if( MyDebug.LOG ) + Log.d(TAG, "start in folder: " + start_folder); + + list = new ListView(getActivity()); + list.setOnItemClickListener(new OnItemClickListener() { + @Override + public void onItemClick(AdapterView parent, View view, int position, long id) { + if( MyDebug.LOG ) + Log.d(TAG, "onItemClick: " + position); + FileWrapper file_wrapper = (FileWrapper) parent.getItemAtPosition(position); + if( MyDebug.LOG ) + Log.d(TAG, "clicked: " + file_wrapper.toString()); + File file = file_wrapper.getFile(); + if( MyDebug.LOG ) + Log.d(TAG, "file: " + file.toString()); + if( file.isDirectory() ) { + refreshList(file); + } + else if( !mode_folder && file.isFile() ) { + chosen_file = file.getAbsolutePath(); + folder_dialog.dismiss(); + } + } + }); + // good to use as short a text as possible for the icons, to reduce chance that the three buttons will have to appear on top of each other rather than in a row, in portrait mode + AlertDialog.Builder folder_dialog_builder = new AlertDialog.Builder(getActivity()) + //.setIcon(R.drawable.alert_dialog_icon) + .setView(list); + if( mode_folder ) { + folder_dialog_builder.setPositiveButton(android.R.string.ok, null); // we set the listener in onShowListener, so we can prevent the dialog from closing (if chosen folder isn't writable) + } + if( show_new_folder_button ) { + folder_dialog_builder.setNeutralButton(R.string.new_folder, null); // we set the listener in onShowListener, so we can prevent the dialog from closing + } + folder_dialog_builder.setNegativeButton(android.R.string.cancel, null); + folder_dialog = folder_dialog_builder.create(); + + folder_dialog.setOnShowListener(new DialogInterface.OnShowListener() { + @Override + public void onShow(DialogInterface dialog_interface) { + if( mode_folder ) { + Button b_positive = folder_dialog.getButton(AlertDialog.BUTTON_POSITIVE); + b_positive.setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "choose folder: " + current_folder.toString()); + if( useFolder() ) { + folder_dialog.dismiss(); + } + } + }); + } + if( show_new_folder_button ) { + Button b_neutral = folder_dialog.getButton(AlertDialog.BUTTON_NEUTRAL); + b_neutral.setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "new folder in: " + current_folder.toString()); + newFolder(); + } + }); + } + } + }); + + if( !start_folder.exists() ) { + if( MyDebug.LOG ) + Log.d(TAG, "create new folder" + start_folder); + if( !start_folder.mkdirs() ) { + if( MyDebug.LOG ) + Log.d(TAG, "failed to create new folder"); + // don't do anything yet, this is handled below + } + } + refreshList(start_folder); + if( !canWrite() ) { + // see testFolderChooserInvalid() + if( MyDebug.LOG ) + Log.d(TAG, "failed to read folder"); + + if( show_dcim_shortcut ) { + if( MyDebug.LOG ) + Log.d(TAG, "fall back to DCIM"); + // note that we reset to DCIM rather than DCIM/OpenCamera, just to increase likelihood of getting back to a valid state + refreshList(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM)); + if( current_folder == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "can't even read DCIM?!"); + refreshList(new File("/")); + } + } + } + return folder_dialog; + } + + public void setStartFolder(File start_folder) { + this.start_folder = start_folder; + } + + public void setMaxParent(File max_parent) { + if( MyDebug.LOG ) + Log.d(TAG, "setMaxParent: " + max_parent); + this.max_parent = max_parent; + } + + public void setShowNewFolderButton(boolean show_new_folder_button) { + this.show_new_folder_button = show_new_folder_button; + } + + public void setShowDCIMShortcut(boolean show_dcim_shortcut) { + this.show_dcim_shortcut = show_dcim_shortcut; + } + + public void setModeFolder(boolean mode_folder) { + this.mode_folder = mode_folder; + } + + public void setExtension(String extension) { + this.extension = extension.toLowerCase(); + } + + private void refreshList(File new_folder) { + if( MyDebug.LOG ) + Log.d(TAG, "refreshList: " + new_folder); + if( new_folder == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "refreshList: null folder"); + return; + } + File [] files = null; + // try/catch just in case? + try { + files = new_folder.listFiles(); + } + catch(Exception e) { + MyDebug.logStackTrace(TAG, "exception reading folder", e); + } + // n.b., files may be null if no files could be found in the folder (or we can't read) - but should still allow the user + // to view this folder (so the user can go to parent folders which might be readable again) + List listed_files = new ArrayList<>(); + if( new_folder.getParentFile() != null ) { + if( max_parent != null && max_parent.equals(new_folder) ) { + // don't show parent option + } + else { + listed_files.add(new FileWrapper(new_folder.getParentFile(), getResources().getString(R.string.parent_folder), 0)); + } + } + if( show_dcim_shortcut ) { + File default_folder = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM); + if( !default_folder.equals(new_folder) && !default_folder.equals(new_folder.getParentFile()) ) + listed_files.add(new FileWrapper(default_folder, null, 1)); + } + if( files != null ) { + for(File file : files) { + boolean accept = false; + if( file.isDirectory() ) + accept = true; + else if( !mode_folder && file.isFile() ) { + accept = true; + if( extension != null ) { + String name = file.getName(); + int index = name.lastIndexOf('.'); + if( index != -1 ) { + String ext = name.substring(index).toLowerCase(); + if( !ext.equals(extension) ) { + accept = false; + } + } + } + } + + if( accept ) { + int sort_order = file.isDirectory() ? 2 : 3; + listed_files.add(new FileWrapper(file, null, sort_order)); + } + } + } + Collections.sort(listed_files); + + ArrayAdapter adapter = new ArrayAdapter<>(this.getActivity(), android.R.layout.simple_list_item_1, listed_files); + list.setAdapter(adapter); + + this.current_folder = new_folder; + //dialog.setTitle(current_folder.getName()); + folder_dialog.setTitle(current_folder.getAbsolutePath()); + } + + private boolean canWrite() { + try { + if( this.current_folder != null && this.current_folder.canWrite() ) + return true; + } + catch(Exception e) { + if( MyDebug.LOG ) + Log.d(TAG, "exception in canWrite()"); + } + return false; + } + + private boolean useFolder() { + if( MyDebug.LOG ) + Log.d(TAG, "useFolder"); + if( current_folder == null ) + return false; + if( canWrite() ) { + String new_save_location = current_folder.getAbsolutePath(); + if( this.show_dcim_shortcut ) { + File base_folder = StorageUtils.getBaseFolder(); + if( current_folder.getParentFile() != null && current_folder.getParentFile().equals(base_folder) ) { + if( MyDebug.LOG ) + Log.d(TAG, "parent folder is base folder"); + new_save_location = current_folder.getName(); + } + } + if( MyDebug.LOG ) + Log.d(TAG, "new_save_location: " + new_save_location); + chosen_folder = new_save_location; + return true; + } + else { + Toast.makeText(getActivity(), R.string.cant_write_folder, Toast.LENGTH_SHORT).show(); + } + return false; + } + + /** Returns the folder selected by the user (or the folder containing the selected folder if + * mode_folder==false). Returns null if the dialog was cancelled. + */ + public String getChosenFolder() { + return this.chosen_folder; + } + + /** Returns the file selected by the user, if mode_folder==false. Returns null if the dialog was + * cancelled or mode_folder==true. + */ + public String getChosenFile() { + return this.chosen_file; + } + + private static class NewFolderInputFilter implements InputFilter { + // whilst Android seems to allow any characters on internal memory, SD cards are typically formatted with FAT32 + private final static String disallowed = "|\\?*<\":>"; + + @Override + public CharSequence filter(CharSequence source, int start, int end, Spanned dest, int dstart, int dend) { + for(int i=start;i test_ui_buttons = new Hashtable<>(); + public int test_saved_popup_width; + public int test_saved_popup_height; + public volatile int test_navigation_gap; + public volatile int test_navigation_gap_landscape; + public volatile int test_navigation_gap_reversed_landscape; + + public MainUI(MainActivity main_activity) { + if( MyDebug.LOG ) + Log.d(TAG, "MainUI"); + this.main_activity = main_activity; + + this.setSeekbarColors(); + } + + private void setSeekbarColors() { + if( MyDebug.LOG ) + Log.d(TAG, "setSeekbarColors"); + { + ColorStateList progress_color = ColorStateList.valueOf( Color.argb(255, 240, 240, 240) ); + ColorStateList thumb_color = ColorStateList.valueOf( Color.argb(255, 255, 255, 255) ); + + SeekBar seekBar = main_activity.findViewById(R.id.zoom_seekbar); + seekBar.setProgressTintList(progress_color); + seekBar.setThumbTintList(thumb_color); + + seekBar = main_activity.findViewById(R.id.focus_seekbar); + seekBar.setProgressTintList(progress_color); + seekBar.setThumbTintList(thumb_color); + + seekBar = main_activity.findViewById(R.id.focus_bracketing_target_seekbar); + seekBar.setProgressTintList(progress_color); + seekBar.setThumbTintList(thumb_color); + + seekBar = main_activity.findViewById(R.id.exposure_seekbar); + seekBar.setProgressTintList(progress_color); + seekBar.setThumbTintList(thumb_color); + + seekBar = main_activity.findViewById(R.id.iso_seekbar); + seekBar.setProgressTintList(progress_color); + seekBar.setThumbTintList(thumb_color); + + seekBar = main_activity.findViewById(R.id.exposure_time_seekbar); + seekBar.setProgressTintList(progress_color); + seekBar.setThumbTintList(thumb_color); + + seekBar = main_activity.findViewById(R.id.white_balance_seekbar); + seekBar.setProgressTintList(progress_color); + seekBar.setThumbTintList(thumb_color); + } + } + + /** Similar view.setRotation(ui_rotation), but achieves this via an animation. + */ + private void setViewRotation(View view, float ui_rotation) { + if( !view_rotate_animation ) { + view.setRotation(ui_rotation); + } + if( !MainActivity.lock_to_landscape ) { + float start_rotation = view_rotate_animation_start + ui_rotation; + if( start_rotation >= 360.0f ) + start_rotation -= 360.0f; + view.setRotation(start_rotation); + } + float rotate_by = ui_rotation - view.getRotation(); + if( rotate_by > 181.0f ) + rotate_by -= 360.0f; + else if( rotate_by < -181.0f ) + rotate_by += 360.0f; + // view.animate() modifies the view's rotation attribute, so it ends up equivalent to view.setRotation() + // we use rotationBy() instead of rotation(), so we get the minimal rotation for clockwise vs anti-clockwise + /*if( main_activity.is_test && Build.VERSION.SDK_INT <= Build.VERSION_CODES.JELLY_BEAN_MR2 ) { + // We randomly get a java.lang.ArrayIndexOutOfBoundsException crash when running MainTests suite + // on Android emulator with Android 4.3, from deep below ViewPropertyAnimator.start(). + // Unclear why this is - I haven't seen this on real devices and can't find out info about it. + view.setRotation(ui_rotation); + } + else*/ { + view.animate().rotationBy(rotate_by).setDuration(view_rotate_animation_duration).setInterpolator(new AccelerateDecelerateInterpolator()).start(); + } + } + + public void layoutUI() { + layoutUI(false); + } + + public void layoutUIWithRotation(float view_rotate_animation_start) { + if( MyDebug.LOG ) + Log.d(TAG, "layoutUIWithRotation: " + view_rotate_animation_start); + this.view_rotate_animation = true; + this.view_rotate_animation_start = view_rotate_animation_start; + layoutUI(); + view_rotate_animation = false; + this.view_rotate_animation_start = 0.0f; + } + + private UIPlacement computeUIPlacement() { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + String ui_placement_string = sharedPreferences.getString(PreferenceKeys.UIPlacementPreferenceKey, "ui_top"); + switch( ui_placement_string ) { + case "ui_left": + return UIPlacement.UIPLACEMENT_LEFT; + case "ui_top": + return UIPlacement.UIPLACEMENT_TOP; + default: + return UIPlacement.UIPLACEMENT_RIGHT; + } + } + + // stores with width and height of the last time we laid out the UI + public int layoutUI_display_w = -1; + public int layoutUI_display_h = -1; + + private void layoutUI(boolean popup_container_only) { + long debug_time = 0; + if( MyDebug.LOG ) { + Log.d(TAG, "layoutUI"); + debug_time = System.currentTimeMillis(); + } + + MainActivity.SystemOrientation system_orientation = main_activity.getSystemOrientation(); + boolean system_orientation_portrait = system_orientation == MainActivity.SystemOrientation.PORTRAIT; + boolean system_orientation_reversed_landscape = system_orientation == MainActivity.SystemOrientation.REVERSE_LANDSCAPE; + if( MyDebug.LOG ) { + Log.d(TAG, " system_orientation = " + system_orientation); + Log.d(TAG, " system_orientation_portrait? " + system_orientation_portrait); + } + + // we cache the preference_ui_placement to save having to check it in the draw() method + this.ui_placement = computeUIPlacement(); + if( MyDebug.LOG ) + Log.d(TAG, "ui_placement: " + ui_placement); + int relative_orientation; + if( MainActivity.lock_to_landscape ) { + // new code for orientation fixed to landscape + // the display orientation should be locked to landscape, but how many degrees is that? + int rotation = main_activity.getWindowManager().getDefaultDisplay().getRotation(); + int degrees = 0; + switch (rotation) { + case Surface.ROTATION_0: degrees = 0; break; + case Surface.ROTATION_90: degrees = 90; break; + case Surface.ROTATION_180: degrees = 180; break; + case Surface.ROTATION_270: degrees = 270; break; + default: + break; + } + // getRotation is anti-clockwise, but current_orientation is clockwise, so we add rather than subtract + // relative_orientation is clockwise from landscape-left + //int relative_orientation = (current_orientation + 360 - degrees) % 360; + relative_orientation = (current_orientation + degrees) % 360; + if( MyDebug.LOG ) { + Log.d(TAG, " current_orientation = " + current_orientation); + Log.d(TAG, " degrees = " + degrees); + Log.d(TAG, " relative_orientation = " + relative_orientation); + } + } + else { + relative_orientation = 0; + } + final int ui_rotation = (360 - relative_orientation) % 360; + main_activity.getPreview().setUIRotation(ui_rotation); + // naming convention for variables is for system_orientation==LANDSCAPE, right-handed UI + int align_left = system_orientation_portrait ? RelativeLayout.ALIGN_TOP : RelativeLayout.ALIGN_LEFT; + int align_right = system_orientation_portrait ? RelativeLayout.ALIGN_BOTTOM : RelativeLayout.ALIGN_RIGHT; + int align_top = system_orientation_portrait ? RelativeLayout.ALIGN_RIGHT : RelativeLayout.ALIGN_TOP; + int align_bottom = system_orientation_portrait ? RelativeLayout.ALIGN_LEFT : RelativeLayout.ALIGN_BOTTOM; + int left_of = system_orientation_portrait ? RelativeLayout.ABOVE : RelativeLayout.LEFT_OF; + int right_of = system_orientation_portrait ? RelativeLayout.BELOW : RelativeLayout.RIGHT_OF; + int above = system_orientation_portrait ? RelativeLayout.RIGHT_OF : RelativeLayout.ABOVE; + int below = system_orientation_portrait ? RelativeLayout.LEFT_OF : RelativeLayout.BELOW; + int ui_independent_left_of = left_of; + int ui_independent_right_of = right_of; + int ui_independent_above = above; + int ui_independent_below = below; + int align_parent_left = system_orientation_portrait ? RelativeLayout.ALIGN_PARENT_TOP : RelativeLayout.ALIGN_PARENT_LEFT; + int align_parent_right = system_orientation_portrait ? RelativeLayout.ALIGN_PARENT_BOTTOM : RelativeLayout.ALIGN_PARENT_RIGHT; + int align_parent_top = system_orientation_portrait ? RelativeLayout.ALIGN_PARENT_RIGHT : RelativeLayout.ALIGN_PARENT_TOP; + int align_parent_bottom = system_orientation_portrait ? RelativeLayout.ALIGN_PARENT_LEFT : RelativeLayout.ALIGN_PARENT_BOTTOM; + int center_horizontal = system_orientation_portrait ? RelativeLayout.CENTER_VERTICAL : RelativeLayout.CENTER_HORIZONTAL; + int center_vertical = system_orientation_portrait ? RelativeLayout.CENTER_HORIZONTAL : RelativeLayout.CENTER_VERTICAL; + + int iconpanel_left_of = left_of; + int iconpanel_right_of = right_of; + int iconpanel_above = above; + int iconpanel_below = below; + int iconpanel_align_parent_left = align_parent_left; + int iconpanel_align_parent_right = align_parent_right; + int iconpanel_align_parent_top = align_parent_top; + int iconpanel_align_parent_bottom = align_parent_bottom; + + if( system_orientation_reversed_landscape ) { + int temp = align_left; + align_left = align_right; + align_right = temp; + temp = align_top; + align_top = align_bottom; + align_bottom = temp; + temp = left_of; + left_of = right_of; + right_of = temp; + temp = above; + above = below; + below = temp; + + ui_independent_left_of = left_of; + ui_independent_right_of = right_of; + ui_independent_above = above; + ui_independent_below = below; + + temp = align_parent_left; + align_parent_left = align_parent_right; + align_parent_right = temp; + temp = align_parent_top; + align_parent_top = align_parent_bottom; + align_parent_bottom = temp; + + iconpanel_left_of = left_of; + iconpanel_right_of = right_of; + iconpanel_above = above; + iconpanel_below = below; + iconpanel_align_parent_left = align_parent_left; + iconpanel_align_parent_right = align_parent_right; + iconpanel_align_parent_top = align_parent_top; + iconpanel_align_parent_bottom = align_parent_bottom; + } + + if( ui_placement == UIPlacement.UIPLACEMENT_LEFT ) { + int temp = above; + above = below; + below = temp; + temp = align_parent_top; + align_parent_top = align_parent_bottom; + align_parent_bottom = temp; + iconpanel_align_parent_top = align_parent_top; + iconpanel_align_parent_bottom = align_parent_bottom; + } + else if( ui_placement == UIPlacement.UIPLACEMENT_TOP ) { + iconpanel_left_of = below; + iconpanel_right_of = above; + iconpanel_above = left_of; + iconpanel_below = right_of; + //noinspection SuspiciousNameCombination + iconpanel_align_parent_left = align_parent_bottom; + //noinspection SuspiciousNameCombination + iconpanel_align_parent_right = align_parent_top; + //noinspection SuspiciousNameCombination + iconpanel_align_parent_top = align_parent_left; + //noinspection SuspiciousNameCombination + iconpanel_align_parent_bottom = align_parent_right; + } + + Point display_size = new Point(); + main_activity.getApplicationInterface().getDisplaySize(display_size, true); + this.layoutUI_display_w = display_size.x; + this.layoutUI_display_h = display_size.y; + if( MyDebug.LOG ) { + Log.d(TAG, "layoutUI_display_w: " + layoutUI_display_w); + Log.d(TAG, "layoutUI_display_h: " + layoutUI_display_h); + } + final int display_height = Math.min(display_size.x, display_size.y); + + final float scale = main_activity.getResources().getDisplayMetrics().density; + if( MyDebug.LOG ) + Log.d(TAG, "scale: " + scale); + + /*int navigation_gap = 0; + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1 ) { + final int display_width = Math.max(display_size.x, display_size.y); + Point real_display_size = new Point(); + display.getRealSize(real_display_size); + final int real_display_width = Math.max(real_display_size.x, real_display_size.y); + navigation_gap = real_display_width - display_width; + if( MyDebug.LOG ) { + Log.d(TAG, "display_width: " + display_width); + Log.d(TAG, "real_display_width: " + real_display_width); + Log.d(TAG, "navigation_gap: " + navigation_gap); + } + }*/ + int navigation_gap = main_activity.getNavigationGap(); + int navigation_gap_landscape = main_activity.getNavigationGapLandscape(); + int navigation_gap_reverse_landscape = main_activity.getNavigationGapReverseLandscape(); + // navigation gaps for UI elements that are aligned to align_parent_bottom (the landscape edge, or reversed landscape edge if left-handed): + this.navigation_gap_landscape_align_parent_bottom = navigation_gap_landscape; + this.navigation_gap_reverse_landscape_align_parent_bottom = navigation_gap_reverse_landscape; + if( ui_placement == UIPlacement.UIPLACEMENT_LEFT ) { + navigation_gap_landscape_align_parent_bottom = 0; + } + else { + navigation_gap_reverse_landscape_align_parent_bottom = 0; + } + int gallery_navigation_gap = navigation_gap; + + int gallery_top_gap = 0; + { + // Leave space for the Android 12+ camera privacy indicator, as gallery icon would + // otherwise overlap when in landscape orientation. + // In theory we should use WindowInsets.getPrivacyIndicatorBounds() for this, but it seems + // to give a much larger value when required (leaving to a much larger gap), as well as + // obviously changing depending on orientation - but whilst this is only an issue for + // landscape orientation, it looks better to keep the position consistent for any + // orientation (otherwise the icons jump about when changing orientation, which looks + // especially bad for UIPLACEMENT_RIGHT. + // Not needed for UIPLACEMENT_LEFT - although still adjust the right hand side margin + // for consistency. + // We do for all Android versions for consistency (avoids testing overhead due to + // different behaviour on different Android versions). + if( ui_placement != UIPlacement.UIPLACEMENT_LEFT ) { + // if we did want to do this for UIPLACEMENT_LEFT for consistency, it'd be the + // "bottom" margin we need to change. + gallery_top_gap = (int) (privacy_indicator_gap_dp * scale + 0.5f); // convert dps to pixels + } + int privacy_indicator_gap = (int) (privacy_indicator_gap_dp * scale + 0.5f); // convert dps to pixels + gallery_navigation_gap += privacy_indicator_gap; + } + test_navigation_gap = navigation_gap; + test_navigation_gap_landscape = navigation_gap_landscape; + test_navigation_gap_reversed_landscape = navigation_gap_reverse_landscape; + if( MyDebug.LOG ) { + Log.d(TAG, "navigation_gap: " + navigation_gap); + Log.d(TAG, "gallery_navigation_gap: " + gallery_navigation_gap); + } + + if( !popup_container_only ) + { + // reset: + top_icon = null; + + // we use a dummy view, so that the GUI buttons keep their positioning even if the Settings button is hidden (visibility set to View.GONE) + View view = main_activity.findViewById(R.id.gui_anchor); + RelativeLayout.LayoutParams layoutParams = (RelativeLayout.LayoutParams)view.getLayoutParams(); + layoutParams.addRule(iconpanel_align_parent_left, 0); + layoutParams.addRule(iconpanel_align_parent_right, RelativeLayout.TRUE); + layoutParams.addRule(iconpanel_align_parent_top, RelativeLayout.TRUE); + layoutParams.addRule(iconpanel_align_parent_bottom, 0); + layoutParams.addRule(iconpanel_above, 0); + layoutParams.addRule(iconpanel_below, 0); + layoutParams.addRule(iconpanel_left_of, 0); + layoutParams.addRule(iconpanel_right_of, 0); + view.setLayoutParams(layoutParams); + setViewRotation(view, ui_rotation); + View previous_view = view; + + List buttons_permanent = new ArrayList<>(); + if( ui_placement == UIPlacement.UIPLACEMENT_TOP ) { + // not part of the icon panel in TOP mode + view = main_activity.findViewById(R.id.gallery); + layoutParams = (RelativeLayout.LayoutParams) view.getLayoutParams(); + layoutParams.addRule(align_parent_left, 0); + layoutParams.addRule(align_parent_right, RelativeLayout.TRUE); + layoutParams.addRule(align_parent_top, RelativeLayout.TRUE); + layoutParams.addRule(align_parent_bottom, 0); + layoutParams.addRule(above, 0); + layoutParams.addRule(below, 0); + layoutParams.addRule(left_of, 0); + layoutParams.addRule(right_of, 0); + setMarginsForSystemUI(layoutParams, 0, gallery_top_gap, gallery_navigation_gap, 0); + view.setLayoutParams(layoutParams); + setViewRotation(view, ui_rotation); + } + else { + buttons_permanent.add(main_activity.findViewById(R.id.gallery)); + } + buttons_permanent.add(main_activity.findViewById(R.id.settings)); + buttons_permanent.add(main_activity.findViewById(R.id.popup)); + buttons_permanent.add(main_activity.findViewById(R.id.exposure)); + //buttons_permanent.add(main_activity.findViewById(R.id.switch_video)); + //buttons_permanent.add(main_activity.findViewById(R.id.switch_camera)); + buttons_permanent.add(main_activity.findViewById(R.id.exposure_lock)); + buttons_permanent.add(main_activity.findViewById(R.id.white_balance_lock)); + buttons_permanent.add(main_activity.findViewById(R.id.cycle_raw)); + buttons_permanent.add(main_activity.findViewById(R.id.store_location)); + buttons_permanent.add(main_activity.findViewById(R.id.text_stamp)); + buttons_permanent.add(main_activity.findViewById(R.id.stamp)); + buttons_permanent.add(main_activity.findViewById(R.id.focus_peaking)); + buttons_permanent.add(main_activity.findViewById(R.id.auto_level)); + buttons_permanent.add(main_activity.findViewById(R.id.cycle_flash)); + buttons_permanent.add(main_activity.findViewById(R.id.face_detection)); + buttons_permanent.add(main_activity.findViewById(R.id.audio_control)); + buttons_permanent.add(main_activity.findViewById(R.id.kraken_icon)); + + List buttons_all = new ArrayList<>(buttons_permanent); + // icons which only sometimes show on the icon panel: + buttons_all.add(main_activity.findViewById(R.id.trash)); + buttons_all.add(main_activity.findViewById(R.id.share)); + + for(View this_view : buttons_all) { + layoutParams = (RelativeLayout.LayoutParams)this_view.getLayoutParams(); + layoutParams.addRule(iconpanel_align_parent_left, 0); + layoutParams.addRule(iconpanel_align_parent_right, 0); + layoutParams.addRule(iconpanel_align_parent_top, RelativeLayout.TRUE); + layoutParams.addRule(iconpanel_align_parent_bottom, 0); + layoutParams.addRule(iconpanel_above, 0); + layoutParams.addRule(iconpanel_below, 0); + layoutParams.addRule(iconpanel_left_of, previous_view.getId()); + layoutParams.addRule(iconpanel_right_of, 0); + this_view.setLayoutParams(layoutParams); + setViewRotation(this_view, ui_rotation); + previous_view = this_view; + } + + int button_size = main_activity.getResources().getDimensionPixelSize(R.dimen.onscreen_button_size); + if( ui_placement == UIPlacement.UIPLACEMENT_TOP ) { + // need to dynamically lay out the permanent icons + + int count = 0; + View first_visible_view = null; + View last_visible_view = null; + for(View this_view : buttons_permanent) { + if( this_view.getVisibility() == View.VISIBLE ) { + if( first_visible_view == null ) + first_visible_view = this_view; + last_visible_view = this_view; + count++; + } + } + //count = 10; // test + if( MyDebug.LOG ) { + Log.d(TAG, "count: " + count); + Log.d(TAG, "display_height: " + display_height); + } + if( count > 0 ) { + /*int button_size = display_height / count; + if( MyDebug.LOG ) + Log.d(TAG, "button_size: " + button_size); + for(View this_view : buttons) { + if( this_view.getVisibility() == View.VISIBLE ) { + layoutParams = (RelativeLayout.LayoutParams)this_view.getLayoutParams(); + layoutParams.width = button_size; + layoutParams.height = button_size; + this_view.setLayoutParams(layoutParams); + } + }*/ + int total_button_size = count*button_size; + int margin = 0; + if( total_button_size > display_height ) { + if( MyDebug.LOG ) + Log.d(TAG, "need to reduce button size"); + button_size = display_height / count; + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "need to increase margin"); + if( count > 1 ) + margin = (display_height - total_button_size) / (count-1); + } + if( MyDebug.LOG ) { + Log.d(TAG, "button_size: " + button_size); + Log.d(TAG, "total_button_size: " + total_button_size); + Log.d(TAG, "margin: " + margin); + } + for(View this_view : buttons_permanent) { + if( this_view.getVisibility() == View.VISIBLE ) { + if( MyDebug.LOG ) { + Log.d(TAG, "set view layout for: " + this_view.getContentDescription()); + if( this_view==first_visible_view ) { + Log.d(TAG," first visible view"); + } + } + //this_view.setPadding(0, margin/2, 0, margin/2); + layoutParams = (RelativeLayout.LayoutParams)this_view.getLayoutParams(); + // be careful if we change how the margins are laid out: it looks nicer when only the settings icon + // is displayed (when taking a photo) if it is still shown left-most, rather than centred; also + // needed for "pause preview" trash/icons to be shown properly (test by rotating the phone to update + // the layout) + int margin_first = this_view==first_visible_view ? navigation_gap_reverse_landscape : margin/2; + int margin_last = this_view==last_visible_view ? navigation_gap_landscape : margin/2; + // avoid risk of privacy dot appearing on top of icon - in practice this is only a risk when in + // reverse landscape mode, but we apply in all orientations to avoid icons jumping about; + // similarly, as noted above we use a hardcoded dp rather than + // WindowInsets.getPrivacyIndicatorBounds(), as we want the icons to stay in the same location even as + // the device is rotated + final int privacy_gap_left = (int) (12 * scale + 0.5f); // convert dps to pixels + setMarginsForSystemUI(layoutParams, privacy_gap_left, margin_first, 0, margin_last); + layoutParams.width = button_size; + layoutParams.height = button_size; + this_view.setLayoutParams(layoutParams); + } + } + top_icon = first_visible_view; + } + } + else { + // need to reset size/margins to their default + // except for gallery, which still needs its margins set for navigation gap! (and we + // shouldn't change it's size, which isn't necessarily button_size) + // other icons still needs margins set for navigation_gap_landscape and navigation_gap_reverse_landscape + view = main_activity.findViewById(R.id.gallery); + layoutParams = (RelativeLayout.LayoutParams) view.getLayoutParams(); + setMarginsForSystemUI(layoutParams, 0, Math.max(gallery_top_gap, navigation_gap_reverse_landscape), gallery_navigation_gap, navigation_gap_landscape); + view.setLayoutParams(layoutParams); + for(View this_view : buttons_permanent) { + if( this_view != view ) { + layoutParams = (RelativeLayout.LayoutParams)this_view.getLayoutParams(); + setMarginsForSystemUI(layoutParams, 0, navigation_gap_reverse_landscape, 0, navigation_gap_landscape); + layoutParams.width = button_size; + layoutParams.height = button_size; + this_view.setLayoutParams(layoutParams); + } + } + } + + // end icon panel + + view = main_activity.findViewById(R.id.take_photo); + layoutParams = (RelativeLayout.LayoutParams)view.getLayoutParams(); + layoutParams.addRule(align_parent_left, 0); + layoutParams.addRule(align_parent_right, RelativeLayout.TRUE); + layoutParams.addRule(align_parent_top, 0); + layoutParams.addRule(align_parent_bottom, 0); + layoutParams.addRule(center_vertical, RelativeLayout.TRUE); + layoutParams.addRule(center_horizontal, 0); + setMarginsForSystemUI(layoutParams, 0, 0, navigation_gap, 0); + view.setLayoutParams(layoutParams); + setViewRotation(view, ui_rotation); + + view = main_activity.findViewById(R.id.switch_camera); + layoutParams = (RelativeLayout.LayoutParams)view.getLayoutParams(); + layoutParams.addRule(align_parent_left, 0); + layoutParams.addRule(align_parent_right, RelativeLayout.TRUE); + layoutParams.addRule(align_parent_top, 0); + layoutParams.addRule(align_parent_bottom, 0); + layoutParams.addRule(ui_independent_above, R.id.take_photo); + layoutParams.addRule(ui_independent_below, 0); + layoutParams.addRule(ui_independent_left_of, 0); + layoutParams.addRule(ui_independent_right_of, 0); + setMarginsForSystemUI(layoutParams, 0, 0, navigation_gap, 0); + view.setLayoutParams(layoutParams); + setViewRotation(view, ui_rotation); + + view = main_activity.findViewById(R.id.switch_multi_camera); + layoutParams = (RelativeLayout.LayoutParams)view.getLayoutParams(); + layoutParams.addRule(ui_independent_above, 0); + layoutParams.addRule(ui_independent_below, 0); + layoutParams.addRule(ui_independent_left_of, R.id.switch_camera); + layoutParams.addRule(ui_independent_right_of, 0); + layoutParams.addRule(align_top, R.id.switch_camera); + layoutParams.addRule(align_bottom, R.id.switch_camera); + layoutParams.addRule(align_left, 0); + layoutParams.addRule(align_right, 0); + { + int margin = (int) (5 * scale + 0.5f); // convert dps to pixels + setMarginsForSystemUI(layoutParams, 0, 0, margin, 0); + } + view.setLayoutParams(layoutParams); + setViewRotation(view, ui_rotation); + + view = main_activity.findViewById(R.id.pause_video); + layoutParams = (RelativeLayout.LayoutParams)view.getLayoutParams(); + layoutParams.addRule(align_parent_left, 0); + layoutParams.addRule(align_parent_right, RelativeLayout.TRUE); + layoutParams.addRule(align_parent_top, 0); + layoutParams.addRule(align_parent_bottom, 0); + layoutParams.addRule(ui_independent_above, R.id.take_photo); + layoutParams.addRule(ui_independent_below, 0); + layoutParams.addRule(ui_independent_left_of, 0); + layoutParams.addRule(ui_independent_right_of, 0); + setMarginsForSystemUI(layoutParams, 0, 0, navigation_gap, 0); + view.setLayoutParams(layoutParams); + setViewRotation(view, ui_rotation); + + view = main_activity.findViewById(R.id.cancel_panorama); + layoutParams = (RelativeLayout.LayoutParams)view.getLayoutParams(); + layoutParams.addRule(align_parent_left, 0); + layoutParams.addRule(align_parent_right, RelativeLayout.TRUE); + layoutParams.addRule(align_parent_top, 0); + layoutParams.addRule(align_parent_bottom, 0); + layoutParams.addRule(above, R.id.take_photo); + layoutParams.addRule(below, 0); + layoutParams.addRule(left_of, 0); + layoutParams.addRule(right_of, 0); + setMarginsForSystemUI(layoutParams, 0, 0, navigation_gap, 0); + view.setLayoutParams(layoutParams); + setViewRotation(view, ui_rotation); + + view = main_activity.findViewById(R.id.switch_video); + layoutParams = (RelativeLayout.LayoutParams)view.getLayoutParams(); + layoutParams.addRule(align_parent_left, 0); + layoutParams.addRule(align_parent_right, RelativeLayout.TRUE); + layoutParams.addRule(align_parent_top, 0); + layoutParams.addRule(align_parent_bottom, 0); + layoutParams.addRule(ui_independent_above, 0); + layoutParams.addRule(ui_independent_below, R.id.take_photo); + layoutParams.addRule(ui_independent_left_of, 0); + layoutParams.addRule(ui_independent_right_of, 0); + setMarginsForSystemUI(layoutParams, 0, 0, navigation_gap, 0); + view.setLayoutParams(layoutParams); + setViewRotation(view, ui_rotation); + + view = main_activity.findViewById(R.id.take_photo_when_video_recording); + layoutParams = (RelativeLayout.LayoutParams)view.getLayoutParams(); + layoutParams.addRule(align_parent_left, 0); + layoutParams.addRule(align_parent_right, RelativeLayout.TRUE); + layoutParams.addRule(align_parent_top, 0); + layoutParams.addRule(align_parent_bottom, 0); + layoutParams.addRule(ui_independent_above, 0); + layoutParams.addRule(ui_independent_below, R.id.take_photo); + layoutParams.addRule(ui_independent_left_of, 0); + layoutParams.addRule(ui_independent_right_of, 0); + setMarginsForSystemUI(layoutParams, 0, 0, navigation_gap, 0); + view.setLayoutParams(layoutParams); + setViewRotation(view, ui_rotation); + + view = main_activity.findViewById(R.id.zoom_seekbar); + layoutParams = (RelativeLayout.LayoutParams)view.getLayoutParams(); + // align close to the edge of screen + layoutParams.addRule(align_parent_left, 0); + layoutParams.addRule(align_parent_right, RelativeLayout.TRUE); + layoutParams.addRule(align_parent_top, 0); + layoutParams.addRule(align_parent_bottom, RelativeLayout.TRUE); + // margins set below in setFixedRotation() + // need to clear the others, in case we turn zoom controls on/off + layoutParams.addRule(above, 0); + layoutParams.addRule(below, 0); + layoutParams.addRule(left_of, 0); + layoutParams.addRule(right_of, 0); + view.setLayoutParams(layoutParams); + int margin = (int) (20 * scale + 0.5f); // convert dps to pixels + setFixedRotation(main_activity.findViewById(R.id.zoom_seekbar), 0, navigation_gap_reverse_landscape_align_parent_bottom, margin+navigation_gap, navigation_gap_landscape_align_parent_bottom); + + view = main_activity.findViewById(R.id.focus_seekbar); + layoutParams = (RelativeLayout.LayoutParams)view.getLayoutParams(); + layoutParams.addRule(left_of, R.id.zoom_seekbar); + layoutParams.addRule(right_of, 0); + layoutParams.addRule(above, 0); + layoutParams.addRule(below, 0); + layoutParams.addRule(align_parent_top, 0); + layoutParams.addRule(align_parent_bottom, RelativeLayout.TRUE); + layoutParams.addRule(align_parent_left, 0); + layoutParams.addRule(align_parent_right, 0); + view.setLayoutParams(layoutParams); + + view = main_activity.findViewById(R.id.focus_bracketing_target_seekbar); + layoutParams = (RelativeLayout.LayoutParams)view.getLayoutParams(); + layoutParams.addRule(left_of, R.id.zoom_seekbar); + layoutParams.addRule(right_of, 0); + layoutParams.addRule(above, R.id.focus_seekbar); + layoutParams.addRule(below, 0); + view.setLayoutParams(layoutParams); + + setFocusSeekbarsRotation(); + } + + if( !popup_container_only ) + { + // set seekbar info + int width_dp; + if( !system_orientation_portrait && (ui_rotation == 0 || ui_rotation == 180) ) { + // landscape + width_dp = 350; + } + else { + // portrait + width_dp = 250; + // prevent being too large on smaller devices (e.g., Galaxy Nexus or smaller) + int max_width_dp = getMaxHeightDp(true); + if( width_dp > max_width_dp ) + width_dp = max_width_dp; + } + if( MyDebug.LOG ) + Log.d(TAG, "width_dp: " + width_dp); + int height_dp = 50; + int width_pixels = (int) (width_dp * scale + 0.5f); // convert dps to pixels + int height_pixels = (int) (height_dp * scale + 0.5f); // convert dps to pixels + + View view = main_activity.findViewById(R.id.sliders_container); + setViewRotation(view, ui_rotation); + view.setTranslationX(0.0f); + view.setTranslationY(0.0f); + + if( system_orientation_portrait || ui_rotation == 90 || ui_rotation == 270 ) { + // portrait + if( system_orientation_portrait ) + view.setTranslationY(2*height_pixels); + else + view.setTranslationX(2*height_pixels); + } + else if( ui_rotation == 0 ) { + // landscape + view.setTranslationY(height_pixels); + } + else { + // upside-down landscape + view.setTranslationY(-1*height_pixels); + } + + /* + // align sliders_container + RelativeLayout.LayoutParams lp = (RelativeLayout.LayoutParams)view.getLayoutParams(); + if( system_orientation_portrait || ui_rotation == 90 || ui_rotation == 270 ) { + // portrait + if( system_orientation_portrait ) + view.setTranslationY(2*height_pixels); + else + view.setTranslationX(2*height_pixels); + lp.addRule(left_of, 0); + lp.addRule(right_of, 0); + lp.addRule(above, 0); + lp.addRule(below, 0); + lp.addRule(align_parent_top, 0); + lp.addRule(align_parent_bottom, 0); + } + else if( ui_rotation == (ui_placement == UIPlacement.UIPLACEMENT_LEFT ? 180 : 0) ) { + // landscape (or upside-down landscape if ui-left) + view.setTranslationY(0); + lp.addRule(left_of, R.id.zoom_seekbar); + lp.addRule(right_of, 0); + + if( main_activity.showManualFocusSeekbar(true) ) { + lp.addRule(above, R.id.focus_bracketing_target_seekbar); + lp.addRule(below, 0); + lp.addRule(align_parent_top, 0); + lp.addRule(align_parent_bottom, 0); + } + else if( main_activity.showManualFocusSeekbar(false) ) { + lp.addRule(above, R.id.focus_seekbar); + lp.addRule(below, 0); + lp.addRule(align_parent_top, 0); + lp.addRule(align_parent_bottom, 0); + } + else { + lp.addRule(above, 0); + lp.addRule(below, 0); + lp.addRule(align_parent_top, 0); + lp.addRule(align_parent_bottom, RelativeLayout.TRUE); + } + } + else { + // upside-down landscape (or landscape if ui-left) + if( ui_rotation == 0 ) + view.setTranslationY(height_pixels); + else + view.setTranslationY(-1*height_pixels); + lp.addRule(left_of, 0); + lp.addRule(right_of, 0); + lp.addRule(above, 0); + lp.addRule(below, 0); + lp.addRule(align_parent_bottom, 0); + } + view.setLayoutParams(lp);*/ + + view = main_activity.findViewById(R.id.exposure_seekbar); + RelativeLayout.LayoutParams lp = (RelativeLayout.LayoutParams)view.getLayoutParams(); + lp.width = width_pixels; + lp.height = height_pixels; + view.setLayoutParams(lp); + + view = main_activity.findViewById(R.id.iso_seekbar); + lp = (RelativeLayout.LayoutParams)view.getLayoutParams(); + lp.width = width_pixels; + lp.height = height_pixels; + view.setLayoutParams(lp); + + view = main_activity.findViewById(R.id.exposure_time_seekbar); + lp = (RelativeLayout.LayoutParams)view.getLayoutParams(); + lp.width = width_pixels; + lp.height = height_pixels; + view.setLayoutParams(lp); + + view = main_activity.findViewById(R.id.white_balance_seekbar); + lp = (RelativeLayout.LayoutParams)view.getLayoutParams(); + lp.width = width_pixels; + lp.height = height_pixels; + view.setLayoutParams(lp); + } + + if( popupIsOpen() ) + { + final View view = main_activity.findViewById(R.id.popup_container); + RelativeLayout.LayoutParams layoutParams = (RelativeLayout.LayoutParams)view.getLayoutParams(); + if( ui_placement == UIPlacement.UIPLACEMENT_TOP ) { + layoutParams.addRule(align_right, 0); + layoutParams.addRule(align_bottom, 0); + layoutParams.addRule(align_left, 0); + layoutParams.addRule(align_top, 0); + layoutParams.addRule(above, 0); + layoutParams.addRule(below, 0); + layoutParams.addRule(left_of, 0); + layoutParams.addRule(right_of, R.id.popup); + layoutParams.addRule(align_parent_top, system_orientation_portrait ? 0 : RelativeLayout.TRUE); + layoutParams.addRule(align_parent_bottom, system_orientation_portrait ? 0 : RelativeLayout.TRUE); + layoutParams.addRule(align_parent_left, 0); + layoutParams.addRule(align_parent_right, 0); + } + else { + layoutParams.addRule(align_right, R.id.popup); + layoutParams.addRule(align_bottom, 0); + layoutParams.addRule(align_left, 0); + layoutParams.addRule(align_top, 0); + layoutParams.addRule(above, 0); + layoutParams.addRule(below, R.id.popup); + layoutParams.addRule(left_of, 0); + layoutParams.addRule(right_of, 0); + layoutParams.addRule(align_parent_top, 0); + layoutParams.addRule(align_parent_bottom, system_orientation_portrait ? 0 : RelativeLayout.TRUE); + layoutParams.addRule(align_parent_left, 0); + layoutParams.addRule(align_parent_right, 0); + } + if( system_orientation_portrait ) { + // limit height so doesn't take up full height of screen + layoutParams.height = display_height; + } + view.setLayoutParams(layoutParams); + + //setPopupViewRotation(ui_rotation, display_height); + view.getViewTreeObserver().addOnGlobalLayoutListener( + new OnGlobalLayoutListener() { + @Override + public void onGlobalLayout() { + if( MyDebug.LOG ) + Log.d(TAG, "onGlobalLayout()"); + // We need to call setPopupViewRotation after the above layout param changes + // have taken effect, otherwise we can have problems due to popup_height being incorrect. + // Example bugs: + // Left-handed UI, portrait: Restart and open popup, it doesn't appear until device is rotated. + // Top UI, reverse-portrait: Restart and open popup, it appears in wrong location. + // Top UI, reverse-landscape: Restart and open popup, it appears in wrong location. + setPopupViewRotation(ui_rotation, display_height); + + // stop listening - only want to call this once! + view.getViewTreeObserver().removeOnGlobalLayoutListener(this); + } + } + ); + } + + if( !popup_container_only ) { + setTakePhotoIcon(); + // no need to call setSwitchCameraContentDescription() + } + + if( MyDebug.LOG ) { + Log.d(TAG, "layoutUI: total time: " + (System.currentTimeMillis() - debug_time)); + } + } + + /** Wrapper for layoutParams.setMargins, but where the margins are supplied for landscape orientation, + * and if in portrait these are automatically rotated. + */ + void setMarginsForSystemUI(RelativeLayout.LayoutParams layoutParams, int left, int top, int right, int bottom) { + MainActivity.SystemOrientation system_orientation = main_activity.getSystemOrientation(); + if( system_orientation == MainActivity.SystemOrientation.PORTRAIT ) { + //noinspection SuspiciousNameCombination + layoutParams.setMargins(bottom, left, top, right); + } + else if( system_orientation == MainActivity.SystemOrientation.REVERSE_LANDSCAPE ) { + layoutParams.setMargins(right, bottom, left, top); + } + else { + layoutParams.setMargins(left, top, right, bottom); + } + } + + /** Some views (e.g. seekbars and zoom controls) are ones where we want to have a fixed + * orientation as if in landscape mode, even if the system UI is portrait. So this method + * sets a rotation so that the view appears as if in landscape orentation, and also sets + * margins. + * Note that Android has poor support for a rotated seekbar - we use view.setRotation(), but + * this doesn't affect the bounds of the view! So as a hack, we modify the margins so the + * view is positioned correctly. For this to work, the view must have a specified width + * (which can be computed programmatically), rather than having both left and right sides being + * aligned to another view. + * The left/top/right/bottom margins should be supply for landscape orientation - these will + * be automatically rotated if we're actually in portrait orientation. + */ + private void setFixedRotation(View view, int left, int top, int right, int bottom) { + MainActivity.SystemOrientation system_orientation = main_activity.getSystemOrientation(); + int rotation = (360 - MainActivity.getRotationFromSystemOrientation(system_orientation)) % 360; + view.setRotation(rotation); + // set margins due to rotation + RelativeLayout.LayoutParams layoutParams = (RelativeLayout.LayoutParams)view.getLayoutParams(); + if( system_orientation == MainActivity.SystemOrientation.PORTRAIT ) { + int diff = (layoutParams.width-layoutParams.height)/2; + if( MyDebug.LOG ) + Log.d(TAG, "diff: " + diff); + setMarginsForSystemUI(layoutParams, diff+left, -diff+top, diff+right, -diff+bottom); + } + else { + setMarginsForSystemUI(layoutParams, left, top, right, bottom); + } + view.setLayoutParams(layoutParams); + } + + void setFocusSeekbarsRotation() { + setFixedRotation(main_activity.findViewById(R.id.focus_seekbar), 0, navigation_gap_reverse_landscape_align_parent_bottom, 0, navigation_gap_landscape_align_parent_bottom); + // don't need to set margins for navigation gap landscape for focus_bracketing_target_seekbar, as it sits above the source focus_seekbar + setFixedRotation(main_activity.findViewById(R.id.focus_bracketing_target_seekbar), 0, 0, 0, 0); + } + + private void setPopupViewRotation(int ui_rotation, int display_height) { + if( MyDebug.LOG ) + Log.d(TAG, "setPopupViewRotation"); + View view = main_activity.findViewById(R.id.popup_container); + setViewRotation(view, ui_rotation); + // reset: + view.setTranslationX(0.0f); + view.setTranslationY(0.0f); + + int popup_width = view.getWidth(); + int popup_height = view.getHeight(); + test_saved_popup_width = popup_width; + test_saved_popup_height = popup_height; + if( MyDebug.LOG ) { + Log.d(TAG, "popup_width: " + popup_width); + Log.d(TAG, "popup_height: " + popup_height); + if( popup_view != null ) + Log.d(TAG, "popup total width: " + popup_view.getTotalWidth()); + } + if( popup_view != null && popup_width > popup_view.getTotalWidth()*1.2 ) { + // This is a workaround for the rare but annoying bug where the popup window is too large + // (and appears partially off-screen). Unfortunately have been unable to fix - and trying + // to force the popup container to have a particular width just means some of the contents + // (e.g., Timer) are missing. But at least stop caching it, so that reopening the popup + // should fix it, rather than having to restart or pause/resume Open Camera. + // Also note, normally we should expect popup_width == popup_view.getTotalWidth(), but + // have put a fudge factor of 1.2 just in case it's normally slightly larger on some + // devices. + Log.e(TAG, "### popup view is too big?!"); + force_destroy_popup = true; + /*popup_width = popup_view.getTotalWidth(); + ViewGroup.LayoutParams params = new RelativeLayout.LayoutParams( + popup_width, + RelativeLayout.LayoutParams.WRAP_CONTENT); + view.setLayoutParams(params);*/ + } + else { + force_destroy_popup = false; + } + + if( ui_rotation == 0 || ui_rotation == 180 ) { + view.setPivotX(popup_width/2.0f); + view.setPivotY(popup_height/2.0f); + } + else if( ui_placement == UIPlacement.UIPLACEMENT_TOP ) { + view.setPivotX(0.0f); + view.setPivotY(0.0f); + if( ui_rotation == 90 ) { + //noinspection SuspiciousNameCombination + view.setTranslationX(popup_height); + } + else if( ui_rotation == 270 ) { + view.setTranslationY(display_height); + } + } + else { + view.setPivotX(popup_width); + view.setPivotY(ui_placement == UIPlacement.UIPLACEMENT_RIGHT ? 0.0f : popup_height); + if( ui_placement == UIPlacement.UIPLACEMENT_RIGHT ) { + if( ui_rotation == 90 ) { + //noinspection SuspiciousNameCombination + view.setTranslationY( popup_width ); + } + else if( ui_rotation == 270 ) + view.setTranslationX( - popup_height ); + } + else { + if( ui_rotation == 90 ) + view.setTranslationX( - popup_height ); + else if( ui_rotation == 270 ) + view.setTranslationY( - popup_width ); + } + } + } + + /** Set icons for taking photos vs videos. + * Also handles content descriptions for the take photo button and switch video button. + */ + public void setTakePhotoIcon() { + if( MyDebug.LOG ) + Log.d(TAG, "setTakePhotoIcon()"); + if( main_activity.getPreview() != null ) { + ImageButton view = main_activity.findViewById(R.id.take_photo); + int resource; + int content_description; + int switch_video_content_description; + if( main_activity.getPreview().isVideo() ) { + if( MyDebug.LOG ) + Log.d(TAG, "set icon to video"); + resource = main_activity.getPreview().isVideoRecording() ? R.drawable.take_video_recording : R.drawable.take_video_selector; + content_description = main_activity.getPreview().isVideoRecording() ? R.string.stop_video : R.string.start_video; + switch_video_content_description = R.string.switch_to_photo; + } + else if( main_activity.getApplicationInterface().getPhotoMode() == MyApplicationInterface.PhotoMode.Panorama && + main_activity.getApplicationInterface().getGyroSensor().isRecording() ) { + if( MyDebug.LOG ) + Log.d(TAG, "set icon to recording panorama"); + resource = R.drawable.baseline_check_white_48; + content_description = R.string.finish_panorama; + switch_video_content_description = R.string.switch_to_video; + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "set icon to photo"); + resource = R.drawable.take_photo_selector; + content_description = R.string.take_photo; + switch_video_content_description = R.string.switch_to_video; + } + view.setImageResource(resource); + view.setContentDescription( main_activity.getResources().getString(content_description) ); + view.setTag(resource); // for testing + + view = main_activity.findViewById(R.id.switch_video); + view.setContentDescription( main_activity.getResources().getString(switch_video_content_description) ); + resource = main_activity.getPreview().isVideo() ? R.drawable.take_photo : R.drawable.take_video; + view.setImageResource(resource); + view.setTag(resource); // for testing + } + } + + /** Set content description for switch camera button. + */ + public void setSwitchCameraContentDescription() { + if( MyDebug.LOG ) + Log.d(TAG, "setSwitchCameraContentDescription()"); + if( main_activity.getPreview() != null && main_activity.getPreview().canSwitchCamera() ) { + ImageButton view = main_activity.findViewById(R.id.switch_camera); + int content_description; + int cameraId = main_activity.getNextCameraId(); + switch( main_activity.getPreview().getCameraControllerManager().getFacing( cameraId ) ) { + case FACING_FRONT: + content_description = R.string.switch_to_front_camera; + break; + case FACING_BACK: + content_description = R.string.switch_to_back_camera; + break; + case FACING_EXTERNAL: + content_description = R.string.switch_to_external_camera; + break; + default: + content_description = R.string.switch_to_unknown_camera; + break; + } + if( MyDebug.LOG ) + Log.d(TAG, "content_description: " + main_activity.getResources().getString(content_description)); + view.setContentDescription( main_activity.getResources().getString(content_description) ); + } + } + + /** Set content description for pause video button. + */ + public void setPauseVideoContentDescription() { + if (MyDebug.LOG) + Log.d(TAG, "setPauseVideoContentDescription()"); + ImageButton pauseVideoButton = main_activity.findViewById(R.id.pause_video); + int content_description; + if( main_activity.getPreview().isVideoRecordingPaused() ) { + content_description = R.string.resume_video; + pauseVideoButton.setImageResource(R.drawable.ic_play_circle_outline_white_48dp); + } + else { + content_description = R.string.pause_video; + pauseVideoButton.setImageResource(R.drawable.ic_pause_circle_outline_white_48dp); + } + if( MyDebug.LOG ) + Log.d(TAG, "content_description: " + main_activity.getResources().getString(content_description)); + pauseVideoButton.setContentDescription(main_activity.getResources().getString(content_description)); + } + + UIPlacement getUIPlacement() { + return this.ui_placement; + } + + public void updateRemoteConnectionIcon() { + View remoteConnectedIcon = main_activity.findViewById(R.id.kraken_icon); + if (main_activity.getBluetoothRemoteControl().remoteConnected() ) { + if( MyDebug.LOG ) + Log.d(TAG, "Remote control connected"); + remoteConnectedIcon.setVisibility(View.VISIBLE); + } else { + if( MyDebug.LOG ) + Log.d(TAG, "Remote control DISconnected"); + remoteConnectedIcon.setVisibility(View.GONE); + } + + } + + // ParameterCanBeLocal warning suppressed as it's incorrect here! (Or + // possibly it's due to effect of MainActivity.lock_to_landscape always + // being false.) + public void onOrientationChanged(@SuppressWarnings("ParameterCanBeLocal") int orientation) { + /*if( MyDebug.LOG ) { + Log.d(TAG, "onOrientationChanged()"); + Log.d(TAG, "orientation: " + orientation); + Log.d(TAG, "current_orientation: " + current_orientation); + }*/ + if( !MainActivity.lock_to_landscape ) + return; + // if locked to landscape, we need to handle the orientation change ourselves + if( orientation == OrientationEventListener.ORIENTATION_UNKNOWN ) + return; + int diff = Math.abs(orientation - current_orientation); + if( diff > 180 ) + diff = 360 - diff; + // only change orientation when sufficiently changed + if( diff > 60 ) { + orientation = (orientation + 45) / 90 * 90; + orientation = orientation % 360; + if( orientation != current_orientation ) { + this.current_orientation = orientation; + if( MyDebug.LOG ) { + Log.d(TAG, "current_orientation is now: " + current_orientation); + } + view_rotate_animation = true; + layoutUI(); + view_rotate_animation = false; + + // Call DrawPreview.updateSettings() so that we reset calculations that depend on + // getLocationOnScreen() - since the result is affected by a View's rotation, we need + // to recompute - this also means we need to delay slightly until after the rotation + // animation is complete. + // To reproduce issues, rotate from upside-down-landscape to portrait, and observe + // the info-text placement (when using icons-along-top), or with on-screen angle + // displayed when in 16:9 preview. + // Potentially we could use Animation.setAnimationListener(), but we set a separate + // animation for every icon. + // Note, this seems to be unneeded due to the fix in DrawPreview for + // "getRotation() == 180.0f", but good to clear the cached values (e.g., in case we + // compute them during when the icons are being rotated). + final Handler handler = new Handler(); + handler.postDelayed(new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "onOrientationChanged->postDelayed()"); + + main_activity.getApplicationInterface().getDrawPreview().updateSettings(); + } + }, view_rotate_animation_duration+20); + } + } + } + + public boolean showExposureLockIcon() { + if( !main_activity.getPreview().supportsExposureLock() ) + return false; + if( main_activity.getApplicationInterface().isCameraExtensionPref() ) { + // not supported for camera extensions + return false; + } + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + return sharedPreferences.getBoolean(PreferenceKeys.ShowExposureLockPreferenceKey, true); + } + + public boolean showWhiteBalanceLockIcon() { + if( !main_activity.getPreview().supportsWhiteBalanceLock() ) + return false; + if( main_activity.getApplicationInterface().isCameraExtensionPref() ) { + // not supported for camera extensions + return false; + } + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + return sharedPreferences.getBoolean(PreferenceKeys.ShowWhiteBalanceLockPreferenceKey, false); + } + + public boolean showCycleRawIcon() { + if( !main_activity.getPreview().supportsRaw() ) + return false; + if( !main_activity.getApplicationInterface().isRawAllowed(main_activity.getApplicationInterface().getPhotoMode()) ) + return false; + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + return sharedPreferences.getBoolean(PreferenceKeys.ShowCycleRawPreferenceKey, false); + } + + public boolean showStoreLocationIcon() { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + return sharedPreferences.getBoolean(PreferenceKeys.ShowStoreLocationPreferenceKey, false); + } + + public boolean showTextStampIcon() { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + return sharedPreferences.getBoolean(PreferenceKeys.ShowTextStampPreferenceKey, false); + } + + public boolean showStampIcon() { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + return sharedPreferences.getBoolean(PreferenceKeys.ShowStampPreferenceKey, false); + } + + public boolean showFocusPeakingIcon() { + if( !main_activity.supportsPreviewBitmaps() ) + return false; + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + return sharedPreferences.getBoolean(PreferenceKeys.ShowFocusPeakingPreferenceKey, false); + } + + public boolean showAutoLevelIcon() { + if( !main_activity.supportsAutoStabilise() ) + return false; + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + return sharedPreferences.getBoolean(PreferenceKeys.ShowAutoLevelPreferenceKey, false); + } + + public boolean showCycleFlashIcon() { + if( !main_activity.getPreview().supportsFlash() ) + return false; + if( main_activity.getPreview().isVideo() ) + return false; // no point showing flash icon in video mode, as we only allow flash auto and flash torch, and we don't support torch on the on-screen cycle flash icon + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + return sharedPreferences.getBoolean(PreferenceKeys.ShowCycleFlashPreferenceKey, false); + } + + public boolean showFaceDetectionIcon() { + if( !main_activity.getPreview().supportsFaceDetection() ) + return false; + if( main_activity.getApplicationInterface().isCameraExtensionPref() ) { + // not supported for camera extensions + return false; + } + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + return sharedPreferences.getBoolean(PreferenceKeys.ShowFaceDetectionPreferenceKey, false); + } + + public void setImmersiveMode(final boolean immersive_mode) { + if( MyDebug.LOG ) + Log.d(TAG, "setImmersiveMode: " + immersive_mode); + this.immersive_mode = immersive_mode; + main_activity.runOnUiThread(new Runnable() { + public void run() { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + // if going into immersive mode, the we should set GONE the ones that are set GONE in showGUI(false) + //final int visibility_gone = immersive_mode ? View.GONE : View.VISIBLE; + final int visibility = immersive_mode ? View.GONE : View.VISIBLE; + if( MyDebug.LOG ) + Log.d(TAG, "setImmersiveMode: set visibility: " + visibility); + // n.b., don't hide share and trash buttons, as they require immediate user input for us to continue + View switchCameraButton = main_activity.findViewById(R.id.switch_camera); + View switchMultiCameraButton = main_activity.findViewById(R.id.switch_multi_camera); + View switchVideoButton = main_activity.findViewById(R.id.switch_video); + View exposureButton = main_activity.findViewById(R.id.exposure); + View exposureLockButton = main_activity.findViewById(R.id.exposure_lock); + View whiteBalanceLockButton = main_activity.findViewById(R.id.white_balance_lock); + View cycleRawButton = main_activity.findViewById(R.id.cycle_raw); + View storeLocationButton = main_activity.findViewById(R.id.store_location); + View textStampButton = main_activity.findViewById(R.id.text_stamp); + View stampButton = main_activity.findViewById(R.id.stamp); + View focusPeakingButton = main_activity.findViewById(R.id.focus_peaking); + View autoLevelButton = main_activity.findViewById(R.id.auto_level); + View cycleFlashButton = main_activity.findViewById(R.id.cycle_flash); + View faceDetectionButton = main_activity.findViewById(R.id.face_detection); + View audioControlButton = main_activity.findViewById(R.id.audio_control); + View popupButton = main_activity.findViewById(R.id.popup); + View galleryButton = main_activity.findViewById(R.id.gallery); + View settingsButton = main_activity.findViewById(R.id.settings); + View zoomSeekBar = main_activity.findViewById(R.id.zoom_seekbar); + View focusSeekBar = main_activity.findViewById(R.id.focus_seekbar); + View focusBracketingTargetSeekBar = main_activity.findViewById(R.id.focus_bracketing_target_seekbar); + if( main_activity.getPreview().getCameraControllerManager().getNumberOfCameras() > 1 ) + switchCameraButton.setVisibility(visibility); + if( main_activity.showSwitchMultiCamIcon() ) + switchMultiCameraButton.setVisibility(visibility); + switchVideoButton.setVisibility(visibility); + if( main_activity.supportsExposureButton() ) + exposureButton.setVisibility(visibility); + if( showExposureLockIcon() ) + exposureLockButton.setVisibility(visibility); + if( showWhiteBalanceLockIcon() ) + whiteBalanceLockButton.setVisibility(visibility); + if( showCycleRawIcon() ) + cycleRawButton.setVisibility(visibility); + if( showStoreLocationIcon() ) + storeLocationButton.setVisibility(visibility); + if( showTextStampIcon() ) + textStampButton.setVisibility(visibility); + if( showStampIcon() ) + stampButton.setVisibility(visibility); + if( showFocusPeakingIcon() ) + focusPeakingButton.setVisibility(visibility); + if( showAutoLevelIcon() ) + autoLevelButton.setVisibility(visibility); + if( showCycleFlashIcon() ) + cycleFlashButton.setVisibility(visibility); + if( showFaceDetectionIcon() ) + faceDetectionButton.setVisibility(visibility); + if( main_activity.hasAudioControl() ) + audioControlButton.setVisibility(visibility); + popupButton.setVisibility(visibility); + galleryButton.setVisibility(visibility); + settingsButton.setVisibility(visibility); + if( MyDebug.LOG ) { + Log.d(TAG, "has_zoom: " + main_activity.getPreview().supportsZoom()); + } + if( main_activity.getPreview().supportsZoom() && sharedPreferences.getBoolean(PreferenceKeys.ShowZoomSliderControlsPreferenceKey, true) ) { + zoomSeekBar.setVisibility(visibility); + } + if( main_activity.showManualFocusSeekbar(false) ) + focusSeekBar.setVisibility(visibility); + if( main_activity.showManualFocusSeekbar(true) ) + focusBracketingTargetSeekBar.setVisibility(visibility); + String pref_immersive_mode = sharedPreferences.getString(PreferenceKeys.ImmersiveModePreferenceKey, "immersive_mode_off"); + if( pref_immersive_mode.equals("immersive_mode_everything") ) { + if( sharedPreferences.getBoolean(PreferenceKeys.ShowTakePhotoPreferenceKey, true) ) { + View takePhotoButton = main_activity.findViewById(R.id.take_photo); + takePhotoButton.setVisibility(visibility); + } + if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.N && main_activity.getPreview().isVideoRecording() ) { + View pauseVideoButton = main_activity.findViewById(R.id.pause_video); + pauseVideoButton.setVisibility(visibility); + } + if( main_activity.getPreview().supportsPhotoVideoRecording() && main_activity.getApplicationInterface().usePhotoVideoRecording() && main_activity.getPreview().isVideoRecording() ) { + View takePhotoVideoButton = main_activity.findViewById(R.id.take_photo_when_video_recording); + takePhotoVideoButton.setVisibility(visibility); + } + if( main_activity.getApplicationInterface().getGyroSensor().isRecording() ) { + View cancelPanoramaButton = main_activity.findViewById(R.id.cancel_panorama); + cancelPanoramaButton.setVisibility(visibility); + } + } + if( !immersive_mode ) { + // make sure the GUI is set up as expected + showGUI(); + } + } + }); + } + + public boolean inImmersiveMode() { + return immersive_mode; + } + + public void showGUI(final boolean show, final boolean is_video) { + if( MyDebug.LOG ) { + Log.d(TAG, "showGUI: " + show); + Log.d(TAG, "is_video: " + is_video); + } + if( is_video ) + this.show_gui_video = show; + else + this.show_gui_photo = show; + showGUI(); + } + + public void showGUI() { + if( MyDebug.LOG ) { + Log.d(TAG, "showGUI"); + Log.d(TAG, "show_gui_photo: " + show_gui_photo); + Log.d(TAG, "show_gui_video: " + show_gui_video); + } + if( inImmersiveMode() ) + return; + if( (show_gui_photo || show_gui_video) && main_activity.usingKitKatImmersiveMode() ) { + // call to reset the timer + main_activity.initImmersiveMode(); + } + main_activity.runOnUiThread(new Runnable() { + public void run() { + final boolean is_panorama_recording = main_activity.getApplicationInterface().getGyroSensor().isRecording(); + final int visibility = is_panorama_recording ? View.GONE : (show_gui_photo && show_gui_video) ? View.VISIBLE : View.GONE; // for UI that is hidden while taking photo or video + final int visibility_video = is_panorama_recording ? View.GONE : show_gui_photo ? View.VISIBLE : View.GONE; // for UI that is only hidden while taking photo + View settingsButton = main_activity.findViewById(R.id.settings); + View switchCameraButton = main_activity.findViewById(R.id.switch_camera); + View switchMultiCameraButton = main_activity.findViewById(R.id.switch_multi_camera); + View switchVideoButton = main_activity.findViewById(R.id.switch_video); + View exposureButton = main_activity.findViewById(R.id.exposure); + View exposureLockButton = main_activity.findViewById(R.id.exposure_lock); + View whiteBalanceLockButton = main_activity.findViewById(R.id.white_balance_lock); + View cycleRawButton = main_activity.findViewById(R.id.cycle_raw); + View storeLocationButton = main_activity.findViewById(R.id.store_location); + View textStampButton = main_activity.findViewById(R.id.text_stamp); + View stampButton = main_activity.findViewById(R.id.stamp); + View focusPeakingButton = main_activity.findViewById(R.id.focus_peaking); + View autoLevelButton = main_activity.findViewById(R.id.auto_level); + View cycleFlashButton = main_activity.findViewById(R.id.cycle_flash); + View faceDetectionButton = main_activity.findViewById(R.id.face_detection); + View audioControlButton = main_activity.findViewById(R.id.audio_control); + View popupButton = main_activity.findViewById(R.id.popup); + settingsButton.setVisibility(visibility_video); // still allow settings when recording video - arguably we shouldn't, but looks wierd given that the other default icons aren't hidden when recording video + if( main_activity.getPreview().getCameraControllerManager().getNumberOfCameras() > 1 ) + switchCameraButton.setVisibility(visibility); + if( main_activity.showSwitchMultiCamIcon() ) + switchMultiCameraButton.setVisibility(visibility); + switchVideoButton.setVisibility(visibility); + if( main_activity.supportsExposureButton() ) + exposureButton.setVisibility(visibility_video); // still allow exposure when recording video + if( showExposureLockIcon() ) + exposureLockButton.setVisibility(visibility_video); // still allow exposure lock when recording video + if( showWhiteBalanceLockIcon() ) + whiteBalanceLockButton.setVisibility(visibility_video); // still allow white balance lock when recording video + if( showCycleRawIcon() ) + cycleRawButton.setVisibility(visibility); + if( showStoreLocationIcon() ) + storeLocationButton.setVisibility(visibility); + if( showTextStampIcon() ) + textStampButton.setVisibility(visibility); + if( showStampIcon() ) + stampButton.setVisibility(visibility); + if( showFocusPeakingIcon() ) + focusPeakingButton.setVisibility(visibility); + if( showAutoLevelIcon() ) + autoLevelButton.setVisibility(visibility); + if( showCycleFlashIcon() ) + cycleFlashButton.setVisibility(visibility); + if( showFaceDetectionIcon() ) + faceDetectionButton.setVisibility(visibility); + if( main_activity.hasAudioControl() ) + audioControlButton.setVisibility(visibility); + if( !(show_gui_photo && show_gui_video) ) { + closePopup(); // we still allow the popup when recording video, but need to update the UI (so it only shows flash options), so easiest to just close + } + + View remoteConnectedIcon = main_activity.findViewById(R.id.kraken_icon); + if (main_activity.getBluetoothRemoteControl().remoteConnected() ) { + if( MyDebug.LOG ) + Log.d(TAG, "Remote control connected"); + remoteConnectedIcon.setVisibility(View.VISIBLE); + } else { + if( MyDebug.LOG ) + Log.d(TAG, "Remote control DISconnected"); + remoteConnectedIcon.setVisibility(View.GONE); + } + popupButton.setVisibility(main_activity.getPreview().supportsFlash() ? visibility_video : visibility); // still allow popup in order to change flash mode when recording video + + if( show_gui_photo && show_gui_video ) { + layoutUI(); // needed for "top" UIPlacement, to auto-arrange the buttons + } + } + }); + } + + public void updateExposureLockIcon() { + ImageButton view = main_activity.findViewById(R.id.exposure_lock); + boolean enabled = main_activity.getPreview().isExposureLocked(); + view.setImageResource(enabled ? R.drawable.exposure_locked : R.drawable.exposure_unlocked); + view.setContentDescription( main_activity.getResources().getString(enabled ? R.string.exposure_unlock : R.string.exposure_lock) ); + } + + public void updateWhiteBalanceLockIcon() { + ImageButton view = main_activity.findViewById(R.id.white_balance_lock); + boolean enabled = main_activity.getPreview().isWhiteBalanceLocked(); + view.setImageResource(enabled ? R.drawable.white_balance_locked : R.drawable.white_balance_unlocked); + view.setContentDescription( main_activity.getResources().getString(enabled ? R.string.white_balance_unlock : R.string.white_balance_lock) ); + } + + public void updateCycleRawIcon() { + ApplicationInterface.RawPref raw_pref = main_activity.getApplicationInterface().getRawPref(); + ImageButton view = main_activity.findViewById(R.id.cycle_raw); + if( raw_pref == ApplicationInterface.RawPref.RAWPREF_JPEG_DNG ) { + if( main_activity.getApplicationInterface().isRawOnly() ) { + // actually RAW only + view.setImageResource(R.drawable.raw_only_icon); + } + else { + view.setImageResource(R.drawable.raw_icon); + } + } + else { + view.setImageResource(R.drawable.raw_off_icon); + } + } + + public void updateStoreLocationIcon() { + ImageButton view = main_activity.findViewById(R.id.store_location); + boolean enabled = main_activity.getApplicationInterface().getGeotaggingPref(); + view.setImageResource(enabled ? R.drawable.ic_gps_fixed_red_48dp : R.drawable.ic_gps_fixed_white_48dp); + view.setContentDescription( main_activity.getResources().getString(enabled ? R.string.preference_location_disable : R.string.preference_location_enable) ); + } + + public void updateTextStampIcon() { + ImageButton view = main_activity.findViewById(R.id.text_stamp); + boolean enabled = !main_activity.getApplicationInterface().getTextStampPref().isEmpty(); + view.setImageResource(enabled ? R.drawable.baseline_text_fields_red_48 : R.drawable.baseline_text_fields_white_48); + } + + public void updateStampIcon() { + ImageButton view = main_activity.findViewById(R.id.stamp); + boolean enabled = main_activity.getApplicationInterface().getStampPref().equals("preference_stamp_yes"); + view.setImageResource(enabled ? R.drawable.ic_text_format_red_48dp : R.drawable.ic_text_format_white_48dp); + view.setContentDescription( main_activity.getResources().getString(enabled ? R.string.stamp_disable : R.string.stamp_enable) ); + } + + public void updateFocusPeakingIcon() { + ImageButton view = main_activity.findViewById(R.id.focus_peaking); + boolean enabled = main_activity.getApplicationInterface().getFocusPeakingPref(); + view.setImageResource(enabled ? R.drawable.key_visualizer_red : R.drawable.key_visualizer); + view.setContentDescription( main_activity.getResources().getString(enabled ? R.string.focus_peaking_disable : R.string.focus_peaking_enable) ); + } + + public void updateAutoLevelIcon() { + ImageButton view = main_activity.findViewById(R.id.auto_level); + boolean enabled = main_activity.getApplicationInterface().getAutoStabilisePref(); + view.setImageResource(enabled ? R.drawable.auto_stabilise_icon_red : R.drawable.auto_stabilise_icon); + view.setContentDescription( main_activity.getResources().getString(enabled ? R.string.auto_level_disable : R.string.auto_level_enable) ); + } + + public void updateCycleFlashIcon() { + // n.b., read from preview rather than saved application preference - so the icon updates correctly when in flash + // auto mode, but user switches to manual ISO where flash auto isn't supported + String flash_value = main_activity.getPreview().getCurrentFlashValue(); + if( flash_value != null ) { + ImageButton view = main_activity.findViewById(R.id.cycle_flash); + switch( flash_value ) { + case "flash_off": + view.setImageResource(R.drawable.flash_off); + break; + case "flash_auto": + case "flash_frontscreen_auto": + view.setImageResource(R.drawable.flash_auto); + break; + case "flash_on": + case "flash_frontscreen_on": + view.setImageResource(R.drawable.flash_on); + break; + case "flash_torch": + case "flash_frontscreen_torch": + view.setImageResource(R.drawable.baseline_highlight_white_48); + break; + case "flash_red_eye": + view.setImageResource(R.drawable.baseline_remove_red_eye_white_48); + break; + default: + // just in case?? + Log.e(TAG, "unknown flash value " + flash_value); + view.setImageResource(R.drawable.flash_off); + break; + } + } + else { + ImageButton view = main_activity.findViewById(R.id.cycle_flash); + view.setImageResource(R.drawable.flash_off); + } + } + + public void updateFaceDetectionIcon() { + ImageButton view = main_activity.findViewById(R.id.face_detection); + boolean enabled = main_activity.getApplicationInterface().getFaceDetectionPref(); + view.setImageResource(enabled ? R.drawable.ic_face_red_48dp : R.drawable.ic_face_white_48dp); + view.setContentDescription( main_activity.getResources().getString(enabled ? R.string.face_detection_disable : R.string.face_detection_enable) ); + } + + public void updateOnScreenIcons() { + if( MyDebug.LOG ) + Log.d(TAG, "updateOnScreenIcons"); + this.updateExposureLockIcon(); + this.updateWhiteBalanceLockIcon(); + this.updateCycleRawIcon(); + this.updateStoreLocationIcon(); + this.updateTextStampIcon(); + this.updateStampIcon(); + this.updateFocusPeakingIcon(); + this.updateAutoLevelIcon(); + this.updateCycleFlashIcon(); + this.updateFaceDetectionIcon(); + } + + public void audioControlStarted() { + ImageButton view = main_activity.findViewById(R.id.audio_control); + view.setImageResource(R.drawable.ic_mic_red_48dp); + view.setContentDescription( main_activity.getResources().getString(R.string.audio_control_stop) ); + } + + public void audioControlStopped() { + ImageButton view = main_activity.findViewById(R.id.audio_control); + view.setImageResource(R.drawable.ic_mic_white_48dp); + view.setContentDescription( main_activity.getResources().getString(R.string.audio_control_start) ); + } + + public boolean isExposureUIOpen() { + View exposure_seek_bar = main_activity.findViewById(R.id.exposure_container); + int exposure_visibility = exposure_seek_bar.getVisibility(); + View manual_exposure_seek_bar = main_activity.findViewById(R.id.manual_exposure_container); + int manual_exposure_visibility = manual_exposure_seek_bar.getVisibility(); + return exposure_visibility == View.VISIBLE || manual_exposure_visibility == View.VISIBLE; + } + + /** + * Opens or close the exposure settings (ISO, white balance, etc) + */ + public void toggleExposureUI() { + if( MyDebug.LOG ) + Log.d(TAG, "toggleExposureUI"); + closePopup(); + mSelectingExposureUIElement = false; + if( isExposureUIOpen() ) { + closeExposureUI(); + } + else if( main_activity.getPreview().getCameraController() != null && !main_activity.getPreview().isPreviewStarting() && main_activity.supportsExposureButton() ) { + // make sure preview is not starting - risk here is if preview is currently opening on + // background thread - don't want to open exposure UI that would allow being + // able to change settings that would then require restarting the preview + setupExposureUI(); + if (main_activity.getBluetoothRemoteControl().remoteEnabled()) { + initRemoteControlForExposureUI(); + } + } + } + + private void initRemoteControlForExposureUI() { + if( MyDebug.LOG ) + Log.d(TAG, "initRemoteControlForExposureUI"); + if( isExposureUIOpen() ) { // just in case + remote_control_mode = true; + mExposureLine = 0; + highlightExposureUILine(true); + } + } + + private void clearRemoteControlForExposureUI() { + if( MyDebug.LOG ) + Log.d(TAG, "clearRemoteControlForExposureUI"); + if( isExposureUIOpen() && remote_control_mode ) { + remote_control_mode = false; + resetExposureUIHighlights(); + } + } + + private void resetExposureUIHighlights() { + if( MyDebug.LOG ) + Log.d(TAG, "resetExposureUIHighlights"); + ViewGroup iso_buttons_container = main_activity.findViewById(R.id.iso_buttons); // Shown when Camera API2 enabled + View exposure_seek_bar = main_activity.findViewById(R.id.exposure_container); + View shutter_seekbar = main_activity.findViewById(R.id.exposure_time_seekbar); + View iso_seekbar = main_activity.findViewById(R.id.iso_seekbar); + View wb_seekbar = main_activity.findViewById(R.id.white_balance_seekbar); + // Set all lines to black + iso_buttons_container.setBackgroundColor(Color.TRANSPARENT); + exposure_seek_bar.setBackgroundColor(Color.TRANSPARENT); + shutter_seekbar.setBackgroundColor(Color.TRANSPARENT); + iso_seekbar.setBackgroundColor(Color.TRANSPARENT); + wb_seekbar.setBackgroundColor(Color.TRANSPARENT); + } + + /** + * Highlights the relevant line on the Exposure UI based on + * the value of mExposureLine + * + */ + private void highlightExposureUILine(boolean selectNext) { + if( MyDebug.LOG ) + Log.d(TAG, "highlightExposureUILine: " + selectNext); + if (!isExposureUIOpen()) { // Safety check + return; + } + ViewGroup iso_buttons_container = main_activity.findViewById(R.id.iso_buttons); // Shown when Camera API2 enabled + View exposure_seek_bar = main_activity.findViewById(R.id.exposure_container); + View shutter_seekbar = main_activity.findViewById(R.id.exposure_time_seekbar); + View iso_seekbar = main_activity.findViewById(R.id.iso_seekbar); + View wb_seekbar = main_activity.findViewById(R.id.white_balance_seekbar); + // Our order for lines is: + // - ISO buttons + // - ISO slider + // - Shutter speed + // - exposure seek bar + if( MyDebug.LOG ) + Log.d(TAG, "mExposureLine: " + mExposureLine); + mExposureLine = ( mExposureLine + 5 ) % 5; + if( MyDebug.LOG ) + Log.d(TAG, "mExposureLine modulo: " + mExposureLine); + if (selectNext) { + if (mExposureLine == 0 && !iso_buttons_container.isShown()) + mExposureLine++; + if (mExposureLine == 1 && !iso_seekbar.isShown()) + mExposureLine++; + if (mExposureLine == 2 && !shutter_seekbar.isShown()) + mExposureLine++; + if ((mExposureLine == 3) && !exposure_seek_bar.isShown()) + mExposureLine++; + if ((mExposureLine == 4) && !wb_seekbar.isShown()) + mExposureLine++; + } else { + // Select previous + if (mExposureLine == 4 && !wb_seekbar.isShown()) + mExposureLine--; + if (mExposureLine == 3 && !exposure_seek_bar.isShown()) + mExposureLine--; + if (mExposureLine == 2 && !shutter_seekbar.isShown()) + mExposureLine--; + if (mExposureLine == 1 && !iso_seekbar.isShown()) + mExposureLine--; + if (mExposureLine == 0 && !iso_buttons_container.isShown()) + mExposureLine--; + } + if( MyDebug.LOG ) + Log.d(TAG, "after skipping: mExposureLine: " + mExposureLine); + mExposureLine = ( mExposureLine + 5 ) % 5; + if( MyDebug.LOG ) + Log.d(TAG, "after skipping: mExposureLine modulo: " + mExposureLine); + resetExposureUIHighlights(); + + if (mExposureLine == 0) { + iso_buttons_container.setBackgroundColor(highlightColor); + //iso_buttons_container.setAlpha(0.5f); + } else if (mExposureLine == 1) { + iso_seekbar.setBackgroundColor(highlightColor); + //iso_seekbar.setAlpha(0.5f); + } else if (mExposureLine == 2) { + shutter_seekbar.setBackgroundColor(highlightColor); + //shutter_seekbar.setAlpha(0.5f); + } else if (mExposureLine == 3) { // + exposure_seek_bar.setBackgroundColor(highlightColor); + //exposure_seek_bar.setAlpha(0.5f); + } else if (mExposureLine == 4) { + wb_seekbar.setBackgroundColor(highlightColor); + //wb_seekbar.setAlpha(0.5f); + } + } + + private void nextExposureUILine() { + mExposureLine++; + highlightExposureUILine(true); + } + + private void previousExposureUILine() { + mExposureLine--; + highlightExposureUILine(false); + } + + /** + * Our order for lines is: + * -0: ISO buttons + * -1: ISO slider + * -2: Shutter speed + * -3: exposure seek bar + */ + private void nextExposureUIItem() { + if( MyDebug.LOG ) + Log.d(TAG, "nextExposureUIItem"); + switch (mExposureLine) { + case 0: + nextIsoItem(false); + break; + case 1: + changeSeekbar(R.id.iso_seekbar, 10); + break; + case 2: + changeSeekbar(R.id.exposure_time_seekbar, 5); + break; + case 3: + //changeSeekbar(R.id.exposure_seekbar, 1); + // call via MainActivity.changeExposure(), to handle repeated zeroes + main_activity.changeExposure(1); + break; + case 4: + changeSeekbar(R.id.white_balance_seekbar, 3); + break; + } + } + + private void previousExposureUIItem() { + if( MyDebug.LOG ) + Log.d(TAG, "previousExposureUIItem"); + switch (mExposureLine) { + case 0: + nextIsoItem(true); + break; + case 1: + changeSeekbar(R.id.iso_seekbar, -10); + break; + case 2: + changeSeekbar(R.id.exposure_time_seekbar, -5); + break; + case 3: + //changeSeekbar(R.id.exposure_seekbar, -1); + // call via MainActivity.changeExposure(), to handle repeated zeroes + main_activity.changeExposure(-1); + break; + case 4: + changeSeekbar(R.id.white_balance_seekbar, -3); + break; + } + } + + private void nextIsoItem(boolean previous) { + if( MyDebug.LOG ) + Log.d(TAG, "nextIsoItem: " + previous); + // Find current ISO + final SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + String current_iso = sharedPreferences.getString(PreferenceKeys.ISOPreferenceKey, CameraController.ISO_DEFAULT); + int count = iso_buttons.size(); + int step = previous ? -1 : 1; + boolean found = false; + for(int i = 0; i < count; i++) { + Button button = (Button)iso_buttons.get(i); + String button_text = String.valueOf(button.getText()); + if( ISOTextEquals(button_text, current_iso) ) { + found = true; + // Select next one, unless it's "Manual", which we skip since + // it's not practical in remote mode. + Button nextButton = (Button) iso_buttons.get((i + count + step)%count); + String nextButton_text = String.valueOf(nextButton.getText()); + if( nextButton_text.contains("m") ) { + nextButton = (Button) iso_buttons.get((i+count+ 2*step)%count); + } + nextButton.callOnClick(); + break; + } + } + if( !found ) { + // For instance, we are in ISO manual mode and "M" is selected. default + // back to "Auto" to avoid being stuck since we're with a remote control + iso_buttons.get(0).callOnClick(); + } + } + + /** + * Select element on exposure UI. Based on the value of mExposureLine + * // Our order for lines is: + * // - ISO buttons + * // - ISO slider + * // - Shutter speed + * // - exposure seek bar + */ + private void selectExposureUILine() { + if( MyDebug.LOG ) + Log.d(TAG, "selectExposureUILine"); + if( !isExposureUIOpen() ) { // Safety check + return; + } + + if( mExposureLine == 0 ) { // ISO presets + ViewGroup iso_buttons_container = main_activity.findViewById(R.id.iso_buttons); + iso_buttons_container.setBackgroundColor(highlightColorExposureUIElement); + //iso_buttons_container.setAlpha(1f); + final SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + String current_iso = sharedPreferences.getString(PreferenceKeys.ISOPreferenceKey, CameraController.ISO_DEFAULT); + // if the manual ISO value isn't one of the "preset" values, then instead highlight the manual ISO icon + boolean found = false; + Button manualButton = null; + for(View view : iso_buttons) { + Button button = (Button)view; + String button_text = String.valueOf(button.getText()); + if( ISOTextEquals(button_text, current_iso) ) { + PopupView.setButtonSelected(button, true); + //button.setBackgroundColor(highlightColorExposureUIElement); + //button.setAlpha(0.3f); + found = true; + } + else { + if( button_text.contains("m") ) { + manualButton = button; + } + PopupView.setButtonSelected(button, false); + button.setBackgroundColor(Color.TRANSPARENT); + } + } + if( !found && manualButton != null ) { + // We are in manual ISO, highlight the "M" button + PopupView.setButtonSelected(manualButton, true); + manualButton.setBackgroundColor(highlightColorExposureUIElement); + //manualButton.setAlpha(0.3f); + } + mSelectingExposureUIElement = true; + } + else if( mExposureLine == 1 ) { + // ISO seek bar - change color + View seek_bar = main_activity.findViewById(R.id.iso_seekbar); + //seek_bar.setAlpha(0.1f); + seek_bar.setBackgroundColor(highlightColorExposureUIElement); + mSelectingExposureUIElement = true; + } + else if( mExposureLine == 2 ) { + // ISO seek bar - change color + View seek_bar = main_activity.findViewById(R.id.exposure_time_seekbar); + //seek_bar.setAlpha(0.1f); + seek_bar.setBackgroundColor(highlightColorExposureUIElement); + mSelectingExposureUIElement = true; + } + else if ( mExposureLine == 3 ) { + // Exposure compensation + View container = main_activity.findViewById(R.id.exposure_container); + //container.setAlpha(0.1f); + container.setBackgroundColor(highlightColorExposureUIElement); + mSelectingExposureUIElement = true; + } + else if( mExposureLine == 4 ) { + // Manual white balance + View container = main_activity.findViewById(R.id.white_balance_seekbar); + //container.setAlpha(0.1f); + container.setBackgroundColor(highlightColorExposureUIElement); + mSelectingExposureUIElement = true; + } + } + + /** Returns the height of the device in dp (or width in portrait mode), allowing for space for the + * on-screen UI icons. + * @param centred If true, then find the max height for a view that will be centred. + */ + int getMaxHeightDp(boolean centred) { + // ensure we have display for landscape orientation (even if we ever allow Open Camera + Point display_size = new Point(); + main_activity.getApplicationInterface().getDisplaySize(display_size, true); + + // normally we should always have heightPixels < widthPixels, but good not to assume we're running in landscape orientation + int smaller_dim = Math.min(display_size.x, display_size.y); + // the smaller dimension should limit the width, due to when held in portrait + final float scale = main_activity.getResources().getDisplayMetrics().density; + int dpHeight = (int)(smaller_dim / scale); + if( MyDebug.LOG ) { + Log.d(TAG, "display size: " + display_size.x + " x " + display_size.y); + Log.d(TAG, "dpHeight: " + dpHeight); + } + // allow space for the icons at top/right of screen + int margin = centred ? 120 : 50; + dpHeight -= margin; + return dpHeight; + } + + public boolean isSelectingExposureUIElement() { + if( MyDebug.LOG ) + Log.d(TAG, "isSelectingExposureUIElement returns:" + mSelectingExposureUIElement); + return mSelectingExposureUIElement; + } + + + /** + * Process a press to the "Up" button on a remote. Called from MainActivity. + * @return true if an action was taken + */ + public boolean processRemoteUpButton() { + if( MyDebug.LOG ) + Log.d(TAG, "processRemoteUpButton"); + boolean didProcess = false; + if (popupIsOpen()) { + didProcess = true; + if (selectingIcons()) { + previousPopupIcon(); + } else if (selectingLines()) { + previousPopupLine(); + } + } else if (isExposureUIOpen()) { + didProcess = true; + if (isSelectingExposureUIElement()) { + nextExposureUIItem(); + } else { + previousExposureUILine(); + } + } + return didProcess; + } + + /** + * Process a press to the "Down" button on a remote. Called from MainActivity. + * @return true if an action was taken + */ + public boolean processRemoteDownButton() { + if( MyDebug.LOG ) + Log.d(TAG, "processRemoteDownButton"); + boolean didProcess = false; + if (popupIsOpen()) { + if (selectingIcons()) { + nextPopupIcon(); + } else if (selectingLines()) { + nextPopupLine(); + } + didProcess = true; + } else if (isExposureUIOpen()) { + if (isSelectingExposureUIElement()) { + previousExposureUIItem(); + } else { + nextExposureUILine(); + } + didProcess = true; + } + return didProcess; + } + + private List iso_buttons; + private int iso_button_manual_index = -1; + private final static String manual_iso_value = "m"; + + /** Opens the exposure UI if not already open, and sets up or updates the UI. + */ + public void setupExposureUI() { + if( MyDebug.LOG ) + Log.d(TAG, "setupExposureUI"); + test_ui_buttons.clear(); + final SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + final Preview preview = main_activity.getPreview(); + ImageButton view = main_activity.findViewById(R.id.exposure); + view.setImageResource(R.drawable.ic_exposure_red_48dp); + View sliders_container = main_activity.findViewById(R.id.sliders_container); + sliders_container.setVisibility(View.VISIBLE); + Animation animation = AnimationUtils.loadAnimation(main_activity, R.anim.fade_in); + sliders_container.startAnimation(animation); + ViewGroup iso_buttons_container = main_activity.findViewById(R.id.iso_buttons); + iso_buttons_container.removeAllViews(); + List supported_isos; + if( preview.isVideoRecording() ) { + supported_isos = null; + } + else if( preview.supportsISORange() ) { + if( MyDebug.LOG ) + Log.d(TAG, "supports ISO range"); + int min_iso = preview.getMinimumISO(); + int max_iso = preview.getMaximumISO(); + List values = new ArrayList<>(); + values.add(CameraController.ISO_DEFAULT); + values.add(manual_iso_value); + iso_button_manual_index = 1; // must match where we place the manual button! + int [] iso_values = {50, 100, 200, 400, 800, 1600, 3200, 6400}; + values.add(ISOToButtonText(min_iso)); + for(int iso_value : iso_values) { + if( iso_value > min_iso && iso_value < max_iso ) { + values.add(ISOToButtonText(iso_value)); + } + } + values.add(ISOToButtonText(max_iso)); + supported_isos = values; + } + else { + supported_isos = preview.getSupportedISOs(); + iso_button_manual_index = -1; + } + String current_iso = sharedPreferences.getString(PreferenceKeys.ISOPreferenceKey, CameraController.ISO_DEFAULT); + // if the manual ISO value isn't one of the "preset" values, then instead highlight the manual ISO icon + if( !current_iso.equals(CameraController.ISO_DEFAULT) && supported_isos != null && supported_isos.contains(manual_iso_value) && !supported_isos.contains(current_iso) ) + current_iso = manual_iso_value; + + + int total_width_dp = 280; + int max_width_dp = getMaxHeightDp(true); + if( total_width_dp > max_width_dp ) + total_width_dp = max_width_dp; + if( MyDebug.LOG ) + Log.d(TAG, "total_width_dp: " + total_width_dp); + + // n.b., we hardcode the string "ISO" as this isn't a user displayed string, rather it's used to filter out "ISO" included in old Camera API parameters + iso_buttons = PopupView.createButtonOptions(iso_buttons_container, main_activity, total_width_dp, test_ui_buttons, supported_isos, -1, -1, "ISO", false, current_iso, 0, "TEST_ISO", new PopupView.ButtonOptionsPopupListener() { + @Override + public void onClick(String option) { + if( MyDebug.LOG ) + Log.d(TAG, "clicked iso: " + option); + SharedPreferences.Editor editor = sharedPreferences.edit(); + String old_iso = sharedPreferences.getString(PreferenceKeys.ISOPreferenceKey, CameraController.ISO_DEFAULT); + if( MyDebug.LOG ) + Log.d(TAG, "old_iso: " + old_iso); + editor.putString(PreferenceKeys.ISOPreferenceKey, option); + String toast_option = option; + + if( preview.supportsISORange() ) { + if( option.equals(CameraController.ISO_DEFAULT) ) { + if( MyDebug.LOG ) + Log.d(TAG, "switched from manual to auto iso"); + // also reset exposure time when changing from manual to auto from the popup menu: + editor.putLong(PreferenceKeys.ExposureTimePreferenceKey, CameraController.EXPOSURE_TIME_DEFAULT); + editor.apply(); + preview.showToast(null, "ISO: " + toast_option, 0, true); // supply offset_y_dp to be consistent with preview.setExposure(), preview.setISO() + main_activity.updateForSettings(true, ""); // already showed the toast, so block from showing again + } + else if( old_iso.equals(CameraController.ISO_DEFAULT) ) { + if( MyDebug.LOG ) + Log.d(TAG, "switched from auto to manual iso"); + if( option.equals("m") ) { + // if we used the generic "manual", then instead try to preserve the current iso if it exists + if( preview.getCameraController() != null && preview.getCameraController().captureResultHasIso() ) { + int iso = preview.getCameraController().captureResultIso(); + if( MyDebug.LOG ) + Log.d(TAG, "apply existing iso of " + iso); + editor.putString(PreferenceKeys.ISOPreferenceKey, String.valueOf(iso)); + toast_option = String.valueOf(iso); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "no existing iso available"); + // use a default + final int iso = 800; + editor.putString(PreferenceKeys.ISOPreferenceKey, "" + iso); + toast_option = "" + iso; + } + } + + // if changing from auto to manual, preserve the current exposure time if it exists + if( preview.getCameraController() != null && preview.getCameraController().captureResultHasExposureTime() ) { + long exposure_time = preview.getCameraController().captureResultExposureTime(); + if( MyDebug.LOG ) + Log.d(TAG, "apply existing exposure time of " + exposure_time); + editor.putLong(PreferenceKeys.ExposureTimePreferenceKey, exposure_time); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "no existing exposure time available"); + } + + editor.apply(); + preview.showToast(null, "ISO: " + toast_option, 0, true); // supply offset_y_dp to be consistent with preview.setExposure(), preview.setISO() + main_activity.updateForSettings(true, ""); // already showed the toast, so block from showing again + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "changed manual iso"); + if( option.equals("m") ) { + // if user selected the generic "manual", then just keep the previous non-ISO option + if( MyDebug.LOG ) + Log.d(TAG, "keep existing iso of " + old_iso); + editor.putString(PreferenceKeys.ISOPreferenceKey, old_iso); + } + + editor.apply(); + int iso = preview.parseManualISOValue(option); + if( iso >= 0 ) { + // if changing between manual ISOs, no need to call updateForSettings, just change the ISO directly (as with changing the ISO via manual slider) + //preview.setISO(iso); + //updateSelectedISOButton(); + // rather than set ISO directly, we move the seekbar, and the ISO will be changed via the seekbar listener + SeekBar iso_seek_bar = main_activity.findViewById(R.id.iso_seekbar); + main_activity.getManualSeekbars().setISOProgressBarToClosest(iso_seek_bar, iso); + } + } + } + else { + editor.apply(); + if( preview.getCameraController() != null ) { + preview.getCameraController().setISO(option); + } + } + + setupExposureUI(); + } + }); + if( supported_isos != null ) { + View iso_container_view = main_activity.findViewById(R.id.iso_container); + iso_container_view.setVisibility(View.VISIBLE); + } + + View exposure_seek_bar = main_activity.findViewById(R.id.exposure_container); + View manual_exposure_seek_bar = main_activity.findViewById(R.id.manual_exposure_container); + String iso_value = main_activity.getApplicationInterface().getISOPref(); + if( main_activity.getPreview().usingCamera2API() && !iso_value.equals(CameraController.ISO_DEFAULT) ) { + exposure_seek_bar.setVisibility(View.GONE); + + // with Camera2 API, when using manual ISO we instead show sliders for ISO range and exposure time + if( main_activity.getPreview().supportsISORange() ) { + manual_exposure_seek_bar.setVisibility(View.VISIBLE); + SeekBar exposure_time_seek_bar = main_activity.findViewById(R.id.exposure_time_seekbar); + if( main_activity.getPreview().supportsExposureTime() ) { + exposure_time_seek_bar.setVisibility(View.VISIBLE); + } + else { + exposure_time_seek_bar.setVisibility(View.GONE); + } + } + else { + manual_exposure_seek_bar.setVisibility(View.GONE); + } + } + else { + manual_exposure_seek_bar.setVisibility(View.GONE); + + if( main_activity.getPreview().supportsExposures() ) { + exposure_seek_bar.setVisibility(View.VISIBLE); + } + else { + exposure_seek_bar.setVisibility(View.GONE); + } + } + + View manual_white_balance_seek_bar = main_activity.findViewById(R.id.manual_white_balance_container); + if( main_activity.getPreview().supportsWhiteBalanceTemperature()) { + // we also show slider for manual white balance, if in that mode + String white_balance_value = main_activity.getApplicationInterface().getWhiteBalancePref(); + if( main_activity.getPreview().usingCamera2API() && white_balance_value.equals("manual") ) { + manual_white_balance_seek_bar.setVisibility(View.VISIBLE); + } + else { + manual_white_balance_seek_bar.setVisibility(View.GONE); + } + } + else { + manual_white_balance_seek_bar.setVisibility(View.GONE); + } + + //layoutUI(); // needed to update alignment of exposure UI + } + + /** Returns whether the ISO button with the supplied text is a match for the supplied iso. + * Should only be used for Preview.supportsISORange()==true (i.e., full manual ISO). + */ + public static boolean ISOTextEquals(String button_text, String iso) { + // Can't use equals(), due to the \n that Popupview.getButtonOptionString() inserts, and + // also good to make this general in case in future we support other text formats. + // We really want to check that iso is the last word in button_text. + if( button_text.endsWith(iso) ) { + return button_text.length()==iso.length() || Character.isWhitespace( button_text.charAt(button_text.length()-iso.length()-1) ); + } + return false; + } + + /** Returns the ISO button text for the supplied iso. + * Should only be used for Preview.supportsISORange()==true (i.e., full manual ISO). + */ + public static String ISOToButtonText(int iso) { + // n.b., if we change how the ISO is converted to a string for the button, will also need + // to update updateSelectedISOButton() + return String.valueOf(iso); + } + + /** If the exposure panel is open, updates the selected ISO button to match the current ISO value, + * if a continuous range of ISO values are supported by the camera. + */ + public void updateSelectedISOButton() { + if( MyDebug.LOG ) + Log.d(TAG, "updateSelectedISOButton"); + Preview preview = main_activity.getPreview(); + if( preview.supportsISORange() && isExposureUIOpen() ) { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + String current_iso = sharedPreferences.getString(PreferenceKeys.ISOPreferenceKey, CameraController.ISO_DEFAULT); + // if the manual ISO value isn't one of the "preset" values, then instead highlight the manual ISO icon + if( MyDebug.LOG ) + Log.d(TAG, "current_iso: " + current_iso); + boolean found = false; + for(View view : iso_buttons) { + Button button = (Button)view; + if( MyDebug.LOG ) + Log.d(TAG, "button: " + button.getText()); + String button_text = String.valueOf(button.getText()); + if( ISOTextEquals(button_text, current_iso) ) { + PopupView.setButtonSelected(button, true); + found = true; + } + else { + PopupView.setButtonSelected(button, false); + } + } + if( !found && !current_iso.equals(CameraController.ISO_DEFAULT) ) { + if( MyDebug.LOG ) + Log.d(TAG, "must be manual"); + if( iso_button_manual_index >= 0 && iso_button_manual_index < iso_buttons.size() ) { + Button button = (Button)iso_buttons.get(iso_button_manual_index); + PopupView.setButtonSelected(button, true); + } + } + } + } + + public void setSeekbarZoom(int new_zoom) { + if( MyDebug.LOG ) + Log.d(TAG, "setSeekbarZoom: " + new_zoom); + SeekBar zoomSeekBar = main_activity.findViewById(R.id.zoom_seekbar); + if( MyDebug.LOG ) + Log.d(TAG, "progress was: " + zoomSeekBar.getProgress()); + zoomSeekBar.setProgress(main_activity.getPreview().getMaxZoom()-new_zoom); + if( MyDebug.LOG ) + Log.d(TAG, "progress is now: " + zoomSeekBar.getProgress()); + } + + public void changeSeekbar(int seekBarId, int change) { + if( MyDebug.LOG ) + Log.d(TAG, "changeSeekbar: " + change); + SeekBar seekBar = main_activity.findViewById(seekBarId); + int value = seekBar.getProgress(); + int new_value = value + change; + if( new_value < 0 ) + new_value = 0; + else if( new_value > seekBar.getMax() ) + new_value = seekBar.getMax(); + if( MyDebug.LOG ) { + Log.d(TAG, "value: " + value); + Log.d(TAG, "new_value: " + new_value); + Log.d(TAG, "max: " + seekBar.getMax()); + } + if( new_value != value ) { + seekBar.setProgress(new_value); + } + } + + /** Closes the exposure UI. + */ + public void closeExposureUI() { + ImageButton image_button = main_activity.findViewById(R.id.exposure); + image_button.setImageResource(R.drawable.ic_exposure_white_48dp); + + clearRemoteControlForExposureUI(); // must be called before we actually close the exposure panel + View view = main_activity.findViewById(R.id.sliders_container); + view.setVisibility(View.GONE); + view = main_activity.findViewById(R.id.iso_container); + view.setVisibility(View.GONE); + view = main_activity.findViewById(R.id.exposure_container); + view.setVisibility(View.GONE); + view = main_activity.findViewById(R.id.manual_exposure_container); + view.setVisibility(View.GONE); + view = main_activity.findViewById(R.id.manual_white_balance_container); + view.setVisibility(View.GONE); + } + + public void setPopupIcon() { + if( MyDebug.LOG ) + Log.d(TAG, "setPopupIcon"); + ImageButton popup = main_activity.findViewById(R.id.popup); + String flash_value = main_activity.getPreview().getCurrentFlashValue(); + if( MyDebug.LOG ) + Log.d(TAG, "flash_value: " + flash_value); + if( main_activity.getMainUI().showCycleFlashIcon() ) { + popup.setImageResource(R.drawable.popup); + } + else if( flash_value != null && flash_value.equals("flash_off") ) { + popup.setImageResource(R.drawable.popup_flash_off); + } + else if( flash_value != null && ( flash_value.equals("flash_torch") || flash_value.equals("flash_frontscreen_torch") ) ) { + popup.setImageResource(R.drawable.popup_flash_torch); + } + else if( flash_value != null && ( flash_value.equals("flash_auto") || flash_value.equals("flash_frontscreen_auto") ) ) { + popup.setImageResource(R.drawable.popup_flash_auto); + } + else if( flash_value != null && ( flash_value.equals("flash_on") || flash_value.equals("flash_frontscreen_on") ) ) { + popup.setImageResource(R.drawable.popup_flash_on); + } + else if( flash_value != null && flash_value.equals("flash_red_eye") ) { + popup.setImageResource(R.drawable.popup_flash_red_eye); + } + else { + popup.setImageResource(R.drawable.popup); + } + } + + public void closePopup() { + if( MyDebug.LOG ) + Log.d(TAG, "close popup"); + + main_activity.enablePopupOnBackPressedCallback(false); + + if( popupIsOpen() ) { + clearRemoteControlForPopup(); // must be called before we set popup_view_is_open to false; and before clearSelectionState() so we know which highlighting to disable + clearSelectionState(); + + popup_view_is_open = false; + /* Not destroying the popup doesn't really gain any performance. + * Also there are still outstanding bugs to fix if we wanted to do this: + * - Not resetting the popup menu when switching between photo and video mode. See test testVideoPopup(). + * - When changing options like flash/focus, the new option isn't selected when reopening the popup menu. See test + * testPopup(). + * - Changing settings potentially means we have to recreate the popup, so the natural place to do this is in + * MainActivity.updateForSettings(), but doing so makes the popup close when checking photo or video resolutions! + * See test testSwitchResolution(). + */ + if( cache_popup && !force_destroy_popup ) { + popup_view.setVisibility(View.GONE); + } + else { + destroyPopup(); + } + main_activity.initImmersiveMode(); // to reset the timer when closing the popup + } + } + + public boolean popupIsOpen() { + return popup_view_is_open; + } + + public boolean selectingIcons() { + return mSelectingIcons; + } + + public boolean selectingLines() { + return mSelectingLines; + } + + public void destroyPopup() { + if( MyDebug.LOG ) + Log.d(TAG, "destroyPopup"); + force_destroy_popup = false; + if( popupIsOpen() ) { + closePopup(); + } + ViewGroup popup_container = main_activity.findViewById(R.id.popup_container); + popup_container.removeAllViews(); + popup_view = null; + } + + /** + * Higlights the next LinearLayout view + */ + private void highlightPopupLine(boolean highlight, boolean goUp) { + if( MyDebug.LOG ) { + Log.d(TAG, "highlightPopupLine"); + Log.d(TAG, "highlight: " + highlight); + Log.d(TAG, "goUp: " + goUp); + } + if (!popupIsOpen()) { // Safety check + clearSelectionState(); + return; + } + final ViewGroup popup_container = main_activity.findViewById(R.id.popup_container); + Rect scrollBounds = new Rect(); + popup_container.getDrawingRect(scrollBounds); + final LinearLayout inside = (LinearLayout) popup_container.getChildAt(0); + if (inside == null) + return; // Safety check + int count = inside.getChildCount(); + boolean foundLine = false; + while (!foundLine) { + // Ensure we stay within our bounds: + mPopupLine = (mPopupLine + count ) % count; + View v = inside.getChildAt(mPopupLine); + if( MyDebug.LOG ) + Log.d(TAG, "line: " + mPopupLine + " view: " + v); + // to test example with HorizontalScrollView, see popup menu on Nokia 8 with Camera2 API, the flash icons row uses a HorizontalScrollView + if( v instanceof HorizontalScrollView && ((HorizontalScrollView) v).getChildCount() > 0 ) + v = ((HorizontalScrollView) v).getChildAt(0); + if (v.isShown() && v instanceof LinearLayout ) { + if (highlight) { + v.setBackgroundColor(highlightColor); + //v.setAlpha(0.3f); + if (v.getBottom() > scrollBounds.bottom || v.getTop() < scrollBounds.top) + popup_container.scrollTo(0, v.getTop()); + mHighlightedLine = (LinearLayout) v; + } else { + v.setBackgroundColor(Color.TRANSPARENT); + v.setAlpha(1f); + } + foundLine = true; + if( MyDebug.LOG ) + Log.d(TAG, "found at line: " + foundLine); + } else { + mPopupLine += goUp ? -1 : 1; + } + } + if( MyDebug.LOG ) + Log.d(TAG,"Current line: " + mPopupLine); + } + + /** + * Highlights an icon on a horizontal line, such as flash mode, + * focus mode, etc. Checks that the popup is open in case it is + * wrongly called, so that it doesn't crash the app. + */ + private void highlightPopupIcon(boolean highlight, boolean goLeft) { + if( MyDebug.LOG ) { + Log.d(TAG, "highlightPopupIcon"); + Log.d(TAG, "highlight: " + highlight); + Log.d(TAG, "goLeft: " + goLeft); + } + if (!popupIsOpen()) { // Safety check + clearSelectionState(); + return; + } + highlightPopupLine(false, false); + int count = mHighlightedLine.getChildCount(); + boolean foundIcon = false; + while (!foundIcon) { + // Ensure we stay within our bounds: + // (careful, modulo in Java will allow negative numbers, hence the line below: + mPopupIcon= (mPopupIcon + count ) % count; + View v = mHighlightedLine.getChildAt(mPopupIcon); + if( MyDebug.LOG ) + Log.d(TAG, "row: " + mPopupIcon + " view: " + v); + if (v instanceof ImageButton || v instanceof Button ) { + if (highlight) { + v.setBackgroundColor(highlightColor); + //v.setAlpha(0.5f); + mHighlightedIcon = v; + mSelectingIcons = true; + } else { + v.setBackgroundColor(Color.TRANSPARENT); + } + if( MyDebug.LOG ) + Log.d(TAG, "found icon at row: " + mPopupIcon); + foundIcon = true; + } else { + mPopupIcon+= goLeft ? -1 : 1; + } + } + } + + /** + * Select the next line on the settings popup. Called by MainActivity + * when receiving a remote control command. + */ + private void nextPopupLine() { + highlightPopupLine(false, false); + mPopupLine++; + highlightPopupLine(true, false); + } + + private void previousPopupLine() { + highlightPopupLine(false, true); + mPopupLine--; + highlightPopupLine(true, true); + } + + private void nextPopupIcon() { + highlightPopupIcon(false, false); + mPopupIcon++; + highlightPopupIcon(true, false); + } + + private void previousPopupIcon() { + highlightPopupIcon(false, true); + mPopupIcon--; + highlightPopupIcon(true, true); + } + + /** + * Simulates a press on the currently selected icon + */ + private void clickSelectedIcon() { + if( MyDebug.LOG ) + Log.d(TAG, "clickSelectedIcon: " + mHighlightedIcon); + if (mHighlightedIcon != null) { + mHighlightedIcon.callOnClick(); + } + } + + /** + * Ensure all our selection tracking variables are cleared when we + * exit menu selection (used in remote control mode) + */ + private void clearSelectionState() { + if( MyDebug.LOG ) + Log.d(TAG, "clearSelectionState"); + mPopupLine = 0; + mPopupIcon = 0; + mSelectingIcons = false; + mSelectingLines = false; + mHighlightedIcon= null; + mHighlightedLine = null; + } + + /** + * Opens or closes the settings popup on the camera preview. The popup that + * differs depending whether we're in photo or video mode + */ + public void togglePopupSettings() { + final ViewGroup popup_container = main_activity.findViewById(R.id.popup_container); + if( popupIsOpen() ) { + closePopup(); + return; + } + if( main_activity.getPreview().getCameraController() == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "camera not opened!"); + return; + } + else if( main_activity.getPreview().isPreviewStarting() ) { + if( MyDebug.LOG ) + Log.d(TAG, "preview is starting!"); + // risk if preview is currently opening on background thread - don't want to open popup menu that would allow being + // able to change settings that would then require restarting the preview + // still allow opening popup if preview is not started (but not starting in background) - in practice this means the + // preview failed to start, so can be useful to allow opening the popup to change modes (in case failing to start is + // due to a specific photo mode or other setting) + return; + } + + if( MyDebug.LOG ) + Log.d(TAG, "open popup"); + + main_activity.enablePopupOnBackPressedCallback(true); // so that back button will close the popup instead of exiting the application + + closeExposureUI(); + main_activity.getPreview().cancelTimer(); // best to cancel any timer, in case we take a photo while settings window is open, or when changing settings + main_activity.stopAudioListeners(); + + final long time_s = System.currentTimeMillis(); + + { + // prevent popup being transparent + popup_container.setBackgroundColor(Color.BLACK); + popup_container.setAlpha(0.9f); + } + + if( popup_view == null ) { + if( MyDebug.LOG ) + Log.d(TAG, "create new popup_view"); + test_ui_buttons.clear(); + popup_view = new PopupView(main_activity); + popup_container.addView(popup_view); + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "use cached popup_view"); + popup_view.setVisibility(View.VISIBLE); + } + popup_view_is_open = true; + + if (main_activity.getBluetoothRemoteControl().remoteEnabled()) { + initRemoteControlForPopup(); + } + + // need to call layoutUI to make sure the new popup is oriented correctly + // but need to do after the layout has been done, so we have a valid width/height to use + // n.b., even though we only need the portion of layoutUI for the popup container, there + // doesn't seem to be any performance benefit in only calling that part + popup_container.getViewTreeObserver().addOnGlobalLayoutListener( + new OnGlobalLayoutListener() { + @Override + public void onGlobalLayout() { + if( MyDebug.LOG ) + Log.d(TAG, "onGlobalLayout()"); + if( MyDebug.LOG ) + Log.d(TAG, "time after global layout: " + (System.currentTimeMillis() - time_s)); + layoutUI(true); + if( MyDebug.LOG ) + Log.d(TAG, "time after layoutUI: " + (System.currentTimeMillis() - time_s)); + // stop listening - only want to call this once! + popup_container.getViewTreeObserver().removeOnGlobalLayoutListener(this); + + UIPlacement ui_placement = computeUIPlacement(); + MainActivity.SystemOrientation system_orientation = main_activity.getSystemOrientation(); + float pivot_x; + float pivot_y; + switch( ui_placement ) { + case UIPLACEMENT_TOP: + if( main_activity.getPreview().getUIRotation() == 270 ) { + // portrait (when not locked) + pivot_x = 0.0f; + pivot_y = 1.0f; + } + else if( system_orientation == MainActivity.SystemOrientation.REVERSE_LANDSCAPE ) { + pivot_x = 1.0f; + pivot_y = 1.0f; + } + else { + pivot_x = 0.0f; + pivot_y = 0.0f; + } + break; + case UIPLACEMENT_LEFT: + if( system_orientation == MainActivity.SystemOrientation.PORTRAIT ) { + pivot_x = 0.0f; + pivot_y = 1.0f; + } + else if( system_orientation == MainActivity.SystemOrientation.REVERSE_LANDSCAPE ) { + pivot_x = 0.0f; + pivot_y = 0.0f; + } + else { + pivot_x = 1.0f; + pivot_y = 1.0f; + } + break; + default: + if( system_orientation == MainActivity.SystemOrientation.PORTRAIT ) { + pivot_x = 1.0f; + pivot_y = 1.0f; + } + else if( system_orientation == MainActivity.SystemOrientation.REVERSE_LANDSCAPE ) { + pivot_x = 0.0f; + pivot_y = 1.0f; + } + else { + pivot_x = 1.0f; + pivot_y = 0.0f; + } + break; + } + ScaleAnimation animation = new ScaleAnimation(0.0f, 1.0f, 0.0f, 1.0f, Animation.RELATIVE_TO_SELF, pivot_x, Animation.RELATIVE_TO_SELF, pivot_y); + animation.setDuration(200); + //popup_container.setAnimation(animation); + AlphaAnimation fade_animation = new AlphaAnimation(0.0f, 1.0f); + fade_animation.setDuration(200); + AnimationSet animation_set = new AnimationSet(false); + animation_set.addAnimation(animation); + animation_set.addAnimation(fade_animation); + popup_container.setAnimation(animation_set); + } + } + ); + + if( MyDebug.LOG ) + Log.d(TAG, "time to create popup: " + (System.currentTimeMillis() - time_s)); + } + + private void initRemoteControlForPopup() { + if( MyDebug.LOG ) + Log.d(TAG, "initRemoteControlForPopup"); + if( popupIsOpen() ) { // just in case + // For remote control, we want to highlight lines and icons on the popup view + // so that we can control those just with the up/down buttons and "OK" + clearSelectionState(); + remote_control_mode = true; + mSelectingLines = true; + highlightPopupLine(true, false); + } + } + + private void clearRemoteControlForPopup() { + if( MyDebug.LOG ) + Log.d(TAG, "clearRemoteControlForPopup"); + if( popupIsOpen() && remote_control_mode ) { + remote_control_mode = false; + + // reset highlighting + final ViewGroup popup_container = main_activity.findViewById(R.id.popup_container); + Rect scrollBounds = new Rect(); + popup_container.getDrawingRect(scrollBounds); + final LinearLayout inside = (LinearLayout) popup_container.getChildAt(0); + if( inside == null ) + return; // Safety check + View v = inside.getChildAt(mPopupLine); + if( v.isShown() && v instanceof LinearLayout ) { + if( MyDebug.LOG ) + Log.d(TAG, "reset " + mPopupLine + "th view: " + v); + v.setBackgroundColor(Color.TRANSPARENT); + v.setAlpha(1f); + } + if( mHighlightedLine != null ) { + v = mHighlightedLine.getChildAt(mPopupIcon); + if( v instanceof ImageButton || v instanceof Button ) { + v.setBackgroundColor(Color.TRANSPARENT); + } + } + /*for(int i=0;i= Build.VERSION_CODES.N && main_activity.getPreview().isVideoRecording() ) { + done = true; + main_activity.pauseVideo(); + } + if( !done ) { + main_activity.takePicture(false); + } + return true; + case "volume_focus": + if(keydown_volume_up && keydown_volume_down) { + if (MyDebug.LOG) + Log.d(TAG, "take photo rather than focus, as both volume keys are down"); + main_activity.takePicture(false); + } + else if (main_activity.getPreview().getCurrentFocusValue() != null && main_activity.getPreview().getCurrentFocusValue().equals("focus_mode_manual2")) { + if(keyCode == KeyEvent.KEYCODE_VOLUME_UP) + main_activity.changeFocusDistance(-1, false); + else + main_activity.changeFocusDistance(1, false); + } + else { + // important not to repeatedly request focus, even though main_activity.getPreview().requestAutoFocus() will cancel, as causes problem if key is held down (e.g., flash gets stuck on) + // also check DownTime vs EventTime to prevent repeated focusing whilst the key is held down + if(event.getDownTime() == event.getEventTime() && !main_activity.getPreview().isFocusWaiting()) { + if(MyDebug.LOG) + Log.d(TAG, "request focus due to volume key"); + main_activity.getPreview().requestAutoFocus(); + } + } + return true; + case "volume_zoom": + if(keyCode == KeyEvent.KEYCODE_VOLUME_UP) + main_activity.zoomIn(); + else + main_activity.zoomOut(); + return true; + case "volume_exposure": + if(main_activity.getPreview().getCameraController() != null) { + String value = sharedPreferences.getString(PreferenceKeys.ISOPreferenceKey, CameraController.ISO_DEFAULT); + boolean manual_iso = !value.equals(CameraController.ISO_DEFAULT); + if(keyCode == KeyEvent.KEYCODE_VOLUME_UP) { + if(manual_iso) { + main_activity.changeISO(1); + } + else + main_activity.changeExposure(1); + } + else { + if(manual_iso) { + main_activity.changeISO(-1); + } + else + main_activity.changeExposure(-1); + } + } + return true; + case "volume_auto_stabilise": + if( main_activity.supportsAutoStabilise() ) { + boolean auto_stabilise = sharedPreferences.getBoolean(PreferenceKeys.AutoStabilisePreferenceKey, false); + auto_stabilise = !auto_stabilise; + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putBoolean(PreferenceKeys.AutoStabilisePreferenceKey, auto_stabilise); + editor.apply(); + String message = main_activity.getResources().getString(R.string.preference_auto_stabilise) + ": " + main_activity.getResources().getString(auto_stabilise ? R.string.on : R.string.off); + main_activity.getPreview().showToast(main_activity.getChangedAutoStabiliseToastBoxer(), message, true); + main_activity.getApplicationInterface().getDrawPreview().updateSettings(); // because we cache the auto-stabilise setting + this.destroyPopup(); // need to recreate popup in order to update the auto-level checkbox + } + else if( !main_activity.deviceSupportsAutoStabilise() ) { + // n.b., need to check deviceSupportsAutoStabilise() - if we're in e.g. Panorama mode, we shouldn't display a toast (as then supportsAutoStabilise() returns false even if auto-level is supported on the device) + main_activity.getPreview().showToast(main_activity.getChangedAutoStabiliseToastBoxer(), R.string.auto_stabilise_not_supported); + } + return true; + case "volume_really_nothing": + // do nothing, but still return true so we don't change volume either + return true; + } + // else do nothing here, but still allow changing of volume (i.e., the default behaviour) + break; + } + case KeyEvent.KEYCODE_MENU: + { + // needed to support hardware menu button + // tested successfully on Samsung S3 (via RTL) + // see http://stackoverflow.com/questions/8264611/how-to-detect-when-user-presses-menu-key-on-their-android-device + main_activity.openSettings(); + return true; + } + case KeyEvent.KEYCODE_CAMERA: + { + if( event.getRepeatCount() == 0 ) { + main_activity.takePicture(false); + return true; + } + } + case KeyEvent.KEYCODE_FOCUS: + { + // important not to repeatedly request focus, even though main_activity.getPreview().requestAutoFocus() will cancel - causes problem with hardware camera key where a half-press means to focus + // also check DownTime vs EventTime to prevent repeated focusing whilst the key is held down - see https://sourceforge.net/p/opencamera/tickets/174/ , + // or same issue above for volume key focus + if( event.getDownTime() == event.getEventTime() && !main_activity.getPreview().isFocusWaiting() ) { + if( MyDebug.LOG ) + Log.d(TAG, "request focus due to focus key"); + main_activity.getPreview().requestAutoFocus(); + } + return true; + } + case KeyEvent.KEYCODE_ZOOM_IN: + case KeyEvent.KEYCODE_PLUS: + case KeyEvent.KEYCODE_NUMPAD_ADD: + { + main_activity.zoomIn(); + return true; + } + case KeyEvent.KEYCODE_ZOOM_OUT: + case KeyEvent.KEYCODE_MINUS: + case KeyEvent.KEYCODE_NUMPAD_SUBTRACT: + { + main_activity.zoomOut(); + return true; + } + case KeyEvent.KEYCODE_SPACE: + case KeyEvent.KEYCODE_NUMPAD_5: + { + if( isExposureUIOpen() && remote_control_mode ) { + commandMenuExposure(); + return true; + } + else if( popupIsOpen() && remote_control_mode ) { + commandMenuPopup(); + return true; + } + else if( event.getRepeatCount() == 0 ) { + main_activity.takePicture(false); + return true; + } + break; + } + case KeyEvent.KEYCODE_DPAD_UP: + case KeyEvent.KEYCODE_NUMPAD_8: + //case KeyEvent.KEYCODE_VOLUME_UP: // test + if( !remote_control_mode ) { + if( popupIsOpen() ) { + initRemoteControlForPopup(); + return true; + } + else if( isExposureUIOpen() ) { + initRemoteControlForExposureUI(); + return true; + } + } + else if( processRemoteUpButton() ) + return true; + break; + case KeyEvent.KEYCODE_DPAD_DOWN: + case KeyEvent.KEYCODE_NUMPAD_2: + //case KeyEvent.KEYCODE_VOLUME_DOWN: // test + if( !remote_control_mode ) { + if( popupIsOpen() ) { + initRemoteControlForPopup(); + return true; + } + else if( isExposureUIOpen() ) { + initRemoteControlForExposureUI(); + return true; + } + } + else if( processRemoteDownButton() ) + return true; + break; + case KeyEvent.KEYCODE_FUNCTION: + case KeyEvent.KEYCODE_NUMPAD_MULTIPLY: + togglePopupSettings(); + break; + case KeyEvent.KEYCODE_SLASH: + case KeyEvent.KEYCODE_NUMPAD_DIVIDE: + toggleExposureUI(); + break; + } + return false; + } + + public void onKeyUp(int keyCode, KeyEvent event) { + if( MyDebug.LOG ) + Log.d(TAG, "onKeyUp: " + keyCode); + if( keyCode == KeyEvent.KEYCODE_VOLUME_UP ) + keydown_volume_up = false; + else if( keyCode == KeyEvent.KEYCODE_VOLUME_DOWN ) + keydown_volume_down = false; + } + + /** If the exposure menu is open, selects a current line or option. Else does nothing. + */ + public void commandMenuExposure() { + if( MyDebug.LOG ) + Log.d(TAG, "commandMenuExposure"); + if( isExposureUIOpen() ) { + if( isSelectingExposureUIElement() ) { + // Close Exposure UI if new press on MENU + // while already selecting + toggleExposureUI(); + } + else { + // Select current element in Exposure UI + selectExposureUILine(); + } + } + } + + /** If the popup menu is open, selects a current line or option. Else does nothing. + */ + public void commandMenuPopup() { + if( MyDebug.LOG ) + Log.d(TAG, "commandMenuPopup"); + if( popupIsOpen() ) { + if( selectingIcons() ) { + clickSelectedIcon(); + } + else { + highlightPopupIcon(true, false); + } + } + } + + /** Shows an information dialog, with a button to request not to show again. + * Note it's up to the caller to check whether the info_preference_key (to not show again) was + * already set. + * @param title_id Resource id for title string. + * @param info_id Resource id for dialog text string. + * @param info_preference_key Preference key to set in SharedPreferences if the user selects to + * not show the dialog again. + * @return The AlertDialog that was created. + */ + public AlertDialog showInfoDialog(int title_id, int info_id, final String info_preference_key) { + AlertDialog.Builder alertDialog = new AlertDialog.Builder(main_activity); + alertDialog.setTitle(title_id); + if( info_id != 0 ) + alertDialog.setMessage(info_id); + else { + // Setting an empty string here is still needed to allow setting a message later after it's been created. + // Needed for MagneticSensor's dialog + alertDialog.setMessage(""); + } + alertDialog.setPositiveButton(android.R.string.ok, null); + alertDialog.setNegativeButton(R.string.dont_show_again, new DialogInterface.OnClickListener() { + @Override + public void onClick(DialogInterface dialog, int which) { + if( MyDebug.LOG ) + Log.d(TAG, "user clicked dont_show_again for info dialog"); + final SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putBoolean(info_preference_key, true); + editor.apply(); + } + }); + + //main_activity.showPreview(false); + //main_activity.setWindowFlagsForSettings(false); // set set_lock_protect to false, otherwise if screen is locked, user will need to unlock to see the info dialog! + + AlertDialog alert = alertDialog.create(); + // AlertDialog.Builder.setOnDismissListener() requires API level 17, so do it this way instead + alert.setOnDismissListener(new DialogInterface.OnDismissListener() { + @Override + public void onDismiss(DialogInterface arg0) { + if( MyDebug.LOG ) + Log.d(TAG, "info dialog dismissed"); + //main_activity.setWindowFlagsForCamera(); + //main_activity.showPreview(true); + } + }); + //main_activity.showAlert(alert); + alert.show(); + return alert; + } + + /** Returns a (possibly translated) user readable string for a white balance preference value. + * If the value is not recognised (this can happen for the old Camera API, some devices can + * have device-specific options), then the received value is returned. + */ + public String getEntryForWhiteBalance(String value) { + int id = -1; + switch( value ) { + case CameraController.WHITE_BALANCE_DEFAULT: + id = R.string.white_balance_auto; + break; + case "cloudy-daylight": + id = R.string.white_balance_cloudy; + break; + case "daylight": + id = R.string.white_balance_daylight; + break; + case "fluorescent": + id = R.string.white_balance_fluorescent; + break; + case "incandescent": + id = R.string.white_balance_incandescent; + break; + case "shade": + id = R.string.white_balance_shade; + break; + case "twilight": + id = R.string.white_balance_twilight; + break; + case "warm-fluorescent": + id = R.string.white_balance_warm; + break; + case "manual": + id = R.string.white_balance_manual; + break; + default: + break; + } + String entry; + if( id != -1 ) { + entry = main_activity.getResources().getString(id); + } + else { + entry = value; + } + return entry; + } + + /** Returns a (possibly translated) user readable string for a scene mode preference value. + * If the value is not recognised (this can happen for the old Camera API, some devices can + * have device-specific options), then the received value is returned. + */ + public String getEntryForSceneMode(String value) { + int id = -1; + switch( value ) { + case "action": + id = R.string.scene_mode_action; + break; + case "barcode": + id = R.string.scene_mode_barcode; + break; + case "beach": + id = R.string.scene_mode_beach; + break; + case "candlelight": + id = R.string.scene_mode_candlelight; + break; + case CameraController.SCENE_MODE_DEFAULT: + id = R.string.scene_mode_auto; + break; + case "fireworks": + id = R.string.scene_mode_fireworks; + break; + case "landscape": + id = R.string.scene_mode_landscape; + break; + case "night": + id = R.string.scene_mode_night; + break; + case "night-portrait": + id = R.string.scene_mode_night_portrait; + break; + case "party": + id = R.string.scene_mode_party; + break; + case "portrait": + id = R.string.scene_mode_portrait; + break; + case "snow": + id = R.string.scene_mode_snow; + break; + case "sports": + id = R.string.scene_mode_sports; + break; + case "steadyphoto": + id = R.string.scene_mode_steady_photo; + break; + case "sunset": + id = R.string.scene_mode_sunset; + break; + case "theatre": + id = R.string.scene_mode_theatre; + break; + default: + break; + } + String entry; + if( id != -1 ) { + entry = main_activity.getResources().getString(id); + } + else { + entry = value; + } + return entry; + } + + /** Returns a (possibly translated) user readable string for a color effect preference value. + * If the value is not recognised (this can happen for the old Camera API, some devices can + * have device-specific options), then the received value is returned. + */ + public String getEntryForColorEffect(String value) { + int id = -1; + switch( value ) { + case "aqua": + id = R.string.color_effect_aqua; + break; + case "blackboard": + id = R.string.color_effect_blackboard; + break; + case "mono": + id = R.string.color_effect_mono; + break; + case "negative": + id = R.string.color_effect_negative; + break; + case CameraController.COLOR_EFFECT_DEFAULT: + id = R.string.color_effect_none; + break; + case "posterize": + id = R.string.color_effect_posterize; + break; + case "sepia": + id = R.string.color_effect_sepia; + break; + case "solarize": + id = R.string.color_effect_solarize; + break; + case "whiteboard": + id = R.string.color_effect_whiteboard; + break; + default: + break; + } + String entry; + if( id != -1 ) { + entry = main_activity.getResources().getString(id); + } + else { + entry = value; + } + return entry; + } + + /** Returns a (possibly translated) user readable string for an antibanding preference value. + * If the value is not recognised, then the received value is returned. + */ + public String getEntryForAntiBanding(String value) { + int id = -1; + switch( value ) { + case CameraController.ANTIBANDING_DEFAULT: + id = R.string.anti_banding_auto; + break; + case "50hz": + id = R.string.anti_banding_50hz; + break; + case "60hz": + id = R.string.anti_banding_60hz; + break; + case "off": + id = R.string.anti_banding_off; + break; + default: + break; + } + String entry; + if( id != -1 ) { + entry = main_activity.getResources().getString(id); + } + else { + entry = value; + } + return entry; + } + + /** Returns a (possibly translated) user readable string for an noise reduction mode preference value. + * If the value is not recognised, then the received value is returned. + * Also used for edge mode. + */ + public String getEntryForNoiseReductionMode(String value) { + int id = -1; + switch( value ) { + case CameraController.NOISE_REDUCTION_MODE_DEFAULT: + id = R.string.noise_reduction_mode_default; + break; + case "off": + id = R.string.noise_reduction_mode_off; + break; + case "minimal": + id = R.string.noise_reduction_mode_minimal; + break; + case "fast": + id = R.string.noise_reduction_mode_fast; + break; + case "high_quality": + id = R.string.noise_reduction_mode_high_quality; + break; + default: + break; + } + String entry; + if( id != -1 ) { + entry = main_activity.getResources().getString(id); + } + else { + entry = value; + } + return entry; + } + + View getTopIcon() { + return this.top_icon; + } + + // for testing + public View getUIButton(String key) { + if( MyDebug.LOG ) { + Log.d(TAG, "getPopupButton(" + key + "): " + test_ui_buttons.get(key)); + Log.d(TAG, "this: " + this); + Log.d(TAG, "popup_buttons: " + test_ui_buttons); + } + return test_ui_buttons.get(key); + } + + Map getTestUIButtonsMap() { + return test_ui_buttons; + } + + public PopupView getPopupView() { + return popup_view; + } + + public boolean testGetRemoteControlMode() { + return remote_control_mode; + } + + public int testGetPopupLine() { + return mPopupLine; + } + + public int testGetPopupIcon() { + return mPopupIcon; + } + + public int testGetExposureLine() { + return mExposureLine; + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/ui/ManualSeekbars.java b/app/src/main/java/net/sourceforge/opencamera/ui/ManualSeekbars.java new file mode 100644 index 0000000..4016bff --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/ui/ManualSeekbars.java @@ -0,0 +1,256 @@ +package net.sourceforge.opencamera.ui; + +import android.util.Log; +import android.widget.SeekBar; + +import net.sourceforge.opencamera.MyDebug; + +import java.util.ArrayList; +import java.util.List; + +/** This contains functionality related to the seekbars for manual controls. + */ +public class ManualSeekbars { + private static final String TAG = "ManualSeekbars"; + + private static final int manual_n = 1000; // the number of values on the seekbar used for manual focus distance + + public static double seekbarScaling(double frac) { + // For various seekbars, we want to use a non-linear scaling, so user has more control over smaller values + return (Math.pow(100.0, frac) - 1.0) / 99.0; + } + + private static double seekbarScalingInverse(double scaling) { + return Math.log(99.0*scaling + 1.0) / Math.log(100.0); + } + + public static void setProgressSeekbarScaled(SeekBar seekBar, double min_value, double max_value, double value) { + seekBar.setMax(manual_n); + double scaling = (value - min_value)/(max_value - min_value); + double frac = seekbarScalingInverse(scaling); + int new_value = (int)(frac*manual_n + 0.5); // add 0.5 for rounding + if( new_value < 0 ) + new_value = 0; + else if( new_value > manual_n ) + new_value = manual_n; + seekBar.setProgress(new_value); + } + + /*public static long exponentialScaling(double frac, double min, double max) { + // We use S(frac) = A * e^(s * frac) + // We want S(0) = min, S(1) = max + // So A = min + // and Ae^s = max + // => s = ln(max/min) + double s = Math.log(max / min); + return (long)(min * Math.exp(s * frac) + 0.5f); // add 0.5f so we round to nearest + } + + private static double exponentialScalingInverse(double value, double min, double max) { + double s = Math.log(max / min); + return Math.log(value / min) / s; + } + + public void setProgressSeekbarExponential(SeekBar seekBar, double min_value, double max_value, double value) { + seekBar.setMax(manual_n); + double frac = exponentialScalingInverse(value, min_value, max_value); + int new_value = (int)(frac*manual_n + 0.5); // add 0.5 for rounding + if( new_value < 0 ) + new_value = 0; + else if( new_value > manual_n ) + new_value = manual_n; + seekBar.setProgress(new_value); + }*/ + + private List seekbar_values_white_balance; + private List seekbar_values_iso; + private List seekbar_values_shutter_speed; + + public int getWhiteBalanceTemperature(int progress) { + return seekbar_values_white_balance.get(progress).intValue(); + } + + public int getISO(int progress) { + return seekbar_values_iso.get(progress).intValue(); + } + + public long getExposureTime(int progress) { + return seekbar_values_shutter_speed.get(progress); + } + + static private void setProgressBarToClosest(SeekBar seekBar, List seekbar_values, long current_value) { + if( MyDebug.LOG ) + Log.d(TAG, "setProgressBarToClosest"); + int closest_indx = -1; + long min_dist = 0; + for(int i=0;i(); + List seekbar_values = seekbar_values_white_balance; + + // min to max, per 100 + for(long i=min_white_balance;i(); + List seekbar_values = seekbar_values_iso; + + seekbar_values.add(min_iso); + + // 1 to 99, per 1 + for(long i=1;i<100;i++) { + if( i > min_iso && i < max_iso ) + seekbar_values.add(i); + } + + // 100 to 500, per 5 + for(long i=100;i<500;i+=5) { + if( i > min_iso && i < max_iso ) + seekbar_values.add(i); + } + + // 500 to 1000, per 10 + for(long i=500;i<1000;i+=10) { + if( i > min_iso && i < max_iso ) + seekbar_values.add(i); + } + + // 1000 to 5000, per 50 + for(long i=1000;i<5000;i+=50) { + if( i > min_iso && i < max_iso ) + seekbar_values.add(i); + } + + // 5000 to 10000, per 100 + for(long i=5000;i<10000;i+=100) { + if( i > min_iso && i < max_iso ) + seekbar_values.add(i); + } + + seekbar_values.add(max_iso); + + seekBar.setMax(seekbar_values.size()-1); + + setProgressBarToClosest(seekBar, seekbar_values, current_iso); + } + + public void setProgressSeekbarShutterSpeed(SeekBar seekBar, long min_exposure_time, long max_exposure_time, long current_exposure_time) { + if( MyDebug.LOG ) + Log.d(TAG, "setProgressSeekbarShutterSpeed"); + seekbar_values_shutter_speed = new ArrayList<>(); + List seekbar_values = seekbar_values_shutter_speed; + + seekbar_values.add(min_exposure_time); + + // 1/10,000 to 1/1,000 + for(int i=10;i>=1;i--) { + long exposure = 1000000000L/(i* 1000L); + if( exposure > min_exposure_time && exposure < max_exposure_time ) + seekbar_values.add(exposure); + } + + // 1/900 to 1/100 + for(int i=9;i>=1;i--) { + long exposure = 1000000000L/(i* 100L); + if( exposure > min_exposure_time && exposure < max_exposure_time ) + seekbar_values.add(exposure); + } + + // 1/90 to 1/60 (steps of 10) + for(int i=9;i>=6;i--) { + long exposure = 1000000000L/(i* 10L); + if( exposure > min_exposure_time && exposure < max_exposure_time ) + seekbar_values.add(exposure); + } + + // 1/50 to 1/15 (steps of 5) + for(int i=50;i>=15;i-=5) { + long exposure = 1000000000L/i; + if( exposure > min_exposure_time && exposure < max_exposure_time ) + seekbar_values.add(exposure); + } + + // 0.1 to 1.9, per 1.0s + for(int i=1;i<20;i++) { + long exposure = (1000000000L/10)*i; + if( exposure > min_exposure_time && exposure < max_exposure_time ) + seekbar_values.add(exposure); + } + + // 2 to 19, per 1s + for(int i=2;i<20;i++) { + long exposure = 1000000000L*i; + if( exposure > min_exposure_time && exposure < max_exposure_time ) + seekbar_values.add(exposure); + } + + // 20 to 60, per 5s + for(int i=20;i<60;i+=5) { + long exposure = 1000000000L*i; + if( exposure > min_exposure_time && exposure < max_exposure_time ) + seekbar_values.add(exposure); + } + + // n.b., very long exposure times are not widely supported, but requested at https://sourceforge.net/p/opencamera/code/merge-requests/49/ + + // 60 to 180, per 15s + for(int i=60;i<180;i+=15) { + long exposure = 1000000000L*i; + if( exposure > min_exposure_time && exposure < max_exposure_time ) + seekbar_values.add(exposure); + } + + // 180 to 600, per 60s + for(int i=180;i<600;i+=60) { + long exposure = 1000000000L*i; + if( exposure > min_exposure_time && exposure < max_exposure_time ) + seekbar_values.add(exposure); + } + + // 600 to 1200, per 120s + for(int i=600;i<=1200;i+=120) { + long exposure = 1000000000L*i; + if( exposure > min_exposure_time && exposure < max_exposure_time ) + seekbar_values.add(exposure); + } + + seekbar_values.add(max_exposure_time); + + seekBar.setMax(seekbar_values.size()-1); + + setProgressBarToClosest(seekBar, seekbar_values, current_exposure_time); + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/ui/MyEditTextPreference.java b/app/src/main/java/net/sourceforge/opencamera/ui/MyEditTextPreference.java new file mode 100644 index 0000000..49d466f --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/ui/MyEditTextPreference.java @@ -0,0 +1,157 @@ +package net.sourceforge.opencamera.ui; + +import android.content.Context; +import android.content.res.TypedArray; +import android.os.Parcel; +import android.os.Parcelable; +import android.preference.DialogPreference; +import android.text.TextUtils; +import android.util.AttributeSet; +import android.view.View; +import android.view.inputmethod.EditorInfo; +import android.widget.EditText; +import android.widget.TextView; + +import net.sourceforge.opencamera.R; + +/** This contains a custom preference for an EditTextPreference. We do all this to fix the problem + * that Android's EditTextPreference doesn't satisfy Google's own emoji policy, due to the + * programmatically allocated EditText (which means AppCompat can't update it to support emoji + * properly). This is fixed with AndroidX (androidx.preference.*), but switching to that is a major + * change. + * Once we have switched to AndroidX's preference libraries, we can switch back to + * EditTextPreference (but check that the emoji strings still work on Android 10 or earlier!) + */ +public class MyEditTextPreference extends DialogPreference { + //private static final String TAG = "MyEditTextPreference"; + + private EditText edittext; + + private String dialogMessage = ""; + private final int inputType; + + private String value; // current saved value of this preference (note that this is intentionally not updated when the seekbar changes, as we don't save until the user clicks ok) + private boolean value_set; + + public MyEditTextPreference(Context context, AttributeSet attrs) { + super(context, attrs); + + String namespace = "http://schemas.android.com/apk/res/android"; + + // can't get both strings and resources to work - only support resources + int id = attrs.getAttributeResourceValue(namespace, "dialogMessage", 0); + if( id > 0 ) + this.dialogMessage = context.getString(id); + + this.inputType = attrs.getAttributeIntValue(namespace, "inputType", EditorInfo.TYPE_NULL); + + setDialogLayoutResource(R.layout.myedittextpreference); + } + + @Override + protected void onBindDialogView(View view) { + super.onBindDialogView(view); + + this.edittext = view.findViewById(R.id.myedittextpreference_edittext); + this.edittext.setInputType(inputType); + + TextView textView = view.findViewById(R.id.myedittextpreference_summary); + textView.setText(dialogMessage); + + if( value != null ) { + this.edittext.setText(value); + } + } + + @Override + protected void onDialogClosed(boolean positiveResult) { + super.onDialogClosed(positiveResult); + + if( positiveResult ) { + String new_value = edittext.getText().toString(); + if( callChangeListener(new_value) ) { + setValue(new_value); + } + } + } + + public String getText() { + return value; + } + + private void setValue(String value) { + final boolean changed = !TextUtils.equals(this.value, value); + if( changed || !value_set ) { + this.value = value; + value_set = true; + persistString(value); + if( changed ) { + notifyChanged(); + } + } + } + + @Override + protected Object onGetDefaultValue(TypedArray a, int index) { + return a.getString(index); + } + + @Override + protected void onSetInitialValue(boolean restoreValue, Object defaultValue) { + setValue(restoreValue ? getPersistedString(value) : (String) defaultValue); + } + + @Override + protected Parcelable onSaveInstanceState() { + final Parcelable superState = super.onSaveInstanceState(); + if( isPersistent() ) { + return superState; + } + + final SavedState state = new SavedState(superState); + state.value = value; + return state; + } + + @Override + protected void onRestoreInstanceState(Parcelable state) { + if( state == null || !state.getClass().equals(SavedState.class) ) { + super.onRestoreInstanceState(state); + return; + } + + SavedState myState = (SavedState)state; + super.onRestoreInstanceState(myState.getSuperState()); + setValue(myState.value); + } + + private static class SavedState extends BaseSavedState { + String value; + + SavedState(Parcel source) { + super(source); + value = source.readString(); + } + + @Override + public void writeToParcel(Parcel dest, int flags) { + super.writeToParcel(dest, flags); + dest.writeString(value); + } + + SavedState(Parcelable superState) { + super(superState); + } + + public static final Parcelable.Creator CREATOR = + new Parcelable.Creator<>() { + public SavedState createFromParcel(Parcel in) { + return new SavedState(in); + } + + public SavedState[] newArray(int size) { + return new SavedState[size]; + } + }; + } +} diff --git a/app/src/main/java/net/sourceforge/opencamera/ui/PopupView.java b/app/src/main/java/net/sourceforge/opencamera/ui/PopupView.java new file mode 100644 index 0000000..362eefc --- /dev/null +++ b/app/src/main/java/net/sourceforge/opencamera/ui/PopupView.java @@ -0,0 +1,1884 @@ +package net.sourceforge.opencamera.ui; + +import net.sourceforge.opencamera.MainActivity; +import net.sourceforge.opencamera.MyApplicationInterface; +import net.sourceforge.opencamera.MyDebug; +import net.sourceforge.opencamera.PreferenceKeys; +import net.sourceforge.opencamera.R; +import net.sourceforge.opencamera.cameracontroller.CameraController; +import net.sourceforge.opencamera.preview.Preview; + +import java.text.DecimalFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import android.annotation.SuppressLint; +import android.content.Context; +import android.content.SharedPreferences; +import android.graphics.Bitmap; +import android.graphics.Color; +import android.graphics.Typeface; +import android.hardware.camera2.CameraExtensionCharacteristics; +import android.os.Handler; +import android.preference.PreferenceManager; +import android.util.Log; +import android.util.TypedValue; +import android.view.Gravity; +import android.view.LayoutInflater; +import android.view.View; +import android.view.ViewGroup; +import android.view.ViewTreeObserver.OnGlobalLayoutListener; +import android.widget.Button; +import android.widget.CheckBox; +import android.widget.CompoundButton; +import android.widget.HorizontalScrollView; +import android.widget.ImageButton; +import android.widget.LinearLayout; +import android.widget.RadioButton; +import android.widget.RadioGroup; +import android.widget.ScrollView; +import android.widget.TextView; +import android.widget.ImageView.ScaleType; + +import androidx.appcompat.widget.SwitchCompat; + +/** This defines the UI for the "popup" button, that provides quick access to a + * range of options. + */ +public class PopupView extends LinearLayout { + private static final String TAG = "PopupView"; + public static final float ALPHA_BUTTON_SELECTED = 1.0f; + public static final float ALPHA_BUTTON = 0.6f; // 0.4f tends to be hard to see in bright light + + private static final float button_text_size_dip = 12.0f; + private static final float title_text_size_dip = 17.0f; + private static final float standard_text_size_dip = 16.0f; + private static final float arrow_text_size_dip = 16.0f; + private static final float arrow_button_w_dp = 60.0f; + private static final float arrow_button_h_dp = 48.0f; // should be at least 48.0 (Google Play's prelaunch warnings) + private final int arrow_button_w; + private final int arrow_button_h; + + private int total_width_dp; + + private int picture_size_index = -1; + private int nr_mode_index = -1; + private int burst_n_images_index = -1; + private int video_size_index = -1; + private int video_capture_rate_index = -1; + private int timer_index = -1; + private int repeat_mode_index = -1; + private int grid_index = -1; + + @SuppressWarnings("FieldCanBeLocal") + private final DecimalFormat decimal_format_1dp_force0 = new DecimalFormat("0.0"); + + public PopupView(Context context) { + super(context); + if( MyDebug.LOG ) + Log.d(TAG, "new PopupView: " + this); + + final long debug_time = System.nanoTime(); + if( MyDebug.LOG ) + Log.d(TAG, "PopupView time 1: " + (System.nanoTime() - debug_time)); + this.setOrientation(LinearLayout.VERTICAL); + + final float scale = getResources().getDisplayMetrics().density; + + arrow_button_w = (int) (arrow_button_w_dp * scale + 0.5f); // convert dps to pixels + arrow_button_h = (int) (arrow_button_h_dp * scale + 0.5f); // convert dps to pixels + + final MainActivity main_activity = (MainActivity)this.getContext(); + + boolean small_screen = false; + total_width_dp = 280; + int max_width_dp = main_activity.getMainUI().getMaxHeightDp(false); + if( total_width_dp > max_width_dp ) { + total_width_dp = max_width_dp; + small_screen = true; + } + if( MyDebug.LOG ) { + Log.d(TAG, "max_width_dp: " + max_width_dp); + Log.d(TAG, "total_width_dp: " + total_width_dp); + Log.d(TAG, "small_screen: " + small_screen); + } + + /*{ + int total_width = (int) (total_width_dp * scale + 0.5f); // convert dps to pixels; + if( MyDebug.LOG ) + Log.d(TAG, "total_width: " + total_width); + ViewGroup.LayoutParams params = new LayoutParams( + total_width, + LayoutParams.WRAP_CONTENT); + this.setLayoutParams(params); + }*/ + + final Preview preview = main_activity.getPreview(); + boolean is_camera_extension = main_activity.getApplicationInterface().isCameraExtensionPref(); + if( MyDebug.LOG ) + Log.d(TAG, "PopupView time 2: " + (System.nanoTime() - debug_time)); + + if( !main_activity.getMainUI().showCycleFlashIcon() ) + { + List supported_flash_values = preview.getSupportedFlashValues(); + if( preview.isVideo() && supported_flash_values != null ) { + // filter flash modes we don't want to show + List filter = new ArrayList<>(); + for(String flash_value : supported_flash_values) { + if( Preview.isFlashSupportedForVideo(flash_value) ) + filter.add(flash_value); + } + supported_flash_values = filter; + } + if( supported_flash_values != null && supported_flash_values.size() > 1 ) { // no point showing flash options if only one available! + addButtonOptionsToPopup(supported_flash_values, R.array.flash_icons, R.array.flash_values, getResources().getString(R.string.flash_mode), preview.getCurrentFlashValue(), 0, "TEST_FLASH", new ButtonOptionsPopupListener() { + @Override + public void onClick(String option) { + if( MyDebug.LOG ) + Log.d(TAG, "clicked flash: " + option); + preview.updateFlash(option); + main_activity.getMainUI().setPopupIcon(); + main_activity.getMainUI().destroyPopup(); // need to recreate popup for new selection + } + }); + } + } + if( MyDebug.LOG ) + Log.d(TAG, "PopupView time 3: " + (System.nanoTime() - debug_time)); + + //if( preview.isVideo() && preview.isTakingPhoto() ) { + if( preview.isVideo() && preview.isVideoRecording() ) { + // don't add any more options + } + else { + // make a copy of getSupportedFocusValues() so we can modify it + List supported_focus_values = preview.getSupportedFocusValues(); + MyApplicationInterface.PhotoMode photo_mode = main_activity.getApplicationInterface().getPhotoMode(); + if( !preview.isVideo() && photo_mode == MyApplicationInterface.PhotoMode.FocusBracketing ) { + // don't show focus modes in focus bracketing mode (as we'll always run in manual focus mode) + supported_focus_values = null; + } + if( supported_focus_values != null ) { + supported_focus_values = new ArrayList<>(supported_focus_values); + // only show appropriate continuous focus mode + if( preview.isVideo() ) { + supported_focus_values.remove("focus_mode_continuous_picture"); + } + else { + supported_focus_values.remove("focus_mode_continuous_video"); + } + } + addButtonOptionsToPopup(supported_focus_values, R.array.focus_mode_icons, R.array.focus_mode_values, getResources().getString(R.string.focus_mode), preview.getCurrentFocusValue(), 0, "TEST_FOCUS", new ButtonOptionsPopupListener() { + @Override + public void onClick(String option) { + if( MyDebug.LOG ) + Log.d(TAG, "clicked focus: " + option); + preview.updateFocus(option, false, true); + main_activity.getMainUI().destroyPopup(); // need to recreate popup for new selection + } + }); + if( MyDebug.LOG ) + Log.d(TAG, "PopupView time 4: " + (System.nanoTime() - debug_time)); + + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + + //final boolean use_expanded_menu = true; + final boolean use_expanded_menu = false; + final List photo_modes = new ArrayList<>(); + final List photo_mode_values = new ArrayList<>(); + photo_modes.add( getResources().getString(use_expanded_menu ? R.string.photo_mode_standard_full : R.string.photo_mode_standard) ); + photo_mode_values.add( MyApplicationInterface.PhotoMode.Standard ); + if( main_activity.supportsNoiseReduction() ) { + photo_modes.add(getResources().getString(use_expanded_menu ? R.string.photo_mode_noise_reduction_full : R.string.photo_mode_noise_reduction)); + photo_mode_values.add(MyApplicationInterface.PhotoMode.NoiseReduction); + } + if( main_activity.supportsDRO() ) { + photo_modes.add( getResources().getString(R.string.photo_mode_dro) ); + photo_mode_values.add( MyApplicationInterface.PhotoMode.DRO ); + } + if( main_activity.supportsHDR() ) { + photo_modes.add( getResources().getString(R.string.photo_mode_hdr) ); + photo_mode_values.add( MyApplicationInterface.PhotoMode.HDR ); + } + if( main_activity.supportsPanorama() ) { + photo_modes.add(getResources().getString(use_expanded_menu ? R.string.photo_mode_panorama_full : R.string.photo_mode_panorama)); + photo_mode_values.add(MyApplicationInterface.PhotoMode.Panorama); + } + if( main_activity.supportsFastBurst() ) { + photo_modes.add(getResources().getString(use_expanded_menu ? R.string.photo_mode_fast_burst_full : R.string.photo_mode_fast_burst)); + photo_mode_values.add(MyApplicationInterface.PhotoMode.FastBurst); + } + if( main_activity.supportsExpoBracketing() ) { + photo_modes.add( getResources().getString(use_expanded_menu ? R.string.photo_mode_expo_bracketing_full : R.string.photo_mode_expo_bracketing) ); + photo_mode_values.add( MyApplicationInterface.PhotoMode.ExpoBracketing ); + } + if( main_activity.supportsFocusBracketing() ) { + photo_modes.add( getResources().getString(use_expanded_menu ? R.string.photo_mode_focus_bracketing_full : R.string.photo_mode_focus_bracketing) ); + photo_mode_values.add( MyApplicationInterface.PhotoMode.FocusBracketing ); + } + if( main_activity.supportsCameraExtension(CameraExtensionCharacteristics.EXTENSION_AUTOMATIC) ) { + photo_modes.add( getResources().getString(use_expanded_menu ? R.string.photo_mode_x_auto_full : R.string.photo_mode_x_auto) ); + photo_mode_values.add( MyApplicationInterface.PhotoMode.X_Auto ); + } + if( main_activity.supportsCameraExtension(CameraExtensionCharacteristics.EXTENSION_HDR) ) { + photo_modes.add( getResources().getString(use_expanded_menu ? R.string.photo_mode_x_hdr_full : R.string.photo_mode_x_hdr) ); + photo_mode_values.add( MyApplicationInterface.PhotoMode.X_HDR ); + } + if( main_activity.supportsCameraExtension(CameraExtensionCharacteristics.EXTENSION_NIGHT) ) { + photo_modes.add( getResources().getString(use_expanded_menu ? R.string.photo_mode_x_night_full : R.string.photo_mode_x_night) ); + photo_mode_values.add( MyApplicationInterface.PhotoMode.X_Night ); + } + if( main_activity.supportsCameraExtension(CameraExtensionCharacteristics.EXTENSION_BOKEH) ) { + photo_modes.add( getResources().getString(use_expanded_menu ? R.string.photo_mode_x_bokeh_full : R.string.photo_mode_x_bokeh) ); + photo_mode_values.add( MyApplicationInterface.PhotoMode.X_Bokeh ); + } + if( main_activity.supportsCameraExtension(CameraExtensionCharacteristics.EXTENSION_BEAUTY) ) { + photo_modes.add( getResources().getString(use_expanded_menu ? R.string.photo_mode_x_beauty_full : R.string.photo_mode_x_beauty) ); + photo_mode_values.add( MyApplicationInterface.PhotoMode.X_Beauty ); + } + if( preview.isVideo() ) { + // only show photo modes when in photo mode, not video mode! + // (photo modes not supported for photo snapshot whilst recording video) + } + else if( photo_modes.size() > 1 ) { + String current_mode = null; + for(int i=0;i 0 ) { + nr_mode_index--; + update(); + return nr_mode_index; + } + return -1; + } + @Override + public int onClickNext() { + if( nr_mode_index != -1 && nr_mode_index < nr_mode_values.length-1 ) { + nr_mode_index++; + update(); + return nr_mode_index; + } + return -1; + } + }); + } + + if( main_activity.supportsAutoStabilise() && !main_activity.getMainUI().showAutoLevelIcon() ) { + // don't show auto-stabilise checkbox on popup if there's an on-screen icon + CheckBox checkBox = new CheckBox(main_activity); + checkBox.setText(getResources().getString(R.string.preference_auto_stabilise)); + checkBox.setTextSize(TypedValue.COMPLEX_UNIT_SP, standard_text_size_dip); + checkBox.setTextColor(Color.WHITE); + { + // align the checkbox a bit better + LayoutParams params = new LayoutParams( + LayoutParams.MATCH_PARENT, + LayoutParams.MATCH_PARENT + ); + final int left_padding = (int) (10 * scale + 0.5f); // convert dps to pixels + params.setMargins(left_padding, 0, 0, 0); + checkBox.setLayoutParams(params); + } + + boolean auto_stabilise = sharedPreferences.getBoolean(PreferenceKeys.AutoStabilisePreferenceKey, false); + if( auto_stabilise ) + checkBox.setChecked(auto_stabilise); + checkBox.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { + public void onCheckedChanged(CompoundButton buttonView, + boolean isChecked) { + main_activity.clickedAutoLevel(); + } + }); + + this.addView(checkBox); + } + if( MyDebug.LOG ) + Log.d(TAG, "PopupView time 8: " + (System.nanoTime() - debug_time)); + + if( !preview.isVideo() && photo_mode != MyApplicationInterface.PhotoMode.Panorama ) { + // Only show photo resolutions in photo mode - even if photo snapshots whilst recording video is supported, the + // resolutions for that won't match what the user has requested for photo mode resolutions. + // And Panorama mode chooses its own resolution. + final List picture_sizes = new ArrayList<>(preview.getSupportedPictureSizes(true)); + // take a copy so that we can reorder + // picture_sizes is sorted high to low, but we want to order low to high + Collections.reverse(picture_sizes); + picture_size_index = -1; + CameraController.Size current_picture_size = preview.getCurrentPictureSize(); + final List picture_size_strings = new ArrayList<>(); + for(int i=0;i 0 ) { + picture_size_index--; + update(); + return picture_size_index; + } + return -1; + } + + @Override + public int onClickNext() { + if( picture_size_index != -1 && picture_size_index < picture_sizes.size() - 1 ) { + picture_size_index++; + update(); + return picture_size_index; + } + return -1; + } + }); + } + if( MyDebug.LOG ) + Log.d(TAG, "PopupView time 9: " + (System.nanoTime() - debug_time)); + + if( preview.isVideo() ) { + // only show video resolutions in video mode + //final List video_sizes = preview.getVideoQualityHander().getSupportedVideoQuality(); + //video_size_index = preview.getVideoQualityHander().getCurrentVideoQualityIndex(); + List video_sizes = preview.getSupportedVideoQuality(main_activity.getApplicationInterface().getVideoFPSPref()); + if( video_sizes.isEmpty() ) { + Log.e(TAG, "can't find any supported video sizes for current fps!"); + // fall back to unfiltered list + video_sizes = preview.getVideoQualityHander().getSupportedVideoQuality(); + } + // take a copy so that we can reorder + video_sizes = new ArrayList<>(video_sizes); + // video_sizes is sorted high to low, but we want to order low to high + Collections.reverse(video_sizes); + + final List video_sizes_f = video_sizes; + video_size_index = video_sizes.size()-1; // default to largest (just in case current size not found??) + for(int i=0;i video_size_strings = new ArrayList<>(); + for(String video_size : video_sizes) { + String quality_string = preview.getCamcorderProfileDescriptionShort(video_size); + video_size_strings.add(quality_string); + } + addArrayOptionsToPopup(video_size_strings, getResources().getString(R.string.video_quality), false, false, video_size_index, false, "VIDEO_RESOLUTIONS", new ArrayOptionsPopupListener() { + final Handler handler = new Handler(); + final Runnable update_runnable = new Runnable() { + @Override + public void run() { + if( MyDebug.LOG ) + Log.d(TAG, "update settings due to video resolution change"); + main_activity.updateForSettings(true, "", true, false); // keep the popupview open + } + }; + + private void update() { + if( video_size_index == -1 ) + return; + String quality = video_sizes_f.get(video_size_index); + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putString(PreferenceKeys.getVideoQualityPreferenceKey(preview.getCameraId(), main_activity.getApplicationInterface().getCameraIdSPhysicalPref(), main_activity.getApplicationInterface().fpsIsHighSpeed()), quality); + editor.apply(); + + // make it easier to scroll through the list of resolutions without a pause each time + handler.removeCallbacks(update_runnable); + handler.postDelayed(update_runnable, 400); + } + + @Override + public int onClickPrev() { + if( video_size_index != -1 && video_size_index > 0 ) { + video_size_index--; + update(); + return video_size_index; + } + return -1; + } + + @Override + public int onClickNext() { + if( video_size_index != -1 && video_size_index < video_sizes_f.size() - 1 ) { + video_size_index++; + update(); + return video_size_index; + } + return -1; + } + }); + } + if( MyDebug.LOG ) + Log.d(TAG, "PopupView time 10: " + (System.nanoTime() - debug_time)); + + // apertures probably not supported for camera extensions anyway + if( preview.getSupportedApertures() != null && !is_camera_extension ) { + if( MyDebug.LOG ) + Log.d(TAG, "add apertures"); + + addTitleToPopup(getResources().getString(R.string.aperture)); + + final List apertures = new ArrayList<>(); + final List apertures_strings = new ArrayList<>(); + float current_aperture = main_activity.getApplicationInterface().getAperturePref(); + String prefix = "F/"; + + boolean found_default = false; + String current_aperture_s = ""; + for(float aperture : preview.getSupportedApertures()) { + apertures.add(aperture); + String aperture_string = prefix + decimal_format_1dp_force0.format(aperture); + apertures_strings.add(aperture_string); + if( current_aperture == aperture ) { + found_default = true; + current_aperture_s = aperture_string; + } + } + + if( !found_default ) { + // read from Camera API + if( preview.getCameraController() != null && preview.getCameraController().captureResultHasAperture() ) { + current_aperture = preview.getCameraController().captureResultAperture(); + current_aperture_s = prefix + decimal_format_1dp_force0.format(current_aperture); + } + } + + addButtonOptionsToPopup(apertures_strings, -1, -1, "", current_aperture_s, 0, "TEST_APERTURE", new ButtonOptionsPopupListener() { + @Override + public void onClick(String option) { + if( MyDebug.LOG ) + Log.d(TAG, "clicked aperture: " + option); + int index = apertures_strings.indexOf(option); + if( index != -1 ) { + float new_aperture = apertures.get(index); + if( MyDebug.LOG ) + Log.d(TAG, "new_aperture: " + new_aperture); + preview.showToast(null, getResources().getString(R.string.aperture) + ": " + option, true); + main_activity.getApplicationInterface().setAperture(new_aperture); + if( preview.getCameraController() != null ) { + preview.getCameraController().setAperture(new_aperture); + } + } + else { + Log.e(TAG, "unknown aperture: " + option); + } + main_activity.getMainUI().destroyPopup(); // need to recreate popup for new selection + } + }); + } + + if( !preview.isVideo() && photo_mode == MyApplicationInterface.PhotoMode.FastBurst ) { + if( MyDebug.LOG ) + Log.d(TAG, "add fast burst options"); + + final String [] all_burst_mode_values = getResources().getStringArray(R.array.preference_fast_burst_n_images_values); + String [] all_burst_mode_entries = getResources().getStringArray(R.array.preference_fast_burst_n_images_entries); + + //String [] burst_mode_values = new String[all_burst_mode_values.length]; + //String [] burst_mode_entries = new String[all_burst_mode_entries.length]; + if( all_burst_mode_values.length != all_burst_mode_entries.length ) { + Log.e(TAG, "preference_fast_burst_n_images_values and preference_fast_burst_n_images_entries are different lengths"); + throw new RuntimeException(); + } + + int max_burst_images = main_activity.getApplicationInterface().getImageSaver().getQueueSize()+1; + max_burst_images = Math.max(2, max_burst_images); // make sure we at least allow the minimum of 2 burst images! + if( MyDebug.LOG ) + Log.d(TAG, "max_burst_images: " + max_burst_images); + + // filter number of burst images - don't allow more than max_burst_images + List burst_mode_values_l = new ArrayList<>(); + List burst_mode_entries_l = new ArrayList<>(); + for(int i=0;i max_burst_images ) { + if( MyDebug.LOG ) + Log.d(TAG, "n_images " + n_images + " is more than max_burst_images: " + max_burst_images); + continue; + } + if( MyDebug.LOG ) + Log.d(TAG, "n_images " + n_images); + burst_mode_values_l.add( all_burst_mode_values[i] ); + burst_mode_entries_l.add( all_burst_mode_entries[i] ); + } + final String [] burst_mode_values = burst_mode_values_l.toArray(new String[0]); + final String [] burst_mode_entries = burst_mode_entries_l.toArray(new String[0]); + + String burst_mode_value = sharedPreferences.getString(PreferenceKeys.FastBurstNImagesPreferenceKey, "5"); + burst_n_images_index = Arrays.asList(burst_mode_values).indexOf(burst_mode_value); + if( burst_n_images_index == -1 ) { + if( MyDebug.LOG ) + Log.d(TAG, "can't find burst_mode_value " + burst_mode_value + " in burst_mode_values!"); + burst_n_images_index = 0; + } + addArrayOptionsToPopup(Arrays.asList(burst_mode_entries), getResources().getString(R.string.preference_fast_burst_n_images), true, false, burst_n_images_index, false, "FAST_BURST_N_IMAGES", new ArrayOptionsPopupListener() { + private void update() { + if( burst_n_images_index == -1 ) + return; + String new_burst_mode_value = burst_mode_values[burst_n_images_index]; + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putString(PreferenceKeys.FastBurstNImagesPreferenceKey, new_burst_mode_value); + editor.apply(); + if( preview.getCameraController() != null ) { + preview.getCameraController().setBurstNImages(main_activity.getApplicationInterface().getBurstNImages()); + } + } + @Override + public int onClickPrev() { + if( burst_n_images_index != -1 && burst_n_images_index > 0 ) { + burst_n_images_index--; + update(); + return burst_n_images_index; + } + return -1; + } + @Override + public int onClickNext() { + if( burst_n_images_index != -1 && burst_n_images_index < burst_mode_values.length-1 ) { + burst_n_images_index++; + update(); + return burst_n_images_index; + } + return -1; + } + }); + } + else if( !preview.isVideo() && photo_mode == MyApplicationInterface.PhotoMode.FocusBracketing ) { + if( MyDebug.LOG ) + Log.d(TAG, "add focus bracketing options"); + + final String [] burst_mode_values = getResources().getStringArray(R.array.preference_focus_bracketing_n_images_values); + String [] burst_mode_entries = getResources().getStringArray(R.array.preference_focus_bracketing_n_images_entries); + + if( burst_mode_values.length != burst_mode_entries.length ) { + Log.e(TAG, "preference_focus_bracketing_n_images_values and preference_focus_bracketing_n_images_entries are different lengths"); + throw new RuntimeException(); + } + + String burst_mode_value = sharedPreferences.getString(PreferenceKeys.FocusBracketingNImagesPreferenceKey, "3"); + burst_n_images_index = Arrays.asList(burst_mode_values).indexOf(burst_mode_value); + if( burst_n_images_index == -1 ) { + if( MyDebug.LOG ) + Log.d(TAG, "can't find burst_mode_value " + burst_mode_value + " in burst_mode_values!"); + burst_n_images_index = 0; + } + addArrayOptionsToPopup(Arrays.asList(burst_mode_entries), getResources().getString(R.string.preference_focus_bracketing_n_images), true, false, burst_n_images_index, false, "FOCUS_BRACKETING_N_IMAGES", new ArrayOptionsPopupListener() { + private void update() { + if( burst_n_images_index == -1 ) + return; + String new_burst_mode_value = burst_mode_values[burst_n_images_index]; + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putString(PreferenceKeys.FocusBracketingNImagesPreferenceKey, new_burst_mode_value); + editor.apply(); + if( preview.getCameraController() != null ) { + preview.getCameraController().setFocusBracketingNImages(main_activity.getApplicationInterface().getFocusBracketingNImagesPref()); + } + } + @Override + public int onClickPrev() { + if( burst_n_images_index != -1 && burst_n_images_index > 0 ) { + burst_n_images_index--; + update(); + return burst_n_images_index; + } + return -1; + } + @Override + public int onClickNext() { + if( burst_n_images_index != -1 && burst_n_images_index < burst_mode_values.length-1 ) { + burst_n_images_index++; + update(); + return burst_n_images_index; + } + return -1; + } + }); + + addCheckBox(context, scale, getResources().getString(R.string.focus_bracketing_add_infinity), sharedPreferences.getBoolean(PreferenceKeys.FocusBracketingAddInfinityPreferenceKey, false), new CompoundButton.OnCheckedChangeListener() { + public void onCheckedChanged(CompoundButton buttonView, + boolean isChecked) { + final SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putBoolean(PreferenceKeys.FocusBracketingAddInfinityPreferenceKey, isChecked); + editor.apply(); + if( preview.getCameraController() != null ) { + preview.getCameraController().setFocusBracketingAddInfinity(main_activity.getApplicationInterface().getFocusBracketingAddInfinityPref()); + } + } + }); + + if( main_activity.supportsFocusBracketingSourceAuto() ) { + addCheckBox(context, scale, getResources().getString(R.string.focus_bracketing_auto_source_distance), sharedPreferences.getBoolean(PreferenceKeys.FocusBracketingAutoSourceDistancePreferenceKey, false), new CompoundButton.OnCheckedChangeListener() { + public void onCheckedChanged(CompoundButton buttonView, + boolean isChecked) { + main_activity.getApplicationInterface().setFocusBracketingSourceAutoPref(isChecked); + if( !isChecked ) { + preview.setFocusDistance(main_activity.getPreview().getCameraController().captureResultFocusDistance(), false, false); + } + } + }); + } + } + + if( preview.isVideo() ) { + final List capture_rate_values = main_activity.getApplicationInterface().getSupportedVideoCaptureRates(); + if( capture_rate_values.size() > 1 ) { + if( MyDebug.LOG ) + Log.d(TAG, "add slow motion / timelapse video options"); + float capture_rate_value = sharedPreferences.getFloat(PreferenceKeys.getVideoCaptureRatePreferenceKey(preview.getCameraId(), main_activity.getApplicationInterface().getCameraIdSPhysicalPref()), 1.0f); + final List capture_rate_str = new ArrayList<>(); + int capture_rate_std_index = -1; + for(int i=0;i 0 ) { + video_capture_rate_index--; + update(); + return video_capture_rate_index; + } + return -1; + } + @Override + public int onClickNext() { + if( video_capture_rate_index != -1 && video_capture_rate_index < capture_rate_values.size()-1 ) { + video_capture_rate_index++; + update(); + return video_capture_rate_index; + } + return -1; + } + }); + } + } + + if( photo_mode != MyApplicationInterface.PhotoMode.Panorama ) { + // timer not supported with panorama + + final String [] timer_values = getResources().getStringArray(R.array.preference_timer_values); + String [] timer_entries = getResources().getStringArray(R.array.preference_timer_entries); + String timer_value = sharedPreferences.getString(PreferenceKeys.TimerPreferenceKey, "0"); + timer_index = Arrays.asList(timer_values).indexOf(timer_value); + if( timer_index == -1 ) { + if( MyDebug.LOG ) + Log.d(TAG, "can't find timer_value " + timer_value + " in timer_values!"); + timer_index = 0; + } + // title_in_options should be false for small screens: e.g., problems with pt-rBR or pt-rPT on 4.5" screens or less, see https://sourceforge.net/p/opencamera/discussion/photography/thread/3aa940c636/ + addArrayOptionsToPopup(Arrays.asList(timer_entries), getResources().getString(R.string.preference_timer), !small_screen, false, timer_index, false, "TIMER", new ArrayOptionsPopupListener() { + private void update() { + if( timer_index == -1 ) + return; + String new_timer_value = timer_values[timer_index]; + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putString(PreferenceKeys.TimerPreferenceKey, new_timer_value); + editor.apply(); + } + @Override + public int onClickPrev() { + if( timer_index != -1 && timer_index > 0 ) { + timer_index--; + update(); + return timer_index; + } + return -1; + } + @Override + public int onClickNext() { + if( timer_index != -1 && timer_index < timer_values.length-1 ) { + timer_index++; + update(); + return timer_index; + } + return -1; + } + }); + } + if( MyDebug.LOG ) + Log.d(TAG, "PopupView time 11: " + (System.nanoTime() - debug_time)); + + if( photo_mode != MyApplicationInterface.PhotoMode.Panorama ) { + // auto-repeat not supported with panorama + + final String [] repeat_mode_values = getResources().getStringArray(R.array.preference_burst_mode_values); + String [] repeat_mode_entries = getResources().getStringArray(R.array.preference_burst_mode_entries); + String repeat_mode_value = sharedPreferences.getString(PreferenceKeys.RepeatModePreferenceKey, "1"); + repeat_mode_index = Arrays.asList(repeat_mode_values).indexOf(repeat_mode_value); + if( repeat_mode_index == -1 ) { + if( MyDebug.LOG ) + Log.d(TAG, "can't find repeat_mode_value " + repeat_mode_value + " in repeat_mode_values!"); + repeat_mode_index = 0; + } + // title_in_options should be false for small screens: e.g., problems with pt-rBR or pt-rPT on 4.5" screens or less, see https://sourceforge.net/p/opencamera/discussion/photography/thread/3aa940c636/ + // set title_in_options_first_only to true, as displaying "Repeat: Unlimited" can be too long in some languages, e.g., Vietnamese (vi) + addArrayOptionsToPopup(Arrays.asList(repeat_mode_entries), getResources().getString(R.string.preference_burst_mode), !small_screen, true, repeat_mode_index, false, "REPEAT_MODE", new ArrayOptionsPopupListener() { + private void update() { + if( repeat_mode_index == -1 ) + return; + String new_repeat_mode_value = repeat_mode_values[repeat_mode_index]; + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putString(PreferenceKeys.RepeatModePreferenceKey, new_repeat_mode_value); + editor.apply(); + } + @Override + public int onClickPrev() { + if( repeat_mode_index != -1 && repeat_mode_index > 0 ) { + repeat_mode_index--; + update(); + return repeat_mode_index; + } + return -1; + } + @Override + public int onClickNext() { + if( repeat_mode_index != -1 && repeat_mode_index < repeat_mode_values.length-1 ) { + repeat_mode_index++; + update(); + return repeat_mode_index; + } + return -1; + } + }); + if( MyDebug.LOG ) + Log.d(TAG, "PopupView time 12: " + (System.nanoTime() - debug_time)); + } + + final String [] grid_values = getResources().getStringArray(R.array.preference_grid_values); + String [] grid_entries = getResources().getStringArray(R.array.preference_grid_entries); + String grid_value = sharedPreferences.getString(PreferenceKeys.ShowGridPreferenceKey, "preference_grid_none"); + grid_index = Arrays.asList(grid_values).indexOf(grid_value); + if( grid_index == -1 ) { + if( MyDebug.LOG ) + Log.d(TAG, "can't find grid_value " + grid_value + " in grid_values!"); + grid_index = 0; + } + addArrayOptionsToPopup(Arrays.asList(grid_entries), getResources().getString(R.string.grid), true, true, grid_index, true, "GRID", new ArrayOptionsPopupListener() { + private void update() { + if( grid_index == -1 ) + return; + String new_grid_value = grid_values[grid_index]; + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putString(PreferenceKeys.ShowGridPreferenceKey, new_grid_value); + editor.apply(); + main_activity.getApplicationInterface().getDrawPreview().updateSettings(); // because we cache the grid + } + @Override + public int onClickPrev() { + if( grid_index != -1 ) { + grid_index--; + if( grid_index < 0 ) + grid_index += grid_values.length; + update(); + return grid_index; + } + return -1; + } + @Override + public int onClickNext() { + if( grid_index != -1 ) { + grid_index++; + if( grid_index >= grid_values.length ) + grid_index -= grid_values.length; + update(); + return grid_index; + } + return -1; + } + }); + if( MyDebug.LOG ) + Log.d(TAG, "PopupView time 13: " + (System.nanoTime() - debug_time)); + + // white balance modes, scene modes, color effects + // all of these are only supported when not using extension mode + // popup should only be opened if we have a camera controller, but check just to be safe + if( preview.getCameraController() != null && !is_camera_extension ) { + List supported_white_balances = preview.getSupportedWhiteBalances(); + List supported_white_balances_entries = null; + if( supported_white_balances != null ) { + supported_white_balances_entries = new ArrayList<>(); + for(String value : supported_white_balances) { + String entry = main_activity.getMainUI().getEntryForWhiteBalance(value); + supported_white_balances_entries.add(entry); + } + } + addRadioOptionsToPopup(sharedPreferences, supported_white_balances_entries, supported_white_balances, getResources().getString(R.string.white_balance), PreferenceKeys.WhiteBalancePreferenceKey, CameraController.WHITE_BALANCE_DEFAULT, null, "TEST_WHITE_BALANCE", new RadioOptionsListener() { + @Override + public void onClick(String selected_value) { + switchToWhiteBalance(selected_value); + } + }); + if( MyDebug.LOG ) + Log.d(TAG, "PopupView time 14: " + (System.nanoTime() - debug_time)); + + List supported_scene_modes = preview.getSupportedSceneModes(); + List supported_scene_modes_entries = null; + if( supported_scene_modes != null ) { + supported_scene_modes_entries = new ArrayList<>(); + for(String value : supported_scene_modes) { + String entry = main_activity.getMainUI().getEntryForSceneMode(value); + supported_scene_modes_entries.add(entry); + } + } + addRadioOptionsToPopup(sharedPreferences, supported_scene_modes_entries, supported_scene_modes, getResources().getString(R.string.scene_mode), PreferenceKeys.SceneModePreferenceKey, CameraController.SCENE_MODE_DEFAULT, null, "TEST_SCENE_MODE", new RadioOptionsListener() { + @Override + public void onClick(String selected_value) { + if( preview.getCameraController() != null ) { + if( preview.getCameraController().sceneModeAffectsFunctionality() ) { + // need to call updateForSettings() and close the popup, as changing scene mode can change available camera features + main_activity.updateForSettings(true, getResources().getString(R.string.scene_mode) + ": " + main_activity.getMainUI().getEntryForSceneMode(selected_value)); + main_activity.closePopup(); + } + else { + preview.getCameraController().setSceneMode(selected_value); + // keep popup open + } + } + } + }); + if( MyDebug.LOG ) + Log.d(TAG, "PopupView time 15: " + (System.nanoTime() - debug_time)); + + List supported_color_effects = preview.getSupportedColorEffects(); + List supported_color_effects_entries = null; + if( supported_color_effects != null ) { + supported_color_effects_entries = new ArrayList<>(); + for(String value : supported_color_effects) { + String entry = main_activity.getMainUI().getEntryForColorEffect(value); + supported_color_effects_entries.add(entry); + } + } + addRadioOptionsToPopup(sharedPreferences, supported_color_effects_entries, supported_color_effects, getResources().getString(R.string.color_effect), PreferenceKeys.ColorEffectPreferenceKey, CameraController.COLOR_EFFECT_DEFAULT, null, "TEST_COLOR_EFFECT", new RadioOptionsListener() { + @Override + public void onClick(String selected_value) { + if( preview.getCameraController() != null ) { + preview.getCameraController().setColorEffect(selected_value); + } + // keep popup open + } + }); + if( MyDebug.LOG ) + Log.d(TAG, "PopupView time 16: " + (System.nanoTime() - debug_time)); + } + + } + + if( MyDebug.LOG ) + Log.d(TAG, "Overall PopupView time: " + (System.nanoTime() - debug_time)); + } + + int getTotalWidth() { + final float scale = getResources().getDisplayMetrics().density; + return (int) (total_width_dp * scale + 0.5f); // convert dps to pixels; + } + + private void changePhotoMode(List photo_modes, List photo_mode_values, String option) { + if( MyDebug.LOG ) + Log.d(TAG, "changePhotoMode: " + option); + + final MainActivity main_activity = (MainActivity)this.getContext(); + int option_id = -1; + for(int i=0;i 0 ) { + preview.getCameraController().setWhiteBalanceTemperature(temperature); + // also need to update the slider! + main_activity.setManualWBSeekbar(); + } + } + // keep popup open, unless switching to manual + if( close_popup ) { + main_activity.closePopup(); + } + //main_activity.updateForSettings(getResources().getString(R.string.white_balance) + ": " + selected_value); + //main_activity.closePopup(); + } + + static abstract class ButtonOptionsPopupListener { + public abstract void onClick(String option); + } + + private void addCheckBox(Context context, float scale, CharSequence text, boolean checked, CompoundButton.OnCheckedChangeListener listener) { + @SuppressLint("InflateParams") + final View switch_view = LayoutInflater.from(context).inflate(R.layout.popupview_switch, null); + final SwitchCompat checkBox = switch_view.findViewById(R.id.popupview_switch); + checkBox.setText(text); + { + // align the checkbox a bit better + checkBox.setGravity(Gravity.RIGHT); + LayoutParams params = new LayoutParams( + LayoutParams.MATCH_PARENT, + LayoutParams.MATCH_PARENT + ); + final int right_padding = (int) (20 * scale + 0.5f); // convert dps to pixels + params.setMargins(0, 0, right_padding, 0); + checkBox.setLayoutParams(params); + } + if( checked ) + checkBox.setChecked(checked); + checkBox.setOnCheckedChangeListener(listener); + this.addView(checkBox); + } + + /** Creates UI for selecting an option for multiple possibilites, by placing buttons in one or + * more rows. + * @param max_buttons_per_row If 0, then all buttons will be placed on the same row. Otherwise, + * this is the number of buttons per row, multiple rows will be + * created if necessary. + */ + private void addButtonOptionsToPopup(List supported_options, int icons_id, int values_id, String prefix_string, String current_value, int max_buttons_per_row, String test_key, final ButtonOptionsPopupListener listener) { + if(MyDebug.LOG) + Log.d(TAG, "addButtonOptionsToPopup"); + MainActivity main_activity = (MainActivity)this.getContext(); + createButtonOptions(this, this.getContext(), total_width_dp, main_activity.getMainUI().getTestUIButtonsMap(), supported_options, icons_id, values_id, prefix_string, true, current_value, max_buttons_per_row, test_key, listener); + } + + public static String getButtonOptionString(boolean include_prefix, String prefix_string, String supported_option) { + return (include_prefix ? prefix_string : "") + "\n" + supported_option; + } + + static List createButtonOptions(ViewGroup parent, Context context, int total_width_dp, Map test_ui_buttons, List supported_options, int icons_id, int values_id, String prefix_string, boolean include_prefix, String current_value, int max_buttons_per_row, String test_key, final ButtonOptionsPopupListener listener) { + if( MyDebug.LOG ) + Log.d(TAG, "createButtonOptions"); + final List buttons = new ArrayList<>(); + if( supported_options != null ) { + final long debug_time = System.nanoTime(); + LinearLayout ll2 = new LinearLayout(context); + ll2.setOrientation(LinearLayout.HORIZONTAL); + if( MyDebug.LOG ) + Log.d(TAG, "addButtonOptionsToPopup time 1: " + (System.nanoTime() - debug_time)); + String [] icons = icons_id != -1 ? context.getResources().getStringArray(icons_id) : null; + String [] values = values_id != -1 ? context.getResources().getStringArray(values_id) : null; + if( MyDebug.LOG ) + Log.d(TAG, "addButtonOptionsToPopup time 2: " + (System.nanoTime() - debug_time)); + + final float scale = context.getResources().getDisplayMetrics().density; + final float scale_font = context.getResources().getDisplayMetrics().scaledDensity; + if( MyDebug.LOG ) + Log.d(TAG, "addButtonOptionsToPopup time 2.04: " + (System.nanoTime() - debug_time)); + int actual_max_per_row = supported_options.size(); + if( max_buttons_per_row > 0 ) + actual_max_per_row = Math.min(actual_max_per_row, max_buttons_per_row); + int button_width_dp = total_width_dp/actual_max_per_row; + boolean use_scrollview = false; + final int min_button_width_dp = 48; // needs to be at least 48dp to avoid Google Play pre-launch accessibility report warnings + if( button_width_dp < min_button_width_dp && max_buttons_per_row == 0 ) { + button_width_dp = min_button_width_dp; + use_scrollview = true; + } + int button_width = (int)(button_width_dp * scale + 0.5f); // convert dps to pixels + if( MyDebug.LOG ) { + Log.d(TAG, "actual_max_per_row: " + actual_max_per_row); + Log.d(TAG, "button_width_dp: " + button_width_dp); + Log.d(TAG, "button_width: " + button_width); + Log.d(TAG, "use_scrollview: " + use_scrollview); + } + + View.OnClickListener on_click_listener = new View.OnClickListener() { + @Override + public void onClick(View v) { + String supported_option = (String)v.getTag(); + if( MyDebug.LOG ) + Log.d(TAG, "clicked: " + supported_option); + listener.onClick(supported_option); + } + }; + View current_view = null; + if( MyDebug.LOG ) + Log.d(TAG, "addButtonOptionsToPopup time 2.05: " + (System.nanoTime() - debug_time)); + + for(int button_indx=0;button_indx= 4 && supported_option.substring(0, 4).equalsIgnoreCase("ISO_") ) { + button_string = getButtonOptionString(include_prefix, prefix_string, supported_option.substring(4)); + } + else if( prefix_string.equalsIgnoreCase("ISO") && supported_option.length() >= 3 && supported_option.substring(0, 3).equalsIgnoreCase("ISO") ) { + button_string = getButtonOptionString(include_prefix, prefix_string, supported_option.substring(3)); + } + else { + button_string = getButtonOptionString(include_prefix, prefix_string, supported_option); + } + if( MyDebug.LOG ) + Log.d(TAG, "button_string: " + button_string); + if( MyDebug.LOG ) + Log.d(TAG, "addButtonOptionsToPopup time 2.105: " + (System.nanoTime() - debug_time)); + View view; + if( resource != -1 ) { + ImageButton image_button = new ImageButton(context); + if( MyDebug.LOG ) + Log.d(TAG, "addButtonOptionsToPopup time 2.11: " + (System.nanoTime() - debug_time)); + view = image_button; + buttons.add(view); + ll2.addView(view); + if( MyDebug.LOG ) + Log.d(TAG, "addButtonOptionsToPopup time 2.12: " + (System.nanoTime() - debug_time)); + + //image_button.setImageResource(resource); + final MainActivity main_activity = (MainActivity)context; + Bitmap bm = main_activity.getPreloadedBitmap(resource); + if( bm != null ) + image_button.setImageBitmap(bm); + else { + if( MyDebug.LOG ) + Log.d(TAG, "failed to find bitmap for resource " + resource + "!"); + } + if( MyDebug.LOG ) + Log.d(TAG, "addButtonOptionsToPopup time 2.13: " + (System.nanoTime() - debug_time)); + image_button.setScaleType(ScaleType.FIT_CENTER); + image_button.setBackgroundColor(Color.TRANSPARENT); + final int padding = (int) (10 * scale + 0.5f); // convert dps to pixels + view.setPadding(padding, padding, padding, padding); + } + else { + @SuppressLint("InflateParams") + final View button_view = LayoutInflater.from(context).inflate(R.layout.popupview_button, null); + final Button button = button_view.findViewById(R.id.button); + + button.setBackgroundColor(Color.TRANSPARENT); // workaround for Android 6 crash! Also looks nicer anyway... + view = button; + buttons.add(view); + ll2.addView(view); + + button.setText(button_string); + button.setTextSize(TypedValue.COMPLEX_UNIT_SP, button_text_size_dip); + button.setTextColor(Color.WHITE); + // need 0 padding so we have enough room to display text for ISO buttons, when there are 6 ISO settings + final int padding = (int) (0 * scale + 0.5f); // convert dps to pixels + view.setPadding(padding, padding, padding, padding); + } + if( MyDebug.LOG ) + Log.d(TAG, "addButtonOptionsToPopup time 2.2: " + (System.nanoTime() - debug_time)); + + ViewGroup.LayoutParams params = view.getLayoutParams(); + params.width = button_width; + // be careful of making the height too smaller, as harder to touch buttons; remember that this also affects the + // ISO buttons on exposure panel, and not just the main popup! + params.height = (int) (55 * ((resource != -1) ? scale : scale_font) + 0.5f); // convert dps to pixels + view.setLayoutParams(params); + + view.setContentDescription(button_string); + if( supported_option.equals(current_value) ) { + setButtonSelected(view, true); + current_view = view; + } + else { + setButtonSelected(view, false); + } + if( MyDebug.LOG ) + Log.d(TAG, "addButtonOptionsToPopup time 2.3: " + (System.nanoTime() - debug_time)); + view.setTag(supported_option); + view.setOnClickListener(on_click_listener); + if( MyDebug.LOG ) + Log.d(TAG, "addButtonOptionsToPopup time 2.35: " + (System.nanoTime() - debug_time)); + if( test_ui_buttons != null ) + test_ui_buttons.put(test_key + "_" + supported_option, view); + if( MyDebug.LOG ) { + Log.d(TAG, "addButtonOptionsToPopup time 2.4: " + (System.nanoTime() - debug_time)); + Log.d(TAG, "added to popup_buttons: " + test_key + "_" + supported_option + " view: " + view); + if( test_ui_buttons != null ) + Log.d(TAG, "test_ui_buttons is now: " + test_ui_buttons); + } + } + if( MyDebug.LOG ) + Log.d(TAG, "addButtonOptionsToPopup time 3: " + (System.nanoTime() - debug_time)); + if( use_scrollview ) { + if( MyDebug.LOG ) + Log.d(TAG, "using scrollview"); + final int total_width = (int) (total_width_dp * scale + 0.5f); // convert dps to pixels; + final HorizontalScrollView scroll = new HorizontalScrollView(context); + scroll.addView(ll2); + { + ViewGroup.LayoutParams params = new LayoutParams( + total_width, + LayoutParams.WRAP_CONTENT); + scroll.setLayoutParams(params); + } + parent.addView(scroll); + if( current_view != null ) { + // scroll to the selected button + final View final_current_view = current_view; + final int final_button_width = button_width; + parent.getViewTreeObserver().addOnGlobalLayoutListener( + new OnGlobalLayoutListener() { + @Override + public void onGlobalLayout() { + // scroll so selected button is centred + int jump_x = final_current_view.getLeft() - (total_width-final_button_width)/2; + // scrollTo should automatically clamp to the bounds of the view, but just in case + jump_x = Math.min(jump_x, total_width-1); + if( jump_x > 0 ) { + scroll.scrollTo(jump_x, 0); + } + } + } + ); + } + } + else { + if( MyDebug.LOG ) + Log.d(TAG, "not using scrollview"); + parent.addView(ll2); + } + if( MyDebug.LOG ) + Log.d(TAG, "addButtonOptionsToPopup time 4: " + (System.nanoTime() - debug_time)); + } + return buttons; + } + + static void setButtonSelected(View view, boolean selected) { + view.setAlpha(selected ? ALPHA_BUTTON_SELECTED : ALPHA_BUTTON); + } + + private void addTitleToPopup(final String title) { + final long debug_time = System.nanoTime(); + + @SuppressLint("InflateParams") + final View view = LayoutInflater.from(this.getContext()).inflate(R.layout.popupview_textview, null); + final TextView text_view = view.findViewById(R.id.text_view); + + text_view.setText(title + ":"); + text_view.setTextSize(TypedValue.COMPLEX_UNIT_SP, title_text_size_dip); + text_view.setTypeface(null, Typeface.BOLD); + //text_view.setBackgroundColor(Color.GRAY); // debug + this.addView(text_view); + if( MyDebug.LOG ) + Log.d(TAG, "addTitleToPopup time: " + (System.nanoTime() - debug_time)); + } + + private abstract static class RadioOptionsListener { + /** Called when a radio option is selected. + * @param selected_value The entry in the supplied supported_options_values list (received + * by addRadioOptionsToPopup) that corresponds to the selected radio + * option. + */ + protected abstract void onClick(String selected_value); + } + + /** Adds a set of radio options to the popup menu. + * @param sharedPreferences The SharedPreferences. + * @param supported_options_entries The strings to display on the radio options. + * @param supported_options_values A corresponding array of values. These aren't shown to the + * user, but are the values that will be set in the + * sharedPreferences, and passed to the listener. + * @param title The text to display as a title for this radio group. + * @param preference_key The preference key to use for the values in the + * sharedPreferences. May be null, in which case it's up to + * the user to save the new preference via a listener. + * @param default_value The default value for the preference_key in the + * sharedPreferences. Only needed if preference_key is + * non-null. + * @param current_option_value If preference_key is null, this should be the currently + * selected value. Otherwise, this is ignored. + * @param test_key Used for testing, a tag to identify the RadioGroup that's + * created. + * @param listener If null, selecting an option will call + * MainActivity.updateForSettings() and close the popup. If + * not null, instead selecting an option will call the + * listener. + */ + private void addRadioOptionsToPopup(final SharedPreferences sharedPreferences, final List supported_options_entries, final List supported_options_values, final String title, final String preference_key, final String default_value, final String current_option_value, final String test_key, final RadioOptionsListener listener) { + if( MyDebug.LOG ) + Log.d(TAG, "addRadioOptionsToPopup: " + title); + if( supported_options_entries != null ) { + final MainActivity main_activity = (MainActivity)this.getContext(); + final long debug_time = System.nanoTime(); + + @SuppressLint("InflateParams") + final View button_view = LayoutInflater.from(this.getContext()).inflate(R.layout.popupview_button, null); + final Button button = button_view.findViewById(R.id.button); + + button.setBackgroundColor(Color.TRANSPARENT); // workaround for Android 6 crash! + button.setText(title + "..."); + button.setAllCaps(false); + button.setTextSize(TypedValue.COMPLEX_UNIT_SP, title_text_size_dip); + this.addView(button); + if( MyDebug.LOG ) + Log.d(TAG, "addRadioOptionsToPopup time 1: " + (System.nanoTime() - debug_time)); + + final RadioGroup rg = new RadioGroup(this.getContext()); + rg.setOrientation(RadioGroup.VERTICAL); + rg.setVisibility(View.GONE); + main_activity.getMainUI().getTestUIButtonsMap().put(test_key, rg); + if( MyDebug.LOG ) + Log.d(TAG, "addRadioOptionsToPopup time 2: " + (System.nanoTime() - debug_time)); + + button.setOnClickListener(new OnClickListener() { + private boolean opened = false; + private boolean created = false; + + @Override + public void onClick(View view) { + if( MyDebug.LOG ) + Log.d(TAG, "clicked to open radio buttons menu: " + title); + if( opened ) { + //rg.removeAllViews(); + rg.setVisibility(View.GONE); + final ScrollView popup_container = main_activity.findViewById(R.id.popup_container); + // need to invalidate/requestLayout so that the scrollview's scroll positions update - otherwise scrollBy below doesn't work properly, when the user reopens the radio buttons + popup_container.invalidate(); + popup_container.requestLayout(); + } + else { + if( !created ) { + addRadioOptionsToGroup(rg, sharedPreferences, supported_options_entries, supported_options_values, title, preference_key, default_value, current_option_value, test_key, listener); + created = true; + } + rg.setVisibility(View.VISIBLE); + final ScrollView popup_container = main_activity.findViewById(R.id.popup_container); + popup_container.getViewTreeObserver().addOnGlobalLayoutListener( + new OnGlobalLayoutListener() { + @Override + public void onGlobalLayout() { + if( MyDebug.LOG ) + Log.d(TAG, "onGlobalLayout()"); + // stop listening - only want to call this once! + popup_container.getViewTreeObserver().removeOnGlobalLayoutListener(this); + + // so that the user sees the options appear, if the button is at the bottom of the current scrollview position + if( rg.getChildCount() > 0 ) { + int id = rg.getCheckedRadioButtonId(); + if( id >= 0 && id < rg.getChildCount() ) { + popup_container.smoothScrollBy(0, rg.getChildAt(id).getBottom()); + } + } + } + } + ); + } + opened = !opened; + } + }); + + this.addView(rg); + if( MyDebug.LOG ) + Log.d(TAG, "addRadioOptionsToPopup time 5: " + (System.nanoTime() - debug_time)); + } + } + + private void addRadioOptionsToGroup(final RadioGroup rg, SharedPreferences sharedPreferences, List supported_options_entries, List supported_options_values, final String title, final String preference_key, final String default_value, String current_option_value, final String test_key, final RadioOptionsListener listener) { + if( MyDebug.LOG ) + Log.d(TAG, "addRadioOptionsToGroup: " + title); + if( preference_key != null ) + current_option_value = sharedPreferences.getString(preference_key, default_value); + final long debug_time = System.nanoTime(); + final MainActivity main_activity = (MainActivity)this.getContext(); + int count = 0; + for(int i=0;i supported_options, String title, TextView textView, boolean title_in_options, boolean title_in_options_first_only, int current_index) { + if( title_in_options && !( current_index != 0 && title_in_options_first_only ) ) + textView.setText(title + ": " + supported_options.get(current_index)); + else + textView.setText(supported_options.get(current_index)); + } + + /** Adds a set of options to the popup menu, where there user can select one option out of an array of values, using previous or + * next buttons to switch between them. + * @param supported_options The strings for the array of values to choose from. + * @param title Title to display. + * @param title_in_options Prepend the title to each of the values, rather than above the values. + * @param title_in_options_first_only If title_in_options is true, only prepend to the first option. + * @param current_index Index in the supported_options array of the currently selected option. + * @param cyclic Whether the user can cycle beyond the start/end, to wrap around. + * @param test_key Used to keep track of the UI elements created, for testing. + * @param listener Listener called when previous/next buttons are clicked (and hence the option + * changed). + */ + private void addArrayOptionsToPopup(final List supported_options, final String title, final boolean title_in_options, final boolean title_in_options_first_only, final int current_index, final boolean cyclic, final String test_key, final ArrayOptionsPopupListener listener) { + if( supported_options != null && current_index != -1 ) { + if( !title_in_options ) { + addTitleToPopup(title); + } + + final MainActivity main_activity = (MainActivity)this.getContext(); + + final long debug_time = System.nanoTime(); + + @SuppressLint("InflateParams") + final View ll2 = LayoutInflater.from(this.getContext()).inflate(R.layout.popupview_arrayoptions, null); + final TextView text_view = ll2.findViewById(R.id.text_view); + final Button prev_button = ll2.findViewById(R.id.button_left); + final Button next_button = ll2.findViewById(R.id.button_right); + + setArrayOptionsText(supported_options, title, text_view, title_in_options, title_in_options_first_only, current_index); + //text_view.setBackgroundColor(Color.GRAY); // debug + text_view.setTextSize(TypedValue.COMPLEX_UNIT_SP, standard_text_size_dip); + text_view.setSingleLine(true); // if text too long for the button, we'd rather not have wordwrap, even if it means cutting some text off + LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.WRAP_CONTENT, LinearLayout.LayoutParams.WRAP_CONTENT, 1.0f); + // Yuck! We want the arrow_button_w to be fairly large so that users can touch the arrow buttons easily, but if + // the text is too much for the button size, we'd rather it extend into the arrow buttons (which the user won't see + // anyway, since the button backgrounds are transparent). + // Needed for OnePlus 3T and Nokia 8, for camera resolution + params.setMargins(-arrow_button_w/2, 0, -arrow_button_w/2, 0); + text_view.setLayoutParams(params); + + final float scale = getResources().getDisplayMetrics().density; + final int padding = (int) (0 * scale + 0.5f); // convert dps to pixels + prev_button.setBackgroundColor(Color.TRANSPARENT); // workaround for Android 6 crash! + //ll2.addView(prev_button); + prev_button.setText("<"); + prev_button.setTextSize(TypedValue.COMPLEX_UNIT_SP, arrow_text_size_dip); + prev_button.setTypeface(null, Typeface.BOLD); + prev_button.setPadding(padding, padding, padding, padding); + ViewGroup.LayoutParams vg_params = prev_button.getLayoutParams(); + vg_params.width = arrow_button_w; + vg_params.height = arrow_button_h; + prev_button.setLayoutParams(vg_params); + prev_button.setVisibility( (cyclic || current_index > 0) ? View.VISIBLE : View.INVISIBLE); + prev_button.setContentDescription( getResources().getString(R.string.previous) + " " + title); + main_activity.getMainUI().getTestUIButtonsMap().put(test_key + "_PREV", prev_button); + + //ll2.addView(text_view); + main_activity.getMainUI().getTestUIButtonsMap().put(test_key, text_view); + + next_button.setBackgroundColor(Color.TRANSPARENT); // workaround for Android 6 crash! + //ll2.addView(next_button); + next_button.setText(">"); + next_button.setTextSize(TypedValue.COMPLEX_UNIT_SP, arrow_text_size_dip); + next_button.setTypeface(null, Typeface.BOLD); + next_button.setPadding(padding, padding, padding, padding); + vg_params = next_button.getLayoutParams(); + vg_params.width = arrow_button_w; + vg_params.height = arrow_button_h; + next_button.setLayoutParams(vg_params); + next_button.setVisibility( (cyclic || current_index < supported_options.size()-1) ? View.VISIBLE : View.INVISIBLE); + next_button.setContentDescription( getResources().getString(R.string.next) + " " + title); + main_activity.getMainUI().getTestUIButtonsMap().put(test_key + "_NEXT", next_button); + + // test: + /*prev_button.setText(prev_button.getContentDescription()); + prev_button.setAllCaps(false); + next_button.setText(next_button.getContentDescription()); + next_button.setAllCaps(false);*/ + + prev_button.setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View v) { + int new_index = listener.onClickPrev(); + if( new_index != -1 ) { + setArrayOptionsText(supported_options, title, text_view, title_in_options, title_in_options_first_only, new_index); + prev_button.setVisibility( (cyclic || new_index > 0) ? View.VISIBLE : View.INVISIBLE); + next_button.setVisibility( (cyclic || new_index < supported_options.size()-1) ? View.VISIBLE : View.INVISIBLE); + } + } + }); + next_button.setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View v) { + int new_index = listener.onClickNext(); + if( new_index != -1 ) { + setArrayOptionsText(supported_options, title, text_view, title_in_options, title_in_options_first_only, new_index); + prev_button.setVisibility( (cyclic || new_index > 0) ? View.VISIBLE : View.INVISIBLE); + next_button.setVisibility( (cyclic || new_index < supported_options.size()-1) ? View.VISIBLE : View.INVISIBLE); + } + } + }); + + this.addView(ll2); + + if( MyDebug.LOG ) + Log.d(TAG, "addArrayOptionsToPopup time: " + (System.nanoTime() - debug_time)); + } + } +} diff --git a/app/src/main/res/anim/fade_in.xml b/app/src/main/res/anim/fade_in.xml new file mode 100644 index 0000000..b432ece --- /dev/null +++ b/app/src/main/res/anim/fade_in.xml @@ -0,0 +1,5 @@ + + + + diff --git a/app/src/main/res/anim/slide_in_bottom.xml b/app/src/main/res/anim/slide_in_bottom.xml new file mode 100644 index 0000000..4a69b5a --- /dev/null +++ b/app/src/main/res/anim/slide_in_bottom.xml @@ -0,0 +1,7 @@ + + + + + diff --git a/app/src/main/res/anim/slide_out_bottom.xml b/app/src/main/res/anim/slide_out_bottom.xml new file mode 100644 index 0000000..e961652 --- /dev/null +++ b/app/src/main/res/anim/slide_out_bottom.xml @@ -0,0 +1,7 @@ + + + + + diff --git a/app/src/main/res/animator/button_animation.xml b/app/src/main/res/animator/button_animation.xml new file mode 100644 index 0000000..a30cc62 --- /dev/null +++ b/app/src/main/res/animator/button_animation.xml @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable-hdpi/baseline_add_a_photo_white_48.png b/app/src/main/res/drawable-hdpi/baseline_add_a_photo_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..be070f73b6842e4b4d35fe3ef341291d9642601c GIT binary patch literal 583 zcmV-N0=WH&P)Z;cw4J+4GCAF+u7#-BfQ5f)Y_o zDf1$VolVeKcMd1YY(+7*33}4qkSe;m4{d@1-NjT9>3(1mB#PyIPbeMhpcPfbx(`f( z@ZWv1XPcnD?nbD}dxM}zcP<%a_M(Vwg4Wev7GNxj)F$YRt4ogqx1>0_4ZtP{CZgzb zrpS>%6brkc6J9Rqz%^SsqUdt53xY*%?=dIKCo{#(ZPs8J1d-(6G9qThl9(Zmd9z92X;!IdlUh{>SFAa!O zDrXIqP)B)1$c1#4C2QMpE&h zm5=OhM)9DD4+L>naUjJU!CX@u$P?6Y`Nsgk9hGmiQ6ab%`NkwJlJbjlxY(pYeld@m z7WqUDSI6WNMO?MZAF^!V>Z<%<1b1WdhdJE6kuPMZ;_eqo`9U|~^vVxL38x@GC=kvn zDfz%6VGYO!z7WIp_(6wkkYODQzv+<;=CQHPh%7LSo#(VFDzf~+&N71}6b(gel{l{` z=)~SEgQOJ}3;3`~o^B4x9XYn}rACQ++~5+YIHI7KAP)Bx6m3-S?JG?Rjz0X_;)2|e z$EQ1TMT%*Bc}zm?XrzP>Z%Hd0S}9@eHO&f-My9d!gtVd|MII|-G{^#dREX*uy|P0a z6GZWdW?3W00%5%1q1A{zv?&J^wPjV!~=Q)MSLX1GqX;zNcmZc*SJWtM>&Wj-;> jLxwrc0oB#j?NfgNK7cDJjeDXN00000NkvXXu0mjfx)ly< literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/baseline_bluetooth_white_48.png b/app/src/main/res/drawable-hdpi/baseline_bluetooth_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..4e402e71768b2906be672de24467d2a465a65a5f GIT binary patch literal 422 zcmV;X0a^ZuP)T1PJJH0!rGg3@lg(^$HPU5dcB|xWF-egz4*8_YVy4 zV|RDXJ-6hq)oMZQG8+gk(twhYAQ_3+c`{G}5+ox*G7>*GFMk)OEa)`IL5iGDU{ECR zkRpdjlAzb=Ns1Fm3UtUjv9oDGx5O$*gG%8jlK7gGM8_aV0#r#GKS_aV9P{BPDNv1* zeL)!22uo7&b8(kND;*Di1S3{*ht~IV=?0Fmzs-oPXxx+ zk>erh*6fv-kEA(SNGr{;kpibLIy-6596w3VmVQ#?G?4!z^M9%RKl}WDjM-lu{+FwP z^BA6%di=;gpxH)P4~hQQL%w6Gre~Gt7=-5NgW}!zplIL-kT~5NIC9yrv2YTsV|cJG zto$oI|I+h~*nErE!jZHzEzbQ{3+?hHa`Io`wJT1=Y1ia_jxKQ0*J`!C0cHsU9UO-f Q*8l(j07*qoM6N<$g7n+a literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/baseline_check_white_48.png b/app/src/main/res/drawable-hdpi/baseline_check_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..c9cdfb1f8c4b93fd5561b280bb58f180729ee298 GIT binary patch literal 238 zcmeAS@N?(olHy`uVBq!ia0vp^9w5xY0wn)GsXhawwtBiahEy=Vy~N1Y6e!~I@S4Da z71tzK1Qt%++O3i$KUqHIL6c8&vDk;2XLm&ff#B)oUEP~624B}ZcWGz$=8{zpC04e7 z{%dAY@ZdlrbNcmTmH%6r-K+0z(S2~^v48F?3!@;$;~s&1%NOtYUE^)_;%jl=^3`iH zmsZZT$b5Fh(^%bamSy3kGo6#u{N{)}uh?~>`h{unC+)LyOjb5LUc%KkdCp}=tp^e- m869zSqpAwH4BTKkuUfrw6B#c>c!6xxJ>IN6{k0&Z} z@*OymY0by&S-@~_(h-IYYCwUjjeR}3o(1#XupFOgb9}Omy6d9^i+NXy;^sZ}oF}YM z*{2@q*N|e#IO!>iN0+d|T~&35Pcr_W=GdGpw$VIq_&fEU^?{En=Pxc{I6j$a&O}28 zqu1vQ9g1GhQ{eRLd2-Fxsp$1P&v}#8CHvGX{pO`u&U<26Iq7MOhfQ5E%gaX&w!xyVbL4mI*Gy4)z4*}Q$iB}0o_AV literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/baseline_face_retouching_natural_white_48.png b/app/src/main/res/drawable-hdpi/baseline_face_retouching_natural_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..a5ee6b45a25ab42fa5f38c6bc54eb160b3db6dc6 GIT binary patch literal 1098 zcmV-Q1hxB#P)72;zt;549cvO%4|wO1i!LMs4yT>2_?{lc9E4*ik;vn8ZrI>LdZgAQyPXs zfsuokGhz%opaCqz58*#?xa#J zX-Twj_VdEo!+Z98PCygDgnP4_$Z?Jmvs9P|;2$c?@(V*8VtwGaoz#FDJKdpdC(jHk zgvm>O=OkM+qsQtRV{Tx2DN{o#uNe12(Zy2zwx6eiw!#r1Xo-=H@N(}JK0OkQF+2FIVgR#bDsj|cvJIr7@xM~1%hkn|c{LW

X@E1Nv2aE>_KXB)8}~7jBv**EF;0?ahz_I8O8%A^9pzv@w9AnnVn8tA zdx6edZm>VLrzRutJ5|0D2ay?$#)@SkuRNSeag2B34gA(R+S4c4jAv^>lM}p^u99^= zBnqLY8ruCV3y}_3WsLL+O#el@GNOmaBB1NdIW=EI1C=8UzhPgKhq*4gaegQ8CBH>y zgRAoG@{ByrzmX%a`o>)0HCw}h^74h(WZ5Iy@SwU%#e37uGVoJ4&`bo*pZ+N28^4tT zk3z(MFCU6|IH;K7J4zc~W1s;7=qMigU{>;p8ihbD3!# zP~inorNS+~ssH+8!|tJA6X!#4Cg^Zj)ve=U!}951wNe1gYkqcIX7(%Ae+^c2D!#H z2K~vri4N!M(Wz;i zuQF=pDoHci)Zn>q?QXSC3kwuvF^*0O9rBKaf4CF z^VAcj$a943+65*=f(+O-FmJAx<-!{^qvZQHgPqjnqAic{Nh9jk7f+O~q)_*MHYfA8*@ zNpEJWC%#-v_MG`#04}LB-+SC zDVkA>d>-#TMDxJ2?$x9zWymNt$Vewvaf0_GdcsboQ%;ib4$WIhLOG_foo6I^&q-F& z8U4U0zysR zfO3fiaw!M&N8d6UdJgI?qEfntn=}-AG$sKr%S5O3R|zz>PgP&w?Pio7jJ3raWq5|a z{$hy!_rHkbGzg;K%4NV4UKm+;Di7&Gq3#i&R+{gd`002ovPDHLkV1nEtj9~x( literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/baseline_folder_open_white_48.png b/app/src/main/res/drawable-hdpi/baseline_folder_open_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..0d3a253c0b22bb129e7e58d3d29f371916505390 GIT binary patch literal 248 zcmeAS@N?(olHy`uVBq!ia0vp^9w5xY0wn)GsXhaw_ItWGhEy=Vy*2dOTkc%Dw>xmbwYG?f1&r2G^~b}wdA!R0ZmN9F;&1D>bXt&?W?Dtmv8|lvXBI!0 zeJ#;aQnW!*h?~J#Ma#hT5z8`n(|DKumSLUOCsA6#H9f*hbLkb~md3_!&u;9R1@sDor>mdKI;Vst09IpVFaQ7m literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/baseline_highlight_white_48.png b/app/src/main/res/drawable-hdpi/baseline_highlight_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..50e585cad506d480a899e54aa4761c127a7f20c6 GIT binary patch literal 256 zcmeAS@N?(olHy`uVBq!ia0vp^9w5xY0wn)GsXhawj(fT|hEy=Vy|Pe{DNukVkcG)n z!$U<w3S zByUfSkaN1h^b&)otDnm{r-UW|X=_># literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/baseline_panorama_horizontal_white_48.png b/app/src/main/res/drawable-hdpi/baseline_panorama_horizontal_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..15bbfebcacc9b773315c48c76a1f135300bd67d6 GIT binary patch literal 533 zcmV+w0_y#VP)j#p$E*wI;&eOZsP9|%hO}^pFVr-_X~MZU zm;L7Hs+bSl9TM=0eA=33FOVeY+}0YSn_B+%xp%$cd5^ir&2Dy&$2{*1@A}+t%4npQ z;TGHBxJ(2kcdZ5WutKJ+(BqPW!a-hCR9|bP%W8cU^-?$}Nzieg@U;p$8E2DAx78>u z{Og5qT#}%3@UZv%rHa;u8gHg0R@rE;JvLfxiCM#h@4zgJMt&ia{|b2E`z^y5Cb?@wU%>=54Qf$^&lAQjptxsDhSy$uZ4xyPV4w za}Cx;RsVR$ZD|hjnu6MyVz*@WS)#v&a(gYMATKJSk5$rXl|G7iF+q@JGHF?SjM)j= zZix|kYpkr_9pA$4J-US-J-&tiR!cWyt;9e&cmYR7B{=b|R&s_yBK1>RBLJnNJU*N`ZRq9{55 X+MsBaDt9BV!;49drQ&zy!LMSqw%1=~4x`UBQDtAMYnF70%J1$CQ|+^k~TMu+9Vv6EX=$jSH+?Qj<`0u(T_o7-DH8p?Jbl zE}<~yK_0X`K`0*NK_28mhCIlFJP11;bi`pwL07D>!YGxXdor10jfX0O`W&;!JKIvA z#cNeTw=`*TL)o4vbgKz^W>e3S^A2FeVcT3{u}y zGh&+Q@2TJ*D@zvSK^~N2N{LHf-~DM-Ir1|$K_xleS!?6Fyy`0 literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/baseline_portrait_white_48.png b/app/src/main/res/drawable-hdpi/baseline_portrait_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..8e6aeda802f437a86decd9c306e0acffc732cdb9 GIT binary patch literal 361 zcmV-v0ha!WP)b;@5QSkHd=(MBuQtGP|SjUTs;Q z*4~}f%w(b{ilT&sj1@;ZEYfjgMK**pb z!a(0*vV(`dUm$@566o3UB^3t_RFug<vNK0{trx83k*0v|ItW(z0Vs@q3%a z1jL*zx8BlmW6L}mh(%ySt7TyhVoE)z;ZifT1=WLlK(!%=VpLn|%?|W23iNIV+Kd8i zUI(PoA&@|S1adc8a5sE$H=gn~z;id^{8}YXLtsTwlvmCtSzes(;Ddjy00000NkvXX Hu0mjf?-!cP literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/baseline_remove_red_eye_white_48.png b/app/src/main/res/drawable-hdpi/baseline_remove_red_eye_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..e6c0c76f7ebe905e4bdbb94ae95c69376c59650f GIT binary patch literal 744 zcmVP)>w0&QeJ{ZX4dhQQ2EL_GE3kn!CgY_E+r%|wBikxz4W0X zxk*WCa#Ml6?8Wki)`2W}vv7OIeDX;o$j3b1<2IY5{!Q`CxEW)C%&f(2vmZ+Vt8g15 zK43I%tHd{q#I29`h(5TD6dSE^8zz2W7;e_^rlc_(*Gf3lU^)kQ&T|eho$3+}t8sZx zNr@U6xPi+DvWh}s_7UDb3X4KkKHzeL3=$Pq<2G9qO7WP;Jf^fL%nopHQwK{nmCnU| zqH>?yDw!2aRf&j1oWt#aC~U`NyC@vQ?JS8UJi22UqS6hpG*s!KSh`DCB;_uax+=c} z%VL$Ti{&m!MWX|@A}W6t%Q=-Uf~|vS9K)7g<=6Ivuuy zqR|OkVU<6J<-A@%v2_xS6x_v9PvsZ;HK^-hxl0NOiyl}8s&suU4ODs{mL3uwNw|R9 zeo@%!$HKTDw~Hi^h^U4ot4e2gZXMZGG7FZffo`rdMWIBLn`;SCm=*BJk&Il&B zk1V`1jvr|yYLp7#fHuu}KwE1c(1JU{4i`T#+&;pZ7=>GZ@e%!T8zsJBc}T87V|*XX z1`@LoxAo>lalL;RMFYvqlkoeXuF`WzD*mDkZ?Nq9U2I7GyV%g*r(#2!*k6u`-32QH z<=GiKN_q-SIZZH5(=0ZXh1-+pf7Pn1#&F@>Xf1psG9Nk4>i^SPFpxavn=c-Z$K&yM a4Br5P$Lz7&lMCel0000zwbv&amu?<6sLu+s>e^Lbvg`Kn|{fVIs z*p1F?>`M8^Ufim<*qmOmL53V&qYkGM4v}Bwm@}Nm72p zB*oxFmg4lPykmQcAZ1d!ctM7vsKAu5ue{;|Of3g1F7jv|FUar+PJhcAcBT~5LkW?? zzwv?$T``sHBrkXpQ_n$4f*gKJ8gwL1FUbprVmc}%z6T{lcg!$v^N-U7N&}xOAuhnw zvy0r~M$9~NjnB~emw!_pZ z^Vk|wt<2+KOkvc1>S=O|j+lED0Pe>0l3Tou`8Du~@dvrZ6_{C4=9i9bsluEnpE%#+ z;%exG`CUHI4Kq|;a3`jg{pACPhP~tw599QKykc95 zH{@I;k2sPFOd;FLJD&2La|+9RZ}1H{jP#Xr2^E1Hr45I0y&B*=won?_itlKsghLfH zZl;cjoRG%5nd1IB#x<^?7Jxeb=9nb^&0uJR3o9<52B!ry;XXMexzc+|LE|hch(ivI zdg;lbjP?}NkKGjxa(ID4S3zYwsc^9)A5)Q5&=S7lKs5$-=XpjZ2`Z!wSCg;C#s1vF qd$g4U|MLjPlcP;mR#sM4R@O@IeL2KpWd}F_00006hPtoY}>Z6c}8s8wr$(CZQFLTjyEyKo<4DIrMp^HT{9JY%jRkRlKapDv=?^) zTDmRBT}j-~&}~8PNdlK8fy)=kO0_2+sjt5~omybbK9Q9Az=Lo9CVU=J?veuW}t`w(+bu zg*V&q{6Pl^HFR`Z0Iq9`ATJv_`_jN-`|WawDRV`7Rzq_IWp zp(#ce`7y{?HlIfWv(}^R3cxC3DgW836cXbJSSGs8KW9OM*B%TB&PV7G*m0G zh8KxNEekUJ810**Scy7k@i#~ljJ^mMG$iPdoQ5hTRW#K`E$b+z=Fn`os-EN8I+MUX7|4@t^%-2^T{m{>L~P)g{x14j$qps+g5^ zst1)-Y$6Lr@7SGwEEk(tfZ8rr&sY2UM3Z_Fk-*iL|zkXn1E3Y8;i{} zR8O&rw}|&z23dqIgW`BWtfM7H7x~q8Qa!s%>D?G5lEYTgZXObQ_>35g(rF#)y4QPY zKnXq(`zV07<-9Hd@hJ7UL^>tJ2{gr9665(!Oz{MjIKmBSQ;tPr|DBk}J5xhlpx-y2P?BO~W*~TpT(|~VCAa@f<_?^BSC$!5r$W0K*&nydBPI3tko$+^({AQG=D98|2v!CmnX9MG@6|l(jbMbbT zlM{bicuEuz{Kg`J6|<)7EmJgG@Rw1-!$VxaZX;<$@i|*?vL2mEh6AH`RupO3h}}`r zNH`de-5&1IG@8ML^1gn@r=VhJNu(zCum`(|;vGL>cZ|oy!VsJdvnF#Z#vF9F;c<>( zS4g}el3f@+-ZEp{!)bzD;UOI@iNF0kq50eRFb)wZUXUNV3RaOLN#*pgtJ*q}hO-W< zO4||2W0%ESud@eZJ5ic81^SC(CjJhP$a&XN9j(K88^#iGk1w$MQM0c=eP=D~OW0-5 z^d({mW`4(y*u5!k(FbFVDAs{GjBaa`z*r=TMTD|S6l*Zri(8z>D5%MI7+plM98?T1 z>DSqb%UAk61u?dXJG_bEBe`a?u=_x>=ePkt2dkGnN?c~>cctJ4#^d4!|{3cZV-!+M3XBx z`GZ%)0m(RylUVCD)3_mS;KN905(n#u5(kvT$z>f&h~X1AIE#^1lT(CJNgNQxUV`nm zo-yOBxWRsm7c^N-C|7w>{8*EmjbMwdb!i;%zICPZ-1aUI#YO@R);vANd~t)G81=;y z1_2q(>E|cE=JhaodOtcAhzEQCzz(Z_nuEXlg0|fX;UBJZiOYh37Nv-O^%{2THA{{! zjE;!2fd?QN2rZVE1KQr>B-UgEMsZCFW1JJW=oP%ZvyD)?TJ6V4j2fCfMGT5V)->-S(%i#pjB%Ppu?r`E^1LXASLvU`JEk~ZHY8>|3ZGSE1wtt& z;&|CG>2_boe8)VXsWpukak8JULR`mO;2{YQdGNQDr^LcYR(T|R zbaT4WGf2oVP6BY1{Vb&?S?t%n4p(~J+tUji+4dFlBR%Lvrh{ABngbt2e4YV(MM zz@yY+KfyX_Ik%2lIEiIFZT&9J?~>#Pc4$lokmj9(VyeQg{;}f4i}x2e9)ZBp2cob500000#LT=By}Z;C1rt33 zJ>#Bd(*uBpuJ&|s45^5FJHauF#ZchLGxy*B%bz$(GwqTndD;AyE12o7%kH#!L0+0m zrvQn_KQ(suQ)X83OHWlXexJB_Qpz*ioRCRp_O$nUO`7?AL!hV8XE`%1&$N4o-BeGW zslKs7W#&)&jr>L@@3;Tunf#~r#(4uAOz}0hHP6pG+BV|}_qDfE($6P3`+Q=3U3O}7 z`bI^ypG~FKPj7C%Auze-;I6r!-bChfcsR?r$Lxqo{D!>cHq~O%Nt5&=wg=C% z?dCN+i7+nt%{0$>?Y*WakKfRpY{M^|J*j_Vv|6Q{*)byEZRIl=RoC3XM+ literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/flash_auto.png b/app/src/main/res/drawable-hdpi/flash_auto.png new file mode 100644 index 0000000000000000000000000000000000000000..3d815ec7baaef1c39d952a44151be6e93cd39dac GIT binary patch literal 613 zcmeAS@N?(olHy`uVBq!ia0vp^9w5xY1|&n@Zgyv2VAAn)aSW+od^?LVMLAH!wRwSt zLWGEbj@ZGZzy%Q^N0R~%Fg3A?Fp98>u<~>*Xjq`pwLoJ*gOWzmB8@Hu3)Tbk=V!+S z8yoLsKQJT4{ZHlhvtN1)kIl$)wmqYr_Q!SJUOg_WRWm-$vH8DSEhEq{UcN%{yz_OI zUhQYUd!JQ@v+QNE7X9pdaZZ`*g~+2hyAMduTCT?t#e82Y&06iLVzJuW?Xee3CcK?B zg)?A6(Zw>y#a%nu4$PV&e?U=F-(mmSre}>YkMDFayh%B??C>@&E3PNIZkL>34qxJP zTO&j8gvpXQwS^7q8}4r7lwvpX^@yC-!=N41H(Sqxr%md?#3_rO?1~P`yQT6%XF|G* zCA;eNf2YqxH>+`V@we^Txqs?k;l{oG`9FB2d43-Vy60{G_T6oT43QH>2fj&|MP9I( zu{5FDM@mh@PNM6j#i>Oro5T*v)ZehPpRJK0bH?zp|9X*+Cf5107Q9HXIN@{jGJo0j z=lte>tk@5iJh*%~@q=7pmIRM*LuJi=Uy)oj|VZ`_+&3sTSN9+L6zB xXFJQgXr}MFjlZWIsP%q;MOfj&_8!A$vZc4RA8r!i5d|h622WQ%mvv4FO#ncp4we7_ literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/flash_off.png b/app/src/main/res/drawable-hdpi/flash_off.png new file mode 100644 index 0000000000000000000000000000000000000000..40c691ea3e57fba74d61dc2d68d7753302a75d0e GIT binary patch literal 436 zcmV;l0ZaagP)+TuVb{c!JZ8EWKVVs27_V!#Kkbh2M!41f8Zr8 z{up=n(thjs`NM2guSM|Bvn3EZcl}fCAbhIKz0&p^6mW9>JMXM|zd^ z2u>Yo`cC|{=XaD;kuIOrk^JoeN!tTby*(gldqAqU2PAC|NcHxBr0oHz-X4&&Js{QF z1Cq7}qPUZl8`!~2bK*OwWx*69BX^zn^r&2>>$ew-O`gEe(&;=bNl1l+v@kr#Nrtbd^DNI@@4bvui+W~ z7v?Rnb*ygs#`2c)mhcvJ`~t@>SBPAAQ+t1ELmGd!%7W{);!LyduASI0@B3WN7uE8= z*IoB;Xg3wQaC@J(f7s^VK-TGf#f)e3{u&;;Clv5Tm+>f&daKL$6-d1YQx_8dhJOp& zu)S%o=$o|y)!jFukK9xGCbn^V$6nVrx=Hzh5TQrgzl$+^;B>k=(ecev5ow^`89ZJ6 KT-G@yGywpLn2I6* literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/ic_burst_mode_white_48dp.png b/app/src/main/res/drawable-hdpi/ic_burst_mode_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..b1b848adaff8ce1fdbecd3a4e881e3afc9f4c16a GIT binary patch literal 325 zcmeAS@N?(olHy`uVBq!ia0vp^9w5xY0wn)GsXoKNz{un2;uuoF`1XdX(+fv|hL4qT z^SDKxFzv{GBC^BOsEAqVoM=I|g^SX=cL5vP54Z0=UBB_?QQ!DkH)8|+oivwD37S*1 z`m=tD!TIEw>dpH6+@~IFOtsO;n0MUl{{{u_j0;;$JerY_nPz32qvoFF&wjE@{p`mg zee5jz*5W{U`xNFmAqada1_-s}&Rbi}+TyoS1rIyHN&D z^iJh{7jr`|NbM`)x7eKH8?fh&@Wowk?-xw?>3{zDf8CsKk7Ga{IW literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/ic_colorize_white_48dp.png b/app/src/main/res/drawable-hdpi/ic_colorize_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..aa06077775a20907a4a4e02481175f112c7f1fc3 GIT binary patch literal 438 zcmV;n0ZIOeP)}zu1)y9>{ zq&IZ)VyBB_l5Q?-y|aYH08XZwUH0a2KPQQt$O)fw%Q>IEIH`g&7J{=5l6#I29V9=T zR?;(jo;a-}J13;W5kd$qoLriMbEo8_6b}06&m6NPBOG%iQdR>wSz})+nfm1Px#1ZP1_K>z@;j|==^1poj532;bRa{vGi!vFvd!vV){sAK>D14c_oM?S+J~^7Y7HG>(kTFb9AOyF56R$#+hx~d%csBwWnKKZTiR;OB_g5FWrreSATbR zzx?ZVCutWe9#*TD)6M4V>iYUu+T}qi=x}*?*6#IMT;=qEB&$?vJlo#>NxKGkKwA$O zR@~(P+@p~6k(wT)f)I14#vaSEUQpG8?-2VwKVK_WD#xY=d)&seZ?(f_yI;g$REuGHy9&s|aE{#O}*c zg{7dlHC2N)$NZ8^XgL0QNFMhMDE4$xjumU4jBg0hwav=yX-9H6BjUE~1m1nDFPXeCHD zIY1jhI?4fByfBL*NBnO!vGJV+73>6E!^jtA3OxsMaFc#TCQg}51?f7h++-%m1UVoR zL8iz7sS7em4oEeNMUfMbt%By!Wb<^l*-h&#rqFX_*(zv~5uf>;qnzDR5oFr1a+A0q zljVR!1(_}fBqoT19FT}0E^uqw*}%C)4L3 zPQ%JgEP}Yo0V50IEC-Avh`Su1EGR=cvh0SUoULRiWieBGj!G7bEC@@AXw^*Qg&`Uc z^r|G1pjxq9KBje4?1{Xf4hLfCG&R{XH8*!A3m4KLEqc}-Rrzys^NngAq=K;M9!sXF z$rjw>RkyoFO%GB*_(cLNy5}qj_cpz-(3Gv22fjht&C$wAy)-kkiKWvQt=1#c1NZGu zQ;3*@VTGO(BnN~Fa)az9HnC6drcB`c{e2<{8b_4BYK#lHpb&CFA>@JrDHQ$#Ze(QC TGT-L=00000NkvXXu0mjfie0Cw literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/ic_exposure_white_48dp.png b/app/src/main/res/drawable-hdpi/ic_exposure_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..711d8b1e8596f1d549c83397ae21a40380c95acf GIT binary patch literal 357 zcmeAS@N?(olHy`uVBq!ia0vp^9w5xY0wn)GsXoKNz^LQt;uuoF`1Z1I)*%KF*N5g^ z)hn`jcC~D`tzYmetmVO*l2y&w-UnnfEX$eiKe={k?b@}521O6fEw3&}JKt6n`*D}9 z__R02zR6}hICLpi(kSCd#&k#a9zV&F;~A$GJk6SS+0j>X!I_!-tL~e3D>oXsGo|*i zoRX1u3AWQHesbIVX|s_#n<bc<4jk7*E&QxxOkVfu|spsNn@dFK>VrjHO6Hb15 zI~z$EQ1NHW%Fm87lR^6UdzSH7C6}C8VC$C5mb+tP$$cKlker=*XMQfa*Vx5t?dMao nt5aE-d8*^TunSzj`WMKbZBf&Px#1ZP1_K>z@;j|==^1poj532;bRa{vGi!vFvd!vV){sAK>D02p*dSaefwW^{L9 za%BK;VQFr3E^cLXAT%y8E;Eegmrwuz1^G!tK~#8N?VL+%97hy|^=o?Tk!8!0td+bm z){hzolK2hw!rNdwB#V#)67qnM;Ell>k6S64p- zJ@8SZJ9Tf}x?QiXDtl%444>gMe1^~P89t$(>H7NBSG&8vApLK0;>0`z=H0t+;b){* z@b?&G^Z9(G4Wuv?tOzSK+upuC+t~Prl}!O{(I&n(9xN%W3!SF(sKg6as7X-96lr6D zlEPT%B%N&|FfOLd_f zHA!J8bYQZ&`nh$icdV{{hO}0Yq%Z^)JUxB-t3wX;km=gm7f7RnB!xa$;PlLy@A8iH z8RKhIkfg8-7IYk1jtg4Smq;rINeYW#0lgk`d_;^%Qdk6xb7s(mA0Ni5Zbe9;0*14? zQ#lrM&&(7oe{Ta*esC+2LKhlLv`h11;%Pu3a#fcQZ6dVCsy7e zL0wG>^T1G+=-ukQL-?LeMx(pV@2!G5@g>*Wili_PjMQ#DY8}GA!5y2YBfPCh3N0{D zyY)c-H>C1OQ4Yqlt*tjUf1AL!=SWLikrY~>oMJ%rJ|*Pg4EfN}Yl~9QcSsBUltKcf=CuQV6B2%DEK=y5EtqOEwZ5 z1q|)~z0)osmD2=()*^Fuv`skT-IOUW@j||!0HPU1qhBR9R>4B8w8b8I*NpLK9)8PNHhj* zZQWElF36kZpSa7S1q3{O=1F0PV6j;^)q^wvdbzW6N9njAZ&^3Bc)Kg}I68`;ZjV9w zp3t?V2@qG4MDA_571Y~cbCGtNFjkz>7W+# z%Fz`VW3H<@5OmejNk zH`rA{d3f)qvmmi-;Jqplsr<{2CSs1YzVCd#6=1RmPp~i5-A*5B83A>q;SA8U-=)Vy~!nK SqFb8)0000%OL1jvIjBFga_YzBfv;!%$B`r;*)uge{MvXF?tdnJiBkV%hjm?YFOj4kRPL;c4cs~4~LoBh4sX94^ z!wWjhJ!}*>7<|wkF5|}{yMqim#uNOiFdj@$j5&gGmZ;yLKJoD?vt_& z>XSJECnaoxV&nMZ4Q6X&}bP1gX zd;eYc6@GkU8gv-F?0+kFFpnR@W4PRw`>D-?@dXz78?PqxK?O}tKcelt%$NLy z7c07;I5p)DD~>RnIPdX;an=?))j^}mTe%zIaiZ+wG#6OqcPdnA@RUdV%J+Q2I7Orp zbqxt*L`Hk;V`q&Pzxe>Yy@G zJk+2*q>?(QhNRv-6Wm~llSF*1*@09w3yLb6RBdiPLfGH5sINvq#6JmKTb! zjQoFkkM)`>NK@*dBS?QD6mNHe9M?Hc55jUy7bT=O)j_+cA?-zkH2Y1zehBU&oeM2! z4k?e&1!a&PhxGVJP(eDS4@y%<`XH2`DWn=neUKoB^oZUN9;-#9tB7_%!#I5&c+f0P zgYAO^1*AH!2AcCa+lm`n6Lb)#2Rs{CPCpNk8XPbS5-j4BqdSnC9&X~afG`c(O$Dc~ z1IX!O0jDx4^B}=ExRZSTEA^1Y?WosZdY;2A=i^@L=O%7v5MG0#+{WzzuUYP_hq&D$ z;yp-^;6843&XDlfUc+bBG(~92N%}c_NpOc|k2%kNx->g# zkU1(eyG7jZ@l=$vv{vFfGK?`mk{B_P3@}E9E0k#M3nIZB;EhwkuQEr2JmO6AE%q8L zkP7|~c$ige|ktQ?7#|G01HrSN2$|jBy!~|M3`d!{qM;X_w?uI zM*O+&zeYm{A%qY@2nuY_`IlAgXyA@qvI7M)p=pAmENAnzmM2puhn=ODjDPaK+TlvIrC?=`GH( z3RKctoMjj2r?)uEGSF9Vah7$UkKW=e`#|sB;;c`acID@6MgI7+6wCfk-A}V@uYE>c z`^@Ls7u1Kb@cJ;;syb>`u6t_y*!zqhcPyv@ZQ(PZoiMbw_&_-| z^?4{pbPnBAQEP*~w-vKk$*6Tq=Ch6|sD)MGwXmw&N>2zOgb+fA7f<&R53Cak8WR8j N002ovPDHLkV1me}r}h8< literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/ic_gps_fixed_red_48dp.png b/app/src/main/res/drawable-hdpi/ic_gps_fixed_red_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..fa422e24bdbb57d056e73732ac2d866834278b3e GIT binary patch literal 1730 zcmV;z20i(SP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGmbN~PnbOGLGA9w%&02p*dSaefwW^{L9 za%BK;VQFr3E^cLXAT%y9E;VFFSW*B01~W-SK~#8N?VHPw6-5-r=dZb2CQM)kW)?tr zEQbH-V(_s;SQ8D#XrgY^rJ5L6Xk2se3=>QY(MXgKh@#u;_uZ+wmdCGN{kjDDCcjkA z={k=(U0qdO-LoG;#so15CMh9f zf|vx8l#tgqL1_Gh2>|cAWGHx!btcNj5J+Jku#wBgZf?Hu;L4TH=4Z}4!`~_?|ATc8 zFJJx?$JfB136sJuVPn{^`QF|UN|}IKVCw-&Cad>ENnsZ-_}Qsb@6_~^T9&rAw%*2Z zZ;6t^HZWLlZN9tvn{Pp8eGa zyDre1en#2ZKrV$fFc5Hu!PSB>>hGhgSMMHPy!a84dlTcyTZG4y9qsSmV;!Yi0Ap&M zBq=O~0bttxFbFCO8vi?Pg&R07y2`A>y3l(UJfV$Zj$><;Bq=O`jzMwHanpROu1{Us zs9Z8|vDw(C6qW#|Y^)0CKP;zWEDmwpy1RR53pTIMcXpnKz7jNkWRb!ga5zDC&ko3_ z^Bv005@Vrl49ee!J{73*2xX%^(Ui`hrhBtI2o*s&6$Zi~$Gi(#aL_P(8;&pShmohJ~uwq%a1bt*q0K zCSwlk0}l9#YFpVc3F|DS8$${eFwU_j`rzr#6*Y+U!1VCKg*&+Fe|Z}goO^L_{``A5 zE>0HfrDiu=#HT2W6EB4d7)_5on97x&1$CKd4w%|b1T^{^lx0c662R&kipL@;w7}pb zwK#L-oLV|8uJr8#al#WFTLtrd9dx7MY;CWNOOB(wwoz0{$1V*Ax++I!638!+a z*A-j>D4!nGX*~|D4p2F^V2i%D;$~N9%z`>x-A30rZE=Hs^aAl5#y&o%N`2tpyIEE? zLt{Llj6})Z$EZ_nvjNFoaCqs`$3iE^%I3-tg{D##4uW1^fjVnX5PHxpp_8MV1m&J< zP-pE4a_`NQUWze68M;Z(c#CCt-3ICUX?}nIp4V5P4yXCn&tEZSJ3D`(9A=u2;S`kd zqJ^T{&yo6VIXHLjw({vgooMWiWbY$V7y=`&>Z1lw&v&}rvAg>V%Dm5^wmWFd7`J4dp$<39xY?!ji$QOE2{+7m zHI6@fQ;zDkTw8o1Q_^z?>ma5dL){RZo1x6wAyTM-!Q7_(b){!PT@L!(?9z53IGv(K z-9LHqO{;Gx>ODjX6)>8d&D^V&vId|Ym~sh_+Nb6^hf_I@dAk(bcDJFlSvq@pq%cPA zxdyiYU)mKrC#(;c07bPe+&({`l<_U;>%}2~gY0aCmGk;uJ#TM+8OCBM9m7JTFa;cx ze7wMQ(_l%Wub1NE1p+5UZz+cAN%WEHIizNg50SzgbcF88uSsPvo+e{b4YZvda{%i6 zgtAc_Qdj~UauAgPg)C>c<9FqqC5pBCiPUSr3>rscgIF@!Lo0}h?Y}Iq6um%PIqMK?tFit(< zt{=)<%XJpuv6H1XyK=PZIUXsjg@J%68k8Nl`^F8(t#0KIDQpUZDp>;bMboJ~g%q}d z0fIsQu?0PsE9B04O^6hB0fXgpPxZiB0c<~^4;*)%7>^Wo34;fmn)%tAY-la8#r>RK zdW=O1`+$vcjhb>lxTa^Zr%(sSOMTFLVmwkf1Z<>hY-)+%-&+lq5Q`KJiA#V=4r%;| z&=pcRBus)yO30WXCcz{nWK0l~V3HCtCWuKeNeLMfge0t{f5F;jFOL+C{EHNhcorM~ Y0N8S1P({r%jQ{`u07*qoM6N<$fh-k6B>5Z+iNmzXE@|-8<9D1JTB)#bJdM=;y4IiGI^ZfXp z4gW)3$Tqkh&Obmm(7FlK4HP4b&p4CFCK<$1Qn+T7zBSu-x z*V?*igzL6}Vk{z@(q0`4+Le??Sg;M0MX0ewGk2=kSSGF1Zl#W}!!D4IFrhjB6SnSX zs;3b0c7etbE@>8e6I)HqdK*J1*aezIIH;VxNEo9^{SZRg9LOZ2l#~eLo>D)6iB+JK z@`Lk)F{jk`xCK(5I1R$6EA`2B7D)ZeUU&X4>*S;^+D~e`Wq(LHdx0>{+bw(h8{=i= z>^7R%`j>P&Zj3&{ea-p1*t)H$et>Y=F3_%39iV)?E`sK5M~TLi!^s= zYF5^5J~8xB{zUjI+-J~PWFwfy^)kvm=BQI=jw0tt8^@kQHXJTcl9veY*y;SCn5BmB z-2OvR{~})@(v*9;n#c1uAewJGzr6OLPV4OFkqc*IL5LAgU zm$J+vd(UB)mflr@Ru~~}9cClvs1fv?Yt!=IP)2-!BicR4xU?-DBIwW_mTlQRxVhRw3mX;LkZsIfi^aSxdtVF_ zv^o?LbnIZ_C{YG!24yiAm_!I+9u#yOg?Fd;K4bHY-{<-LI!N>W`~xp!<~RI)O0)lA zL=)Eut~(~6Ca4K&Ws+W`!9t$l4j=M0(PkOtHs=Zaf?D|;sK$^oJkQ5$(x?r_xR{0& z&!H|U#~{CAt;`@fchDq245^EClC{Py0yofgstri_#3Y#EOgfN~_C1rfx#0q`kt$4a zm#cKqMnINM@?2wx5*6xopTOlD>N|q;8f{3-+8JPhe>XvB2iZy1U1o{Hjk|z^eh!>S zAw_nn?VbzhB>*RqaE>yy^``}$VH-S;2z7wn*q(7rOk!w7ZCB(lFWs8|W*vtB7Y(FWPiGD6THVGwCdGnLurbnY3>ropJ+Z5YMEX z8z_r-CWUUGPQ){*-3^pSJd^S^P;|3s*AUO79<*<4=g=tH5aOBi9@@AKbQ`UNcqWz5 z-nN0xqg8msE2)Dj+EX@Ah7Gg<#3Si-v_A=KAYlw`0r5zBoZtAT4-hNpBGE4YN2KTM zpnx*k5}_lhuMX*#WY?g$&YB*NrGkfsMj8I4AaY5PLU&|oo;#4KcmGBq`SZf>on>Q zMtC;<^j3~LNY_n)2TT)fhWB}cCyx9<`IMIC|Lu1wP?FKW!Btm55ENw8m7_V0@(G_-Cl6Y)b&=rRmNhA7jEs literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/ic_help_outline_white_48dp.png b/app/src/main/res/drawable-hdpi/ic_help_outline_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..e9585a04413371c3e5cd1fa505a32dd82f99b664 GIT binary patch literal 1174 zcmV;H1Zn$;P)yRG19otp z3EX-pQU0jp3Gy`+1}X61pc z43q9YA?1n-r0XK8JW<9S(w$RoI7+&U6v_h|XrhNPQl$8s9=_odd0++Yr2A6Av6k03 zU6cpbbBkH#o8<=UMN{$!&Z_>8|Wv#gT~r!mu1Dicm(5=7)e z6Xq=&H4mFeVjAT_2@{wT%7lJQt31$yX_g0m#QY`?T)_0n9}!ZR?ef4FW{W(q4f9+c z*oB#*NPeipjLQ=#OjsT$!X)K^0^*p1@=)g26Pt;=mkSAi8 z%ksk@rcQa{Hl|IUsK*S*52KiB<%JWNIX24^HJB0kA%Q7Z9{3igpX7rIOk93QVM59g zrTl`^5XJIA7?YGAQkal(LJfc5^g9*uMHrKm9}<{y<$#U6#Ho)mWk3ZcEOtV#4;Gitn$qc4fL9h>}Vak*d4$6pCJj2YfN5OCnr)~;c1~|wU1zf=?L8XG@2&#JEKqZ$Ge7i-99dvQP;QYw=LU z&*X28D^zN+QOg7J(aT;f7537_G(n}f%^oc^Dmg(PNivzDm*Z6E-JytsoZ&J9j1VVD ok~kv_&`u+t^P!e4TlSv%A3Ll`2waD!UH||907*qoM6N<$f>w4B{r~^~ literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/ic_info_outline_white_48dp.png b/app/src/main/res/drawable-hdpi/ic_info_outline_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..c41a5fcffa8c4dc3684252fbfd61623837e19d50 GIT binary patch literal 953 zcmV;q14jIbP)+=u1!Si^=c~o6pU56D8MW!jS z$u@O7JnC$-Nr`E0Z~~QViq1F4UDojk)8rj4KiUtv${KCrYEfp?UeGn(lf*h#%mwiZO?K>;SqC6p{w@`EooS#EOw+c}UBbluHuSO9kD3{Dexd zGgQ&NCewM)0=iXlsD$fh4c%;~LATKTP7f-@j@01>+c-g20r3E3BuF*`h>_@^ zHN+KB7e-Jm@Z>j$y%;(Bn$f*7_b_YN7RCYJVeH7P%ol+c!LiAkL*xA)X9%M!t}A%e=*t&auG}M zUL7k|gPtKKgM)4$K3fdZU#`Cd2c5t}oVOJe{qaNg4l!>nXc19Cg$22YJsc}RIU0!D zQ6KVKhxdR-j0Rnxtv}Fl zg03K-O24t7lYB&c=W=+E>@fn$hQ5wD$_oT6qv8ggqJe-vjQr`djDR|Y=pflR=t-%+ z(kr36+3A;_^XSU-C(b#^3v@H6PJ=SMLRaN%qMQqSME8wcL$0HkK4xj7 z`v)iS)q*bxcDYYB<~wVOFW}3`r@4H|@fyE97P!cfkP~%@MH=|M#7TQSm0^y9)p&}_ z7{lc0-Cv$z#&CH?jf4HhF*6r<<1|RC&P|h7oP+#Da^G2|VEz(#l(!^NVZ`1w`3S4D zh|A+Sm+k)}L4iBGp-Gr;RJbh%++L9EIM5D%liv*A40h bb%Xu~a~|hNXxfgr00000NkvXXu0mjfV)nVh literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/ic_launcher_take_photo.png b/app/src/main/res/drawable-hdpi/ic_launcher_take_photo.png new file mode 100644 index 0000000000000000000000000000000000000000..63d0b40fb2be3de64cc470754936e349b8518f20 GIT binary patch literal 2811 zcmVPx#1ZP1_K>z@;j|==^1poj532;bRa{vG&=l}pD=mCOb1snhX02p*dSaefwW^{L9 za%BK;VQFr3E^cLXAT%y8E;Eegmrwuz3T#P4K~#8N?VNjTQ}-3e9Vd`MAPEEKRd7g% z6Z>A@>kKN9ibvT4)QLywvf|y!uVrr|Y$>>lSj0ORD z0Rc)#On9hJ#+y5lY~ZhU{g^E>D3-}Bmz?f(`Z zme2`bJJS=l6NyB>0-G)QNrz+5hP=GYKNS=V2e`&-xQ8-Cl2)fr3a1N_^vnGGv|S!) z!iTc_i$+;~yGfSokNbQ#iLXxheD_cKdLY)Dp1L9FnBxBQd{33dDk#v{mqdiDZA<<$B03%e%QCV*%<- zd$DamtZK-;XNeVVsiev0yUGO?OHhngc6vNx5wjY4^ewSUl4iuxkO~K_b}f~nJRPyC zDMZhbszmt*)Zo9lKw=q+`Cg^h`vm0>HIV{6CmzMo35XP>o@C$Q_C69%!>Thd+_t;j zCCEul&AKK0F+blK4->LkFmZ2GS2xRBfP`F|?HjZ+uH+huXJ`ws2e~0fHPxP3(gCmc z=UfAQfjX~u8FExp?Y`N|T`o^wn0Gn@^U+__V@Z#CQF4PM?cf~uB^v1LM!DiiK)1}k zQxG2G9QHM6gxvqY?VgOB)znQGCa*@H?*Qj8Qk>myRo zguS2A#=+K<*|!xHP2wCy3W(Ek<1?kLt<4lIH*Q2#B=n+f9?t~iu1e6g^{ybD*sQbzK^~%+C>kM33vxeH7$;M(9tky3&o!QVpm4dg#N)9GC?{+-+@D?%uuI z0WRr?(bA$^dHEGKCbLwF;!mJ~Dg|%J)YW!zmCbNw`t%1Qjz;V4+sf&w)7aR|a8#Bn zpn)nyM@#r&R+i(qEVpr$&2VYOiU%f+&c)@++1Si*T=w0blA8KWXrW4CH2W6X4{#OD z!U@DB5ls*S7n@ni3JbpjEmTR2mQ*7MtGG&L2^EM-Ae37dFDn1syjk%-_ncBUV@3d6 z>N zl=Mi2CIMwMOOR7iGOTV8%?Na%tjw4^iE(n(D#Lv9#f1yXiIpom2zBZG_YJ-{tO`V( zq=nS!u#(vqIvk^LCgCcXC9FWy1L~$tGl+AJCS$fNasI8h3ti@^WLH_uga6=|n=H`NM{}pouDtYIeLB?ICvh_16u_ zUaP5L%LRin!FAw+E2m>sAR1!-c_clZ)~pbLiUthGp{;gQC+Q3CzH3PK($=kPxnNKx zxDI@9<#enHgh@KZX3K;osx+$E2ia^6zwG;*t7MjN0^K-#*pO`Fk|k`pU>ZwH4Rzpy zE2m>sAZ?i+22BFWXoe+i$Pr08&Q&r?IDuMQT9n#}6Wf#BD@n@rLx&n>Mv445~{?l*`+;1;o?K z;2L|L>ak-DWl=ZS7l&1W=sda=T4dfy_&0J7Bd31+`6UIO`U2;9PFPmMLD)? zSpZyz-pBr_^~*0=pTSra2pW(U9gZ5X0v)7OEsh!>sMt@AcMjmCeu{fdbI6;aX*-;-}TDX56^IgbZrNk5~Llmnzq) zs!SbFgRvqID!%K$U_weB_Z?DG2}IoP^`-+_`Mfy~)|9o?>GQ zN0lJ#K<=6bomya+{nqJxl5-N4;$r3I&Yf)Fy}@VOxASp@0cX$`hY!aWJi8oTy5Zy3 z+ykO?lyer2Q)9*`mshV=ZdO$EDwkh+NjWuoG#gVm>b%|~l(SRIJ*;5{PygMB5%Wk{ z)?r@)y=S%4ITtx=xtlUfR3ocIvCn-sF_daS+)KIQ@kW=EbEpM+(Hw_kij|FHtV6lA z&CbZcm#4bbdvwEi9R7~WwT^S%7eH>5<3n1m-8RhTgQ%vvM(`fw8i;3*6LLe2TCP1Z z>w^yxjhOvrAkp=WFERCn(hGF$<>ivja%fa%z)0wr4v?x1!PQ4_3$^{fl9HqtO z#~Imk5VID0)Wp#Z4d76hH!d}Gai4lUzILLEfNQ*ld;3K1 zDU?N>CE3{v$p_9*n0(Yy%s^a)YAG9-sd6p3PJAZhf5VDK95p$y7C06}cF{{l}bnEh?u8wCIW N002ovPDHLkV1h4PNd^D_ literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/ic_mic_off_white_48dp.png b/app/src/main/res/drawable-hdpi/ic_mic_off_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..7a15a9ea9e9125e04739214c0fad7c0226d5eca2 GIT binary patch literal 713 zcmV;)0yh1LP)HNE`WQNVAQwxt5?=Ldi4JNv?e8W`ZH^40M<}s4YU)Gn(=?Nztv4@ z0;Coj$nh5G7XHoNx=EY)2mrjoSwR<(Dr~{OYpwem*R)7FZwIaBE7EfcHTUfl8KX>v zHzW$_3#;rP$0b1OPk+ZLq_emXpcUSq!CnI?*t=}E_i3(Bu#;>c$2orQ8I5+54dl4t zk>kmH0-iVD(6@j;H`BuH>SCP*g@vMdHj v-!T&5!~yB+=7Dewpg7AxIVcC^przg~#hg&q9iy!t)x7$D3zfgF*C13FE6!3!9>qM z&$#E=^Z*71=98W-jv*CsZ)aKO3pzox`K<83pZ0$ao^3Ohkv{|cc!Y`w=n#d$+eF*?`-!O$Eoj5njTwrGZjdL z?(_=}_jmjIPkoE<(Teov%C{I__^3^unyPSgS>mC^YuB3Z6jZ$M-SpPd87fEB5*MwQ zReZfc!XT|@Tj}H_H$;34mKN1a3-k=i;ZiAoq!u%=an^3lB{x_+dZJf zCF;46bArhJ-jqwC8EdrtxX-iN-@Ctm>ZSckmlkYpZnj>OpZH&5i3GFw z*QOaoWhGw}me#wb9D08IIKS)tL;nmt4B7Nvu^Ogcc^|)Eu6AZduN2E#<6HOsJ^HHs zM!s7l`N5(%0X?I}H<`Rw?Po3ef6>{yOI=L!Sc9we;<)$i;vSg}SN7J0dd!kGtaHEp zPtmvH-Ver!7oR=z-tsE=*MWSv-v0g1KTQuqF_sB2*Dn0D zugViPT_(^YxoGNChP@Ntx-OC67JJWAy?1_`L-2=8d-v!m0YR#wp-YUfYj3YVSCHK7 z-XJ61EfVbT;ribLG8OSbGbAKZ{IgeV{5e@>P2VHSYuDaA`&*lE!PHcbRdZQ`gB8b< z>GE@&GdmyuU9>ws=-12i+AFgruh}HNN>1r$h~M?=>whsmTX1OKLSD7mOwS+9QN6!N z_(+Lsudi#@N(BYEJ^9a@{!Bmgb{f-jjoV==-AkwOyL5#nI7Dtev4HblecQGm(?Xu( zNqlnuis~jE4T#?AwNZ;>%?F=?gEPNMgHeAh9fPw?+dVe& zN$BvMF0T~wZ8W{NcOTGIqcm;3zzU;3<_Ocxw z8+|uDS?ajpTJU1F;QPU+(6Bk-(EZ(62ZP?ti(Qg&X2K#v=dNQsno`Ed9Lvd5?_X&?yZ1=h SX9Zx!XYh3Ob6Mw<&;$VfsOUrh literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/ic_mic_white_48dp.png b/app/src/main/res/drawable-hdpi/ic_mic_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..b0389382e4ef99837d898958cc880a77151a8bdb GIT binary patch literal 606 zcmV-k0-^nhP)Td@cQEd@i=gHX_eU=eBQLA@z{1pgou6|Y9nYVqbNCrf(}#V=q% z3QCDND2Srcq+cM4EtR^Pt$92RyE98N#4+0l@9(|sybBK{(1BG#h~7qyX%<!2JJ+F8juXd0KvY=ahYna?(84VP-R zK@D7jY=dxQl_0{ zYx^DAb-$pa#Kw%1Je#z#&Zx0lAa>XnzU- zw%3eVTZf3yZukf7WgYDmxzzb2^Hd24Smg<4Qs;fVLHofT{~*O}VntMk^N`pr6#hZ% z<}0x)9p%gr`$E5Oke+cOVnuQ&_t3|EViDv2@}KKvl9m{8zHxwM==?AHcfSSL;vPdS zcZZoHAn8#zt{-!l0-ke`VPj`Mmv})4?i~Lh#Z@BG>b&J8OH}wq+BVm^c|ki)l}^++ z(d`@K07bTNgJ0ZbAe$@f5VO>2r_KZRW&3Gi7iTH4N|U4}RZ5(rKl^VTdIm`v^#45N sKS@f0BuIiJNP>Jvf+R?SB&f&c4>-j2s;86O-2eap07*qoM6N<$f(C;Wla+ig?7wkn?VR;;ayc7ARXr!Ecup#bG_k4NywT7wAl;17=EsNp zg=e#vGRlniuZ(sRKJ?ju`9ivh;+}5jWFRk0Cdzo$gcBDceK~>)UrfE&Vj$AKt4FP6 z)>8xH_i?|LsCAT?n--tXYFhD9GBUFA;?(t4t0w~OQ1RT=rtgt=r(W%Rwh+*r44$rj JF6*2UngBq{QPuze literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/ic_pause_circle_outline_white_48dp.png b/app/src/main/res/drawable-hdpi/ic_pause_circle_outline_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..1b84d36cc3f547aeaa57319944e7ce5e28c86f9a GIT binary patch literal 1822 zcmV+(2jTdMP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGmbN~PnbOGLGA9w%&02p*dSaefwW^{L9 za%BK;VQFr3E^cLXAT%y8E;Eegmrwuz29HTZK~#8N?VH_8WmOo)=ccmHtI7znkDF=o zQi!Oi?89L!3=*n8!JCr0DTaxtKcHaxunbJc6VydiOj=No6-GpKt7r^)j*K%7PKFoS zIgve&EcLkdv({dFd(K%_`+*ppA0?|b%MpYJ(Ts8FFog$fl4)#m2rmEb5i4#vQb z;5V=Wss{c|8~?{^cx?~30rMy#s=~L_#FHN z8m$^Q4ekT#-VH$-8^D8L8njA$GVce^4 zexQ8Jj5KECGVl%vQmt0oTwPr~zqGXU@yyK3@u{h)Cx?cH?i?5xINaIUc@yE^wDEtu zhS%aV%lvExy#X!-%I9FDF$33tZ$PT5)#^&QTpk`98|%ere@h<5$HyP7R4VVGt&67F zG<^lG0?PLwq%n}|z%Rfn1alTo!HMqf?!zkRJg}|V+1Z!S_L8}cPd@|ts$xh*8Ux^y z<4q2nuP!VsoNQ!bY0joVCO6~@0LZ(1Wim#95YSW+5ndWl>={rcr=p`I?40Ofe>OQsI9H- zX1rz>Of$9~0V)UH1o12?A@1%e2SSLMAOhb%G0oU|5vUw^6T}0%gt!H&ZXQDH1aagw z&DeS$s2q3`#8b_LxKFHZ9zyH{O;1lhZJM$515i2eCWuGB32`%B-8_WY2^tz#?raIBTa*)bjSV*<8vGIj*PiYjW7!ydN16HuRhDsJt zm6WrshBIb-VW*5+O5DZ?u6mV553FF9G9`GW*d=&K4vV9FdK{?OB+{6|o4_f)ZZY~K z_Bko974aw3fZuqd0w*-pXX8jF{j zS)RwXInTkp(teNj6JV`@ip7gG2EvZcn*d?CI-ugSJAd>hFmWu$Ge>H zA&nW}lf(-4R3Vs)EJ5M614kMaxDS42a&oelt8%pE98E3pwaWG6Y$J^sVV}gG9t8^l zUSJ^sw?Mg1%*}M3IPh48@Ne4qKVHL~Kz_!2JXEiN%zciHH0Fd;fY5KYnc_Sr(@^e1 z8utOWM|@T8%@mgpoFV7_>61bl8vu6zSclvyap9R-(whfqY#?-+rM8)VOBpC`c5y!^ zXwjJ$X>0;~>{(69f=Ql(=M^{D`LuAGH22V*1!-(DJV9VhAj{skk9jD*k<$4Kdcb5NBZxT@&Et; M07*qoM6N<$g2M()G5`Po literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/ic_photo_camera_white_48dp.png b/app/src/main/res/drawable-hdpi/ic_photo_camera_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..c8e69dcebb98d43695027fcc7e39a339c84dda51 GIT binary patch literal 666 zcmV;L0%iS)P)y zDuP#FBS9M!QcYr?x(Xj5^OKhEOrZ0;%Xt?M^PS1$oUvkJVg?bG2*VOq|FP&lUkJx< zKARBwK%20%c&{z=j$iacgSVPOZ>ZBRKY6Vs^pYxn!v)X(MaU+{KDTId#}NxSK|*6x zFv$sHenK`COmmFwEtDe)3f@BdM8ToA&@EBW@D_@SSjd>rJ=jul4hPAn$ZYd zQ(^?GM-r@1H!ZY@i{+CPWphGpzG8()mbM9@HXpGBsG>9Ms~`YLf6Qp{EBmWamEC4dDr!XN=Tg=UR*LRnyXqu_l$^Po#V$T zj`&?_>IiL%W{Dt*qP?plloe0b6+u)P>h$wELg`L#hA>QZ`WYP|H+WzkLrdxi*`3}7 zVOSR<9(Dx^Vxd?l7V1BL0REY1s|}3R@&Et;07*qoM6N<$f_%9k Avj6}9 literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/ic_photo_size_select_large_white_48dp.png b/app/src/main/res/drawable-hdpi/ic_photo_size_select_large_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..2f6ca2c7ad22b9a3b85d2e7f098e585a822189d7 GIT binary patch literal 420 zcmV;V0bBlwP)38Kjg zWC{6VMt1E%6Pn&#v;2R5)7;u$KITr`ltdztNHRj50TD_YJvP>3ZfNQ&v&Vo=c9F&P z9)St!fHx5I(x5tc1wk_j8h8hVNl@e+G}b`nm^lXtNI+Vg0<_FINI(LzJ#sJ#NI(`+ zz&Vs70uqpb%De3aB%p}Xa)BBs+U42yXDAd^M(#n^m0`$eM_TdJv09^fATO+o=?N3s#?l9qr z>K{dWwn2RsjF($%W>j6;1U<9z@2Vw+wm=_jw^gf=5ms6N O0000Px#1ZP1_K>z@;j|==^1poj532;bRa{vGmbN~PnbOGLGA9w%&02p*dSaefwW^{L9 za%BK;VQFr3E^cLXAT%y8E;Eegmrwuz2Lnk&K~#8N?VH_8WmOo)=ccmHtI7znkBg?s zOCh49aw>>vX?uHnXMKJBx0RKZ4`yd)k55faJux^qcvo+4@4?pA)|&`l)5iDs z8GaVuInVEU=rwRDP(CL{8a=oMdi2~RY3Wk2x&~nb>K(f6dJRPr{IN-j*f$>(P>~=b8~YqqU;q@jY~fO`l{Lxi!>&H zPmVJ11ZzaV6xgKLq92}Ek2JS?gTxk@@jVBx^ zr=6|(`}+?odzP5lE$SrUE1=qX5z=VEBMT|6bChFAz}DffnVFeB^_5Ox&SE(JS zHt!;h7P2qnmszf55xB9jQJMLcmzUpdZ*RX{eI^Uo{o(657jfok(E(iniFy?jKu zeNYZ1g#oGx4fuIrV4y>NE(_-8=U*~a+2VS@xebxV06ZoQ2@P7Ubqf&;5QQ@T%&W_B zOM>#FHn_)tYI_A~48W7Rkhm*Xj`&~)h|06eEiNt&n`&%L0oC>j(rDoAL+F5=ji}|w zqKXPoRe5x|iHV7$rg&Rh;Bug1;0zG2WCDX#o786s2Df{#H-(sc$ls#_r*6rRIOcZXlUqe zQsD32}`WMN_9WmAH2lE47v*`=zOr{}zJ+ZJav)#hEK(SldBQcPY_P9?!y z9Olxcf|N?7w~V_=_W;%AU8GSUZ#VGEZ1Z|oITpu$Ts@DEk3R|@qpqHvgL4&W6fj9! z3J#oMqJVO+p7iwew8A&JpM?Ge_X5?{i;%_$n4~SmRFg(Ze_3P1gdf6?0&-)Gk&m>#w{ft<0Q^{l|~25V3*>I#30o>1rNbta+FJt0o68% zG{*2ch!ne9J;o%CIWgUedKIWPMIwzcFqM`W?2+QYopyFhgPEM&Yxp`PA#i$8wLj3 zhgVQ`)CxH$2FJOba3PHz@JV6@d#uoyHKw5O*nu;RYPbu2XL53~i@S1^6*-$)Vz)~5 z|(EtDd07*qoM6N<$f*sh2KmY&$ literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/ic_power_settings_new_white_48dp.png b/app/src/main/res/drawable-hdpi/ic_power_settings_new_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..95a2b7e73a45a5ab7486f1dd5553232a904c4fda GIT binary patch literal 787 zcmV+u1MK{XP)v(hAmMNB?bz06PQX23KDeF%a9<{ZG@FC z*q>3zXhuntT6SeqTIP+Kq0-CDP<^}T;`xn&{O0@~1L1tm{qRN4>+^&H>H>j408_~I zaQ*>;pde^h2Gy$s)$0U#Tm_QOK|R1_JoljMe3kSWBslm_5NBP2j*u_m zE9exa&m8s}^gHXACivM`(67v3dc$x2f>N07@tyB~S&T=RZu$v2hUqCkIi1wM@DfvV zp+QkzA>Q$)^JV`x8N>@p3JW@ksZ-^~cp6iCAwfl?5t9_D1Vwm)n8mXX+KcIbwP#ra zrvL1Nh7k``AFk7g0o$N3tBB*OL2Za-JiDNO5E;H#4Jspt_{%P+3vo;ReJF)U*aZzE z&Zq~SM-18p-9;Qw4?2jrV;8i5_(MIY5;1QV^cL}hdQcf+!!GC(BCH-1L433e${|YB zgQAG6UC=5brXCbWtk?z3BP!K{st|K_L1TzRgp`j)M9MDc8seOKP!iE+7u1OuRSz03 z{LxX1$nc|TPK+F)+AhfBHR6P7PzPek{snCa@lZ8r3ej&Lv`l{T)>6iO@@5E zZZk%Y8*Dbse#_h7V>TOfpTb5MW~)isw;5r@=wOPih6rm6&OI(7M3&nmX{3rcQKH1D zqLC!y;f@;Tonf|eRgMy$SCXo3#U&Y*@0w|JqSu;kW8`%L1_GX^wikkMku z+!nN<`{#6z$_n(rC9(4bD+|yj&iNr#+MqpM!jbj0K{JA|g)V3yf=0TarU+{5f*K+y z*9B$&4b&1r9bM2+1dVk;a}l)E1wrp9=N7a<6~#f$6D?x75&I%ZEatS z3r4ib$>}g=S;qtI3$1o(pck}Sr-AO#YLW)JV1q`lWa+;+-xWC5gj7%<=Z9mdnhJw*IY47>;cMBGO`W2MlT%T*dBMBZ-J=YZi-4&p~rJrZV?7V-qUN*>sQmogTvBA=~1M^fZp;26L)q&nLFpu;!Jl|U%xL{hEw zD}$c_@D1}N5K1`((?^UJS7=rKmIV?BWn7Kv0{1)1^aLdVTb36vO%fM8fcn99vgi^| z>_vpShZ2{%Y}!vO0K^jqVwxgxxC-?S{o(=v6U74q`4Dw!d^Z@Owu=>i=nxOoqIzP^ z8jYxr=_6L4yvpR~sBL1yJ*Yz@2Fh#cI0AKt{a8k*-8=(1FCCLmKUjJP7NOpjSY+W? zwcka}lw9aWo#`LQed)LW)#mnV+x-K%FC8_gQzaK3Ld}s_Sj0Ab>MjWd<+*h1gnI3- z3PkR^j-jZJ!UytRI;v3L|BnN;|Ff2CyyV2rs8|09$9^H|F1Ny1ogh%NR||5g+tqPS z-hswSEG|T~x=rM}loarCeGfIm)?WUI+RHmoqc~v->U(P&$X%!-Jp+vsM;wK^U2LdF zeM-NAw6FvD0<~3a7)OMv6D#|+bQ*NTD6!*8)cXu@2sGaF>GdPj#S({Em<|d4pm;E* z$r6uyQ4#wDeUzR=59%&TBpx#`Jxz%O!Vq2tdtr&IqhB#o0->DiF`Xl>7|SQY+4YNhz$AZ7;p(sM93lIn0tiX}Y5UXlkU@fxXapj?t+G2a0{atH%G z=jKQvQa#8ZNrpvyhi=9O`a9O-Ha;ZXLkte{TEX-g$1s^v%K@LtNqj-3>)mI^wb-i< zxPW>Vih-Fla}l4A-I0__2(0C!#4qqZWQByoD9%k3=xjzyXiVmgyaL_LBq&q9iy!t)x7$D3zfgF*C13FE6!3!9>qc z&$w6RZ#n}5bGfIBV@O5Z+nJ}kgdIiNKAY^Fo7fhkHrZTaM))j7eT}Xcht|GdAT7Q~ zPc*i|(_Kcuk>z8PMaQn*|DIN{u3zVu1jUBRei5k&`LWyos^xP3(9%hLX1TYvWL{q8 zI~&aSZ6AH_{eP+VZ+vpObZ7f5pKG=-$?I5kwfNm9Q{KFbOEA-1n4~qKC8wz3$HqxB zm|}i6?7OP9C`n7;Pv_hTs&fo(hXfe1a@fr7aXUXZCB#A0*7<&vXdqCiYVNDZ3t;Ao zdAC&rMRrc_oyeqhjLk8>Kri7;NJcHK1@@~=NEsO+9i-{B=KjyETq7zrvPT3sNTC;Wx;|cci)Mcm20e!V6xD^e{26v#@1ys zly4a=PU3h_()Dz!0*D#>V1vxt{J9E?8h74X&>!R)lEXD8HqtgiEdG37e|%mOFr0N3 zg$RnyW^jL+QUAswzNz=O-ml*I+ipDh`_HyoXLi!L2ca%U)efB8wfFdM&+fIx)4V?b zqh9#Kl=Xtg(-YQLwBGxz*{yqQL3}jJ%-J_h8@u)$@QjR`x<6M(%05x@u;k_?Gpt`2 zEncKw@a{_44Yt{D%D?o^oo+aR<0o(DIcC3^bM-^MJbm-xgIJ-+406S?fw$DU;19U?|cnBdeAM1KX!>>~n U-<-+-=28X*Pgg&ebxsLQ0K9;}(EtDd literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/ic_text_format_white_48dp.png b/app/src/main/res/drawable-hdpi/ic_text_format_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..427f3d52de690ac28cd635ca24ea4c19af38c011 GIT binary patch literal 505 zcmV@a3OKQfeQyDvxCzvUl%Tv?BYC9a~aD11TKE`6>;E5 zp{do1^7(GoQ|;61RogWC*xv8=<@I=--akI|yzIS{F~%5Us3NMQ=A9JtPKM8jPpk?b zbP_7_iBiS@1Sk;5Ic2u z7ox{jzd_d#%Om@55Z8PL<@k=MjGSCSw8{AkI*VALF!px$5oi1by+%BqIBOx6eFg1D zactuBIEsCKf*vB;WF}5m(?K+S1nuBAqW0GtcMyHHyAQfa5;t52t))W}9oD!Fx=0)ImLQ6;xz^_(fsr-AzP@0Yx`KHBk5edAkAXwu_)V zT~K=y{lRp}FFxopsJDrKk>y1PrTGZz;!NLEKz$&+*r3Cpez0z)Z`Sh@)S(1H0mp(Y v$buZOAPce}mtzm31zC^EJ${fq&u(8Rp%p9Zi(@7mxv=QZ?X8M@MrkFf;ILa%VA9R2lydbCo z*Jl6z?Ih!nMfPIMhdc#H-MB%=m3I&$)y2zkqk za}wK;)DR`8nhe?^Z(v0j+qsW+n^^du@6djgvmbiYa|LZ6Y|vq}Au4@->3{sgAN)*; z7ME8m8Af|QOwcxc-{BmV?+`G@hg3Oc=%jU;g9rVhPZpJdgTx~5I{uZeBH*W>K?n5@ z4tVXj`BC2>Xy5??+Jgn%MdSm&v)nD{6T=Q{#8i-=0|+>QRZacsmcgF*0t%w-SaIknzFU5G4}(V!WbO9f959o|9xh%v)K@Vd-q9C6$` zXcUn$9`uRKrWbMEJ7^BkX*}qn%%&SL;~lh&XfYmiPiE7KSn>|aA)1T_t;uW>h*j?( z8xb=eWUB?T$Ztl_7v_T;r_68;w%wN=Vh^~M+UlP<<__zHiB5gPaKJ~aU zjs*MYxMVm-e{@_%9PcjOZ;vMgxI&8#lgE@Nnu{Y@Q!{QENz+jGE zpba51#aE0+5o?}H{I3bCQ!N8?h?L(VzkoPmDCc{1{AUQ8gx}Yo>xjq14dnc;u9A9I z5ZC;_m$f6{q<)n&Vcpl;=bT2soBl!in}^MI^;&}82xFe4I({bweL)8a+OPjYz1ctw z=Ll+u8of!#L&OIAf^Nr*A>ay?j{T9p@bf2c>7%G(0s$9-?+)DO;8562&q1`SG+;#> zjZC0@8-5oy#x1l5?1+@JlZR+G*%D>rynzKYn^VL?ZU~>IfHqG&@}mVu5@hKMxQ!L( zb5_8SlPKlkO9QuYT4RtsRCpd$!Cr>Q;dGODsgI{(oWE$v{ zarX^d%sjyBVVzRtIArn>XEPUxZi5RX%|8TgXNo8?v~AvF@-`+Y5R}a|_HF)uBuJ8G zl03ycCqqiV!OaKhQA-EMInNABtdb|sDoe~T!ZF^KWru+vqoW*D4k`!z53YsJ82mBP QQ~&?~07*qoM6N<$f~#mBn*aa+ literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/ic_timer_white_48dp.png b/app/src/main/res/drawable-hdpi/ic_timer_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..bb6f9a63b5b54a9c6070d229f99df1fa66be1537 GIT binary patch literal 901 zcmV;01A6?4P)H}E!95p8+ywss z9jI^1Ok`BqvZ^m5bY^o?VQccx*4iXczJj2?lX$7;o_ksX=JR_DfiLal=HByD{}UtF zP%fizo8b={1&xCI&nRfvf;3YUxk8l(wCSyobS2@qi?AyyA%`vXgVkfIy5!Z{$59q@f@Q*eDLGMtF4J$5LWG;%7)7=i^o8U^4Vt0fJzhjboE#1M zy@hfMnqULdEmA0lIlvO8bzX1_stx2kYw>KPLT8+BG2L(pI*aKZDU@@N;s6ay(@}#0 zenV`^wc{A1ILsE}A<1Y#3z!P1*g=Z-FwI2?(lilu_IU{!<9kE{Z69Z3^+Ae4c(P6*WpcGxi5xbxwqJr`oq^Keac0pGV z7rh6S5M{fdD&my)pgiImyPyY%N$){f#6!EFHX`jkD1+Fr3+f?~-h%?f_OJzYhwWds zI&2qp(=O<1#A%f0$s;OuK_4M5c@HWf7VU!0BW`*Rsv!#QKd!zeO%IW?3(~X@vtEPd z5l_%A4`_G12Hi!JT_0gjc*}Vm(^1!l{AI?xc? zqJwxuGTy6ngLJH%3@ezgFi6n6Z7ul^H{jf9H||CLg%>)Avp;j z+aS${gy~YHM4l`e0s=B*$y1_6k1+51Tu+^0ojBHb!}q>yf-0^m9NL`)-rElAsmDz3 z=0-7Ko}aLFpLa;^#ftM4KISPqc|w_^=)GM6ALd;asnDcFmoBR`sjx_a9C}#STqCiuz3C40riVz-R7on(@@r|3PyO@`HkYu%Bj18sqpx(LORt$)OxODcLs4DH$c@ zSdx;!I7)9+60heG{B{Kmisn#wH>V&EDQl>nv2@`NW~|)d4CY8y&K1n%tfb?ZlX8cwC+ zTJl6UZIDl0z9VMeU#E4)|*LFOT26O5vC z9x*PVC`#`!6R6}zka@yH$5BN0l*taGi02}8{0f4jz6JfEfnPz;Rq@e*B`l&h4azX6 zjpAY!f&Jn*#ay~k?H_{zswf_2V9alBaFFrD^-S?Fju02lDIPlU1{V%UekIM9{7M=q z`IXd8@+m2#s^nABa~Us^&P#qJ9hCe^S|ItAG)VF*slDV=Qb;w)r=$lmUL>uQTqV6G py6aSzE41Qpgs700Y^Ih<@E2tB&Vm18Pcr}j002ovPDHLkV1lAE9qs@C literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/ic_videocam_white_48dp.png b/app/src/main/res/drawable-hdpi/ic_videocam_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..44c28e2f2830f927973beaa3a143ddfe439f20ed GIT binary patch literal 234 zcmeAS@N?(olHy`uVBq!ia0vp^9w5xY0wn)GsXhawHhQ`^hEy=Vyh zQBv~(_A|Uaeg>M$g-&EJ3;(E>F0WfsuqEq@i4qW8@}AzdO6rx>m9XRe^SrNqJ^A{6 z_JW14-f=$P%=XJsxKGL9nM=c&PKKGnj7I8ADSj*`=EzL26jUe%O0+bb;bi0#P;hAY zVbJ!!)MTYN+qP{pwr$(_iEZ1qZ96-5PuJ~1MYHO+QMS^|($dl>N@s#Yarwv9 zgt}IWNn3F}1)0MRITalju!24H z<{N4eRYSg^Cwp4WrHYCMq*7x=!^5Q7)3W0zMv`jdIYw6eh7bS9hI3eo4=XrF7F5NT znsVb_qS(lAO7b%=^C~}4hG9f=uUzPXT`O+TUbuyp*!7YN^RZdYhl+vUv05${K4m>_ zYr`{}Z?V|GS90SK=Hl|39LSE#0v@xEW4#E|TL$zd%s?)Z9mi0T*zS=5_Yhk-j#X5= z$4Y_>krAU!&B2ae2$D-ilpx5zGO<_8_hrPV1SzO^QHmgs$%rQjQdaSzEJ2=>5l;}L zwBkiUf_yF`J|ajy#f$$4QcOnVO6t0w4_Qr+(K2EvLDuj|JS)=*#CDeqxSQB2a;)sQ zh=GX@9rPhge=d*>kFfxkta9L2T;}qK-1v?SSbVDxJit2K*7K=cSccW_&gS4YUoP~* zt|d1oIEC#d7aopllK<-F@H1r?9`Wtl$TzaMCvRTyK4D&xZ`?wds}8J-1VxpWmX>C<36o)sW-vyoq5uE@07*qo IM6N<$f_6$-d;kCd literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-hdpi/share.png b/app/src/main/res/drawable-hdpi/share.png new file mode 100644 index 0000000000000000000000000000000000000000..a36464b8be84a614d4028bc961946d97dd034153 GIT binary patch literal 513 zcmV+c0{;DpP)kD}CM1E~S9+A?v^KPd>Kxvry zq{o0E>6?oso=gDc{MqtZnBRxFh}(8zmvvjtO+FMa^eX`_%U(vyD6c(n&_%q6Wig-tEji(P!B500000NkvXXu0mjf DWWe0) literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/auto_stabilise_icon.png b/app/src/main/res/drawable-mdpi/auto_stabilise_icon.png new file mode 100644 index 0000000000000000000000000000000000000000..38909aa1b5dc09bc9d9c2c3e4dd830c58ed93e24 GIT binary patch literal 291 zcmeAS@N?(olHy`uVBq!ia0vp^3LwnE1|*BCs=fdz#^NA%Cx&(BWL^R}Ea{HEjtmSN z`?>!lvI6;>1s;*b3=BerAk6srPE{XJaEGUhV@QPi+e@}wOojpu4_(%N=118ljSO#77LoDlvqB@h+;jvQd?VYPmRr<9g|MWY%|$> iZC>7XFU_S|>1RY&&3Wv%(Igh+YX(nOKbLh*2~7YYzI1v3 literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/auto_stabilise_icon_red.png b/app/src/main/res/drawable-mdpi/auto_stabilise_icon_red.png new file mode 100644 index 0000000000000000000000000000000000000000..1b424dc5d31a180d0fc58936db9c810e6e08ff1c GIT binary patch literal 375 zcmV--0f_#IP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGmaR2}kaRKw(dX4}90Srk*K~z{r?b5xj z0YMZ8@R`{UOQO&z6pDLgX-IS)%ah3ND~L@Ln_L=&VhRdcO1YOp2(MswjO5s?ubai9 zIlpF3{>dc&oEd@;Lg?JITCKtO77WgS*zJb0#CTGv)P)c##4khP3{1*p0VAP_#FqHJ zW0)ojinj@7>f!l3ra2|f7Z6`~S%ENS7(YrT(~!@@Aqre8(1tN~aw|(cm$M3;&L2bwA%s4SkUgbV VPUdYQCOH5A002ovPDHLkV1mihn+X5_ literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/baseline_add_a_photo_white_48.png b/app/src/main/res/drawable-mdpi/baseline_add_a_photo_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..b9cb5d4d4fa669143cb3b249f2440a2a686b0435 GIT binary patch literal 422 zcmV;X0a^ZuP)MAFdPmM!Vtm`APhqY5CQ;%Ll^=C7(xgk3`2kbVHk$PAsoKr z{5^P`^xoZM`T+d?d_CvX|AM#%u7UG#!pWO%*@2W+xdBKB%MHLczSBqMTw5mGmK4zT zlE@3-Zc=t&TXujy4Hz(`Ea?;A;Sn-mU2I^1Z`=fI&I*txWu%UxLR3Uxs5E@qJW^E} zk!xVC_a}DxO>2Dpij7O)fG6wNSo*RAE`gbBO-w1FtZVx~N47o3e8@Vm57e{eo;+T* zht>gEy5wnPyXP8sAy1g?b?X3k+0rJ@O|}R2fmXIui8|H=0p-4M3u%bwBW}rbzQDW-KM1vIxu43Nt)P=bE1%BF?8-n$L Qvj6}907*qoM6N<$f;d*QO8@`> literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/baseline_bedtime_white_48.png b/app/src/main/res/drawable-mdpi/baseline_bedtime_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..de0eac8c8a9306cff28058a6f54f6535461276ed GIT binary patch literal 428 zcmV;d0aN~oP)d+3s4A%h4)NrqOiLn2;+B#^<17hyoq zpWb^dVI zy3#_4F8}NETxlT7E`xNG2F5sKu$uhwkOQKWpFWKFS>xl6B;cCMI6U6(^Y3kShXATr(~=gqSJI4IyTJ zF)bGam}<)fr~(p^sLW6x#Kk3> zJd+9PxaxDvXO@{~mJvlEPk^s0o+uhC_p zHW}HV#7|89U_tiCQ70tHZ??$G6$NTs66G5ocq(5!ph$%lCwvEbobi=C$`o~PDEtL< WPh=Xaw~9Uh0000JHdX=6dh`@*t2!Mb9fqI}2>ETvgeaWmju8I9Sxa@m?^P{zU&upx$_;id0 z;1CD|6Ljr^fM5cbL@?pZx(Cn+rrdx;)dSSD?S=tYnx3AZZNWn#SaSD-2h0Kp5B!^0 z=71R|+Fs0t5l@=#AHoC7slVg5HY_SnxMz@$*NN<3f1QxqmZ(cL^Kg0caS7vcnLGNwz{&cZ#`2Kjl zhkLx7-r~pC@BPvKAwO}?Pouzjmf=BH1xw}Y4)SU&;oJ9f{`^9Btt-d&9O0WFwO2v= zUXkOnwEt-wd)xLzS$q&u504XDw?{{&vUBr?KoQ^V{8uVw V>-KN=dk=IkgQu&X%Q~loCII_MRu2FG literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/baseline_close_white_48.png b/app/src/main/res/drawable-mdpi/baseline_close_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..4419c31ab3d5fc616f466698316a1b724dff7173 GIT binary patch literal 251 zcmV!36gH;NVP*#! zMT~F2&a4aGB#kvLV5N~fb-}_(QduwJE6K?c;VblbU!lkAib_&h{CN3=Wx21ApU+oV z@oJ@~!i*PsH90tiCiE4FFh(rJ*N@i(O&0WjBE60d4*!#-PSDG=f1~vu75=d6KdyT% zoDM6~So+OS@Y0)!Vw^43;q_tu=jr%hirX?641T{&TNwT|%`pG~002ovPDHLkV1hQ< BY%u@; literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/baseline_delete_white_48.png b/app/src/main/res/drawable-mdpi/baseline_delete_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..ad2fa12f66498700c7434afaa8ce7a2d017b693e GIT binary patch literal 151 zcmeAS@N?(olHy`uVBq!ia0vp^1|ZDA0wn)(8}b0DI8PVHkch)?FI?n3puod?@y*1< znQDsbP0l+XkKri?R| literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/baseline_face_retouching_natural_white_48.png b/app/src/main/res/drawable-mdpi/baseline_face_retouching_natural_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..a7035beeb2e094b4a1f98c2e89d49f4b931c3ea7 GIT binary patch literal 723 zcmV;^0xbQBP)`_Mj32J(RY15D!8im&Sj662Fgcf&1lwq;9l0 z6J*F!q0TV?Exz)BC)^&gTcl5)#nHf1lxU%nBc5@|wcQ3_TOWLeG8SsFc*iZ*dX6pt zUFP)QS2zs-|3#7O0y$ekC>C2GL=j^%0Q`{0Tox*)N?)?+Q&mc3g`cj+l@gWPIVPO!<=e zQmQNKtS`8M1>BJ@<$d5kDcC%_ahE(FY0w0E9P-F841*E?Pi6Cp1pnGE`j8Qs;5Pv8 z<)elN{0H|Vy=nLC`*r!KBe403D-j>fC8xCIBc}&maLuDnvd%h5$=+8_!Um+XicA}&qHeT3on$JLyl122 z)uW;(gJ~COlAeu{2^w}AksWN<2u&t~#`OvDNbqq^Y=mOigHwp)px{n|RM?YgjlPDH zb;B#`Jo;_YBf+O>J1xen-rCa8cR^a1#j9b)gNMDXM-*8%de{s%7gtA%LU2V9ry%Si zyTo)N@@_zFOz(shCZ{RH4mGBAIUQc#d+cM9W1cv1=E-5Q?{=+Cj#l*A9}uscvJ$0n z!oE;Wp32Nb|Ck1U0$Y@nT|34%_Lw>q@??DWw+s0X40DwpqotZKsJ@$85rNA3Ig-q>7EcN*&u?Yn~W0=bPCi`=9IDyPo;|K_v{m zX{mUjG+Xe)3l=ziUEJ}gq##D&(m(|ZQF8|(GrDN@+5|F&S+Z>(nJ;-victyuWWro<6n)}akjIHhOS!2r*efW zST4yG60s!66`B#tqnX@cFF~i|67_iJ@2u${w;1J%hDCCZ1$cb7T=BsqGVn-K{Lq%( bDp@`Q=C;}L@Dp2m00000NkvXXu0mjfNbdo> literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/baseline_folder_open_white_48.png b/app/src/main/res/drawable-mdpi/baseline_folder_open_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..4c6f6a4709362775ae2957f1d2aa6033aefd067a GIT binary patch literal 198 zcmeAS@N?(olHy`uVBq!ia0vp^1|ZDA0wn)(8}b0DUQZXtkP61P*BIHD0!3OMMq5iv zxtAC1u|n76mj4Gelg&-5>^Tmb%H6AFJGZ@2p0BU-giPnWiK_0ao!yUd`V<`M_9mdKI;Vst00A0FBme*a literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/baseline_highlight_white_48.png b/app/src/main/res/drawable-mdpi/baseline_highlight_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..cd7e95e442394f942d575063abed384f81075f70 GIT binary patch literal 203 zcmeAS@N?(olHy`uVBq!ia0vp^1|ZDA0wn)(8}b0DNuDl_Ar*{o&o2~ZG8ABY$idRY z^rG8on}EwHhX9sGUJfya9S84t-AfI4vT^a`|K7J^LUTk7EDvXEt5(mlqje;H5w+*I-G&;6D+>WxfX8VizN vopt~IdEN&Y68LOqccp~&2bV@d*DCWG$AG-wyT058x{krq)z4*}Q$iB}j>u96 literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/baseline_panorama_horizontal_white_48.png b/app/src/main/res/drawable-mdpi/baseline_panorama_horizontal_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..54748cf385a40cf0870e8d722c31d78fafbd783a GIT binary patch literal 391 zcmV;20eJq2P)iZ2W;*vTb|GHDxztU-7f8bB(X=@p*$#C=|tMKkU<6 zfglZ(H{J<9os7E$@WMh(Q<_m&Ycm}81Mnr`0Px-k3-nZ9$sk_xsi?W0CR*WwFJk(d zXdq7Pp5u1fV4Ve)+vJ=FzKJX8K-wi8sA;tIo=Ul^=BSfoAox!e-ORJibsr_>jiZ(t zruzSTNeALmS}UClGR{1+jW<|lZPim*@gPzf0!a%4VIT~IfiRHU1B$7nt`0i?A2_rA z51eMoq#>Z1p_Vx6jl{fj!&dWjQ#IKEbNklf_5wXlAW#of$PIdc(lZ2t5CY(qRDe+8Z)k8EI8CRB=9>rj znR9R?d1X`>u_e})5f#xxI7B_%E3P-V=H zSUZMPMU;SD3dAGFL=R-o4JdnW4=8)Dlb4m80j7TAW}D8;od5s;07*qoM6N<$g5g4X A>;M1& literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/baseline_portrait_white_48.png b/app/src/main/res/drawable-mdpi/baseline_portrait_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..10e3f08b8a3bb4be84bfba1ddb6080b5e0bbde7c GIT binary patch literal 266 zcmV+l0rmcgP)ST5CBmMI(j5#fF=uM8N>jT>9dIC1?ZT9Hgy!JsM1hgnkZ6)FcJ1gLHet0pJf00 z|D#Az0z-ynU@HTb%zgZbFQY?&B?NX^&<7Fp%Yql1UvO=|3jSX}PDMp7A|S!M1JRI# z7nqM&H-QBdvvexL3d)(F+?hc7io_rJ2Vp9nX!_YX7jWRjg_bK1;6}@tV;})B^+K(_ zQxkg%Na>sfz9aP$G^;&C<0qI_1(ToP3p5Y*B@l0Z#oKr30YrR+Qj|?R0R~kuam9oY Q@c;k-07*qoM6N<$g5g+Z}t2(8wlIz3c@nB&1TImwz2xH=J1`**>}xv`yBi^^GLri zY}ha+vw=h0rI^?J<1cS0<}QcX$Q0=sQrJlqv6QomWa$i&DBuG=K2SiCbPTJh0sK|7 zR(z4kIXdGUnbJPSP=@Otr`gPWa>->rn>o!tTxE=r*jUUbT-O*UQ5eTnT%TAX%D@c1 z;o2`I;@FGpE3?GL1U}%}D>1MS*Lx<4i&QFbT_aBO6!NJ7poV->f@g6JS2?K?jib2! zF9Y+0KB#_~(kENssufCt#JHUAa!53;d6kN{R;o9T6lW@1|v{!W&6 z`%SJOPC{6L<)o(ZJSN&>;x#>qWx3yrHbK)-ln}0jQJPG!Uo>k}l;gS)HY)PaQ9)kV zsJMZvf>d!ak@w!SRB^y>*ePc5)qkE6#~%N2tyto{a3*rif8qG9%z5OJ!@Sm&S=*_V zsZ0MNopDx|z7N)Pu(s5&wjUI)&Q!+=t&UwTzlVe6_g`!Iy`$xK*sx*!K@&6!SEh73 R2+IHf002ovPDHLkV1kfT{NMlp literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/baseline_rotate_left_white_48.png b/app/src/main/res/drawable-mdpi/baseline_rotate_left_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..d71ca9b0f2e30f8a05ba093a0934c268d96194da GIT binary patch literal 481 zcmV<70UrK|P)gh1 zLs4QrMBF?mDl7+ph?|Fqn+JuDbr`dYskA3m;ULbM|Id9UlM_bsTM71v{?Uxzn3EKS zy93aNjXZ>3E>aR|7y629X7dU#m&lbr;+aH(`gs<8Sent zN1{6_h~o(6h396o2=j;xpRl8d$C&wYBZ=pj(Y|rbaLg-GzLMZ zVF_kag`#kkTsVj6D*2-e=A5EIZOm0ggKW%uMS~2?3q^zKn9Kf~+(q(7bId8Zu&hGI zbP;BzTxf&2N3uMTzzs|zxsV(j6+sw=c}1e!n2&i#s@yE>%shEkcn81%lH^1j$1raw zkRPT1yNLH{eiH`9$`|oWChpe;<|_c{>~=qyuC8|QoM~k0A0*O^tL{hNfo+(V9AJ=S z=)*?thVX4+8WMu;WGq5fK3ij{erof+l$fm&%hi&Ib+@K}A&IqP<PRuDn*Hban29&qp`XMuDoO!VU@reSgeN|0%MbR@Pn0l(QzW5w z#&PiyC1g|6C3+0xXNd>=W;S7o1(le^*CG%2z$z+BJP@W6vpFtGIKUtpN2KE2|002ovPDHLkV1j2W)Ykw2 literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/baseline_shutter_speed_white_48.png b/app/src/main/res/drawable-mdpi/baseline_shutter_speed_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..83cdc61c0e879af23bad9012808a260c7328e571 GIT binary patch literal 835 zcmV-J1HAl+P)^D?3N4YIK=5jTz=%CC3<>$uQp0Ds`GYjp^w;g{Mv1 zU802@>SnLu4GX86X*{iDfi%OG3TGRgnCL()N}e$=ahSb*FEQHL3Ol4Rzg9R~kHoLN zuF`^uzkyD3C3-j$(B%(l#8rUD3{Mh0+^F1*iC*pibXhtv++%=SQlFU>{*m~%r4v8Z zuH=u26_s@Pcj6xkQ>Gpl0*+6N^_zz>W2LiH42Pn8}`{0Q(sYS9iDQrs6j zZ)D0C=QZWJ5<>uOsl}^h3ACwnSdAJ7UM)-D)y7rySFPqRtYgO%a6B37k$D){2+hU|+N9U%hQ{uAV7;3>&X|4NLQkc2(8ZaTR~|9fJ(=oylw+uQ4j`kG_u7 ztYh@e2VqLrYUB!2eOw2SHYb}h9|fbW;Qa0*REsCv=6ov{nXh~7-| literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/baseline_text_fields_red_48.png b/app/src/main/res/drawable-mdpi/baseline_text_fields_red_48.png new file mode 100644 index 0000000000000000000000000000000000000000..168c28d7ad20c6f4527ed9927b05fc1267cf8c42 GIT binary patch literal 423 zcmeAS@N?(olHy`uVBq!ia0vp^1|ZDA1|-9oezpTC#^NA%Cx&(BWL^R}Ea{HEjtmSN z`?>!lvI6;>1s;*b3=DjSL74G){)!Z!phSslL`iUdT1k0gQ7S`0VrE{6US4X6f{C7i zo^j8!=>b4PB|KdmLo%G-_8rV*b>um^ZSnnjSxt?HjWe7DB-pHPHnUn~?#Z8@8aXZX z=h^GeKmQdsy58UGdm_f(%uKQP?~NNRXTByU3qu6&{7Zg{L=U?gv!#E=$eRhBWUnq( zp8xu=o6^sO{JbZ{wKqIerr*d_IiLJ^VocKZi8lSc?nRw<-9Fiv$(>X!lRo+RMg^4> j?9n;BN$2$G%6amNM@(uzoSnk~3`Yh}S3j3^P6fe86CCvq5QbAW383f~9Z3DW!eu2#JboFyt=akR{0P&O@ AdH?_b literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/dro_icon.png b/app/src/main/res/drawable-mdpi/dro_icon.png new file mode 100644 index 0000000000000000000000000000000000000000..45ec59abb07ead9efcff7007aaeae27a8e632183 GIT binary patch literal 597 zcmV-b0;>IqP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGmasU7lasggtTjKx#0qRLaK~z{r?UO%9 zl~EXnzt5zoL5hfy3=*Lsk|=1&$uXN6MJI7{a0rJQg#JJcMFd5YlhEuK2$p|Vhmaf^ z3{6E)0?80DdwbqVYq{{%uIzuFn5kJh}W@gOekCqCAQ&kTn& z&U}x{z#EPDOc!p4v=)EH4^9#WRO> zEBcq=UsXCtEo`LE@EN|s#n733#?zWf2Y8^~zm|=VnE5Z_+56bq0^Y}mxZ(l4@R_=> zxg2Ng7w+IEe2%9y;xj|&j4JZ%NsR2J`ja@0dmf4{yoZha?SF@tq>EO8?8M=hE4zdj j@rdF~CX>ljQ53%cVQdnZ>y;Mw00000NkvXXu0mjfp9mKF literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/expo_icon.png b/app/src/main/res/drawable-mdpi/expo_icon.png new file mode 100644 index 0000000000000000000000000000000000000000..1ac5d28f3bbba70c884dbd6f4b44de60a49c18bd GIT binary patch literal 582 zcmV-M0=fN(P)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGmaR2}kaRKw(dX4}90ozGLK~z{r?UYSx zQ$Z9!-zyp_C15R*jifCY+z5ga>Q>Z+;6@Scvbrd?3pb(*sbKv9Zu$#Sg5W}s5JX+L z5VA3WNKpy2P=YCig2jGFG|@iKow<{|VCY(K4jk^xy*HD2Gsz1)9*@WK-xF#Kq9{5< zcz_2@!cxS;cpjgGVfcl z4PU9(@HGC$y!OrGp61V@@Ci$H7`@iM6t3eAzQHmIFQon#6x;^xYN`XdoGnEcQ5Jlv zdAEVvI$yNYXcQOhFv?0zVC>eJ&{Qy>F;13-Db>{8@P&tT9*0;6l$7^pMV^15#?%fT9b9mbvT&2{yUoLdjgNy^)VdBAClk$ zR|%)^J)U5GL-H|x;b**#HzfhPn6bkSV;UP8yRc~A-vILjF5?m=aRXOS^6rVIwY>wm zye*l{%X}CsxQRzhQyh}VQE20L{D${%7KO{&H)E$J?ze$7)=kx3X$zY;sV#>1dqDon zS1|q_sNx8wxQ^$Npik=ZRC0`R?3b`ay-w_b-XX#TOru=qOZwJ`9*@W4`Bw;nKfGbe U#A|WUMgRZ+07*qoM6N<$g7548)Bpeg literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/exposure_locked.png b/app/src/main/res/drawable-mdpi/exposure_locked.png new file mode 100644 index 0000000000000000000000000000000000000000..5783ac1c438a84a337452dd64f2f7c0c14aa96db GIT binary patch literal 2053 zcmV+g2>SPlP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGi!vFvd!vV){sAK>D2bxJlK~#8N?VW#2 zQ&$wnfwr`rC<85O9k`ZO%0QW!Kc<+)?H_;QkAKw7InhnC5T^-oIyKoJOVrH}|1rT# zV+>g|o6~4EHS>?TIAtm>Q9x)#s3=m93L+wHc`e;L&FO+t+SlIq`d;b%Bu&pb<-ON? zzVF`q?tQP91PX;h5syTCr?A{xvZS~F@#9hG3PN&b=F6P(=Tp#)9RVzh7w=Ahk8OqjqZv$HKqy}m@defu>G2SrE$*fYPmzoq4Euq7|R{Z*@s z^08x2IO^(Vc%DBe`i`>HRIfTO?_YOk=UU^bQ};0(5TOK68Vn6n&Ya2d?_bgaG!+;B z#5N%xyE{5wX<|r$CMsF0J(*om@fn7O@VWql6BP(9g87pVb|3+g@LhD^tZ!Jc;z{4_ z+n@5Cd;s>@cjwMJI0hUG!-6t6`NJWJ8t|~Rbeh9#zRB2ZDhvsVLaTMEb8}};K6tPO zLq7Oyf&wL9k~QF7QIXzZvD}DO@&Py&9J68NN&|*?h0B@mM|1%aWwPr`YpWXFkR**B zoscqjZf8ni;RE({2VYGPgU`6)JEqrLi7s!1lYBtbCdj&J)A^TGRZGwf3fqagl{(#@ z3XNu0=H9)xG5iWGd-hCc?DqZrj~;!4`d%dzV^-K%RiEj8|T4LK;SzJ zIGrcZjfilYoxzAA05lbjyLa=^4G23B^Q09O%^$aV^{*HTn~7VuUP)cF=mXdnh5`cL zrGEbW@6ZhklH>!T2;lDO`jH*sqBj&6y^dYCZrQ}r(yJJX8144$|G>UrpBNefd>2_t zN-?^eAx%CIbph(;&6~_#0=|cCK-778S2H$jxQH$>urF0^?qBEzgz4yb3r$W8mwagF z{Vxm!CF%kwv$DQs+udQ03+=wUv-1mddBb)8{^#=X;~8{^;B~n;Uy$%|=KG1d0PIyw z0h$3(n@m-qCe0!t_wwatl}>jB-H72c`9SphfTW~Mv;yL`+y6$BcbF$nuA>dYx?#chneQj+0(d>1Vds2QMq?+Mf}t`P z!ajyj$p@k?fWvGa{?1lXQZHJjBwz+y4emK znB%fq_p<|@T!+P?33b;?>cA~awaH{>$0%l{UcXDbV+YZE((CY+9P8%IxruW5rM{Y) z%%Fp<7zUoYcw^eM7O&gAK!4IA^<#JGvIXIWo$MjhDJ0Bt=5&KQsswxpE?k{ z^Z|;A6Mv7FiKuR@vRGFFw}T@dT#aGZ1oUJ zo-`>>HhT07XJg|uA=a4)2I*rgF(ZI1EzPUc>wj_7*6z%|b?Xs^U*Yb|nOW+oQ-5$- zt*brHo<(JnFfIguUpQ(uZYWgkX4o(t9nEM;mYC>-M>3KZK#`H*TWd2C6ywKFMpLp#UI2M| z`YbfVMxK^78%@a~c>$b_jqh6)F4UpR8Q2E4MOU&&UI4eF(NDo2+yunlaBu4IXc zPS|O&EU;HpEJHUW@VBrHx{@X)=7Ip6jQiP9Tl-Cj$++)bR_hu%87C0F>1d;|*3;8- zgB>IIejuTkFd>IdN5?5~F&jFPp#q2u6+mRD03t&L5E&|f$WQ@9h6*4uQ~;5o0*DM1 zKxBvofc13Hj63)!JFx(2lc@*IxI>+jLtJ(~nxH0pk3SL6 literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/exposure_unlocked.png b/app/src/main/res/drawable-mdpi/exposure_unlocked.png new file mode 100644 index 0000000000000000000000000000000000000000..674a21681075c2dd971333bfd490c5e5ca4603a6 GIT binary patch literal 1534 zcmVPx#1ZP1_K>z@;j|==^1poj532;bRa{vGi!vFvd!vV){sAK>D1(QicK~#8N?VLTV z6-5-s?}bE2#1D)nlJKEKV`J2cM6}R`1TmpAHpGHhSXpTeA=)diBHBC~LkPseL}@?^ ziCTz83!+g(3?^DA8iWwJ{?E*x%X_e&^RYAY_WY86W_NaW=IorE*?D{RzKNnJijoDh z+3Zs&HYeGfVahanVE9HPJD_KvXCc{N$}i9-&4}{47PN)cwPMbS$3CWmIN1x>xE6W?x-15JB6S>q*0TCqOzAl6TUx(qELULP zF6f-GG%Ql*b8My(TkHnRu7$867`sqFacuuL9P5bU@cx@atF<|-sh(q+>{8z{dU z`VDGBY%tFQyX#D2=L;;CjRO{}IgR`QZRaVy!MR(Z^HNa@3oP$SYv&6ro8AGiL8gi; z#+rIAKo9T~XHdsx*42dAa-ZGoO{dNmSUw8}$duB=CFn60_eSbsolU&Sv=5y-U*I`u z9f0;%K{B;8@j8nK19iS?tFGmDrd_G*d>%6}YYwxMJK#xcMlIr;*7r7jL7l6i@1=nN z)ZY0#ZeZ3NW-rU{vt|y3yiRUn0n6me+ztln{13@NyXQsiI=={T101pUK`oE6XmC*X z!?p=$TBn^VSgdjc3=Z1=)|yd?1XZGfHeBDdcA9poV6jREq?m&4XIjM%Ok?sR(>_F- zc2YRt9;Q`XU>cLYZM1eij?ZD1fmy|beFp(Ez#%&kbfWiN&+`n-n#1hmQ&1^5O*{6U zw=k;_bwD+PEj33sKVb$qgydzrmxiUILr1{y$ahQQMMi*m64tVU+k0}`VSNQ^olG3tQCr~?wC4oFOK4w%hm zo1oXAlhB`(Y)hWRqsIj2-c$nk^+b;A5sPe8eCRof1R3on zA#;O35QU#WL_|hZR7O-(ASx>2A}S*C1NaFDen2E5vLX^u5m6aYQ5k`#tc-}LKt>=c z5QtZiE2VeP0q%CDB8y>W-_Ccp+`a?2#J>u5i@jELUuCY{0G3+SW0i?q0FGLX$5WMi zcL4~snulN68(Tomkyf)kuU+jUwgs%UiZMS)k}D=8$qzd~$t$fQP?=jU)Z5Gkur(@P zzCS!iYA+ibfUoH%EPoouXOiTJ-cZ2a(1-Q5RP9;}1uPAHN|MZ~4_-7kvK!!NsHZBE z@ku>7NwVO02p}}nt5$8$A7Bg^z(cG0wLf5OXnmF0G#uNKlYRhWz-k^kYaG0&AHWtF`ioqRi!^{Y^yUfd@89&x@eU;MCF=B4O;oje0#Rz(Fj7D`vL9 cR;K)Eo(6vW0kTC==>Px#07*qoM6N<$f?scpGXMYp literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/flash_on.png b/app/src/main/res/drawable-mdpi/flash_on.png new file mode 100644 index 0000000000000000000000000000000000000000..0689eb27699eb4b1090a317f550a5f05b1837648 GIT binary patch literal 243 zcmeAS@N?(olHy`uVBq!ia0vp^1|ZDA0wn)(8}b0DU7jwEArXh)UfamqY#`F|aIvTR zc@{~0wRKNq_ZAk4zGD9()m$ z$i$c2M)+xlrrTmd82wha3c5P z4sq*5w=(72iyIuyuiK+4r*mPGb?GFJ2Y$a*7@J*~zTZ-3e4!$A;-;Or!@X}i8QeNq pingC;ztGKd!7krn^(6bVocfp76&1+TXajx0;OXk;vd$@?2>@>=VcP%z literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/focus_mode_auto.png b/app/src/main/res/drawable-mdpi/focus_mode_auto.png new file mode 100644 index 0000000000000000000000000000000000000000..d4a27da69de6ebd36017e13a6759a03874e8ea36 GIT binary patch literal 1084 zcmeAS@N?(olHy`uVBq!ia0vp^4Is?H1|$#LC7uRSjKx9jP7LeL$-D$|SkfJR9T^xl z_H+M9WCij$3p^r=85sC>fiR=!ezQvq49pijT^vIy;@;jqyV^HU;`qnvR+n|#MJhGf ze0U5D6!n)*G5RF-d^6XKW=yJ&AC;JSE1w4E1iU>MQdHu`wTXFvP zH=D2RO`l(Ue&e-|f3IBL{B3Xc{Mhuhf8Sic%nY=g5U^Tzy?7fNJNtJHUA<{-yjpSJ zzI}_+6kC1y`MJo>1&)&tA%YTM&) zt~;FhR3GODmtXxt{pgnc^%A$0=1$rB@!O0qKhvl3iFljZY5x-4FS=iZxiRNs>_+$7f0p%p zT*kPnYEs#S@Z64H^L4{k7oXf+(i_9ixN%e6#p0X$t$0-QcXKY-yTL!xePhVuiH|Za zuGt*+TKBV=+lh@#Hy*T~i{*Pgu zOV^TC9%DZLtmFBnsZsS)w?8ae&9E?ezvZ)=S_hN!+X7gi^(* z?JJdTs3!WT^D&>cRo7q6DWLuT`<}?O?ZwVkhK$Rq{Nluf9aKwOFUww*+%=yOX_8%YiKTO&#-g`J=ds0IsQSI9*Y85Yn!qiG z%kI0#{?#uNy>LZ4P4o0a0idJSW<4`VIR9t27oYHg*z~v7TiwHJb5F;8d!gm&&%k@Z zZE4xw7)#H<(xN~{D;+b3b`fJOmz)Qy?n%Ad#W}$wv|)SMGtq#5iYHcF`5&|~>VnLB zGoSkIxh!(u^!b1Y}NC&%tLnq zH%Xf-E_|EvfAUT@RZGS#-)}P<(fj{u-ape9vz~7{dn<0KvzNK&R;D%KH%`w_?cZYF zU;15N&5W@p_x=7UuUI;!KKMLO`|^>U>qV-4Z0>#*us*&)eqK)sf5kqrv+pLp+31&g z^WD|+VLo9u#P6?n+!Fs}i_84%si_MOsqLRD{^!DTw)6Z~%J)tB`hSt#w+rlqG7wOH dfvc@;JwxDxU0>hi-r51u;OXk;vd$@?2>^D0=P>{P literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/focus_mode_continuous_picture.png b/app/src/main/res/drawable-mdpi/focus_mode_continuous_picture.png new file mode 100644 index 0000000000000000000000000000000000000000..2979dafd62c8c915fc0373b220d1e0dc5f9514d1 GIT binary patch literal 936 zcmV;Z16TZsP)16$3PNzn!6C3N zxxt1A1K<5I>%PzP-1*e+d3f%Dd%ZL?G?XVyWrl&u3*!fs6~-?rCyd`zMi_snd@#DG zY%m;^3r2*>1S3Y}fssIEfssh%fYIANd%R|Yhs-gdL4=XsHY;4wAi?-|JKSYdg8<`y zby#LxMZ*Yjz)dPD7DkvY8Y&h>m_u%;SQw$2R8%C4L=uxsah=<=Xw%O+H5CaXfgEO7 zr9&^zsi{a9F^ZwiA|FV$PDMq+h)^O%c)}4$nkp8Cqn?;%mn8El7DgBK!5Evw9nw&- zF#b?qR9Pc#i;A+wJf_qze$$y)BW^+2qQ*PGlp4k_IulhkiTcF2vcxS;m{P;|K`AiC zE>X+M4s||q%9I)gN`+~T@H&htD=g5>lp3a#SRv}3vcgNeC1s8g_VIR<4H_KdJx~r< zBXf6 z;cX~KwD2A&6fL~_$`N<)o+%VB@UALHT*G^%P`t&vq#QAYx2I6N$D34+xQKV4P<+8V zuN-j>@2f)b74MvK#Cg0g3dI55MdgS|y!Q&l9^RC4#3j793dJkDYswK<@m?qt&+zUj zN8HD2DHMazd3h-mF3~!7<*9a>6X$H|h$#3w*|VtSoSus72*~2YAOclnr+9_8C#;Si*a$tZZx$_k@&@Q#>P zDu5|9j8h+}D?2O`wab`N;3rdR7$WjZj#iGF>ahr^(54xF(#XL!NnO0ADree_~$q`Q&Q6i#DMWVtw$v&`1 zT`|O&ibRd)^wMFK895{|6^R<_^wXxrZLTxLq(m}Pk*Lrll&M(U;E*t;V$ooWFs5Qr z;U))!Fcpn)mg(@nOoM16$3PNzn!6C3N zxxt1A1K<5I>%PzP-1*e+d3f%Dd%ZL?G?XVyWrl&u3*!fs6~-?rCyd`zMi_snd@#DG zY%m;^3r2*>1S3Y}fssIEfssh%fYIANd%R|Yhs-gdL4=XsHY;4wAi?-|JKSYdg8<`y zby#LxMZ*Yjz)dPD7DkvY8Y&h>m_u%;SQw$2R8%C4L=uxsah=<=Xw%O+H5CaXfgEO7 zr9&^zsi{a9F^ZwiA|FV$PDMq+h)^O%c)}4$nkp8Cqn?;%mn8El7DgBK!5Evw9nw&- zF#b?qR9Pc#i;A+wJf_qze$$y)BW^+2qQ*PGlp4k_IulhkiTcF2vcxS;m{P;|K`AiC zE>X+M4s||q%9I)gN`+~T@H&htD=g5>lp3a#SRv}3vcgNeC1s8g_VIR<4H_KdJx~r< zBXf6 z;cX~KwD2A&6fL~_$`N<)o+%VB@UALHT*G^%P`t&vq#QAYx2I6N$D34+xQKV4P<+8V zuN-j>@2f)b74MvK#Cg0g3dI55MdgS|y!Q&l9^RC4#3j793dJkDYswK<@m?qt&+zUj zN8HD2DHMazd3h-mF3~!7<*9a>6X$H|h$#3w*|VtSoSus72*~2YAOclnr+9_8C#;Si*a$tZZx$_k@&@Q#>P zDu5|9j8h+}D?2O`wab`N;3rdR7$WjZj#iGF>ahr^(54xF(#XL!NnO0ADree_~$q`Q&Q6i#DMWVtw$v&`1 zT`|O&ibRd)^wMFK895{|6^R<_^wXxrZLTxLq(m}Pk*Lrll&M(U;E*t;V$ooWFs5Qr z;U))!Fcpn)mg(@nOoM9)Y@ohtwXDps8XWFR#OwZ zGNQ2sl^0c2sfbvjVk?4LqRM+O{S)Rp=ezfO=YGH3bMErU`G$?8*hMh_03c~+Yw0S; zDL)7X3gV!7z^EVz#k$%c0Zs2A>jEKi+rrTT0BFq@KM4>O=(Bfiy<-6YsW(3;G;+i+ z0sw$aJ4*|9+^sEkVz!3|1a$YWT)G`e$k!kIa=73SU1;E-26C~lMiiBm1vFFlPoDKI8Ft2}KiF-KWYrIy%j)IB@Yp82tfO_am0;6rA(vhIX@3 z$FDf2C1;a@CBmQ%2>K98B;?ZUgD=dFnlKnGd4tXxUF7|i`I3k>Gu%@qH1RZjGb(2@ za|8%enufl7K?BjyTr*!Z*2`1LK$?|Z+p0qTDEAaRSfS(R?CgeB!At!dzQ`#1eEK#>k1-Qpl^e$7;W z+t1b7@v*Qy0*0250rIo)d8-amk2W7TemnhIe(K>BdIVg49nRI_UgiFr;oCqL+O;y; zYL`=d#BUuOo1%8hSE5%}!cH>pV());6ODA#Y@iowL#dulBLa@ z*|_wh)>V2)o-g~9*?D#Ly4q>z7;*=VGC`>43yEcoEhuZY&!TH-HNiBN!_F$K?+&ILg`S_g=jzPzUecUhTK?;ixWch9`|p3s0LtU zhmR~@G5Dfhz-L{jSO{w33q~s6Js8CCXyR)}!J5?}`@3Ra)xK&|H`dB|;=O)GpaOUC z_cyO6-)p41c-9n62pPW6$Ro#3)<(M-+^q+k`3tQ}G zM^v)3q)((_D5MxZTQ~VUz|j7#xVtn6UsNZUi(c(pK{&wfDW=NBR)bf8|2wr9Ck=U! zkWLi$PL*1FSP!xiq+19|~4^2n-Q87JoS^HBuJe!r2wWtCMcZ zx1$@~Ksy5cp5F`0n3HV=GlNmMQ zjT#09K}bg?X6E*E6w*_MzpT?$2iIGV5MfwWAS*f@X{=euzWUE1pDE4uC;v6*)tv&vD+z5){z(^qB@WTyq~r$%D@vgAxF>;41tM z?wzT|FInu!UBP}R@C%V!$hw)!ln-y=8Xw?YFJ*FPMTh1xubkN~jOQ?8+lh_RkvFT# z(~qq8YKNB|?|L;wJ@M}_<0~8dxIW#b=O|jJ*@l$^ed?)xb3xCPIOXMQXM~Pa!)IA-4Y-QKP6s2pRb4Po(3O^w6WVRtXob&M+?5uLcrr9V(H7d z2;2@|r%>d7T5+(LG95L2%I_L&iuIkMWOi9mHH`S8N47URKoc@Cx`>`lZjH(DP~3Vg z*LP@!qTTnuM#<{;WwBe#7V7J>{~b0vfD%X2>M~}u3}*C{;O_zKtZrB~A^p?-4<(6y ALI3~& literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/focus_mode_fixed.png b/app/src/main/res/drawable-mdpi/focus_mode_fixed.png new file mode 100644 index 0000000000000000000000000000000000000000..5d03fd580f60a4b160b8ba412007a6a0c7bf50d8 GIT binary patch literal 686 zcmeAS@N?(olHy`uVBq!ia0vp^4Is?H1|$#LC7uRSjKx9jP7LeL$-D$|SkfJR9T^xl z_H+M9WCij$3p^r=85sC>gD~UqA6Lv67?_+rT^vIy;@;lg+jS*CgyBJ_LWfeu>WDSH z37N_fS-F}wZvPaTCKh(c?r2%oLVqPLrKZ;U9UHp2y6qo#?leC0qUW7YQEJ|&+5!tt zHOr$<3X0Wd_5kg|1{(ga^V+HL_~Vb5C8y8cI>onYRn?Sf&t2{sPh4k`t^Hf7RDMZn zQRn0m>qTq3SKbN9*?qO<%BiIx>m=NR^iFP_a{Go?<+ZC*ZbyWaFP?gX!96(dam1<3 zMOW`;sG2#5+by}X^}D9{;vJp|G3oMdyY8(yTRH2~_nAMoDZD?H@;2>>Wnj7brQWpM zb4%X!ces94tJHs`XEjgnDsRB~ZDKp7mqmKZ?7uwa=i#zjOQ*M0aVlJ7U|!D1=Eux4 zhfTtQ*Px&_tKss;$!}|pT)VIIc6V2BVNBnyHOl+mq<&}Qd+n!Xs=iHsH-AAydGN|b zJRgM$6rZkpx@z`S0qF-7r(#csny*$gJ8*o;?rE#@!hj|;4;x{hZ_zT`R>H(G2l`RKf$V zoQCOw@w1Mu{B`fo``clm_pkrIqT{!>J0rS$v8eTD+5Y3{%5ytztY7u&?fPEXj$L21 qAE_(9GOx8-&5bQWfnpaVdh3`RTm;h+ZI7M>squ96b6Mw<&;$Ta&mQIg literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/focus_mode_infinity.png b/app/src/main/res/drawable-mdpi/focus_mode_infinity.png new file mode 100644 index 0000000000000000000000000000000000000000..3068e51e7070c3506218e3df76df3759dda4dfef GIT binary patch literal 1092 zcmV-K1iSl*P)0LS4k325ClOG1VL~R^gKauun;T+!NMO7f?y#C78ZgaSXc-a78VwQgFghp z!a)!OL9j5e5DWwZpL_PX=bqb{e$~v>Jp!q((< z6j@#gyOj}F_?S#c>nO@>2pj7cs)Tuz6bWf2{NZygBf6N~<0um9_yqYB^??V%nu-jE z2Y&JRDgzvbnDrC`UBdeMz&k-x#Y7))`62^SJY(Y1RxCKI2%1ZxN?1j)aYa!EWSC*% z(NJtO3CoKCK?|J?d6t;CRFnV?8-l)x2|>3y6DqivEGZ}<9tj$W9|YBP7BuiMnUPgu zTod#n-U!O;47kR}o=pOGbu$tnklF7smY8E zK1mI66cG;3B;|3b{b=Bk#EoKNKoW11br?lf#N*OX0$j2oo+()!M~W^RqI%#$36bTI zs8+NU9T^5}In3vgvQ9*qCwvZb>5x)fq`2gPcXn-=(&0j9LY59ww(MFlpsG{gP@+zo z9wYknX;C4izkn2Fn)K+?=NnC`LW0MRh@c4)+EIDaKDax$KG!wlQe*R2@IZ`fQLAjDK#z#qgo4Q$r$7IG z7J4If+TR~DW*R>~{oZrloSAct)6eaDfAj84ww}+i|0a z_=Dd&_`)Bq)_C_o|A+Ft@jcGBIm7<>`O{S|n-BOOK3^d5{e#J>b8BwAt^XqN z?!f#HYJASW{k!j;sTY{?v?5S&l|#Pb@4_dMytkh}^xM-M`a$(@$X(?<7||BGIDYXD?i$rqeiCn+e}6FJTa>KZy{auC z=NNm}8HVQtfg#m-H_933g*Y?6h?u=NDrncI#4m#TV)VX!l#SCBS8rXWvQJ0+_l1om zKFSikQZJh`rtA|9RpZUwBbm!vWVTA8sc>(Pr~LG!&)kc*ol^V#!`4iFsdm0&@!}ex zy|q_b^=`1Ov`TwjqRJS$yk+~MHO>1M)l6D-e$KUzqKQd!8je^nb`~-TJ!Vl^HuX2- zoMP>Z-G#h$O}jSQ@$Wt&ze4)g{l|rSf<%rwpW37NW7p9iit`S{1jJu`{wF{2lS)bV zM~;8ZI~VL*b)Nfiriq*Me8$-yl4~++bosA@e~qlopJFDoAoIRV{Ha%;rxk6ql;Y>n zPxjT^6Spq)xUOBt{f?(@8On^mt~qU+(BPK**jq8cO2A82nG)qC+*@|TDl>+V(2+!t&19r?VXdG!v)_H(PU zqOSzrjp|q?t-O5u0_BP=8eR3LuP*P*b^qA@k=;mPIZKUmgL~hJ6`yl=UaKvd#XHxz zApgiG>%7^5%t=K7O1Ll22VC1XG`%)MUI}kuCYNOrX?G_xdh>Kh7Hs zuQ@*$`#Wo1Tl`AOG2oSxc=)apvx3BvEa$RvZe9AaO?XBAhFFJ>a#>bR_xD6MZM9v$ z?0i#^x5MinCM8|KKu7T=BsMaY8O;};nRx8?JvXK={o8h4s|c)I$z JtaD0e0svQF6B_^k literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/focus_mode_manual.png b/app/src/main/res/drawable-mdpi/focus_mode_manual.png new file mode 100644 index 0000000000000000000000000000000000000000..4efce75eb1dfe3f9c66aadfdd2874e24356fa82e GIT binary patch literal 1193 zcmeAS@N?(olHy`uVBq!ia0vp^4Is?H1|$#LC7uRSjKx9jP7LeL$-D$|SkfJR9T^xl z_H+M9WCij$3p^r=85sEXfH33KJw={CHFlmZjv*CsZ*NDh2nm%r{;|003{S|EMH9GK zIXP847fpYm`0?P1bmKjjqWgbca(@3)>5I~jh{|136BQT58ag!z=r3prDpSl)l$8A# zG_US|z1{J4(xXQoK781?!P2m5?(r#N8fn|h?v%L}hOGabeWNG%cdM6w%i49lul`i; z?J2AZo*xtwyk)bu;o;d|T;6Wqel8?8WaF9D(_h?}7Phvt@`isqP)%9O_hr}Q@-LZg ztyP_~=$iDsB`(LmwQ31}>Y2ap{kso$SokZ}ZT8B45TsxJI`w?1+!vYPIrFLvFBji8 zUCCVXSoOSY@bbuoDX+WLcD&g6KIjF*a+~zE-|qC9&fDBqJ&&hMk2`vWTv*NeAATR# zK5BTReKfsGP?w2cQ`x)xg}RxgTh8a0EvIH*3%Xk^ay@$Or)^bwzYffv_Tt9$g@dl+`xikBL5-MA}1dv|qQz`yt<7Z~e;Zxlpri7Jm>&b>+8HvQ(&?dSh8>$dV4 zEW07uWm)#|dTWwt=_L25^LV2z^Q+`CR@QJye2lqr(nKKo&)q9dZ=Ch{x2RY(=v{m7 zb-!ip9MN5ecE!SY}cyJ zS*;g*nyv4@?B}b3ZV~R=t=C;A-kWo`YsS&Iqp#lZ?)I8iJvDmE`+Ef|o|LXnDxKCI ztGd^Q{l%3u`%vX2CQLKRZN+mx%n^*^~>R*4w z&iJ&-yp|PJXZ|DWotmQT%7qqcxAjkM?(6?wZ{7mrLa3y0x(^eV6xXKMXiIx9(5> zhMAu$R~NQ@3)N-PmKS;%aaHL<==w=hMBEuRpDw=07TfWB>hrd38m^p;`Qig=B`BHM1g`aXHTn|CFP!Nztc*SBr|XU}5~ zn{4Hv?|=W*vc_O3&0Dpf)AyT6Wq9^8?ApOT)vsmkid{$LWe*gey4A~6D%+rWpjdd$ z$2qC>TpQD0z1_3+hoVR^Uq*|?tEXDeMV>nwK2JYy_|4~kl*YZJ|7w1p#E<#@uKsb? z>$~|7hNgacS+_b?@`sQTlLed7H#{k85#ZC>G_;n*X*tYO8>6jS-bW3 m^jF8N1PNtRpuEG83m58@CNFllZt|N0q~6oj&t;ucLK6VGIVo-c literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/ic_burst_mode_white_48dp.png b/app/src/main/res/drawable-mdpi/ic_burst_mode_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..e29329892fef34ec7b2649f2096727f727ce1259 GIT binary patch literal 239 zcmVH1v3UKn>|3CjvA^_@eC)xst@uXWpyai-};by_81)~;>S}uK}s*&8LP12miBx z#Tfsu1@gB4XF{pnS5s=ilmFsyQSSfi{|litANc;?qQruq|IG=uQ1t$PBj18lq8dR- pBc`aJyx~RrrsFUHqhJ(_0sw2oLAdY?PTBwf002ovPDHLkV1ii_Y+nEX literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/ic_colorize_white_48dp.png b/app/src/main/res/drawable-mdpi/ic_colorize_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..c6c995bbfec6c425bcb27b6b1e52fa30f386aa8f GIT binary patch literal 324 zcmV-K0lWT*P) zF;2rk6h+Y|yp9CIrc8n;BS94@YeDn|7i@%<9R$cCXwZn8!oiF~CW*&?B#LdQF0{(3HV8DtG1nl^|0ZTqFFLE0R W+yzEX+->dv0000Px#1ZP1_K>z@;j|==^1poj532;bRa{vGi!vFvd!vV){sAK>D0sTotK~!i%?U}n$ z!ax*;11&FL!`M+-P-Ev?P^pZywVl}c6kaN3d`pOeligPTST~nzhTp0+OHYzR*L=z918i*<$G%;W!9tC-# z1Pay!bLtu#vz59Atgbw*TTtBRK~w5(le1LUP%Py9M(P-do;E7B23Uy)HU?aZH*2DD z&YM*ICB{=?1%^t`(+%8D6BxK08R;6E!n25@-fcpm+<&(FDYk zwli}R(P|)vYGFW)_BN0Kv!Wfa&_J8`b4A=TlH>>frzKz+;g$0M0000((yI*=W6KiU0S#pOtH`UAYz2!Q``Y z=TEMj(8j-9S8f*6nKHI*W_Bnqo->`{x$1#Rb_Y3z@7a&fPx#1ZP1_K>z@;j|==^1poj532;bRa{vGi!vFvd!vV){sAK>D02p*dSaefwW^{L9 za%BK;VQFr3E^cLXAT%y8E;Eegmrwuz1GPy+K~!i%?U~DN(?Ar4`!wD*m#X)DyXiBf zh>HYWb-@!L5Nv=}AOvg>8$c|Rps++BkcwDQ*5UunOqdz}9%pPPQl&)tH1_eCbD5iG zoVC?&^;`X#|Aw8;waMY(`DkjZc3v~6 zCKwyF6OqCgbTr!9dS(M?3)nnInVf_aDrg)G#(R4|tkbrE?e8d)dP53BXmHr=-nWhx zPN#DZOl#gNzoSe0BI7d+1}p6`A>I{F00#$joq z$@-nn^*jcj!~`(uTpbq%6Y4W4rAt!v;rmvo4-$t^!~t6})Mv{oU6QJ=5(a4rsI}KZ z=b?T@0!jM$*NZ?OBv0a>DCIacW7yY^4WR^mJj?1VqkNI^Wf869>N5bgA~6BRC-I#6 z^a;E;DqmzobsLhp9qKI_1bY(3QMr{MYBPtfyg*snvTZ|OCBvtBGY6&OQc8Ng8=>x^ z;L=Q z2$XrYmFEJ;dFE|B3EDKRT=W^)NEQSZRzQpMTV>H;%Sq5yS@4L#J9CFfp#>H7ga+v9 z4=5WaK^u`_L*&r0NTCIlGjsYDpiL8^j1P=xRj(0000< KMNUMnLSTX?D&=ng literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/ic_face_white_48dp.png b/app/src/main/res/drawable-mdpi/ic_face_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..430be5e84a40161d15ad3bfa3791525991e034ca GIT binary patch literal 694 zcmV;n0!jUeP)e4JbCbSY?&;Y~_`TZ+Jx_8t&FwY+gHuEqAjc9Vs?@1drNk0b zWQbzIChQM-nPrzCJ3OYxJ&`&`9K#%>?{NYda0ug zVvsq{2W)s;K0YuTa6JA*c*qgqX3znm?9%L=zII&WEq^)L77-m7pxH7>%&~KWXY6y5 zejUiE!--)Co&VWBx`~>ls8b_?nJ1{nNGq*rd~HJsW&Yrx35yy%=Rwwy{`$gP72P4B~)xjN_c+ z5YeXtQFajboda3KCJ`MlJVAV;&1w4t>xgL#IzSI~{{$T-hp2H$2W0YyBW||5{cURM zA*BK-s)#kZTH3yw4Mdra;DBL>hR@h1x+$t>cvCKQWKy(jtlhg8%>k07*qoM6N<$f(@=SW&i*H literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/ic_fast_forward_white_48dp.png b/app/src/main/res/drawable-mdpi/ic_fast_forward_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..ec53881d9ae04d92db9c1fc56dd1d96e17997f61 GIT binary patch literal 272 zcmV+r0q_2aP) zu?@mN3`HX(3Mx8ETBxCC0XAR)m>}68ED$zef-pcBfI4j`>1fhKiXIb?*ghgs_}9;R z(w)x#qR8X%ct#km%zwdrABHEpaHeY*TFy+vnC@YK2kQ`~UjQ{HJ0lSQ+*sHcsQ_N= zEsR_MaABrpbOI>Z>KLs68WJXQMlS%kvy?J60lYcLn2`z!!4VbKBWj2RmmFI!xn!eb zbb?Iq&@$^o8+cXZf@POmmtD=#&PWPp6~gq@VJfK1)LNO9p!6$y=@0#F@pwF*KjZ`2 WePFy;IbR0=0000Px#1ZP1_K>z@;j|==^1poj532;bRa{vGmbN~PnbOGLGA9w%&02p*dSaefwW^{L9 za%BK;VQFr3E^cLXAT%y9E;VFFSW*B01Jg-FK~!i%?U>7I6hRn;^E9)~Tu4GLQr}i&k&*}@KK%9|E+4O ztEzkM;_qMm8_ds-1N!6fcy886aavexQ2a#)Qb@5WD6+*u!A)1R4>jIvDc`0@u?{SV zg?-xIzCB!C{)y|b@B*|MZEfAc^-MY=#T=M?&~9HIuC9Kw;_5P7Svh75TsQ8B6w6>T z&>ya?9a$mMf>@u?Hu^=Rn1Trg-R^x$Jaf9;duVHQLW&9oK3H0MT1Y5>&9B3qoqdez z-k{gJLU>J$_mqzhZ1&M+cS4FG3|Lh>9?s`zOQ!+v(FV3J=#v7uo}kU@gcN-ksBGF` zbl;(^HGgSKANam!3`wVR7j0!sDf%D?i<3zXdc6l|vkMQQuZtn$gUZcMiUtC-FiLv0 z>2&U(ZCt#;7%og7%j8 zls)LX`vo5Z_uinD9ifI1|6sNTe7^EtXm3c7z=??p!5q=NgjR2VNUZnJvU3_zBs|-M zf!tDANX-8mZEm6y{2w`97VN-ibMuC8=OaZINFB`7=%+?4hpc!wGyOm-r%@yj7)L?e zE*g(m$(*mkJrhw(P;-Dkos{Sb#Z zgF=9kWt2-{!8RB8zD7I!G8zKo^R|(FM_cHq5ebI-^id3S@+K9Hm^&5}Y%38jEc_qE zzAD&((Z(HnT#fh|sFK@pI{8evInN@c zKVpA0TLV5{crUa!q-dP!3(dm9L$pz@#xFDk0XQoB=oAM&n^(#a+R_|S^g#fQTojzl zFMPP&kqxYefCb7V*{2PWehjg5xiZDe6pOg1URz;*4y4Jp=v$w8l= zKHS#>S**c>8P7JjZZzT{Qfvwfa)m|j6Hc2+(LPd~7FTH4fyzy(2~wOE78?|Q(Sa0F ooE8*o&w|fP=e{AuzlZVM9~Z=^6U9(3g#Z8m07*qoM6N<$g4U_?2mk;8 literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/ic_gps_fixed_white_48dp.png b/app/src/main/res/drawable-mdpi/ic_gps_fixed_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..ffab865d9585ebbc77c392f282e2316ff9f7271a GIT binary patch literal 687 zcmV;g0#N;lP)5_dO{AfRR*eT+@b9gN#p1y~2!aQ%;zc~_NpA}JJya4TNWADLCzuWFR`;6~QoL%q#f;ygiJlyjth)6$c9C1aR3Wu!H z`y7H+mMCH4f<;=NBADYBaa5UooS>b3T-hVptRT%L7!;?hGfo#iK4~V{pn$?ana*Ye zX=>E@!ybJNs|Lt%FIv^%Dro0Y4Eu~1kx?;~5(y`P=>1pT;Kd}xF?BL7f;sW{Hav&4 z3B^pDpw(>5YW(a?Ql?SgX^9prnWY^G-5DXv4R;TzkzqJ=_lsHXg=j&^?CjvO=Ln+6 zl5Dtg#VA3)8QvFoe33>J0*XNtdh7?gU=7W;z||Z!0=vdcW5q5wLbD#|y&V-;i%m2K zcEJ^zad9n66uY8w5>1(~fG}O6;hj5yCaHQQJHu_g0vV`BZ{h1i%cqL zCd9Q2QEZFKDeEGe9HQ9}*M?EpP<#@VTWIoj!77>pp3_F%3DljTS+)y$&CCYG)g%QP zaT+*ahRnZ;9=pKf0!LV*0h`m)N&oVu)Uzz`+z~lu9{sKIN V8gkk7uaf`(002ovPDHLkV1fY7Hg5m` literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/ic_gps_off_white_48dp.png b/app/src/main/res/drawable-mdpi/ic_gps_off_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..4a885ca23611896bf01e5b706fd4767c2c0a28a0 GIT binary patch literal 602 zcmV-g0;TI1=bTnWKF|8{JBRsm_)qwM1wcq|kW-!kGcGI&eVy~EQ@;YZ zb7IGAJ}{$;dnKV}%cWQ@ZK>q|7x0uY?nZSwHqwDb_kSZ^8AxzoG!2LmSHAbTWfO*s zXj-wWr2xQw`aX}~v-OpG}Lu%80F4_U;plCVAq zsFzpn`ZN6&eCY=T%45WcAl8O` zX(@c|t=PgDRCV1;gYvVrzMb|W9SDYOYAYfqZ~0I;_ulXae)DQFQ1*4(j?3BN*0(-> o`sS3xx#|D%zszT#?7s^53-7hUFSiu;@&Et;07*qoM6N<$g7e!3Q2+n{ literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/ic_hdr_on_white_48dp.png b/app/src/main/res/drawable-mdpi/ic_hdr_on_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..ec6f1959414044afaab67ea5614ff11bb9bc75b9 GIT binary patch literal 191 zcmeAS@N?(olHy`uVBq!ia0vp^1|ZDA0wn)(8}b0DHcuDFkch)?FK*;)Fc4^cn9VZx zfUnH4*q8%xTSQ`xdFr^nT{Caisly%@1OzJMr#@z~S5$IwaoNv&*vv8df80rfyDRF%{sc~kbK&5QJGH->WyX4vX#Wkir4NGsk pV{fp~DQD-@nG400los8;$zUd|9Dj&4vIXcO22WQ%mvv4FO#nN}Mt}eS literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/ic_help_outline_white_48dp.png b/app/src/main/res/drawable-mdpi/ic_help_outline_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..e10918b867eac66e7e915b29021235e5f6ea0215 GIT binary patch literal 801 zcmV++1K#|JP)UxlvJ=kh90sVRSXVS@JCQ0i_@Oh-)egQaC{v|MnFVffiakX1 zv)I8|J`m9MO6Z;}Twx zMAaFR+-J=qCrBs&Nyb=W%{&RUM~nx!zVa7QwLpvsR&m`Us&=@9Yl)re0K*h<&8jty z;(AI-9*i=_A`TA!l4DpN4Dt-uVTEHGWy}&O)sf&bjlMI-X4NrB5mRET!f+Aum7S_1 z!6Sk^WV7lRp@KQDVC6y!%4)33G|;eX6A zH6Vp4$&V@~t~NNtGUg6lYCt!}ksnn|TrDw;>pltj(2a5AM;SAq7C4RTDp55dg(=C8 z1x#9@808ygR)NT13i9I?W?Z4Tf_Y3tfyiR=^5Ycdx{SSEVkYFr9?UXHg}}k| zC=k7@V0O!o2>)Tmv_WJsZ|PDq&R~{^X#^^8IJP$M6PDBwD%*2v*1GNKN!i#NDt z)DBVRajh~*Of3*+iVCi~Y*Kq9m}ku^#_3T2ddc#dHFrsI82F7edM6lkFA`iLOT`C(Wi}KJOA*GFmIV=NI#1RX(q{2poD`%i2`{h f*iDyy`vv>}3M)dB_9^(R00000NkvXXu0mjfHaK6Q literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/ic_info_outline_white_48dp.png b/app/src/main/res/drawable-mdpi/ic_info_outline_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..c571b2e3e776762bb90733f664f9d66e2c7f321c GIT binary patch literal 655 zcmV;A0&x9_P)22NX4g$Gh;r2I3UU zC}4AjSPu(ixIwI-i~@=>;)?%*fd#|^BPhd-@r0NU2I`1i6z1DU)PjK~qBKyTf@lN- zCy2s8fg++640I7Wl<`GYIEcGoz(-^U3JfE>zYypm@_+0U8C+z9*+p(5%4UK1zv(Js zZ=k^ch-xrUL_9KT9C*VMq7V#ZxJHzW0~N$&*uzfo7172v3S>D)tf9gJQ}~FgQJ{|K z@irKcxri5LQU~VfAvRG_fjnJAhY3`QO>%*_Wjs2dSVB8ak(i`~wix%~<)U?%MJ3oA z7ie3kxPZkW+6y&o6w%94r-ydHP_ma_Nr*Gbj78j570v p!{d$?4XPC6k!Bt+*lWOGzX87wDYF(rDzN|n002ovPDHLkV1ir08p{9x literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/ic_launcher_take_photo.png b/app/src/main/res/drawable-mdpi/ic_launcher_take_photo.png new file mode 100644 index 0000000000000000000000000000000000000000..cd2ada19bffc0db76e4640d637bf3171faf42e3b GIT binary patch literal 1681 zcmV;C25$L@P)Px#1ZP1_K>z@;j|==^1poj532;bRa{vG&=l}pD=mCOb1snhX02p*dSaefwW^{L9 za%BK;VQFr3E^cLXAT%y8E;Eegmrwuz1_DV$K~!i%?U`Fl6lWC2XTil)P(nb-0v16o z%kEsXHT4o(HIY8~(!__RZF))CCbh+;NmFAVYWmcq2@f{L1XByGv`JG@QR4-y2v$Tv zWU&TYY7+v5aw%<5NZM+q=Re=cZx-o)8#jlkc+#Rb z-0r1ccs!{PjxlD=1^gC-8NlD<=VxA1lvi3*wX99kuK87MuwB*uy{&0Fgky}|(zK?V zs(Ki{h(WBGW6sU7pU!Hds=jnfRWG-z>car6Rkdr{L&P>w4&*{kk$QH`@p5*yi_YL~ zzpD0dU^5Xl(|v<-mgMCvMh#Ia_7->E>wU6KRogg-SctBO|02~uEm1N?wv`qjUF!Gf zO#RIjj-^oJ2Gv4MvwU=hTZR^11y^h~g_@{sV$95tpheM+aRuf=H&yLhb6wasT*CP{ z^WmaLbHz+K^2n+iRF@a4WPGP&UvX;=J z($6KDPZ86ttrixwqfwUM<`OIfT~(B~z$j1|+r45%QdDo)wQKd>@(TUWd-quNmX{ko zY^(**KCAo1^z=j%H^LkZvivF+Xh{Utcl4+}IyyR~;1f*~U<9i`o@jV%HgG}Vy0dYk z-gDr9K5+baP`}sLv%s8&$hW_$Dk$VaPOetCzz9}>9AXDoug<1zucck(!0FTa*x1+v zaiDh8UtP`C3Ktl`YGMb?QRnl$#szS?-`5vYcxa@jhpoj4tY8-C!s41_xtw$66lj10 zfkp-g^)5Q29i^p)aEvj|Hy}F=G%3pGU>4}Yob<|W=A1d*@9kwFpzgD0^|~!v^ct^s zf-r_TE(WsGK#QW@0JA_Rb4$`v`rI4loH>n5(g4nMjn5Z^{%-wxLxI0*Wm)e!b&8F( z(*V8H1_*nJpeNgsoSYK+4LhB<2L7Vk>e7Y{;%sl;taqO~r;k2(U?}veT-vlr9~l^k zh_};#pSBixlC<2!m!y)+%q-4vGI0(3Qdt>1)4$f$Ma*L?Fc07Gc{>e2FQlc>jbqxI z85yfMr^&=M(6)E40aw3$d&FEom$z**eEj?NMa(g5b* z1K;pDTWf%Bp=3!)G4YL~kFmf!e8cDMG(aE!gM>Z42FMbx zD@rTp%!zB@kFGBL;<|MP;P*Rs>O*(#1m|#!G0edSzM-*@od%j@^(Hanx1^vgrng%q z=gf&~0O~9&3!dd#MbR5~?J|Vpz!Zlq3Gm+H>zz9}>Jkb&=3ku%lg2FX)?*1v#$*a8cfz$Or< zHr!vSU*{Ek!X=vzfdxz^n#bQv3}6vR_RU2J3eOf5RdWSqQ>cmBU=S!XHy7=ia=*;WFP)2pjT)#$b#qbM zAc%qC5@zmkyPt2A9n1fG@ bpe5-)z?^jyzLaha00000NkvXXu0mjf!v_=h literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/ic_mic_off_white_48dp.png b/app/src/main/res/drawable-mdpi/ic_mic_off_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..bb7915f33df69c09fbc22cb39e913205a6f06004 GIT binary patch literal 484 zcmV5}42RexTey86(fe-f{4kyh~{+(!Bv2JQ==%JP-)<&$CY}%iXF4O^C zeZ85VwQV|>k=Fb`*B6Xuh!wdk&kyt=L#)eXFh9^U&Jt6(OkzNU7|13DM2L<-JrE|U zfjChQ#EE(!PSgW&q8^A7aiEfj1C>M^h!A@@A`Arjn;cOO`~-K?yi0~SmDcZrUvXd? zJWcaEazyGguoVYZ!OJv1ph&y|Z{olcFmd|te9T6HSnvZpjssT#kJ6L#M&t<{L$DYI z(&hPG`tP7+*}%la?hJ9pd#^R(fNS=^n&UHTi?%awaU}?(J_oGogoxBA(6$En>Uup< z(iaQ?ymd=At#sY?i3rgs^`sZ}XUE8f9VvMt1f<^dO+JkCA1Vl>o^a0x2Qzd>ThCUR zXheI_4FF5h`6v(xh&O*#UM>;!K#-^eGDLp6>wyB%KNKLH?$6F00+5Klv++Hs?-B5S a0e=92ZRRk`JirnF0000Px#1ZP1_K>z@;j|==^1poj532;bRa{vGmbN~PnbOGLGA9w%&02p*dSaefwW^{L9 za%BK;VQFr3E^cLXAT%y8E;Eegmrwuz0+LBYK~!i%?U%ia)j$-*-(Qn<*?s%*_M_N^ z{lC(PAB|uwmWyH`+6c>9TZ&-b+6WdF79z?Dw(*>G@|fJ5Br{1A0u%T#^UmCvd+ucN zNEYczSGuOXioLyi#lgYNhpnyBXT4h8a18ydEX&1INO1@>wmv+(@=YsT%w$DKaR_dA zrRfICa0Vk|Uc%!QB&AX1ElW{kNI zQUOGYvCxb$7eXq4NHG?gG3G)@1rRC5LNmr(2&n)f#aL*@m-;9OOH~+sD zpmwm8_0iFHw0(E*-xh#l^sp9#&qN#3%FfOmfA0GD_>n)3&(q&%Cw&Go?AqH}lGW;$ zpM-sUR$lrs3e0pybmcVuyErpIKwqFyc6m;-9Gr(dB*thh<)S7 zIf9N4k)i^NX6uua_a+gK!!wTASipjP8ZpIk`5bL+J>+=DF+a8oDYk)y8+U;e`};4^ z1}9}K=DhOP@Ce?0BE>eac!nK+Y7*Xpo%@8gvC}>rV+>+Hb54Wp?F+QQ8$qPl1rxX( zZyEr2*uPz$o<2t3KR)^O+1V5K_bKNUySp#YMwus4?1PEa`5lT|E80AfVhBtKa{PyX zt&GIt=5`*tKBO1}ljBG|aPLRbsD-ik@4t`!VB-~190JXNoaY}q`=v5lLCt$MMjNk? z;#i!?S^?Bl>{z5Y7MeBYQvq#AaV#`z%%=j{km6Wq)|j7N0ljadQz6AUA;mc%#W^9x fIU&XQP%i!e1$Z)RdNB0s00000NkvXXu0mjf%b07p literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/ic_mic_white_48dp.png b/app/src/main/res/drawable-mdpi/ic_mic_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..9f44db5d21785d6d92316645fe5bf23ae994b77e GIT binary patch literal 436 zcmV;l0ZaagP)Dr;YEmosql-|Yvp75WH@G-;5;rMmp=!acAZ~6h{u2&vP8JI)D(y`~u{sNd zx>y7uB-Q5QQr;mP%S}8N4BY3P-uHXpfy+5Bw=q4`Xj#$K*Vl1ZOQT$sJm#D&0X}Kx z6trx?h@zQKup-5se1fhNMLt1aie5f}G*t_RQ!m)BTJU@71;45m>`lF3P_>}{9}D)t zO@}YL-N={0puAuQJcz!K;$`Fy!FGAU3-B!Z+fpn>z6RFH3+{uSlLvj%N23&u9Hesw zV70v9BH&(3=6w*L<7{NBfD7Z?qIK}Yd`y~_ZR+XSu;ggW&-)3Uk9SQk8-h1xG!Cg* zn_%eDxbNUGVBP7zo|*Fs@M!#p*76qc)s^IltF{5J9h>AOpV0yE&JAtx-7P8rY&bRP zsyN}P0N;J~#jXHP%>LUhy6&4a?6{U=*&Vm!y-~VunaQwCquLe#ENEx_eUAbpvszFK eCa+ph3;qD8LA)J0pe6_a0000g>0a31^o^RPE+G9`{VTIRAfx@&pf^uY0z3cXXWa&|5gqv)tXJ{u|Iv22WQ% Jmvv4FO#tqKJtzPG literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/ic_pause_circle_outline_white_48dp.png b/app/src/main/res/drawable-mdpi/ic_pause_circle_outline_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..2ba93e8208c80aabfbe56227470f8479fced58e9 GIT binary patch literal 1248 zcmV<61Rwi}P)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGmbN~PnbOGLGA9w%&02p*dSaefwW^{L9 za%BK;VQFr3E^cLXAT%y8E;Eegmrwuz1W`#uK~!i%?U+kz6hRQjeIE2CiU&18G-@Ol zHE0xa3F4A3An3tc4mku9JjBOAQNh<$k%dI^0eli8_%UQRhmcL;10~=yWF^sD*8e|L z4^ut0yR*A9!b1M=gPoe~uIibd?&?~%rZugpag|D?4d4{G23~+k@B^#}{7oD0T?Hor z+pujzn8q4t1rNX%P^&(Jd!PkaeaFHy#$YEH0erfR>MzKG9l-Kc5z|;5jte65!V+ek*N^tb9udnZj6-XoW_xB$~-#Kv;);VCswu)&KxK#cEf%xRp2;L!s z@ZZFDupL-2RvZA=`9SD+DamP5j?vN441E0&w@#0N6=T@}aEDc!)b#Z94a;E~;KRNp zZk@Qo$RShP68PNn-%;{Zg7ZT?Q&% zbs@0m6bRdSk!M)TL71cmkjZ4)ja1?|Q1Pk@fyo?;PJ!Y~qofC5OrMV+Rx(HKgFtu^ zGygN4Zfk4X51;SFz0(~Ki8W#nSngB%0jmd=vowIO+v3)VC$wE45^KZ|f)#;4cp}TJ z9#{^OWNd7#7ry)>y-vwAR%&4g)eCQM^9VH%r<(z<_tCVTo5g+{CZ0000< KMNUMnLSTX?0Xba& literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/ic_photo_camera_white_48dp.png b/app/src/main/res/drawable-mdpi/ic_photo_camera_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..be9fb226a53ce5ee4008cfafa0754f42284d51b3 GIT binary patch literal 446 zcmV;v0YUzWP)S3bFt5nS?tU*5U+ zuRv$a3fXL!qPq!nmZ-`S-6wEv!Im}!G@?dn#;*=rm4ZHT$3Hy5 zK#ky=d(UOmFsQV^o<2gJx=}eIwnA3W7xau83lN!8vECcV2)4w<^v@2~67MD11sAjh ztGUt}LA${P1=0J3E1eNGc`3L+Gf5t=WKEt5E{OV^X;qWQ4p$&rl*20-lc(iB^4yia zijQ0lF6anWaiv8;dol|xA!y6No&?ck78<-*{Fa&B9E#TSq3mYYuttQ*QZ*_=*n0M$ zV!JjJ9`z`_U5gAzIwsc`sys4xbSaS{W>lIoJ$6TM=qna%_I>f1qB5iXUVh&=cfY<* oSSx7G$evyZ)YnOH5*(G_7e~OF#v^w+0000007*qoM6N<$g3E)#tN;K2 literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/ic_photo_size_select_large_white_48dp.png b/app/src/main/res/drawable-mdpi/ic_photo_size_select_large_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..4577f75b14927661f4ea8a0ce5d726442e31849e GIT binary patch literal 304 zcmV-00nh%4P)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGmbN~PnbOGLGA9w%&02p*dSaefwW^{L9 za%BK;VQFr3E^cLXAT%y8E;Eegmrwuz1h7d&K~!i%?U>JN6;~9;ZEY9rPAXkg4aG*Q zWTOU6g=~WO@E<7Dg}W@W2u*O2Ru?TQwAyVO2_%x0KR~M`O~L;lTX{)K$kUg$vmk=3Tezi2lxv74)W?Za2f0Ys_!7AF@U$hIN;N5RxiL9*a?)c zhDhTEoB@wPtHm@s4V2GJq%nin!8IVYy1M#gd3pKH?Ck9D)YMdIaB%QIS69~_GBh-F zkT%}Kf4;?kYt~GOZ{szfVn`v4f;Yi^kP?Eal*{FhdwP2I;d_2Jgx=oX{TvM1KD2I< z`T@|Vip4`36Y$BWf`{{G^Yincb#!#>RY8Wpwv|fd0{U99ZsX!0HhdG(m|y~U;*%R2 z8#|$bD#G~q_(}9xweI6$I;ofZnFd!gXNzG!ZpxY+FisaR76gXKXaJdsN1Y*VY%`2U|qM@ReA z{SfF2eLOPvPG5pltSJLvk>%AkoadDz1qVPOfTuGvGehb>A#j~o@901M3SI$TyxxVu zsxlG+d6H+5as!~&*4FAvOG`J3q*PyD-}~_WuXW{g40!Q+7Y0k>NGuPOgDm0$a4MQo z@Xhi&u1|p%uXka1jrb~6=UJ!$P(@N|Zf@?ZHFvrVym-9}gBy=XEW%XhS!4}>%9~Qt z)6=J{sS_7EFJAA$U|Ac9Ri)}Yi$()*3Qehzk&!NI;`Ar*;`J^JZ~kXFtUAvkWB^o1 zN_BU4b3KZS>wy=qcVYaW186us7af3?Lfnj~&a((ipsLhb&DuebXnZs#-_yrPq$ z(#p_)dJamc#=e0bgKzsmsQzlNSR5}3P$x`Ad z@Z$9@3>I@dbc&QGjobi=q)Z%j+zN(=hmXL=&*skQ53mEM7#j|N@T}4k9F*HJ2n&AJ z+{E=EP_eBejS0L|qWC6wdY}R+f~8IJ$y46|6;p&XCgAZNkNG^o;R;U=LT4uhXB&R7 z=qs7?)c0UJP%%YFqkspksX6*exW&4y3jA&ncr51Yq*trlmSh99g#8Ux^2 zC6D{K!Dv>@f6nunQ)MHhF$cQ=Pt17elvk{6SRTj~HWXaauG1uq2Pqh?xOaN=EEc6$Bv0q0%p#*bh?_`QCSX?H800MTUaxquh)5ky_(3 zs3R+>2crIfqzFIgvg)E93X&Z22c$)|W_u6VkYw7=zC}a@+N=Q`>g<~JfEke-yWC_> zB|WVNY>Q;p2{0$=9X;SiBz;bReo5cz0k0xyZ~`<*`b`hWi=@H{P+4kmJ>Xp=F(-g0 z>AW8BCXyy6Ktj^F;6v_r+988dozY8nMKb3ESdi+i;EQ(R1SpM}lwQ)N#%{Z{eW#3y zA}xNh3qh<~12zqYT_nPU8h71R_PuJKG?aTJ7W*=NbMwMj!&w^g{bKnxs^q)5JtjY>((4lF}O?9;JMBJt; z8n~hyMNXM#fCL^M2?kkE*Q)gPaofJGW)0I=HU2q2AR{~lnm-zKj?cXq=Ui)^CK=fG uMwnKn*yfrXdGh4Qutmyu=7bT(AI2wuPj3$EJQURc0000M1L^E>wD z_G`*BEKt%ZU=9!xlW_2kcwqhg@J~+L1PJQf&&tujU~xgSfq^Aq(NdMD@Qa448^br9 zJv*meXvT?xG}dmujKXP)SB+{1>_Hs!K&aj_S29PB2q7`#^j3EY3P>-2UD~XDRtfmsf8DLbgnnn^HBYA;g z4=`RcQo><0vEH(o33L^M&WvX>?})XA6bX(bwxF&to(yq8CgZt|0URYGT*Nj$lS8t2 zAeB76#6+Y7c{rx{NT#g>Kqqctu*(0{!sf8`O=HeuFw(OyP33GOIdsHr)GHc_4XaU~ zX)7VoiLa>T_OG@QmE+P1t;G`ss7jiP8|I*{k?a}(rQ(4!ZlPwmFKLfaW5orAcwjQ> zK8eo8zNj~(dj)`fc_Wh#s9w&-64XXKZJ- zy{hPqIxk`2rrgo15$a|90fwVgRmE=-Q0!cjtdEE*sBVD+T6nzVbIzvysEKiv=`_?X zXHzL^OMrkJv0w)(&%RW=Bhx3KnOKm;M^sO@#qJSmQsl*MD(W7IUd#0@QY`^R;(>JT zpk~-MoC=rnGn}iab-AptK15cczS3SoqAL}sC1OJZUI4I;LPx#1ZP1_K>z@;j|==^1poj532;bRa{vGmZU6uhZUM{ jaZ12joQK~zXfm6l&@ zOHmxh+uP3Cj!Mk`EJKzos7OToof0GQw2(+_3&PuYSa^^aiAV?`Btj$}Jb4ihL_#9L z#6&O;mNDHd9-}#;2t=(2YiA2IfvmwcR zm=1C1hc0M^M|Qh?nD+mL;WB7}5J)iyLI8T8A5!Xr{o!zULh|(t<*a~*m(pB?YIqAb z;3*7%W-mEFt62$KVGeZiZi8VMz4X({aKHnbXs)XZ7vOHPUr2Jc8TP^uHtKNlL^q_g z18yv%c?iGY0Q5mIS_r2h5s$}LI~!x)Zy2rX{ z`QSVRVzF3FNlD4D>44cEqB#l2;T~w&Q^9~Ka01F zqT`c;neY?x<@+s&z(Ht+J)lk3s&!_pq;Tq;N;&wKLZBI|7Nt}LDh=PvD{JZ0C*&Xq zO2K^2WK64jY+jA&IcTFR&8u9=`6Tw>y}^&NVj3vltyBj(6PKU`j)Fp@jA;Zb$+0SN zkJ3~JOS7*5D@_KS&c)CUZ7>hc!ECV7TK!=AzC99&l)Bw+RX~MdSF*hW6Cqp5G{sgS zHLJilX=As5&cQYK0Y1ay@hFyOK?g%sQ4X{!D=CZ$SGI9UCH*%jjWvNYtYE}^kbTgm zsKhRUeV`NT0xLuLGz2T)6!zMK!C-AgMa5{^0M)OW5-TSvbSi!UjTfQipu;y1jYe0w zTrRWRXUq`Knt{r&7cPUoW~z{dR0;~qPWXxqeQ-K-L&^yO)wPP)U7@S2Qkl?6*2+GC zHd1>eqxwwxL1|o$;Rn3C+v#+wOcWLmJE0z4K}umz85R)@pg43uV<;3-XmeC>6M4}A zOIVRAuD({vAchhu5jvo){F~_VdcCPX16i`!{sJqpsjVgBEw%sv002ovPDHLkV1m`6 BmOcOg literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/ic_text_format_red_48dp.png b/app/src/main/res/drawable-mdpi/ic_text_format_red_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..21710324aacedb9a75c4a3c7b6d537380d7b696b GIT binary patch literal 516 zcmeAS@N?(olHy`uVBq!ia0vp^1|ZDA1|-9oezpTC#^NA%Cx&(BWL^R}Ea{HEjtmSN z`?>!lvI6;>1s;*b3=E3qAk1hSF~15ZC{f}XQ4*Y=R#Ki=l*&+$n3-3imzP?iV4`QJ zXWXmuHyvnbsi%u$NQU#p5$>@ZM=v52ZVsnmEqh(^C54q|voP zA?`(wOsY|=2k*@{V$YhH6VJancQS2qN?W2tn_$z`gb%8%5C0wzJvwtAqg?Y`JF7qI z6PEX${+t|NAy%#7Ep9%4|BpkPZ#YNJTGMv8IqHE;y^`%|!*!08RXaBAnVk6YXzZ?| zkCvE)P1~;F&o4OzXx5J;Ft9%<4pgy;N^sLTb(0+K9{p>3cO7e5>SJ z^CM*Q&1_v6S@nqe3x>NUncA?2#r!L_DUq!W;!ixe{IcFj^jT4&E0h7p4SQ+q%MA5_3{gE znEemCxm&ugZ;)PiOWHR6-_B;|`ioon7_!s9)Kty9zTwd3t9vx$65sNiz4@XvL9$Jd z>F=Rdv0G*B>peXd=9d4M)gl7rtObTtq6FvU)cc|JZ&%d%PSFNN5`(9!pUXO@geCw* C)ZSeH literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/ic_text_format_white_48dp.png b/app/src/main/res/drawable-mdpi/ic_text_format_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..e2f3eaf3aec5a8628cc7a9e6104e8a1c7431acc7 GIT binary patch literal 353 zcmV-n0iOPeP)9nGfh`&~6(2c@G!)4xHkL3KTw)d#7aiPV z=9tIa(V>_a$GnlJi+9YJV&WLHuNn?8M~a7jyiwIKj@PGH*ulJzQ4LvMG24oRJSEJ! z>e#@P$SVd`@CH=J5Z;QCBf}%+T1~jcJkY76n8aI96PEENloY3Y=S0acL@R?zfV zDjYpjFl8JLhr{7;I2?{Lrb4$uv4&UBC)mSVRT#27;|=Q*jNmuJi?N@TL@k zbG#XSfmysWrT!WMsa*t8y9lIq5lE?2>Yw@mds2ZFt5Aby00000NkvXXu0mjfZ%~y$ literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/ic_timelapse_white_48dp.png b/app/src/main/res/drawable-mdpi/ic_timelapse_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..46ab13fa468b8c920008c3bf6e9e1d793d36f9b6 GIT binary patch literal 790 zcmV+x1L^#UP)%mw?oFQQ{~$w- zIAg`SnwFNDbt{&X3`t@UNt<%sUEQ?lQ~R?6iY{r1si|Tj3*eZ)^sAcL!2o$*3c|L_ z%BGDgD3~y(1z9yJ4v-J_JaS4-(j)Tb?FtCFIN%Ea zH@qf^hl8pBDlxz@0KeFmYWLWme)fS^Jw0#N0leJ{Q1mz8Mi=`8z!S?32)|SX+&A7G za0&3RM{^KV_m0pgyMT+`08?6kQxb6iun;l}fTowy0q4VD<^}+8G)z<#a4ronu-0{jvFaKKh4pam#o0z7PIhXKv3fDfgs9tYF_6B+FJ zJ>H`DA%CR1zKDK@;A z4oFl0kGvkf9aZnqoVP;VAq~I_j zfRH#~(N8utHEj67yeWxb?^ggbv5Qv)K-EEscpP#Yp*=A`&MJUi3-Ue93zjqhd}mL8 zm*0RuRoPgo`IwV#L9id`_xhewfuJ2%%_%7wHms;*&NX!ix?s;>50F{+^{abkvOJ;2 zUDOm)!{_#9e*{iCXG1sLan=i&AAuo;9JXlLnk`LDTh=T)Z91&941f&r43IJY16GOx U-xkr)_y7O^07*qoM6N<$f)Ghy3;+NC literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/ic_timer_white_48dp.png b/app/src/main/res/drawable-mdpi/ic_timer_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..9d239966f326e91665e364fe5b214c3151938c4c GIT binary patch literal 628 zcmV-)0*n2LP)9gYO;M!|3KwTTajWr{}EgiG20|5T^x{wc4odRfjR*>BZ zEdb03F~HXVzEc%)TC)paH3oP^dc1**Pe%Ax2U3C;@_>GAKvPwS0)m0Co)s5iR>3z--J<|0`0W7V-A!@xBmZfXAHxXqm{FeH*}$ zN*oXj`VBzGhVhJxxhVnA^rR30q{J}*68n}-tE;G}o3Ww+0Del3&j->$eJM7ds}$Q0 zUeuD8bF+#sAWGhL;PxaM-q3d!E7YhDee1-9D}VXJu`TZ#e%Lba2k;M1c7|f0@VMIm O0000IY?oiX?^n<%gh1c{AF8vHWE)DM14;}9AF8%JleYW}`ch)6$?#WW^uS@kM zCyCnrk2dFjL|n1^Xms6Zj+x(gX%e zbWjkN7)qd57GwZGk_c3u)4(lRF_Dgjfdpe^#~c!M z194`_560IGjFVs00lRj<5&-f%WHo(!2dceb4SoCvsv~Q?L}=y}>**^STwb$*KIVYK zHA8}MRw^v|L<;Oy7!2V8S$=Vi-7I8O_y8N_#$aw#eX94IBAi)rV=BEwHCkb?G-4tt zH&f!7)Y`-)skMnck|AM#r1r#Y$(k@cQhOpH2W#ShqGL9X$IpzX4+0QnG|U RaBctq002ovPDHLkV1iX4v$OyJ literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/ic_videocam_white_48dp.png b/app/src/main/res/drawable-mdpi/ic_videocam_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..1b2583d34e8bafff26a20f89c9d7cacf4525617e GIT binary patch literal 178 zcmeAS@N?(olHy`uVBq!ia0vp^1|ZDA0wn)(8}b0DN>3NZkP61P*9^Is4S8Gw;|*rq z@i^Z5O~`&LSF^H2(&3uW+b>MJ-#(mEy!`0wC)efFl5M{lznoI4x{+;s;upRj$rU~^>bP0l+XkKQP@0n literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/iso_icon.png b/app/src/main/res/drawable-mdpi/iso_icon.png new file mode 100644 index 0000000000000000000000000000000000000000..99d271e88d4449dc67b7a9b9b6190216ba299570 GIT binary patch literal 539 zcmV+$0_6RPP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGmaR2}kaRKw(dX4}90kBC#K~z{r?UTJP zLs1mK->VWTBnXQz*lLiNBqWVRBC#-F5sN>-B1VIO!DL`UOcIO?J|>Aph>t}~U2Gyk zqSa8Y-?{I!(fY{iWVyfOmvhg#=iYaF`<~};I2?}u#@F}_W6UriflI#c-;hgr4EwPL zFHz?1h!%RJe-v9Vg{Qbm&l9mU*lZnzA6ik&(hK;ahSTC2rFDZ-Z2N`giC1E7y7n;A2rEqSPI2UBK3OBV9BplqJ8Ov?pC zH2#I*Ka>|>IY_-tHao^X&4mt)R~mC9F@x3ED1~Ce93aOuPT@WF-~oq_T&U7m@Un4m z2`@JByr{--uzBt;7}S!5Jcjdj#+Wol@!V#{^lF=hye24{3|45lfKWHU;xeq0F~2HJ zn8hqsVmY3mQ2c(3Ydm7ztB%9L1M@e4ag@(*5hrm6g+1+UN4X7Q1E+8e-|z*y?53r_ zq7@Y66O(J-$9E~f2kv5nwo88kFYqg8*sU$4!NLf}G_{b+w=K6(ucdD=*Q7o9!#Nxd dhvRoV?-LH1ugRkW; literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/nr_icon.png b/app/src/main/res/drawable-mdpi/nr_icon.png new file mode 100644 index 0000000000000000000000000000000000000000..7dbe3365e2b88089cf9f4589b2292d73c307d1d4 GIT binary patch literal 472 zcmV;}0Vn>6P)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGmaR2}kaRKw(dX4}90c}Y{K~z{r?Uc_e zML`sR=gLBn5UVGlZ2S>yEZ8XjgN>3OS=otf^w>)l)>eA5QG|N3RHT%$Q#O7qyoEHr zGjp!w-Fa?rrOa1fbI!TVbl#bJpNT{wk$+F3YbMs(8J1prz+#dlR_nto6Szn1n7o^K z4%72K@DdMr{^Tn4z`-w=GOcB)ulbVQ(0s|KRor30Z3@t!Yl!a?TH&yEjYHhy5RRiv ztsfUL$Zrh@6}-hZoRvZiDvbOLOT_!WWSVu&Bu@jvx~>aMp;O~58hJj5BgA9ATE^ z6w#hlwCaaVHj<8(`ROi zDb)PK3&O@DJkiua-iIYDqrB}>8^s%RS0WG>@Ek9&N6V2&Bog^+81o6A7TzWqGWA{n O0000R;>#mP zB_2gJIy{WxDHIfl;54zZ?(Dj(@bmNJ>M(=pX9D&J05vf@IA0RFIB;_s@2aW4t^P!s z?~T5q@wxa<`?f89Li_gpTlKB*dwsOb9#+$?m6!M>7BklVVk|aEPh1DJ$>V{EJl=XH?V({#-awY-(QL|0?dORlc*XtSl(} z&d3n4k$a}x{MZxU-fiZ-$+m%8aD#C}>V?${Gwe!OwoRt{8X v))3p#FqiR-(={Oe2E;4(u$C!;+}~cs6m#}#(>L)Q#vrbztDnm{r-UW|mL!4d literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/popup_flash_auto.png b/app/src/main/res/drawable-mdpi/popup_flash_auto.png new file mode 100644 index 0000000000000000000000000000000000000000..fd2f06671d9de560b8c752d1f5dd3b9efb886025 GIT binary patch literal 1517 zcmZ`(Z8Xyh7~iT*Szd}+)@0n0LSE-RZOpMr(i(;^rI|T2@1tTZ^S-uQsSKT=ni8%X zVeXQ=jHTKRK1}P@CC7iHIP#J@+;#5#bkDgTp67R-^Su0?=l9_$2?-9+(?#lnKp;IL z0Z-B*^`~uzYW?J$YnA8`GTd@M94w0OjQ~ajpJaLsvTVBN{+ZfF zrSTl}-i*ZPp}H3EB$w`W@Dz5u518MyuU=t7BlljfIjFc{p^D&p$ObYDQl0~sN}d+Q zn?KSZnVZ-aQG%ieAdI*dU?CK*0z0el2nOfRx*oX*|?y z)knwgDBTT!KAiIBcgbrbB>m9Ifw=}E^FjrK((7)@tHFJ6H+ zVMpnE7?V-TG;$Vy(kt69=G{BRJD`A7H9^sE!bG=~ya5%JEFJfZ@qJmLG-T{pC6)E) ziI-c?KBu{K%ShZz`e}kIM^Y*FOY^qfBl5{_6~-1uzL*sR$qj&DM!t%s9-=s44Sv_3 zy_TXs+`EAOf;-yJmn)E1uj=lyC)>2iCpD7t^kmc6`efGElkR5bFz84yW{w%?{a`Y$ z&1U~7HO@>l;ks~t#S)mft5PiNvl4mGg0kwPUt!L)@Y>@kbZ}(+_;e!+g^+%i+Aa4X zOm`OLW52|I~H;8?- zWx78WD7`a|`mrl1lJ`{fM)|DajUcA9nkR3rJPo%NBOFdzaBsMj4u@a=QN^c*c-!4I z5p>-4mVVu%d;ToLK|Oa+f1)S9J@8xVf;s)>b7$4yU%7JE4W^pCn`I?C9h_Rjn}WvE zCU+8uy3TgC{kL3FurxJ$gyMqzQrwOcyI-DPJ)Y~oX2_T{W7boU1#n>Zf>!GwmBWKhJ;Dbe3+b&G{qrNYazCpPBaYKrrCrAUgI7ij0W*xX~JF(cpu`X zskW?K201@C#ffD9iv==>hvt8%+|bVd$o-!kDuQS7MaG~Gsd2oQ_CPx#1ZP1_K>z@;j|==^1poj532;bRa{vGizyJUazyWI3i3tDz1X)Q$K~#8N?V3G^ zR96(ocQk&a36>&mlTrwh)Dba_2v&lXqzYLHq)lsz6qXjQsIZNs(oPEz#Py>HAugnl zDA5oF%aFu?1ebLkf9KAb*&XNWy?bZ8@6G&w@aLX)-+Pz6=j=Q0zB^}bR%kYxZdHpd z-ik~p0GUw$20+R>g<|**9rs~IGSdOTQ4RDd+U;HA;8%3BWsYXnpMX!$kLWT%*cE!x zIo)V9UbrpV;9$2lpzk(%gN%D<9<|S%K!2k$+z2;|UT}_E8_@R|`Waae*3eZn@8`Cm z1@sRp!oTQX7+CazbKKg1zVk6#jt+-G#9(leTf^bHvgt@G zpCaHWw?@OYmX7Y5cVSpM(#oeOILfWjaIK}IA5fb?>F6R#TKQ-%s1JO}TC~<$M}ZH} z0dx(A+mo=4uA&w6l)lYadOAw~bi(B9MR@@|Mysyh(RsArFYAGGZWV*;`)$S*Ssh(M zABI7u37ZsJvKbD+UYr)3oqk&f`rIl8*ALna9kx393+)L5OcPAXoNtpZiml7Lg21s` z#o+o;+tK}2M_>Bwg=u(OX_(j&%~!3X!dxJif8%xt0LOkb8m_n1&7sy&hwr^LUZd;i zTgH9P_4b}v_xH#2z{D))$6)EG^G?8aE1ocFH(lu{hB~Z*PI_Aw`*&YxnG>UHb%a9a zRKcWdwG+!DA7)ZdK%1`B(Nw{tY?&F$D<5W(0cihyQRzq@V)0l+e?dpkA80F@zI4Py z>?13dhd#_Co`9F=B-(-oZe`aVCSqT>v7Gf`CJ}(2P}m2e9lDiWYna4-)ZQ8{gWY3Q z4*_hq;t8X+_#b`l-5uzAcPslY8fVE%9+-q}FO+4`Zx3+nRy?g?gLBV#R}Y-qoQ{68 zV?R}2n8Z$G+Y3cmMmzlW0LN~{6Gm<}AE1xDs|U`t(~*_D*wb5!-G-4c5etGn3B%Hn zom)g*&Nt|DD;~zMCFgf&c2|!~-u|o3p-1SI>mfReIywJcIOkS8jIo0D`k^Spm;!Kg z9OKL+Rdt-cgfpW6WJUqVi~=Bp5XE6R8C4e~62ZAibwMHl9GYAgBoe@xqyF>zao%qi zDd5Pl)CEl8^bMcMJ%?I^d)6k?4E(Y5sMPw$mF^pkpRw2t_u>0;LxPHATcIz zZd6^65JCtcgb+fANch-NT~H2iuIhqvfkRanloOn(x}aR(NYw>pMs-1%QC(0@NF%BX z5|cx9L77oqkQk0FC4EU*7Q08k5sw2Lsk)$?;7rv8!lvI6;>1s;*b z3=BdAAk3&Zdqx&eP@=>&q9iy!t)x7$D3zfgF*C13FE6!3!9>qM&$#E=^Z*71=6Rkj zjv*CsZ*TeLU2za#eekBiRc``wO1r=*)}lrk1@RNCI~dqc@Co!Bu{!S1;oNY@GvV+D zujK4U^UQ7U=04x4bA9qY*Q^-ZusJ`{mz~>oJJ-YGa9P6JC5hG>%5)v?n_hVmdtX8b z$dbMCWK%v*=d=a+%tv3|dclp9*Zr3&o2A|vu#eAkaUqn4-FxcvNUUbT{h1s`R zTPLjGt;*6nU>JsD^*3<@;M@j*2Q-&GXnbPS#%E#NVYDDE(zhTE(x83nYY0 z4}Lo;{$O3I#On5ivl4zsYxKshVduMN6C&U6{lc7PbN;LB3tlOAsLXCQk8jxifwk`N z{d3ASZRQ86R$XDa-GB1v!P(Z|xb`{e{Xf8W&O0GnM3Z;h-j-{3zIE#~{{2+%kh#zK zv!e!3tZ`b-E>*E&2EV^gH{a0gTw8kSMnZ4!_13%PjADNt%$@r=I%)ahW!Z;-qA}m( zU84g&oICe&?N64Rvug|;>)-7%_@i>AX;FFE_RF*44_rGVCH&>N+?j97=hdHWLC-18+ zeaV;l22K>0HpoU;XWrmjGuI@C{hFWDHs)+~-rJ2^C%3(AC|>uh$*uiuL-3}Z3xX!M z=)RdyqPOCTN+P$l%3N+OQ_mZR&YgJPmSX{Ul}REd|F_!5`ybGE-@CJ5=W$wu*5ZILL+ zFnn<#>B7RJ3!J+h+r^ssbgjO>0h=&o_s#{H7Zy3|J#Sj((9O4V4)-i3U8~1sdKp|m z<8Ol{jSa<3HcNQVjFf4Zc{Jw2nIyf8G(+(x4rf@;>O5DZ~Do}{PY12z|@g%}c> cY=86r+I3XPA;fJGFgG)Jy85}Sb4q9e0GD6o>;M1& literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/popup_flash_red_eye.png b/app/src/main/res/drawable-mdpi/popup_flash_red_eye.png new file mode 100644 index 0000000000000000000000000000000000000000..c30f7aa58d4f0de4c6b4cb97400b58587777bade GIT binary patch literal 1427 zcmZ{kc~H^`6vuyF!D=2`mXcO#-fQ5sDN5etQ97xONoHbR8EOb?UQ}M75h{3;B$bhw zwWi{cV=9#(9%wFEqMNO$pzG0Q&>vTn`qr55LgS# zMR@#@8|c6T&11B4LafLy56m}Sy9?Dh@uUJvFR1{98c3f#Geel;j-I-C(mzBVR z+cF>I&)WqDrz}_}5(tDr)=ZN`VAHLHrhuO6YG&E&0iqB9FAbo40CZ;*ta;yqVU^Yy z%k)-;2@FPTzI&@UZvKuL1EA7hYMlng}_yL>EshUqeD z1_M#6RZC?oOh~xd;>t-|w)7;aH8q;tWxMr)R2FXfJmN|>Mkocuk-$|!OBCRazSNeHKaTIw?^bl#+KP-9AgtNBA7E*KY2+*G#`_`hE zYZHA{LXLSB?#(+EiP&RM%=a+r^Anu7+wT zpoz}rd&`O~`UHsM_NCTd#K<{MS-gPK-M;+3>kw(myr*a(q3T!L^5_S`ovU?J7UuKs z_UdoUIe7Y6HmtJd_D5fTPx#Ftdutzn}XK))6ZZ8DuZyyDC zuaS=E#~#$|*AX@9*;?_&sN?(YaY=(c#tFXl<--00VI>o3kBo28*d}o0x}0+~O&=1y zN3sQSKnx!yt?!joM+e`G#NI*}Va_#zs&?PDBKC%&2IA*qEa1ua&1MF_e~D=?a8$=@ zLo+^lr{l`Q2@@1J1{vVmt<7ay-eRo$q;Zb)YS|QWp5^2ZRd_oayp%D=jTjE<}@|bWpK)Q{#jL454II2P~ zmwm+?5d3T?!&O-ia;<6@KU4NlK` zLMO8<@6@Y3t`+HKSrB2||6<`gFK}D4ges7!6n9cuIMwEqPIQ6X&ZM~WzS$#oUC=&= zI$ZhbVh-iQV}qzpXrFF_+5#Y-qX<<}zzu3RQ_YA1$iJ}pUkd#b)KCgHKpccqSlU-8 QLk?2F7a8DH?}5(w8+Nvuk^lez literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/popup_flash_torch.png b/app/src/main/res/drawable-mdpi/popup_flash_torch.png new file mode 100644 index 0000000000000000000000000000000000000000..8e3ae9c6d1e974d698879b85c3a9675b412766d6 GIT binary patch literal 1060 zcmeAS@N?(olHy`uVBq!ia0vp^2_VeD1|%QND7OGojKx9jP7LeL$-D$|SkfJR9T^xl z_H+M9WCij$3p^r=85o2LL74ILovJHjG_8%uNL{IL zF)Z+)mt%cHs(^^#!9!73{FFJ4WlV{<*%85E79}C-vLVnV@XwJ$eiyV1l~m?!Htv)* zy7}hb-Lo^xE%|NE%sqMTz`2_{jlZ9pJ@4ktn_U+#&erecU{^TC*ip!kB-0=PXF6Cd zS9IU_g~xo!1BN3O42FCOhN~Y|-8ff#IC*Vz?bWSPG6$T(4hDbFyK|Daw1VsWvrV!6 ze?Rcqgv73H-Mp~u-sb|Ya|hlRu|HS0~ zVwc@PTsgZw# z?Sb=-)w{OY?9qJJeEI5DAz{^q!U~IQGoIZmlOv5K`^s(~+*z};gM)Fy`TCirvzy|3 ze(zhiZt2B!lMm0XQWVTP4nlPQGju*^ys= z&F4^Tz;RHDeyj`pKvAD-5&6 zgcTX=jxV2cV)X~#Yd&qkXQwB6aoGI4v;E&|FFRJ&3B}g_35$IWm~MCxadwwjT*Fx- zo}Kv@4WH}Yi3pNpQom*KaGS`V+oCttO^(-gp3!pp;CD`&pRUIvQXU?gbo%Dez#npc z&-XPe7ijC;S}yzPdJVT8yZz=j`t|2J9y8n%?mK*TLbJ{;)_DzUzy19BpnH~oaE( zS<3xLfB(hF{t=TaHM6HlUuW&?EMVQC=h5@J>4?VbCaXU8LrF?34|XXS#>R?^Jd``| zvNgQmtKj*$e`gJsmTBO;CX3~V zcg}si;huh5y>zRhpIv7W>y<-wbCqNGHklPNKe)`7zvIm+ulsMW2pKl4JH%{g$mh_* zEbthTY(Y$6Lk6Iu3SuV+0I5PaB?06;OkVc7$tS%-paJMusGE1_tzdyLy;^{hzdtzm kpqhtdGzxP`h}M6G`S%V?on+*a4$RpMp00i_>zopr0N}yXbpQYW literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/raw_icon.png b/app/src/main/res/drawable-mdpi/raw_icon.png new file mode 100644 index 0000000000000000000000000000000000000000..640a1d8215acb9404cc49e8228464bf7fd13ade6 GIT binary patch literal 693 zcmV;m0!safP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGmasU7lasggtTjKx#0!m3lK~z{r?Uc)l z&QTc0&u$LIT;=e+N)@dwzPZ++h9a?bgk+jHJfR8&+{^uJZ7yZ9INHFV0OTBa%Ys2$e+S20nKTNZ6Hm-&*Yk(z)b6e z@GE$)2G;>^Qrnx9iUXcfZZxIh1mw-O!Vjmd#dU8&<(A7~@nvPhUVF88J^`!V)3W`|NZ(1XHk@iI!R>gxhX=+^&T- z_+wFX0eMlE-P&sN6}*7caLJ+=-@-iaqvIb3J36=l(g|X?s$bj_`u5H?@S z2E`_VynwpCK&N+6pRJaS@uJ5EQy3px2ZXPhkv79fa$^suwp0f#piO)QtBL7Xa1S=Y zE0_!WAZxGNH4GlZ9!_ECCSK92V!)wr=TvH6zj+cXu+EmM_;>8N(ytyHAi|TE%9ybbDBG$`Ly)@=1Dk>@} b3L%8QL)rvki7c;_00000NkvXXu0mjf+=M^b literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/raw_off_icon.png b/app/src/main/res/drawable-mdpi/raw_off_icon.png new file mode 100644 index 0000000000000000000000000000000000000000..0550b8b3f11cf4a59dbc191573b6e8f320242694 GIT binary patch literal 707 zcmV;!0zCbRP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGmaR2}kaRKw(dX4}90$52zK~z{r?UcJ} zRY4SnXNPD&1usWI35r29MZ6$35;P%LDI%hXR$>$J2`rRgk-{PmkUnB12%->zmr4;q z3onR7F^Z^YRM2=yykz|THEWKWynxO8@SU~h-)qmzUVHWlMMXtLMgLoseqxMk38(TH z{f2umQdQMYb6nIOBaT+9q<*ppbxToK|B9o zqYw^zl&*T*r5|5#yK=M$x>IU#0X(KFALni$V5NQ6^o`fIJ!$O_rgmBqo zkH>AW1T>$d%}YByTK(J&tYE@H;uTm0Z{RNECprM?%z3yDx?k^Dx(&aeliFp(tuoO? zkLs$FJ8{FKO`gv{U7umSm#Aj9xiQ+#iAU2gKC}X)JXx_1&VkNUH%!A9(EFc+=9#ES z`iWzJZoMPv6?{)=SPl)CgioLe)Yy~bKhxqXsI8)*Y=RQ_9#)`03Ymj4H=b@>&H z`B<%bW)7bkk`bs1}5#&XGNYjsX1?@G~5LHb3)IlyYG1tH|vQvf=;ig=IN=#CV76jDZGyQHAUDg p-wIreHHZA>8bw7#MMWWm@D~9Rw!VX#GU)&S002ovPDHLkV1keoHvIqq literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/raw_only_icon.png b/app/src/main/res/drawable-mdpi/raw_only_icon.png new file mode 100644 index 0000000000000000000000000000000000000000..d91198312ad7b1454b008d05c52efe689bcf43e3 GIT binary patch literal 1024 zcmV+b1poVqP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGmaR2}kaRKw(dX4}91C>ccK~z{r?Ur3= z&QTo4pGO*@NZu_%5}KFH>#|%JNhnPe!Ud7sy1=gHC8d^KFdD9uD|y}IWy-janoUSd zG4Ej{^KO5i?>WEo-+x;(_nvQkp5OU>&pFR?p8t9Kr!xBQmDWcTMcoMbAxFQUHl0p4 z`m{x~9YGieYIBh`k@@Q13n~-dAi`vLiB~62!kS$sCRkxj)Mh;BQQo$E7f5Ga14Q|ax1#BA$QPqc7Tr1^Hh|VwU*&C|z!NwKzaa}g zLC`ADKl)5(O;LEfbHkUR^A@%8wHUuZIlO}bw3C+&cbVpv9Y#fX0%t7W;nV0oXw{0B zGCI&Y>$lFQ(K*l}zGVI}nJ6sO68W((!T{XjBK8GMfux0&?~Bm`n3JFkBsc%W(V%Ji zB@=Bke~nLLKi;ZN7-0a%&5~E{t>`S2`eN&Ifjq1W&FMh%o98|wSc{@>| zTTr?RDu;uk4m7wuF7tA?9&xo#qg*KP#kg*TPu(_F|9ha5)D6Cstbr=)RZe&sjHL5D z3_$-%`*|q*Z2;H1WgW=+?*O$egC^Jy zv62JO)hf9kPe@m?^wXkr0(3yFgz+*u5`(x(s0Mw13Arw&SjhprusFkFklz0^s0Np{ zXfvq4PQuPqW3kF&tat#7>=8kCyGb>US6iFU#FtsFqjGct<^-X uRjpUG4n=(ybWfl@U15|p$ta_QQmH=%m?5GdZ~txp0000d|n3Pmz_ z`2aj)9+l;zBR8?RNoV<}${d~pA1JCQEGN!c_7G(cXNa>}ZdBwCN#RjVF6<$x_REfz zB!`wVBL{bI^SJHD!|e_^WWq39_RxSl@{xxI?8arJYyFr_R0uuzhRrIOkdLjHO_Kqm zFxyH#*)Rc*uuHPw0%0Cgjb67-#1vLG+t3F@u*U?@QkTzOZqAM7t_@FOAKO94piys8_S2Wu2kyssMK?h=epq<>9!au@J%YrL}c}$WG1=xkz zNEt94vyJ4H2`jPrP7j4J7Q3Y~VKgqgX+U22$V)?Z<1$PpZVn9dIJQJwj`1l~|o z(P%_7#b7liixfZ(8cJ#f cWf0l^0}KSHGP+ePUH||907*qoM6N<$g2_JaI{*Lx literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/share.png b/app/src/main/res/drawable-mdpi/share.png new file mode 100644 index 0000000000000000000000000000000000000000..7362f0d7b0feeaf36ad4fe0c3786318dff2c3bfe GIT binary patch literal 383 zcmV-_0f7FAP)F4>9Wc_u(EtDcO93e# zDEB!dBjaYeIRI$6FOaH76TSt+=|G2krl|vfmh%FsO+d_xMS2U+0S&ZvfCG@~!5Q3) zj3($}OjM2mUQ#tMQ916@@F+P;lSBx#*cfWDF)Y;su_T(Z&(K78i)s!4TC4`7ijgb^ z`EVA{lC#jz@Ii9WGHjOPbpSL)gA8$i8nPK`xD?dK$QI1Pm)A)yh@ifki6v1TfI0{k ztbOEKOvnK}NER_*iO=OAjICgymIKb<(bGt&uPM!jpQ%}J4B99d*hGkw4I`oXoW9wR z$_3G9sQ4|aI$*$yVmy_RB(BN`=zw%uw+FDbhUnTJ_)L$2h=D+zkINBThJT}!oM!nb d7zF?U1^|S5kOI@4ht>c9002ovPDHLkV1k^Vo*@7L literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/switch_camera.png b/app/src/main/res/drawable-mdpi/switch_camera.png new file mode 100644 index 0000000000000000000000000000000000000000..c0e44e5535c25305717346739376f33a264c70b7 GIT binary patch literal 2737 zcmZveX*3iH8^>oDF=Lqu&2UMmh|*+VuHBG^NoAJ|S;7os36V65DOu7&%5n`i%VZdt zY)QrtWh{v?G1lzsMBEDXx*y;3zQ1$+&;R_-bDr~jc)mO-PG~zxh&%)U07%-OwYj+G zn13ak$P2NU{GFup_+8 z9{_;Z+S^z<$6Z;;59TOdf_H59Cl3#)fMiiNS5raNvPx~)0)oxmBg!X&1+0tI>P-@~ zwoU%1)lr+X*sp}5wuc(B{q96D%M?$Pm-o)Cx~OLnqp_=eqo}ofw_fufGtIG2b$avW zI8Wy)*6RQMKWtj5eqPGyf2J4u^6fABg9p{IM(MA34*J$LoC3fX-$%=9_6(y6K+?qa zIL2s462)3`eYDTM!tq7Qz1n-JI@Yf7)KK#j>4o4&4wMxPL;;lB>dcel2)Zpw)pd5E zeLx?!n?U>~AazpoyEJzoAjtb(q(!!e9ZO0e0}HZsKD6LLJ2g|%08uxL`f#i^{Zd|S#im{5&VICORCd{YK)0q|YOwFS#Kx2SO|Xr>7mw{y}RfWpXDP#OcC ztxGqFyOHL&lzF0|Ou#!O(Nv49NTKm?&}$YF6SyNr;zAKhOiQndsdw23&k}afe&>0R z*|-9lv^^2)4rV``M##;cDR|5`gvlMhtCUSJMXI57?hkqVUEFLe!dyJ0}WN0 zEeT~h>+c@e>Z=jmCWS*QX zNZ1;yA9ol6{)!RrtFDD%4VJ;LTemRto?CfuTWjzlXoloRH>8opBw6*w4R}m=qQJ-F zVmhmPzfJ|msQcU6gpoTaoJjpUU6XxI=iItB$#H};29fZE-btK1$~zEkYp~#?_*<9L z2G*Z?rm29JV44flIvaOj2#rQi1`&)n5U-`Kpn5n|zC=&hVI=DDjj->R;{dhY53vP^ z9(OY(iffkKx)dL{eS1`b?vUnkmaSXETm$tEc7to}nn_-mLg;LNE8>jAt^)0Yc;dkx zfEPPadZ9}Se>@J|>wT2%EIuUL$JsB-HcqAn1nlFBe=2lY6jkoc%HKFVKMS%Bu^xET zf^L}=BUz!oU!8Yx1=bOdy&TFs<5?hG+l!r_C7ZLx9SQR-$Evic9uLxND*Ru-XR2-W zQCKymnOxz^D0$px{RZjbVp>yLS&!%lqWj=6l@iEexp0-z6o0JUggANt8B0umU)NGF zJS=lv6q9yroTI5>id^S~wv6>r4GAWeBP-d>;%U2f;%~4%hDV~sYPnlY_vpmO!8}JN z7e_pCD+zd>B?RvV>rX4x-~qHlNFxASZy-Au8{G#|0f`x&IY2jF zy-gcD|C+K2`ew=({i|;5dNwYLqGc5^ckKOKNKLf=({u6h`*(!gk6zesz3H>XbzL$h03KQ)68?JR*lB$Ytvh6Ru}seJ4lOe!zp8Lr%z~~syZ$QD6faIZ;t}t7+To9nXVZs(+41Hu9qz@o!j$zM zL9DT=Zl{%eSA#{8xl@GcaEx}Iq|`dx|5|SHap&}KQjMiq53TYXtsVH=ZW44gQ8dmf z1uD`hR2==G$X6zF)7Inqzy;*^vHxb%rTqOB?~nNr{){&{=-b_~7e9}j*iu*|gxlKM z<_+czl&FjPKTlAq-!HW(`Em}C_IN=iNa0UN?mvD^V)ENOuU@Ao^uiRdcLL<)!}J1i9#+t589lhggSIOi9QGC~T{uLN@{B2~iMV z^-njCH0vh7mlksfkUvh1*0tj3P;N$BvR=Q8O4cUCMB1$A?c6Uf8N#OLE>U4v7N50n38ZnB6zrp6nU3lDd!(FNnchss6fVR?O(1n!|FC@ zlyL{OUxC^D1MScAwZ1OLB;*3MLDu1yD0ZLAK}a6 zPN)RIOk!X6F`Pz_1G`tsR{R9o;;&x6Z=HdBq?)oDAzx}mo@_t){Op|s*h{vaZgJ}c z@nrk@6kM3N<0=xC)Ys)IGU_ioZA?(#98G`+n`d@eNSa$e2{ZVdjjzd=Q^s;9tw#@c z_9^lYleQ5u+qmVZ^+|dCC45Ou?seX=e#a8r?+ZLkn;8`yIlR+ArdCxYdN~R=B;tu( z@nM8ZEx3RWHd29%r*s{xZ?5thfgUzD{hFS(5GNU!Lk}I&a8FbOtolqR=y-QLF)bC> zEg*zW3gO$)rRpovIl!tmrpP|mq)Cv@lrg@#X2fYrK7JSz+1KVKdV>=^q!=ej{imPN zAKji!le@Gf@95ScZ|ifur15_K0*8o3(728$?BmyNyA;?`LT71=t!Q=1j0xNztpDcdFO z!i932=8xu4u(HId4Qy}$f{!?Zk5}FX5e@otDX`^fT525FzCO8><{}MyTOF z?bF90A&-S+V5N?J3v~zP@~h2x_1cvNWW8nGRVbDEbTy7#>`Zx>Vb8n57@a_rzZiQX z95T{uRulE-f{Wt$YvmDAVpBqjhG1BTecg23Z(&g#{@QNku9D%rjA-}J^FbRuE)c10 nZBkzTH1F~M!fX!Z*Czq&?Cr4*Hw*9Wy_o=eB-*CZ3Pb!4hB`5v literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/take_photo.png b/app/src/main/res/drawable-mdpi/take_photo.png new file mode 100644 index 0000000000000000000000000000000000000000..fb2f94a2cc6a758031727318aa882dc5e59da297 GIT binary patch literal 7475 zcmV-39n9j1P)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGmZ~y=jZ~-Bkm}&q39LPyTK~#8N?VSmD z9L1H#tGatGS(as41{-rEfbl8G+%b@aaODD$5E2N>WqWLnkb`8&m(8_gvtN=8A#9d2 zcra%|LJ}YlmVh~ua9_41+wut*gAc&6b!%q&sQth09wFJ9?&{IZs3q&Cuhd=D$8^@>!+_8$z?@V&HNqtDFCrX^upgy|6a!%y!i$1g zcQhrd8|~B(6Ium8ZJ&+BJcN@G=1|%&V;91+XeJ)O@`1+WxDGotWQ0xuT)|(C@Fj$a zlr{o*4T`W-2ytiA!s_*IS||vO0=R--gm50hND=(*ya7Mq7tGS{Yg$wr3X3r03IM^A z_kJnDNEZC=z)ujjpi^;AW3q}?#9c`kSG9Z{im_`3Br+f!r&o6QBWX6 z9RiFvseRUM;oJH^c~VYEOKVqX?1=U}eyRkS)+ zH6^QQ$hoAHv=HZ{+A0O(*I|{qcLma0sJUw`=`|MzSSTCYN)>p$Vzb4p%T^}GKjtQs zgpw5|gkO!*-+`*}c{eE_BzUS>gDJ$JX0VDN+Hu%_;K18fB&)jJ#G+D?f`!?iixc03 zRk~pl6bN1nbHVS9tq90VZ}SzD+fBpx`l`!nimn7jFJL%(pTOdF1iCdF1ZqdA))U3- zW`zahj}*aHf3f`QthWf#Yyu%ju;l>MQP=ycSe+-Lz5ok`H49KM2uiu+%{XmfL$pNM^ z2~sKVK)9kYSw%Ox3Zh^OfPVgXEU2p|FMJxW9~J&UPU;T#X|qLEQDhk;{YK zG{|s=#?-IBo*X0HJ(LzqeX1kEnOKa~i=7l-a$arOginHL(uMfdG%bDHL2A}^shmxN zLO~AN(?a{zGCkJZ%_21rNBNY%9xDjoIEzJ%$#EnnDWrvi+E0$gT~5pg$L!O&Wi@xu zBG6guCOW<95@Pfpt5CV3*| z1Jo^UslwtaC+0`5=^YB;XBHk;8sQVOCP$(-pB85yjk9bEa#@le83+=--G%noIkZ&u z36}Wf5w~^GL$SU{DS*PY^RbY5ItLes!tZrrO3!Y>kHxgo2tP+(CD>U$C7?1TUrN&G z@QY2=INgnS;;R5K{j+gGn$qQu({%~lvnZ5u3qMCr2|iM8C2(At+%0oY&=g!$x8#Fy zZZt14!ny)sinHnkO#cY3`MWbTNh;Gh7vV3*6>XnsEI?*}4_E8GQx zw%xQHM|mB&sYiyM3a*dl{ z8VarMfaut`CjXnIW>bsNvt_4|QE1S>2X_y?uEk0``L=03(JEpQ!IWD0eiIcq6_@W3 z7B~Nqj&_~1;llj~>=x#|S~!1nr;plyKdAOfT{`_(J1)CQ6Z+-TG~+WfW6{auRIWnP zoIUnFy0gM++@c%zHf6-?q#)%Us4ehm0qF#Yw|^f#|rd$5@ZU=;aK#XN?JT=Z{?-v=pGuVf2U06$A>dAathGmvLCJa9voDo z#o!;A&8GByERrGl1Doe@8vOASF8#@@YI>$29X@0kYdm)U8s z$nV=k969N&F>@2#npw$ukRWOkJo=fb5q{)>kw|4sp;6AVD*P*(vhBw{p023ZxEAn- zDL=l4V23ZSNLKxk)_Vo1Fr^-gq6>cr5pQt`QN4S;?tvZW7y_1c52HWp?S5%zzbS8S?1D%;WUh3}d$_^8YU^mbH%>bm_I~LO` zOdK`WUG<`@HK9O5+ny0VjxxuVgmxbor7%romVT^X_;VdldZXL({MI!*Dju@9J)dLM0qZ~ISkY=b

;^e0?@tdFP$wIlpYz;_8 zGTVp{=J9z7CSuA_Ai$~xCCC}~58)QjES=l-pSukU6N)cAq~KM>U@ zrG_Y1zqQVcUjpT#rp4V9HWbLncc@(v$}xv-n{UtZvK0XBKaCOF>v;)r?YD*hLQi_( z3ZsHqJVIu^6f{8no4Z)_@}+5e5{R1|E*v>|`T>ylft5yrKD^-yT}V9XqM8C0Mgv_cga=NBT)DVGqmO3IKKSyZM6-6TK!ie4x$O zuU}(CXGY{EQc%_Cf-BdGunVnuE>KsMrl|QryU;@+>w8%AnrAZ#Ih@XxP~n>sQ#I{J z$`y9+OY}u7-*zL6yxVH(-*`IBdQ2YitNC<$^?eAs37-6sRjljNwXE%|NxXx?nftPi zQzx*taT?1uqE3^7c$FY+n*@7!e~YF5VOb_iO^~xC6yRF?n`@Jsc29sGa_`#cn9Ss- zx!}j!TS`DF27*xN*3Hj$XTQD9ij%|V1*RhfjU~%E^jiMP?x@-miA+ z{n$ci5U*@D6a|IvRYRH}{A)g{^xShs!k&hvaH+|wyfVtQ1#=V8r%sM#Y9h?Yc!>5; zjqz`-H6qPh^)5GQSb(nJ!DGydV-+UsuVeUG8VEl;3jX#^BlDY$Mr69jEPOKiCseS^ z#Ye}K1IGuSh!Rt|a`LoT#bvWJbB#ysrVD`kslNQSL94^c3gCh@MP&-j67lIdQ`aIq zw}97fH;l$K)ABifGy%0z75Yu5#CajY@5?!J;?e7-Y3XJUw`v9yaq&ho0>uo=*nq4e z3I$^I7p(IojuH*81cPqXKDN;i2cU+!NqextWa9F}v}`dZpvE=Mzt|8}8e$@wZ=xnC z;_|dmwzZmJ=_=EeN-04ogc<~R>gFrEM2yDi^O1#;%-K(+I$IR0wSTp$9bcnls9DJe zIgw4<%m_+dw)T}0Djli34?6|e-ezWAOba!i`6sXa_zG@Ku2u*5GFV}#5bkHEYWy9K zi6Z+ejKA`pY0#t+H)&Wf>=Xd?i>7hptz|T$W9C@aGX=HQO)3i232vcIdG_^Cm|>{> zff#lQ015aOGP$cV`vt-8WMq&m=l#dS3Rwjpvlfob6{S@l@kq-{k*X=e>Y;ybEc8%4 zlWP+u9CRv9--As>0e8#iJBx6PJFn zyBzq73>h*Np@?l*_Zh~dae#2Vj3J_N8vo<@vPZpxXM3nmZcr^K7Gn8UQ;YKaL(eq$ zR29YcVq2bq@Rygxt<9ZU*Q-jFwXH|~HoNHqBR$EW^i1715??TZXFK!MZ@NebN?E?e zq;Y@$Y9PVpoksSy4MyZpkD=#x(M9~!!J0iD;3f?n5|ejO``_DGxnqa2wYs%OaQ3d7 zo(~jcIQqAhsAcEQh?sAA^tVY+H?21!Z?_ml`84~Q?S{VGW9Zq|Mbx*6V!+;)rauQUt&ZB>7%80D|W7Y$L+Mm8|nI4@M z$!}6=gm6JE&ysa=lZFy0S)rxcL}jRKK4&@#h?5`S1spG>;IC2?o2io`2`X@}q1o9m zg4b-Y;`cXZtZaeqUL;+=HzEGv3JreIwzmk2WZPi zREaAmEBg6ICCKD@DoU9^N!XX+c`fJyZtPS$UZ!r5#@*O z_oMb@-=c4D`Y;PVL66C!_iq)Ir~f0HIP!1l#KMQVGFLp**-6Wz9!n=KZ88##9c&Ce z7nnfFc%779do#C!=~o;R=k$gzH#r3O_ts2q{CVTNDR$T2zVL0A%rW5=zDesI&5lD%a>K;U`+0r-f+-F1??R9vt;M4;8?A zU6|iHCaTW}PuowB1)wQ@o@h*6=dL;mKu9=(p@cl|@179X>MMeM#DVrBVdtI}mOYMy zPcN#zu};e#cStl0-M+vmLhW={KU4sdS}Ot$UQBL?P3h}`@+7g{pnA{ z()XTS8JqvHiV7%JnA-dP*n@Qb*Jm;7kqqyA+Kt-mp)n1q_FrS+_i0!Z7x@BmW8Xvh zmM{VrDoj(D@3xxe6YH{8;||Nz4Pjb+nzN|NR7K%R!$eg*eOiRmRB)ON>~1bA1w0%| z71H-w1+&g;N>=~DjoR#S{<>7_5eie5V%76oGeGJ`iJ?}2-4MEkCTve%#-7ikK?!Kx zNg5i0DF6-p_+K+>|Aw@dK6%3>lltBI-@|OIOjc7fATuU_W5Y2=ARs(~noWwhTd5;MmWL`trc?uy){=3h+AzAyG^%q0U_H+Jn45$Fj^Y`uGjQ`G!dVwGJzgDuV z9Xg9-j8Us0Lwj08P^^As1uY#^dnVR+_x%ge;vz79c3WaY}bt^x0~AdypkcP4CNYBoaU}I^QY``qkG`_ zGhB$@WAU03^F(z9Bl1HZMO?Doir)Q7I!kly+~iUEux7iF{pM3?w#(pAkGUu$d|LV{ zfZ*1{rkbTLeO^2jVEMx8R&n`H$sV@0 zn?=XwmkPvTmz&i13pC{JRrB^8us7|Txl)f^ z{eNBhb8lobd%HE*(q`&UG^e}1{ZtPp;ghAzUmy^^#2r5ZnsS3G4Ep|qC%$Hcs9U`A z2>2xEMsVNn(Suyy)breQvt^4Vtm~(1*|`U*8K}Wtr3lo5!`s(>y+V&Y+9p(b?trGA z=Pd+kVkJKqZ82+oyVmwEZ(nlIdu~!*q!fUVyhlKgnaX`l-1GdD*|!ZQ+C1L8F`_|V zdz#ACxSvRK{*~n0+R>4?vME_D)pScMz~PJACRm)4Lvgs1;t9!Cx~`*aUS$xE&&m99YW!mke^d?Jtn5Gqst1B-{9m>=%N zk19b>NciORNk5j41Fnx5;!ZazO`s+qM?gPU^tKyK9!Y} zd@OD~*r9e^wCR#5ooMYWscgYD^)PcW(k`N^4m>6&kG^ZC~3 zt}ncE;lUrcNr4F51mr+BV<|k&MOc+I@4b^P_RcP!48hI*jx5)&E>!qL!3sb$CCC2} z7jc~%mB3a4#ny7$8jRv#TSxNtrG;<9*){9t@AIM?D~LfU0AVOS*B}GGaicPztBPF=dNs$7Xw(kQV?3H5$ft|fR zT#LH<{l;Xa{fz%$7_2Yanyh7dge!~FP~Vn4^tFX z0SI&t=_Q3fha!B#O$q=^BDIHwnZ>X}3QksztZtyZp-4W!0YBnGy>rFF%A&rltt5q9 zmuiVBOoJ8Rzd*Er^DY+qX7!U`E=>a)lJYI|=tI1o6&HbXJzH_xx}y|-ToB`w^B zRO=a#AvuTpIVnN0yGOwP$(NAPSMYT01^HTnTTd9={BHC73CrE2qEqrhtxvU|fFGf` z1V=b2g)m$!0ySYmM;oTnm&A||xSh`d4Z0_-bboFA{K-Bq?HC+oDNOhUHJm9w$Lc~S zrC^*}%=C1!GiX8#r9wEp7pm=ytc125g*~?oiY+gYX|T74yGV&#zhL zCY6;TQvjm=_SW%SC>LOLKEfP3v2>6C$tqBu2eJy-UictQa0Gewx9XPuyXHl;C2#BV zF{BERWA>7^8qMP8SWF~Q{f>5%Ma(OC%OCsfh45J6vEc?q4vr%hq}b7iuo$w>}3!$JZ2a#ZSl zHN&)-unZrELY#`ELlI^ol)-cFM>}ddx^8?eg#J%cm}^!h$D^GeUJN@0*qw$YyCztI z*KxsTKp{>?66J~fhN-k23|29y``E#_Xk>!Zix##);D0xSxgj0h-nj9i8N>RShheV( z15uyaIZ-gBUT~(N!#`FL3Vj)iZbni6p9Ja!?Xz9wY=tFQi^)V6W7gJ8<*wE1&pWti x6EVWVh*W@lG^E;n^GiFdNLKkN$q0e5{|A=q3KQ(#qO|}3002ovPDHLkV1kJNcI5y7 literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/take_photo_pref.png b/app/src/main/res/drawable-mdpi/take_photo_pref.png new file mode 100644 index 0000000000000000000000000000000000000000..6431ee7564a617d313a513b900dcfaac0cedb15f GIT binary patch literal 817 zcmV-11J3-3P)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGmZ2$lgZ2@7wP0>(*1K~!i%?U%oc z6G0TmbC<*vNlb#BV18Ix+1LnLSZfgf1i?~L3H}krgF6D(B;b$E!a@=b3$d^fv=A%= zLCGe;CYxX8_vNSGxA*o4T(X%Zll6-E!0ybvd9(Z8%+7nWE`>s&P$(4tksU6jplRB^ z&*!^_>dNPiMx$S`SnM%3&kxmV_03!mUDx08u=9Z(<4tO{TAqh*gso4gR4N^Mz22*+ zdQT)SS0E6$91I5Ov$0LP-Tv9@_5N^^m`^5?QZyQU#BF)BTCJ}$u5_c>;kSj$we+L5i$%D!ZmvwA0Kyy!{J|U+F}QA)e`A53xk_j{ypS` zbT*s4pUdSQP#v;#x7*cN%~@c;^OG;rntHwdA(P46!I1c}i~0~g-?6$S<#PEY#>hd$ zfo8M$#WbEu+W3^2X){uNv5#^94_k?&4&bxtEgFcB`;EI!>2x~Uq7K?+?plhY4q%pF zK+Q0$(P(^T@folO%BB-om3m?_6``3DbMiQ^6s_&L#G^xmSu=0!4c z1_|spt6L)KfKV792k;~Qgqv%j9};BakJIV2N0AB9#uo~OZsEGYDr>1!DzETo_K=&K zA-V!GaWoTe1FV{nFV-RC047A}VKTwz*WjugbX^xZ$IAf6?f3lJwb;t#6c#-mPXx>B vVUcvkPx#1ZP1_K>z@;j|==^1poj532;bRa{vGmYXATeYXRZBp{M`=6j@0`K~#8N?VWpc z9Myft@15CQNk)=oS%!dn($qk}56Q+haY!LF5R*11<&cJ!v~~QDR)$iVls}r26G#H2 z4QWUY%@Y^a;^l-SlxLe#Am#Ls5QhTBl581_afuWBmaT_nOS?OB`~BUyvwH2k_NCqR ze0;8F?%mnZ%>8}u@BZ%Wof*!moK+7D>0<_K5rSQatj3t2e>m^yr;ZY#%EyofbjEgV zi#MXIdZ`)#EKLlNf#)+JOwL)GVEl3_#|buIaUrbu9?JWKj6Q_)D2bw_=R%d^KK~ zf~Y6~D5HM>=}(a^LRx?{)8{aNv=!-1c!{Sq#;b;4xuPa&;B zI)}>3lr+*dq^*LP_xHq`x@Bn@Q?>#?YhQxJ2Ba&HmQh)SaUAIv=q4V=^6~EY>|t42 zW|W-(l;Qsd>CchoQCSu6DunR75aNNJ&Zga}tSl%S0Vu<7MEW#RwHSUXZzE3l1~XHS z^lWS?3y(140)XKudcPj27>1|FoJIvWPeBp9ya<3riawkI;x|sC2MwFk^#W`st4F8* z3Jl@|aSsL+4|m6#>6*igF=GNia7W{&&tsL0UkJmKp~-GAv_PfuC~{=TBn(??8C608 zAYn{FR)w{VpzjL`=yZHHDn(<7gT6-_7OLK!GFkfe-gxutDqnJDL;z^@e}ctVkyglp z+<|FeD2{(Zj?r;OJceS3fz!F!M$ZRJECOD`sx1vACf2cUDXM%0LBZmWu+JZKcQ*D} zMWv&p0)XLZ9Dfs1ClU;m6PD4fW4Bu-QOa|%MM3V{5&`GbRpcXvx3R-JOfG)h+tHL% zWksiCl?LNqibWUF`LZB5CMw)+M5pZvPVO`1@F;A8h<$_3@m>-I*a< zH9>I#2?3{<30(cM*+{29yW@uDf{UQw0zk0$5iB+%(NDAfu%mjbg;YB#t_d6;JR-Px zLr+H&%~L2C1r-2{e+>tq4DGKvcD;`k=b--58!Q zxUqIe#~hlSloxpiTX6dyg7G(D)vxhWq46g{UNAy6qr9RYKB)`sd##&}&rh_B_pRBX8!O2Mgt%<=Hn(|#(U2c`QK*lFd?_e(gs3)o`tKR&G z;PYc5xmK+QCMN-CjDHD>4lCzEX#8BUMn}Fyz`wM3bN^D62}EE4px>t(EW!A;Y_e1g2aYLG=|5B~{!nXh*L)!5=E9U~a ztT5UyEaUa**&QHYxas|ggDooSi7x@P-q%kq|7I)a!g$LMjbCW2cz^;z*PswRMq3@L z`{wd{N9d+lPkaeL>6D_4^bgh7=!o0rzfz;6l5aizrRMcg0lz zOA~{1ae?m9bA#r{hpxYr&>TbhQOyIWB!0o>{jL*ny*$7TNbV7(?2|&*Ut(;#xq1Rk zXjj&-I#(~?D1f%a0D?a5)%xjo?9li#0Syg(1cXpX=lKu+&an|Y3V<_>?%tVe<+5U3 z?W@iSoRxqI2<7O@5l!fq*Cp#$tL!wg_S@+D+wjqUVfAdQBnt%`^N&GBl&XOm)CQ%J zVcEfT{(#BN{~oIuyKeWIv-(w01_uEQ{RR9UBS%RiK zSGo-N+b8*~mrt?i`%yC`(t+I#cyz_$+V1%5$5bZ6v}0^d^v~8<^m(jaY?WkTd|Fq| z?w|bDK7)yWS`c`|KN_eE4LkHDA|KvZ zaPx7do9#X8nb{F-`VmRYTGg zqHF7^+W=@+gFi8NdiGf8}&?DKLv4$QwyMBUp&=3)++DO&eK0|Gk%X9P`rztYVuVu-* zKU&9Dy)PQkxJdf>4Orr1M1a6;%>`QyP8!)wG9+66WmueP<+4(+Zkl#fu$`456wvrR zzlp~G!RlJJ@I8@O8uk*XGb$n zP6_BA?af_vXEI~by#yHnEa^JhjPH6)@uoi#@VG*pa5H{R$mbTh3jiNpcj1HYX1all z05mhQ5_@o)SxP(h>(xR$D7f(@@<$@CZs6m!sRnxbs$^UMOl!l!xpj|^{X_DkIL4RI zuDu&7XPMz*MgaK6C2G})u=N*!A{yU@Vn2O}39kJ=?U0T=Bbboj zl)(7-BfLfi>n;eBU&)@mQwH-@_feaz)Mc$_^%h$DMTZhQ%JN78e~&<%M@xyH8qn%;=z_A3(2 zLB(l=1Z5=6w2Nz5s>H^J?dj?0?g}utAx8q*xvVLQP3d6|PQXF9N0lMd;hdSv&t(0Z zuPU+eB{U7e{eloTBbTuMyfvcLrkY#e0^X;}vq5vuNKqjH&^gm?{_q)%t>;8%>V652 zkpREw{KGe$t!ZX-F>LJeXg5+)IPo(mobyODqPtD1EhaQU#MNtKD7fGZW7aB-33|bT zYkv_&|CI*?fu4;P8$~poX*5g3d1DFzLlIOO)S&(RY%gd+mTR<#ZBj^~04fWzDFuO^ zS8v0ZujcNDemk4@`Dg74UC;+tnLwo9v9@X|Y4V)63{Z5r<18&4?c1g%o48hF1 zUmO|g-JkM&m{Sq(w3(q9zhZyZHlj6~wFVc=?PbOEF5H>=NwZ=>aII}S%;t7HH_G}B zq)PTOXoaWEIOFf&KcVfr_ClzIQyky~y_%g|Nf;;w|<3LqC2zzlmwS*)ZYaQEmEUFfH1DFt8%^B5M+2DaQucr_>o zs8Z*aRs`g)$zEQgHyNAAjRZ5^#lq!9gm5Q5o=h2%of3rH1MG6{us48& zIvGKJl5xBc@XQ{=Ka-jNKE_YHpt6!le`D{OmDGoHt88|t8}PX8$P1hs8&31+sLu!C zRwAP*AZ8gpr4@F{%c_Rwg%zws;ZY0wVB$HhC7!{g%Jul{N2FYah>zEfp0 zWZi#TVsITs1P`cec6|I%FUa8G3nTJH-n~tK%2jLF;8WY{7>3scW(M+^lw6H(GUI1SZRF=FI}OJ#_jJxV z4rwJ+)(QH)eIOyFL^eG4$e)0L9^BPF|G3Id!&N<8h#zC|s+Dtqx&Z6CKq!LJVb`^5 zv>~|pw4o>I1z?W23gGq5rUAi(dmKQ>I3Vl_N`}wa3+i}i$A+_}1%4B86#$VECV7D; ztehiCiJ=IJ4!M3GkL^dp{jM(3^zu(vTwRDw>wU+TYg~T;o4db{-g>71Dbv?}Ljqz4 z+}>CHb`M1cKm42Zzkf&-IpXO7h~CcTZTPEniP#-09tgcaaj=4IAFUs)?>l~sPxul5 zp$TyZ7Oz`5N9d7LT=r2Rg#;d5x_-}oUoi7gCd`La){{W4x-Bty73}*YR^@|+9kB)A z-RYpR2u0F5Vi^PW;i{4fFS_B2RpK0HP;8`#CJ`v2reC3?Fs}c@rS6g6q9I zi1CR)0w8pzeFclBt(*&V(9kZ{!s`RMGVb<0jgP9nyCyB%f56K^pf12>X<~38zWN_n zU0{_sV)tDXxa9P=PFs9N{DjHPCx-QrwePH-pPBCMmz<5#mKa!#c;KhlMUyIWA{0ST z0vX?UX&`p|lfV7Uf+MOZ5P`b@o1S>n^N2V%V%00+hfZ)J)=7okffoUckCt^b$y5K3 ztMQ4P1wiz~=RAso_?F7LK_alO5rjiTC&*J$wC~IK63%XWv;EAiDwh*^34lmzr*6Zs z|EtQnV~-AOm512a#wWx3GCp*2XN?){QrVm-NT0l9(~&woQbQv6j8)?>Y*6AJ> zZN{)(>Q2E!EJLnpsh6?qW`hXMNgal~B)Pu{wtV_re-C(OGG zuz9V$Vc2BidaQ1?N<0~l3r@caMkIc^z+(l-K0zz_=X9!Y#wQBi@-RHC#cscjGy9w> z@&mo&hUT`@@dRiM6^G^Z2s<9~0lstu74u~-^ar+g)=#>%D_@G@dRr6yF^xq!u={Ty zTW^*6qFQ4E^o%e(4T1xn8;pM-tr_io?aiLQ?dL*KD|u;R;1lR&C^$UFD)Gg{R#NFg zunaE=elCKW&!xHXg}(N=UcbWTOwlX1EirT{{>Znny4))9%|szktWvOer3B<GJ?e@@)yeUcRLR`?ynf7(8c3LHYAR%CkOsg3ol;pUBBi%e1o}Sd1tKknJ z68dV|G#}jAIVbS7{h*YrB9|lv=}uV;5&7-N%6Hf200oQ6E07SRAZAhw)Y9p}%(l_( z= zERUqo5vZ06gjsUP)iY$z-CF;U~|Oe*TQ0PSn6U`Tnv;)$o41#Srzo3or(e! z*o$-Alw!%B^u=3d>V*;+%0d7(3lm3MYIy8&m}fn*pRh`+1+g8~eczZq_R}{u%sZ+I zXPmMUfX(8>v8D)%&}iXfNEgV$%7nblR#fvtDLwgbeH&)J#ZRRy1z^*fI5r1y-8FET z*CG2Mqz3sx*&`02Y9BMX`7={XzqE5hOHnW8<7`uAobI{LM78NIesVwa}KGzhyl?%QQg18DrbWRjEoKK&Fe1$VMWhztat%rqb+f4o?u#=z&IX*_XbU9^kyvjG0Gm( zyFuF=)H&O33f6BhF~XR+f2{ua&fTA0P_T&@!lvI6;>1s;*b3=BfqAk6r1&%zZz4T7F7jv*P&Z?D_uH5-Vq6}&ifV1Fa0rdOh7 zWKr8X+d%${#ZLreFGriicFf@tlVC73P*tl6z7l4`^3?NZ-K17|p=tJwsX0L785neq zEqR@gIcwicwci`#T?4;(-TkS#wCs7Osc-R;hwhaNUMoyX&0T3NdAv~W_ja$Rq0>@- zFVgS}`PV7>!2U|fI5(dK;h z#kZYaVuFl0x8s7R``%G{=G3rM@xUb)mWwY!5M_Il{P_v*jS ze{tB(CcKsHOWvM`Z35dnehXC2tDlnkQ}+1BE$wq>@|kGz+MlYG)no+uKrKZ=+_&a~ TZ-}bWB@o}!)z4*}Q$iB}eq@nv literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/white_balance_locked.png b/app/src/main/res/drawable-mdpi/white_balance_locked.png new file mode 100644 index 0000000000000000000000000000000000000000..d8e976a1fbda4305459f6cbe8ae7606e8f9dba38 GIT binary patch literal 2617 zcmZWrc{J307ypgNI%7}wDrCu$EF(e&Q9VqUMv{?jvZWYnMwX;WmXd7QAF`Vn z%Z%(~OV%M-qQuC=kkUJy^PcmZ_q>1Hd%t%%_nhzD&L`obtpyY;0R{j7YGrBWz?PUl z#LLa*3z>KH*a8&jU||YW4oZDyE1aGP8w3DUKZ5*p<6`T4I7?I@00^}IA&`jjg($Xd z(8>&PITXGAA`%B%7Vq|F><}Ss!cBk`-zPCC1(Nm=DmdAEjn^Uey>0Y3f(C9F>*GIH z-Vr48&WM}meM#>%W~<- zS|aw(&H4o}JU`V8F`Fq<_z#TMA<8~F#C;y8CuKW*bl8E_4lx2$!Dvd+7r4!(;+l$@nxW;C6dr39 z!`RwwHrD|3OxG`5*rCU6siC0+Rv?PB7r%SgUu0tJtXZCNb_k_kmbYoBUu1jx1H|Hu z$rMQW1c(`VF~m_%qRSq&cSw6dHMzPaBz{rw^~Q6d21IfkRfv-&=;1iey!z_ho}foT z#OitA>>A3S@t8^qB*89#v*O}>P0x32`R{Ohns*j(7u;5#J_$SCVDLh^&Q9G$FDdJ- zD)vNA03xnG`0-g93YD<#gW-O|9ftq)(j-+q*SFc*^$5ZWAT39)wPc2NDMK=2;54Hos!UoWOeZ6kN+ z!lxBthAmDFQQ_Ti`XJHpEik~8^vJliVe`FXM#sBd%WsnYXZ6-w_cvgk1H2B{i<@v$ zL&N=Hwes?Pu;}D!Xji46MgzzO+vJ!AcgbH6T!k86r_BqF5WrH&+D3;GYC0b*Xj_N&QE3Y z5`X~D6>nU;^_YyRb;H16wc@cx2uhVBX6ZmhCg4%fJPV zIu-c`fB6@-#u=MLMV>(OV`L)$iHN5QyzMJ}#0TFq(>`8p4X?!LgypUYsu;P9gmZWu z`{H+5HK$wJU^4jU?u#V>llwtp#KNOA1_RnxlhJN@r0_8#_2I;UZrqKEmiCXYiN(P3 zHEKS%kL=bNg2nMkkl_7Pf33uYg{|pn$f~>w1C+(Z)t2&pHeKUQb;vn)2nFDgmelGjT-VRntHw`jYfakUsls)AP_=YW zN}oECB6F!pP&rbzw!HuFUers_+S}TR3I9qQNy4k%$UzO@M+f#uz@}Cpf}vI>kX*vr)tH7G>8!Sc5DR#<1?9^1rX>K1Mb#`OJ)Ajv&Vt736NY37vSMX4lKWz ziF=S@)Yz!z(}6;{`UWMu%(v9d&sav1`kd%yC^||qhby9S<_x_NeUGQDW}s#&9QSB6 z6LtA}(A*Qdt{l6>C-+aMc=o4r#VAM6*Kq?Q3j`&R`D?Do*E8yxC^V|hb0{k7?MSK+ zXp`a1UW;PID%~fhixPooG!| z=aM(dxGwH<_B$zsZkK%_m)jwvuKeGxn*fP7T*Gj+sILFI%2qVsGIg9GZDqi(yb@Q3Z$6{ zxO?9gVk#07Kk%H*38tNJbkMZQ6y=x~P8qk5E^Y9m?}Fm4FO-*(CRjh#BCR`-|6B!i zb-*#zQ%2HWi}K^~GDcA|spZ~5>QUJGz5@c`Li#X*hQM@>DJ-vviE zQGHK#cD{UE?#vDi-~Pgk>za`7K{Vi<@*yU*C`MMZp7^@cZ3bWHD;?^Ds!( zZTB4gK((wtnI7otTlXYOY8V)Nd4KAj8H?k)_G$b?%dIPNBzf*ens4uWRkE1L&R$1y zOw8<33Y^Z}sOZ?3oPcQRUPpf}FGZc(cs);Uf|AHLkpondmyp^ptVXk^nD<-J$TP45Bu&2>XQ2PKr& zR*XwqT~*B%$CJV<v;eG literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-mdpi/white_balance_unlocked.png b/app/src/main/res/drawable-mdpi/white_balance_unlocked.png new file mode 100644 index 0000000000000000000000000000000000000000..249c781d47117a5536086ce2a16f0b45e35aaa93 GIT binary patch literal 1760 zcmZvdcTm%b632f*LN9qzLMS2%QVn^R7}^m6BnT)~Jlau@E+9-Oq69+~I0zt0SK&n9 zPUH+F2Vy}$gN70j1EEL_1SAhb5eV(#dw;z*Z)SJD^V!)yc6WAWbDSN|OG_wA001Cu zXNz+csQ-5t6A{F@vP5HnfC#SVt$?OM)nx$)hgv#V0sy5zaz6kpVDT7RPXYkQbboh{ zf{r~207T;LaF%Y@e7RLVeF}Ib+K^X6EpZI0{JR!jp6G)MN+^PO!o+-WP*(nR7bqT= znhL=&95UGWF{ow+u@GDy;(A>!xF*ED+%+XN)AtVRmRrEwGT>!)w$JgO3IAmIG!B!J_ROE3gz;L`+8Sug{)dea*yd+(Lh}4^C^Zb zq>~~uvuC@G&d~?Il)k;)Uh8h{XYGymu9mIt9FVvSmR|5YI^%|(v>E*bOdl`4Qmz%v zulI6)ANMZMH^?VTwD+%pGdG$i8$Hf3p6Wt|DlA_|ITQ`36uul@@F>ZxJj`U?ZN@d1 z8PT3eee5k2o{6$^O1}m!{AIHZ=@^M%H1H7W)?vQX|G8vIV>P=NgLy?L zE}RcyL$R;04_~9p_CN+V6EzWq1a4rm)w*1X_2}4}9n>AFG~Es$=R}2y-boUYmPZ9W z3U|d!<`fZQrv0Zp838|`B=mz7nl@g+1uYOeHTgaK?Sl#haIIN%w=iAS zT075(><39SW{4#ya7#lC$=K3&dYP7L$l9hK$4Y*!-KW3(n6vyPVEU$lXqI*oa}dNcLdhC69-uu{M`9saPVh#gBsDYY)VuuldkfW1pciga zbjarHwDgydMR;Wd@iba0tlzl7{b+2k*mY8+VZ+5c{>*s|YW@(H8(FL;HFv>S?_cse zxAkY7I6&)At-&iRWNnxdgT$)SWg(0|)1sW0U<}{rlcJc4=2gc^R3kz`%=T()bWi7; zW9k~Q^6t9f=IKp|xHfr7QDutM`}uI)6}6Fr>?z!t@S9|IOEpr91=^o)Sr=R1*@@6> zS|QoI5M{?ck>8S8iB^y1;74KRasJwgEFxE2`?5WFqE99Kz%f@k58`Bb>CLxOFr;pA z(DQm|2sWxPL#%5z@WuxMmX1uC;3#Qawg;yA^(H1ZZ3Ojtd0A*>X{!3Y(aWsyag{G!&?gDZj4KDz%q!GDb-fVu=w_WU)|P2pA;-=Q~{KfGp)5?PId zyY|D}&cVDSc{e*e5e31<7F_V9D>AMIs>9@qwPkV*SvR+LnQmk124C1~megmFwH^U0 z!{Ge*LDoc76w56I1AKzsBYgWI)cf1vr&$Rd)206Ss-;MJoeU_ZoRDz8sp$XNabn-A3#loqyD0p}th2C%9MiB3mK$R;UX&wG5V;(SAWzT1 zq7dZZ2&9tx?u{

uxKeF@n&HLlYV|GFgT6%M4=tsXq?`Gw>B5G0l- zauT6T>Iwsd{vTl8rjdK9qAumn7wF^;7O|~%+J$~zjQm30sA?$y+WT2dwI!2Kbsy&s zCKJ!*g9ll=)iogelAxf}B-F52t6jqQh_4WrF1UzK{Gf8w0y^yM!jDc~0qs{OX(kvM y3=iXz{WxHuii=35Hn*CD;$7tbZ51|;w=5#IPCKpgxG{pS1lZvnaZOhKX@3L0VmTH7 literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/baseline_add_a_photo_white_48.png b/app/src/main/res/drawable-xhdpi/baseline_add_a_photo_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..e481f66628879542551faa6ad26ec886205004dc GIT binary patch literal 763 zcmV0AUCN48t%47(xh#A%sKt z{{H}vZtA`4-uHMv0Q~T!>h2NG^U{Il1AKrFaGwwG0Y1P7nDYT-r^3jpS@eJnSH%v1 zHML>~K+IXO10W_Wb^v@67CQhwC>J{b-UzJyk@43Sywf8rUVxU+Ep`CFxaa}UJs~y3BLuv-6R_q^GN8-Gjew?PMAwCYzNDhc14CY!Frdr1 zq~gVPz(lrCqR$cy=5z>Bz6+l=16CYw?VH3XDx#k9i?p#9(A!$~cPuuvwl*VM0Wl>} zh}eft5JD0=0r#SC$9^`{Md8^_z^U3Dv0`6y0=2!e5ik>lCrotIHsB}!fJm)MtT8bc zg>n7?4Yg`u>RjzR`3GF8)!?uz8_oO!Drz;w)IjZPxd({C7p5j^e_}gehN)MzujC#; zs8(;78mfIY|9}g%8e-}}?QilAxK^uMOjXssoqs@2t;!^rSc<|Z|A3E!-mUtgu*^RI zD&`Nx=?~)6z(znT^DBB&6e2qTbAn7CqrNB{v9c2Y?F{FtNJQfiYz4%WQ*J#8zgcac zz^IVe3xGlD`Syhb4c0uRoqm~u%>Zag3d-CvicL$7dS+qN#j=MIT%5F?>)syUlGT*r9oj~?O tnF+2L5QqutjEQlkjO3holc z4uyf^M6*O8U=vZzmVeZdC#uQvk9MNV$T!vzN4fkWO&`$>k&<8RBA&7GiAvrQ&lLGY z6LBq+Kcwg(u2u4f#l*Kw{?JBzt@4FY&1E=H;o8D?Gx6tEA)x9oTU|Zg3Jiifon(G-Jy*=E*)*V9y(-$Tq4e zV9z^d$TqsLshTXQ^yr4Ym!kkCbd7%@^$JV1jI+4HrJrAUjxsE7zDQ^GMNy zD?^-RqD-R^m%ej~1u}_B-r`ypEo5X0yYXRwo9t&fqZAiudhutFdz@iE+gQy!sucwd zgrUeXMMFCwIHjnl;}gE+7_I2oKoOscELS`n!>5Caiwd&%a*34UV;qn1;SN-H}5dvC0i(y4^+@d4{^L;DOKBrdsXlO=MG6H~tl0u#0XeF#t;fiYVEK~R9lU;<;F00cn;F4%+;xCc-K4Jaj;KttO}So8(d zN;t6S2{0gNfB``R3Kv8W2!l5DtpKQ9|nL zuuz09083TBGQufasG|!`hs9mUl3-7j}iXQqJoeIngNcK zpORp3sBdTm!D6h^(2B8&!(dC#<#M!WFx<`Y`T!RkE>}!Dx^k13eDz+l=0!J4&R;bc z1>O+zaQ3pth-Xp1`gI#!-(caXfBjrJ^I!T?VakiH-xILT#n)Utc3J+;_51kx`3g#W u{d^5;Rf;puGnq^#llca7nbG{bi?_@G0000%+#6 zvpYE$brm_X@*9~HIR$UIEAgIj5MWuWlq6Pgh%xiW@t9{;d(Ly+`y6}!i9XOAAb1fm z{p3&gUz5JfnEhXK_xW?*TOQQx_B^-!^_`mCO7%QH+h6{ae^RcvpYi9D$@LTF{r9UA z|0F%Zf1lo$Wj~iVY(Dp=c&+MM6YVGKe|pXj8!zuw7V-^;`l zC&m5Mhqg?+Z~xHsazoAKxn`BuvUT_W=hFN+PhjqQ&VxUf-F{Ky-vM+h26ZFf4(zQT Ytalfw2+!7JjsS5zUHx3vIVCg!0OS#(YybcN literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/baseline_close_white_48.png b/app/src/main/res/drawable-xhdpi/baseline_close_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..f5a903fd7367c4eebb8cf02f697e412ba51c268a GIT binary patch literal 389 zcmeAS@N?(olHy`uVBq!ia0vp^2_VeD0wg^q?%&M7!06-Y;uuoF_;wa!+u;BaSMC+3 z%9t1%Z|0{nBzFbAC|k88a7Qb9hIZ-a*r#(u=Qv0G-~ZnXs2c<}@Ra@v-JjV|^>=0V z+WnV3FS*t-|LtBfxh(f<|D+Jk*RQ=VZH#Q%w)DNa&DHfSxzc}|x$Ga#tSiizuwZXj zIOo2)jq&ygSN7C3wYC4+>~_DR@=(GH|9$)B8)S04U>D-~Thp|DzW;}>pMfgv93MOX zUD>!#@lng~AMu;k!o?3>Kd=7d*H4z$0yF$COfTm-uYTv#uX|5b*5}9a*P@v4AcnEV z=}(7o{bELtxczG>-w)|?)~BzMS-$gv%lauXuO=spvPEyasC4-5fr`28eSd_7`)hut zhso_aE17QRXy5klH|PENTk?!`BDW_#)ZABe>{9$fzyCkCdP9PPVFz~l6F-pPC|LH+{xf5>R>FDqFCh-++0PU%J-a_C@8{83Y!T6}hDWCv z)pxJls&(k`Gv{580?qz^6|0gluUVBYbJ065ev-<|_fF&zN zdhg}S%qW#grBbO>KkSNKqtqHW6{kiiJ#2&#j9{ZErG>8p;Oo)NupAD;2k?2EhkH=L zQvi4z72JsP@L9YYJ7az{30A`I0q}dQ6x{;Tu?T&*O>3uAWp#=Q3p&$ zzp?dWai;ORIG}Mv~FqJB}vS{!;1CZ_#fB+k4bC)wp^6QhZXBSOdTW9yF)tXwOv~Zrr`<2 z+Aqd{xwu#wRczT-jahh?V*RAf#P3Ve_}+M16R}1V0Tb|$;=F)WqE1+ZXA|TJt4E!% z3a*YKU{A#v!J!Q{N}Z)xCq+xz#i5`L#WqUUA!XISYsY#`dOE8RKT!KZBsr7#!(`5TY ziU%+?a=%7d--fvJGHH>ej?zEEio4%VAiyZ?>vA_sHmSI8tqUwyHgS04qh8!b%vB z?IBEPkz_fPQq*uZG=n|NZirPNhPTN#ZjV^&x z)%GxEwA)7QPe+Htr`6}Mb{rK|yeT>q-h`^!PHpGII{u8M(P6L@e^%Q|STi!U{~p#DYkzfMYAaj}=?oT>E|F(~s36l>`U34&W#(m$;;S6b6h>KQ@f?k{n$;j6Q5$ z@Jd!6hK=7{=|+C*5AVJipTL%p6Sl-BaI?l;*>-b&!sF|o$FFcCUX5+BHdc;*!OB=0 z+v3$Y62HRp^7|~NL<7nT9P)WNM+RSBVUp9S#grGyOMGg1xTvaTd5u-JC-GrSkEF_r z+#bTa(v}p;s|*M6&2*)Z@-k1j4SiTt%rsM8XZsW?xCiIq^Y{P`!gA3@N~Kb%R4SE9 brBdmyyS*ATIJ(5s00000NkvXXu0mjfMbp*& literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/baseline_filter_vintage_white_48.png b/app/src/main/res/drawable-xhdpi/baseline_filter_vintage_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..f3704b611a84fbbdb2b70eb42b52b161a3be3bd6 GIT binary patch literal 1190 zcmV;X1X=ruP)Hr#!|`uYviu&C8<*o3ce*UC|H!9!TxOLsoS z;vIVOAXfLu1vg^%H6LN|A>Uzlom_A>zB*Sf*alziEEmj$uNIICN~y<3ZB)n&w-8Pf zcd!xjGK6Ys7{okm!W}dd&ckxWh6Gp34s=ikbYKT+32p~Vb~}wAqMSm>U!Nq-)C@8KlM<@pDM1HSPt>7r-T5@Zp zMgh@*R&slbZVHDIt~4(220TJ8FVjVZz%j(hW_N|aZe;f;H7XRAAVOx#CAuxt?tz1cW6%r-v%CvU>_Rqijwn>`_l+VhWDk?eMU;d; z{dbEPO4QIwZr4((;OM|>|9dcZMVL?bVO;zYIGm2);urL8xc@$)ocGA(P0Hmr#?e4F4UCZgSdJ)}M1LJ*n0zRBAr$L| z=wOiC=$7=+Ljj2Qg7p7AL-kk&geP+U7Y=F%j)GAD07*T0zYYayTmS$707*qoM6N<$ Eg6MoRU;qFB literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/baseline_folder_open_white_48.png b/app/src/main/res/drawable-xhdpi/baseline_folder_open_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..860c80814c5518cce01a7f1478cdd9c1fe30b7fc GIT binary patch literal 322 zcmeAS@N?(olHy`uVBq!ia0vp^2_VeD0wg^q?%&M7z{u(8;uuoF`1Y2y-(d#{hKEbN z9&-mAF}XA0f|AvgSzPBD4ha^xw@!&^KGk)^`dh%}-l1s;XVgtn@tmacG@{7S z{rtvtm;Yo`B&Z0hHL~ibIH?wJ3z#(q^9fiqYBwC|?AKaJx5L(&5$W{|Bm(ajy zKb2h~;lRRA49q+dTKonF8ZI(0Yw;fda)InBW(&gu4X>Dt-e}&evr3SzHH`?~^tI%G zB=b_KjLRnP7V-K^isdZUO_!81>zR_0;yk&}@pM<;yq#ZdHZJl7d1T5@R^w~;W<0wu RcmWtT44$rjF6*2UngF$&fG7X} literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/baseline_highlight_white_48.png b/app/src/main/res/drawable-xhdpi/baseline_highlight_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..5403eda88c767082a40b2492529fa46956c61fb4 GIT binary patch literal 333 zcmeAS@N?(olHy`uVBq!ia0vp^2_VeD0wg^q?%&M7z$oJB;uuoF`1a;PK_*85my4nv zERzH{nshkcFe!4dOkxlaXAmkBx;P`OL-dN-@sPiK3E%AZ_8K0O@b-~s1{wndVROuu zGFoL^`#7CpZpp>6n19ZflAI(S{F%OF^|r^KRE0kk@AHwEd|&a&CtryP_Z3?{u?w1< zek~mI^WM+j&F;Fdgg^b{myObP;CNEbx&7czYun#HlR3-ZMyE5!71ll5@wu7hd3v9< zy+;9vGr7fMU;q6%^HtBB^Y1ZS_Tirx#1RP6$15*ut=_?(p`7Q|dll4$Hho?#G$(%Q zgFmZSo`May^iUr>mdKI;Vst0Jw&H AkN^Mx literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/baseline_panorama_horizontal_white_48.png b/app/src/main/res/drawable-xhdpi/baseline_panorama_horizontal_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..9f73cfd2b41abcf27d517e559168ec265b02bb21 GIT binary patch literal 689 zcmeAS@N?(olHy`uVBq!ia0vp^2_VeD0wg^q?%&M7z*OPs;uuoF`1TevPi(Y6+r_^* zx5GZ<+}@T8L94grJ}$dz`&CnKa^%M6-IMEsI22n1oH!DC=zEXZ7YcF7T{O(s`8n|mhPu_=O1;wV@V^U} z*DU4N*n5k=qhYb_;;CL4Pb1y^%N?W+p7Q&!i0{)OI=9y1@Yr|9J8U#~cR|CV&ptFM8_+vQ5;-CdJ%Y>H;-ftE8FhS@hy2I`gW zy?yt6;rZ&jzxRD><=^v=)xY9^Y+k`W=A49yQx;!gTq@2eC_orl>g3e^-ks6Z`SSBe zwPoEhx#`)_cX~v54Hk>OePzS{!$J11@7|_w|GsTo{WkZf5o^TZh3meqs{a}ubGPQL zp8M2{G~Le^{wA4~T>rKDpy9_ify#`67w*c(6;Jk_`eRt+8&|J=`-I`R)$OTy z-&ahpeIPz}mEwZ+?`>WhvERPnl&LlI`mJxp3)^d#tvvXYC*wur@~GoJD>KgI89%R# zoqdUw=UeORw|NIcf~FNVSpUs9u!Wsx&Vw)K=O#3Ugr#kteZ0hI-#g8I`JS*3ODF5j z&^pFrR=By!D1Mnu*w?j9`E%1AhN!N2#AL7a8O8IueV1?(1vND$CfBv1eYx`N$nSOWyoY)o?nyEhOmAptY6KoBBeO{NmN z{}zk!nlai5=U4EW1aUEmr2HZ5Sn2ZG?0mhgJB z0*<-OEsnX|8wHFvCt$&2ouki!)jo4mK^Sw@fPpUXl2yKQS3wwa)qshX@Sf;5PZWeP z7Y_*6uO%y%@IpZtbNPU9{Xtm5P-`%*88A=~mhh1qS|g0PXuw35;4J{(xT7_~n9Bu( z>ksHKuws45?|D4|q`Z+vRpTKw?z)<7+;k%e3Ox<9vWbkAvc$zYpk1 znSg+8b_#vJ|HMwX{d_=q4k*L{4sd`2=2WFDrfm-xwmKlb_sdfrqqYZhnYKD0zV}-g hkbm!&2{1D=`wv2CNW2f)BeVbj002ovPDHLkV1iHM?EL@$ literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/baseline_portrait_white_48.png b/app/src/main/res/drawable-xhdpi/baseline_portrait_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..b1289cb11170503795f2582be7d5feba69b84694 GIT binary patch literal 461 zcmV;;0W$uHP)D!Z7=~dCX4f#Yq=LW|kf3@8Q@BYqS5g9wAkd(z5L^X>)Z&4p3|I0_ESVUCt}UVG zFW-9u^hr+Xf6~)hO(~_6Qc7KoCIM?=9$n0ufM&6auQMW{%o0Y_^I!e~7h2>bFvOLH z`PP2o3?RvIFA(5TL3Y5(DZnZ_Aa)86X9xW1;m@ClO$dQN% zz9|46%ZqVriDv{rhuxnO<&KU40N$0$|0a0u1C}@E`laIl{M4<9?>1nZ3K+W$*rWnB zZUf?b?QR8l)+J>ACbs@JYW_i1flP@P1=_PV1A8 zk^%G!zk~0W2%w!~>0BZT#isF(ZCHG`@S=00000NkvXXu0mjf DK?}!- literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/baseline_remove_red_eye_white_48.png b/app/src/main/res/drawable-xhdpi/baseline_remove_red_eye_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..47e62c9a2e58d99b0a15ec74c88ff90aba92b9e9 GIT binary patch literal 1033 zcmV+k1or!hP)k4q zAQ8Fs2=N48;t>sxFL;WO+$wb7H7*jXi@ZT+2^L`{vW9qC%XlIZ44&mEiEx-fl4!JL zIf=BAwvs56W;WR*S{8FCE%8TFRuGgGG?VyZ7}p5WRfbEv5GDgVCrDx>jpQ#hW&}x` zz)mJ%i5<0AjovXP(?*Iw8>Vp#z17r`*wBU}Xn8E6oTBVlf``@-+DPK_ znD5X!No$D?%{YaQ#~8817(8@N(p>COn24Mf8|}RW;UYR7U8Q7HWFJ~n#D~Z6D2m79;=^RL4pC7`!YgQKoYx+(6?d4SmUcwAK_~H0!Mr0~u&# zh=En;4U@tV;skmx8tLZPXl|rmMDI8uDGal)@vM=afsGlyL4l1~;zwCdW238){|OsE z8|kjtI89kGF%&CitHC~O?DMrvc7}+FO;|Bo7j9tVy06W#v&sGg)D$uD*JJtzBG_3i zCWc_AwvpeDjs3pv!_JV{-^5*v{Lk3<#YlIF^-XO5K{(Jz&&0+|BR$YM?MH~?=sjWbRq)^oO&sb#-Uo)=AvC5}tO%Od&0vc?MKr3Br5MoULqs4yYptF|Hojlw)pB4p3ZzV8Bt%@=QOHrLW#DJNN|xt z#@P^fdAuZ_n1dW-uEnPRw0$g=w4=cZK%kb{G%Pja(j} zT=AiWpU6RfVO~z8AKAP@nc_nu-;$O7Lm3Csfy|yErT9?K7i6IbPG>5aTqb{5nHuFp z4IdRc%&{81$Ydw^K^=Yhf_mjdRpts=zOffcH-<&z0}lc4Esc3Ayh{|yh!hWy?kiGq zja~THPqZi}DtH|WWyMXTJ6&OkE~=c`c8ix|p*yNMb>qg>($toF>UoG9a|f(umB zov7>O4(;fQHJ#;^2PvM)Uch24W?)UAR<5ugQ8%jqqH7%&WCFr!a7$TaWd9Z3Jbep zeW$RnK2~3ag(a~@DJ;}u%?Q1~RE345u|_H^Y!dpPcpt29RVp6FI!hjLmh%@Gr((S* zk9ZgBa+QjWu?EQ-s+f$mr%FXFQESN)wstj68+?j&gFN9ztoD8;#6eWb1DYA<>K|Y* zKQpn8l?Plz)T(lWkFmN@=3$5!h1FTEus8UA8*nvIC(9L5bita!GIEG5nCW_J(co~b z&deugn2+H^T_=|)X8?9@+8HYOgs8DJ$}M&?2OKWrzwPgU!{7mX#N-0yyzTRd$)Jut zd_ld_SJ!K#8_yzbddx~AyJ$AB1B3G)vwBA(KLKB}fr`LSe_H!p_sIx^)%Cph1J! zPjLfwSIi*t1c*EVB2R$G6Cm;gh&%xzPatV$#RMWxfJhVQWfk_}YKSg@xj2rOu;NId zh23}yE0zS>IDj88B1&LAzQl+qfr+>mBdP?p=U3|UI>VF=a-2`SUZz{wpvdKv^BcFY zdBx**C>s>Gfl~hEN>-*(k|^i7?2ez~3ex?~Sxl;tDD6pFl@$&q-Np3Fpp-{wR#sSs zG@rAK5=eI+4JwNUrdJqDz>g%|NV`&y=01k0EShaokEvy&h2Q%T?PZa@hl4`(KxTcT~9E$Z5?WzbkN;P39Ut;YiA9xw-cvXP| zRWpvmdPVi46W3w^2Q#jA)r-}!erK41;S;QdRWA<0x?2Hpt@j&$F2mYW0kJ#Q`KlLB zVXdaP@FCWGiVNRiO|H1`FIKbS!k<`8iVNRjO&a@`cpk-tC$UymT(}Ht2L;4lSm&u; z9D;R?0^)Y>FBhv~{Y*iDP~b=Jf17mST1bJgDAqqTgt(zOPJwVT)&pTC(m&Fn02t0! zSlh`5X2i9h0$?|+f9aDSypHu9E%J;OzQMXrzOX&6L*yAJ;94!@Xz?FrlSfR=-&k*w zlRq4Z>urY1o0Q@1M)HYPe!z9Q{Bg(YpHh@x>`c;e^2j{kSV6v#=Mj?5Aulh?Bc`(z z0u%BxN!QXSZ)oNbKBP%uupDXLW+sgR7NJcEG%%yW;SkdN!A`0P9h?h1>m-Cnp36!1 z9P3a}6_|z-`2oO9$bQ-5Betct?B&|}Jb;l!LXop6=Pk}*BRM#OC;1H{vjmA9_?xoi zz>$5zynOC0;Aj?OcEeG1-YxM!Ya2V|acJ){etnWFM?g6^| zT_NBjPG)fm8mQ=HRrci?Ik6R+v0m`kU^Iuz{6KH6q=rIG;ZT}pz5Np4JSi?(a3u~= z{mMpJZy(^+B1R0$@iTFGjzOX@oDwvymSUhO=g~-<98OlZKMT`Q5;ywr5ykKZEBkIA z((EE}Vm->RY2Y2s;}EuGb%xNIb_`)nw&f5m;B8#Oa@j)S!!Ec4N%1l}&{iyH%Z|Lv zJo4qRk;I2%2vX0r^piZ%k85$rt&VjhKI~6+jod_^0wU>V8nLNib%_r<1$_7y7$ydW zaWIeZ1%EJ`6aZ=dZY>VF%`ewZQ7qP3QhuU`!G`?quDD-6tUMZHA=xD`Myyj*GE)SB+Y5H6< z$yCMm838>PuWx7~QE>rYE~^@Q^r9M@DX8uX=i`-TUr|^M7q26uhp+$U_7e{d!Df41 z@;msYIWQxVyk^o;qTo!t9uHfg=bY6L5xk8WhtW118p4%iE?OGV)gRgyGQ|DQlMx2e=7e22{&qHqabTS}pL z3sWWSRLViV5~c~C<5h*Kus9Gq9j>&a0@Eu}82XaNbgFJLl%vqVaJ6*=jhIeQDIF*w zms0(5PQ#Rq-48cchrlwB`j9P zl%|tP`4Yd6M2zf`Vz?BH&ZIG|C}D9RrcZTfXLqo!JN^W2kVyD6a+889FME<04ibqk?5~rscroA&Ur!KSUNcG};$TcKN?81eX??Z*HG=IaR-8mGFRMM*$Mmy= z#bivX2f#RjmD;NuS}+~EU)7#NG0l*$D8_ zf~Ne4%{<+AGo>Uf+<*sgv719Dv0w{q8Uh}`m9Qwo)Wh6vai3W6Ngi3K2c|L!i>VlN zoAWnv+ej=}iC}-|F1~51gvHO{i&1R`?Zt;SqVNPkUI{2he~I1}Jyas%W;RfN4IxE# zH>y1j#q^Sd#nqUumPi;wik~tbCcC{<`c;vy&LNmSlSuf;xWTT}kWFuu{u!o2BrH}5 zXigV|T>*gUdXVcp{JvCaok(L^rI4A(>VOW-f#mCO|Bxal=+1j-V8?zBOw}}%@OTB& z^ZIs5$;U6cuY4gIrF2wVkHl0?hk!2S%TfqV3hdOMK|a>iePV}9m+O@+6UlV39?oS!XtFh{WCpg#+ z*E*qk7s0*}g=IuxKfLPbBGGW6bwXkjoWKdy3`nh}`GrSuey4XMU&5*;KyEQgCce%~8I-oU1asm(V853}Dn9Xl|${p-XXWw$0XUS$BJBhaNE_k|$MzY&q zii48~QqMJ^mYiLKgUQ}_UWgX`?POi_=dz0w7wb`;$Fj>q*^bp2Lc37QF3VX@ijM(& zL@|8C0I^$$mmW8A9!+I_VmW>%F2A#!%zw1xS{$NsxR#c(pwNe_si9CcTthEe(CEfV z{6bhib28mz;lV0g#YZ^gQqRX+&Z-OZ&#?9~eum{RlVA9Nhd7VDSea&$NF)-8L~Q;B X1YlYSJH#bD00000NkvXXu0mjf**i~O literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/baseline_text_fields_red_48.png b/app/src/main/res/drawable-xhdpi/baseline_text_fields_red_48.png new file mode 100644 index 0000000000000000000000000000000000000000..22b5cd8271c9f048761005c1c3394200fb5d27c4 GIT binary patch literal 739 zcmeAS@N?(olHy`uVBq!ia0vp^2_VeD1|%QND7OGojKx9jP7LeL$-D$|SkfJR9T^xl z_H+M9WCij$3p^r=85sBugD~Uq{1qucL5ULAh?3y^w370~qEv>0#LT=By}Z;C1rt33 zJ>#Bd(*qb7m_j^V978JN-cH!KHo;urNS*us|5h;wn&*G%eS4vDQ!qR4)aHHNwpN~HM`q3bwB~(MzuzR;jn9_Zyxx6d@#GZRC{4c6 zt8!B=CuLF&&+dsTx0`!?CiNzoPfDrYFwt|9;TyL}Is9fuo@Q-#Jtn<9=%#k^*am6O zO|~~qsN9q&Q}*1(ET$m`Z~Df*BafCZDwc@-#eM978G? zlO@Uy{IloyBbg(kk?^mc!Q)bEGXJLX2}K8HT$>YkPJ@9#V0z%`;uuoF_~tfaN_3#ev5(yf z910Rm3pLsjB)C|d4MbcGMH~$e2si{d81fty5OEIRaTH`=aW!yYI4Hn!uwG-^$2|X8 z<(t-@w6|M%@zSC4#gG$ojp0A*84V?B_JoPXr0uC z17^JpU0=PZLacW@OkWy@~++ZJeVD%*9xu0v)1rIFF z=QFZe4D?V-2Lly?e-^gUo!`YuAkH}v-XTh|4RE+5(f?> z_L)_FZd$Z$(G97LSIhxo`v1>9_dWRgctf3YmBP{0Y-dis7Ktf#ytl-Qaf@u)iwWCx zcI{`-omUpp%5dytW8k{B=(4stQKkdQR-#y#9IPo@_^k>d-y24s`8_ez5qX)MWGfKif+FcTF#x^5uFU>jM2awg++d zgH;$`urozfY_4wEU#k=MOz_hEn+sVC{QMd;|DX2|ZP-^KIs1H1x2|1r!kz82niyD) zo_>BhE9c|P<2M%mnk>3rsORkS40ro%VMm6hN2v^c6}MjU<-gH&WVrMwHOH=3dGY^? z+t%|s#P4}^plQ#;KKbmKT+A2DJs%uicb(1Q{2Rl0-xgL#_D|kn7nNaii=#l5+aW=u z{?g@0#`Ve!a~&sjGCXszWx8PLZ6tU=<)jNk_5xYP7d5^>p8f(!#uqVaGdUhGu5e|r znk$mhSnHoyyz#wYcWr;fo9ZJ;_m$iFPrfgEGXaO x{`Mc$VAf|47fgV~IzSTG2sFhY9n1{dSr1)4Y-(tF$OdQ&gQu&X%Q~loCIB{ZWo-Ze literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/flash_off.png b/app/src/main/res/drawable-xhdpi/flash_off.png new file mode 100644 index 0000000000000000000000000000000000000000..d020a143de366a9297ac60706afae55c3e8f4333 GIT binary patch literal 511 zcmeAS@N?(olHy`uVBq!ia0vp^2_VeD1|%QND7Ro>U_9pO;uuoF`1ZD7j&q=l+egR5 zd6myhmaCRZUD;{y-0J$zgLBxLfgH}rv1)q#{?nuDf|KSH zg|4eV6~o`6l7C(C$G!cG0x|Yo4}Mj$TtBb8qfYMN*Uud5<^%QEG`)T<7-Qd$O^RuM z9b9U^jH0qY%>Sa69;br8tPPAnOZgh?xBU=kaQ@-{Nr$1m;Dc_5^AF!o8Vv0lK0D8F z{_!bVpvB<-dsha&hcDg(3H_6k8DtLKi$5fD$n2yWgTO(tZD}lw6?oA;OBTjue(8a4 zr+v2NX<}$+T)v4_W6pa<0hSxQ`{pY+Jghliz`{5SEb0Dg*A#}cKjqjQ8*Ix90~+Q( ztKn}@IPmRUcm`|6tamm2H@7eNRJ+cqN&Zml?-*ObKT=$Go@WXEQDnRS+~|kh^n>4b z^6WdG`k_`Yd;WKcfZuzLH@rGueFhA!-INT9JQX#6W6B?3#4>og`njxgN@xNATI=Xu literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/flash_on.png b/app/src/main/res/drawable-xhdpi/flash_on.png new file mode 100644 index 0000000000000000000000000000000000000000..ca11004d56ddcd466d6091fd3068c538ad46c889 GIT binary patch literal 347 zcmeAS@N?(olHy`uVBq!ia0vp^2_VeD1|%QND7Ro>U{vySaSW+oe0xK10#ks9+r#af zw(aAyJX-i}r~kX1&*v@Q`Bb3YG5EY@rr+h7`)5jb*w1BTVEB;Nf4=NX!?jb)YyR(D zW1sfd@O6J={h43M*W@?%;wL zfFLLPl)~y$XX-+yMMr+md3(c%B_&4b@SGVAhv&%Z`RtU9d0$g@rR@7T+i7oHAfVvq z-*R)YM;BV|&nk#`;mp9jQcdHk&KJ7_iXMI68GTt+p8Va=py|NK70|#WvVehA1C4oL zf{#!k--=Yr#V0$m9n9U=ihHlNG${ed{E#lj!v34W90-1qHHU6XYn zr$$O!ZEN5XBbDyLZ^Ar}Q$?6&-AvOvqrl(bzh+B(g4NN7>~l8BC)k#j6yK?9w%yV$ z{ozbqlk=U;_6Ob?Fkh0){Libtrorgh4E7m5eiDqiZw&i4%zXBMS#M^$#d_(POm{>U t91PwudGn6`e_}_f%XS`s2F|27{<`uzW_Po%c>)8T!PC{xWt~$(695LBt~vk! literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/ic_colorize_white_48dp.png b/app/src/main/res/drawable-xhdpi/ic_colorize_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..043d03ebe8969786d9a66678439f9b2f83cb23fb GIT binary patch literal 528 zcmV+r0`L8aP)L(I>iZ?nSO#o1o6Tmk*=*6r13vSUue|1(M*bzgH(*UOUthusUsr;YuP4FFmyy5* z`)mdB<0r7eN0NyzA(J0BVa_MXgv~&Hyo3e5Ga)xWPQntOB*%*hh4{0CYQN6xI}l@T z1oBDps@i}y6JJ6QpM0tMCGsWQGi>kGA7ghy=$>NSL}e!I#rYPLm2iYu<6Q~-<9PfF z;~U@j#uvgLaGxKV`89yMITK{^ft(3)`MDBg^D`4}#Pc%}61K(jGZH2d5{mKxfQR1K zsOkg{{~?uM=P2bMPuS7SuQK76__KtrW_}e3rSfk%c0BEA;hz9(uKfOwSXaXb0JeT4 zXyXF_+ngxo0{}am#NiLi-~#}=>?!8Qx%Z84{EGRf#y37WV|F`*oa=tS_6WldFBl+?UnoE#zd(RYe(nINe4EW?v)OF6f3Yv>SpawU SH{jO*0000Px#1ZP1_K>z@;j|==^1poj532;bRa{vGi!vFvd!vV){sAK>D1RP03K~#8N?VUeQ z6hRn=_c&4HAT*fRTxd{&F$5AqLq{x3Y_y@F0KS10@gr!VpFv6*Vq@$`Y$*&Omn#H` z0tps02ueV`bAH?aE9}n9-p=mMpZ7^NJF^^c{GPpcXTt4GKoA5$5ClOGgm;wqUYU*Q z>DM><`yU65#=vpCUio`)z`x=r<-We4Yk2smR4#A#OiV2F&(6N*Us9sfY&LmQ%$J#& z`;Ft{k0)DOcljaW3LU+@LHEc={m=gX)4`>s4SraNk^qFy&h$^GySo*Bh!AkITK(D4 z-94rvp@Z*Zsf~_SapqI_>YVQER9KmQ$GbwL0=kBVo>SRL&ad_L2m6bQPk1*LserQ! zE|HkV;bCyJy84QDW04BLXot6mq$p%3ya=QIvtib@??cUYVc9iVb^{OB)^_-zYsg*! zQxtwYY_#*0HzQ%$VV*VB>O~_jK}00C1_wX0(asy*Z5tBM1_=LTbMp~v{%9R(9}!0>+k^yh#`;g0SSm8ukZm4L$8n|d{c*^){^whr(vi@ z@+>=HNC2a#@PYRNiVPokEuiS|fwuxo2p@PUz?ATTcLGcbA9y9ewD5s90!$1acp<>l z@PYFJOb#D7Ex`2ffwKZ^2p>2pz?SfVa{_D%A2=nzw(x;70&EN)I3d8+@PYLLYz`k- zEx`8hfwcl$2p?D}z?JZUb#F^a6F&QUs%$5`Wr(q&`m?=zTZ)D=AK@b~(Iyk56>9{z zRCdBxA;8t}f$;)d4j&jT!1eHfu>yPu9~ddXm+*mc^MEwrD~$71?k|wt-*7Qc&@m4% zNF==gZJG5AgBKH)b9{Wv6{ozXI?P?)+24Q-6@P zg{b_GS@;T&37`cWQjmH92Ye$IDdSxtA-7p}va&3_24ICa-XxMzE`Q^#Sfm25JReqw zBa(s4D&yk|yc>&D0Mgf00-ZS16 zq9g!oB4TaCp2^8Nm7{o&4a=xj>g*i5x4gW`4+{|vL!-d*d}k}fT`p&vcu8EBz)%yT z9b9lx^Hs`Dn8X6YV4lEsx!zM{Aj&uOMUhGXkshUM0FfOepkPQq!H|H0Apr$L0tzJv Z{sHTz^Y~}u2hsol002ovPDHLkV1hWw4mkh- literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/ic_exposure_white_48dp.png b/app/src/main/res/drawable-xhdpi/ic_exposure_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..66014de15b3ec1e9ff01e9ae21e67de66cb7aa57 GIT binary patch literal 447 zcmeAS@N?(olHy`uVBq!ia0vp^2_VeD0wg^q?%&M7z}V*L;uuoF`1ZD=mvW$l>qBE> zJ+~(7J=Fo8Z0lT?h%bEMb!Lg*`PoffDkb+?%ipp6%MZ#AIg_^eX3n;0{c?Z5Jy~o0 z^Xb98Yxu91yL9u7vnw}JF+PxA1Q zF-6Aqw0OmGANno_?rd7VNPU{z~7S(w=_4xzc#YItc6@qmGaRrG3ZH`){ zORLxVCOaC+OTU%y&c==ulSuG*=XJYV|1(VsQx?SKa>=5i4Xuz7_m`~M>7~wkDyH!gSI|b28t^Ftfg)MA& z95#~pp78p4^f`_d%0Fy=oe=z4Ic#TNI|kQHAZOQmNW&UNppSu-w`BO1m3acunF5ee5 zw=wjj&foLu_ym*^_o&+|BO@{-i%|0FRcn-!lDPI$G4MmZQ@5_aCZ7culxIh09TeA= zr2rofZcQ5}lyqJCd3?kr1xh*0hCzO{wsfyaCF}sdTtVdYRI|%SbzAzUy^9tmIvT!owq(%IO-mtKSvc9L<>0XL zq_bHuwo#b}qBCT1g<&KS%C6{$tbT~bcN2VXo-IaUFAvJ6FH-eSS@w};6iX_@!`pw( zcsXI*k5O!)G{iHv;j#I%xs#)IHz$$o0Onm!=6;mFB0smp@UdLglI3i|OOZD$#y#{V zyt}zmR(nW{Dj;8|zN5`VtKJG!xPMMJOJe!XuRbPLH>U>~yJ9 z3)502%71LHU|v2~oHk5NwiklnpwU979p z0p>WLrxEclPWIFUJ3Nw+RL(`f=KC%CnHN8XVB}S4EB_qf%m5QA>DVh`!7i0aaS_~% zGACHXAoL;8_wOSVV~kSmcDl#fTG`Dg#wa1~$Ee4lWEu`lkFE%NHm zt+jfn>aCy_L)1TjsEjs@cjiYyT(QQ<1QovgC2F7f;$r>ny3>bqI+wiL1ahpxmA5l^ zK@s;&HNw3`0?bJjh z&Ow(aJ^+pevOwZg?i#gaV62X&UBlQ>>(T55q_WX*f*quGzX?VNAG7f|i6wZ5=PE%c&`DZd| zw6wB|?7Ik6*9LPPyc&wT1?VD{p3DmsW3)3#zU3#yLd_2)y>}=`XmPFp1B8g>eV_%J z`2hf3O`!F5j8?TDF!QPnDhZbyursaHOtt<|;69Rqhf`SP?7rButQde4=j?R|f+fu{ zV?hS4*1yn1W2`q{9H1ilMufuf-#2eW(Fm~pa60F$qnVna?J-A(1KR5=SFZ||3a!n2 zF2(EAWu(^nv-uY+ zPj&To&%J%J;nNu_sJ@4t7Y*hWZ?gwxr5>3C`-pUGxxJh!icSAAZ4zl=(L-8Jwkny)x&lY$ZZ(=w&vvN@RysHZ%o9=hQ0=qjk$p2|~F) zPs=-9%FBY&Ve&y;1?$g^!_enx=B7josr7E+><^Fa=-i$o3V>3{F|~&)cjr_7cGW-Z zELqNw3VNxb*pa7~$&e=D8`11|gT}vpy_>~7C~DH#)}aJLkFI!%^tQ<`V-aP;j2CZY zJ|Sv-K!aAes}ehP6=&w8I$A&D$t^wnqq;$w=k5urzrmEocpZFku!9* zO3=6W`xS?jOj82>Ga->|JZ3y&G4puB#u}7};O_)^ETl*mw4hx=9VkX6yV41x@6Lm2 zti&*lTO#K8AUQq`Ur|dYaey;LdK#OW!2`CS8l*K$VbwKjeCmj!KpiZ;7EtnRs z5ExC@v|v?5!QV-k+&c@R_kqbQsvU+Hn3TN{orR-8ZXUIX7b^_|nf-&G=OwmOqP=O6LxewM>t*{DYfofd+;$ioAj z;Rrg+m>?dduC1{e-)TCcvh$))m(tMCsyITLgX9fDSVvD9Ozj z9%}nbVjMCbb}&|e1(l7a67B@Ff literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/ic_face_white_48dp.png b/app/src/main/res/drawable-xhdpi/ic_face_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..393047f59b90ba95bf92ffae88e103197e0089cd GIT binary patch literal 1344 zcmV-G1;6@|!w}d&5g85?NKDM)rG^V-kOjv}1ycit#F%)4Kso_=GsMJ5R1!!C zG8oZ>K$c;G7hH^YNQgrcuZR;zw=tN5GTA!HkZt{%7%zAZUC(pc=bXbuKfl|Di|^8P zZFr94<>lq&<(b1|YS_pj&eBSphm0`7L*le@h6XlLLpfq{f|(T)<^VBtv{BDeLQhLT z0W~!70H=~PvXBCQ14@a~NiI5APl-PPA+~T26MET1(CdJe+`^}2`aJ= zIE}7Pk^eTIN0`=}ZNN%&wJ{d?e=8Wlv`97qA#R~-vGOlhF^pMV1f2(LM%Qt~vZefk zS)Vx#DCKYT^fCp(y0?g9(p`$31w_#mM$)c=-!N&dlYl}x(X$&Fb>rBLIqeka2h^bF z49_AN_ck$1sn!o@LdWk+Kx`M{O9nCFh;BeJ53qNG7ZJ_9L<2+Q>OMiefH3wtn1MK2 z1$#)4n_9hq0~#-$X-F0;=3~y2B3t|P0%Bx#jtNK(dx4KQMjU%1sQ%Cim`rAScm^@D zN>=a%N4UtJ^zwiahDdOOU-^lz`Ho15Ce!mSI(E z^n3_a()2I!MXb&{J#YG#xE0mwNQMn!l{9+3%K6*q8B|Mr4G5z;X&=yl>T@K+E?{-e z==p7^_B;PD*N9};608;(J%0>U)cMEhn@Gm(N+Ke2OCcZR#C)hK#(r1j(QT%gjFX2=K(@Q zxHL-JSoKn7hd}*(EsbBA5cUK=^UlR;2UQX>0DxL?g2s>G19qi=ug``I!LFD ziKebyR?n z@Y~id5{#ye3O6o)m(xNfEkv24af!K(%VctiiEdrLFQNfEf6>HN!c;MpAcYhXVk%XH z*~T$?u(OY`?mhsqiUe-yBjUv)C>1p0h7(Nl?jfKEU6|Cta<3o53bKK_m=GsQz@G=< zidjoLxrnibkl&9YR&#{=I5ogw-eJts@-S$STJ~`b9j)wT5dp-+1hXq+0d>@KimTiv z$uPqtxy@BhvY#l`OhQaf@PGM$d_X=RACM35Z~p>I7`+s;7n+Lz0000Jk`5kZc z`d`%b(pWe;Zp(hA<@4!}{&k#tHlI|O|CsZDxw($fndcEq4Ei#ubIyNcNjUAw_DjZ} z;laevwknx;20M$h5&e(38SZR6lL4d;Xsb)UE39Gs@u;h^X8W;^tPJxaCv!?27jN)C zr{wtL2b04(nW>MtpPUM`+~=@DX7*$5mQx#vVXl&y3DG9{RH15CMV=ubG mfmJJ@kt<{Y6EgF|d4A;|)$xKN5ok0IKhUw(#P@Tsjo(9svL-5cgbU z{JPd{A*e#Mbt7I3xlWA2hx-9S@qxialVCrh_HmP=CMSGRZbBNVIJG4M zw%~Tb2ooah^-V zzdy+CyaJP(2rg@$B7>b|Ge=ayA_Sb+ho-Id@YuDetqMwuLMh&3iqz^qcL18dPc4<) z6w6}tE40Z z7+IOq#(qhmT2N?wu$xY8Rn$M}?81r)PbpMb5_sw!1gX4xqdX}HK<;!tpcEy9k|5Ug zJ0D+;9)QJ06BoO3$D&NV?`Vve-!={88#I(scH7~HZvuO_O_;?_Sxj|onB$Sf-;i|i z9|_3CXw?z*hZw@KiQ1o!@}j?bOohB>m`;g7)qxPFn`!xZCo4`5e9DT3nW`*bpUqL; z+#*+{D$+xxT#hp)306B|5;5{4Z5{^&=T%2=>KC3Hpz()YV1BF)>`!^74|zY{-B3Wd zDhbl(OXH~fAQ=S`hS~ z&BE>}q&A=u#|XCyt~)lm&JsQT*Jrt!972Ic*h2Agm2R>U7Os3^SRAt~3kp0~BH+o! z^qnN@$+q-__E{8aJ06Q?PfL!Ch3Gl#0QbgjOkmTKb%9-+3FoZ^sCa{H^<8M#*tYGpX2X(j(!;hgs_Zm@n%a-^HR)J%6k0 zE)A9^Y~KN^R0fx=#G&!ZhOFafQ!hycrq^y|i1 zm$+Gvl~S?(4fvck4j-Qrp{JY3sIrU6KvkcYKcy{e6TB{ou?I6CSb+@x`DKTfS*y<{0 z%^e5-AVB8;#EXJ`GCxPUL{w^Glzk<-EE1hNw-9fvw_b+>X4jI?S`Lxc?{>RIBBf>- zFGOcC4G(MN$_9G9Ei_SWib(Lp^O(s)4A?EyZn-}1pxhQ?q++ES@eUk8^&xt6Je7cg38{s zF3m3Er3fF$_zy!OAPis8x;puTjq2W+cK_-_fyQkZE}{Bj5Ox& zc+GSwr|3R-t@`%nkbF71JCOiISm+AWfy+BU{0pbj;hxxg_Fq!KYovV``!+{`M^+`; zIrZko5&41dGkbhmhLnN6aRYZ~OCl`uv~X+Yp`W?BrAIZo)P4u(l1ML7BC@@^uBp9y zIP)O(ZqVv{h27+v6|+VCcB^~`ENp+}@=}HCzaG)~c3E|nh4EZ7h_AX_T!WL@os7L! zaGzhmL?8;>I#9Lr_aP4sOo;?gO=@yP3os!&?>Ga^G$buC(B8h+EPCcvRha!*4KC<* zjiy!kMQ1H2<$a8)HkN9NU=2$TenRlGMF= zD5xU|*w}L?kRi^ODD<0B0CoFvhB;24Z~ogg8Q@Zzw%*6T?D3Bi*x(ETzw#)>%RJcMr_sJ-$4rAR{O>3h_&yk;%{;3p@uGCr_`OVC!dC8 zW1`Z8SgS?2v(bE;&e-;R>~ z>n$ZHOB~|dXz*g6L>%kYDTMaWih?FYtXcSvli_(KdWY;O8#*|uqAS3>d1mny2E*sD z57S}hFVOQAc$^tq@H>mCP-T;-wRrL-5hFsJNV+i?nYO1o6B6F+>KMb`NqO_(P|L?W z&pnlL=Q9W~osw$(Fb3}fcs6maPxgy$GJ)CkYE`zNroA`k#D$Fge=?!8qt=2xJAPk4&vZ}! zR~;Puay~w{FRlh*s0xDS8~vY>os-lqOQ|T6GZd%9Zq@#E_ce959*y@y(5uWeac-&^ cC5pQuV9MCS$`hWPa99D@**cx8w7Hh_ABu!Y`2YX_ literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/ic_gps_fixed_white_48dp.png b/app/src/main/res/drawable-xhdpi/ic_gps_fixed_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..c55220a5fe5552f886fb309160080630acb8492f GIT binary patch literal 1379 zcmV-p1)TbcP)_+pOT_1^ zKs6(T;Te_lQD7IJ09lSF^wLH>m6Q=9Mj4gVbC8QXB?rh-G=yECnKY6#w`gPq3OOs- z!)?+?W@!wwK%02CpX4NEs64ZpE>hwd{57ls&BC8Q#c_&JK2yR;riBW>gi&CZaN=*V z9%Yyc289c$!FGWvKH)Y)J1SrfF^gM@3Y!I%F@oC%YEc%lg)!WQiQ6l11-E~wKv~KL z-r#n@R)KoH;r{c#M_J4o+!b3G>BMQA;$Q_%;dCpn*PQ+IF-(dK873K~kA0M)JaZSPV?hcOF^=v(&&Spe z+~6zVHqCW5dj1lo(Y>e8yg&<1$331Gy<`DdhVv{%A*Tzcz2*fTqf1hZ60?byt*Na9BV`7D%^L3hxsz`eZh zKgGPnuTkXR!XUbvCIt%kf=+%{>cvD4N=OsBNs|I~=(3cGH_j|3GU8>lk~wsn{TFCO z_XH*62B!K@LY|{*@n4_^U9ULU_=>41aboc@x-u!0(oCTli^=N z(T{j>-$tbjM-WZIQ9ID}2OmmY6kp({F*QX=cteR+bWc%2u4C$2n1i`GbUAVD zWg{6(q^TB;xXQ+Gx`5B<_Mn8E$HZCWr4(HfCB@Gq({1GGl)v$71bO=W4!VA`0*BD0 zS&b4>Ljpf0sPger-)2@|Eje^uC^5A@DopSrO3Z0=8PjPM#UpenN>E~I7$qMgRH4L_ z@(JAolxcwjIGscxXDK~o09l%|9!p2vI5h^D+#N$V?YXp3LmyMZW9u5#o{NcJ(Y*^Y zxvMyV(;&(-%W38+LnKI(CczL_X`%$>nfo{$MFlBPN&=@pP$A_{oW_DKa3~JrG)o;S zglr=X(h$>a;llYX_Ug?1r*zv!|4rcP!_Y6w>ah4YHOkU61Xc^JPXyl z#qAu*R)ILraT{X`%0hl(0=FSz_6jH}N#Qn2r-eID8n-0tZEv?}U>0z@LnX>ERon;M zX4nzNhNnhy!t!aC!S>Zsx`lb|9J|BXa`u~00Y1`AIV#VTb6VUwn58AmO@EEdBFQjF z6Dv{3Sw%B<$s(Cyci7u58%P1N9M8B+J3FbOoH%jHsbVJ`T;?z409le9^Re-@f?>ii z#QOQ#dK~8hIc&;tj@bNe{{ER4*fPr2h1h<-gbv0DYK(&vF5CwY#T?^3rrvRs;)VYN zrI5WmB#R#z9?(ePV)_VZHNSI{NxUTKr;W13`We=DY^H@X^yAde8CuvJeFzs7hzi(E lR3Iu46J(NS+q$z4oBqkm;g_O90NlRT|*mtk}&J&8Q!9gYuv@%1eZ8XD=E8xeoDZNG6=6Rz&-xP zuVtnfV0XL#{qpf4WH`=`Y!D=e8BUOi5ujfF=7bu)WC7FuV2Il20qW)Z5Z>b-!nMd* zGI0QI0F*)aAT$9UaWHy71z%YtY;cuqXn==s(;@*LkP8n8Pgvy_M);TmG*Lwb+o&hU zVY<1(EDl>$pglCeL-@LE!VA8o)8|uVJDrTP%4QYm2oEqM1o$jzAWwn+&!HX~3`^MUx>Lj|`CV&hp z=sFNfgk$KQnE>{qTcrlEM5to}-QM7UUUWZO{mSzY-O1p9Yu=k%i-b{h!@&V}(S3qg zB=n-YOTi4a@S4~@b*19#Bgsg0}zHFl)nxfz}pIW zgX60^80IBFID!ylr#2zLb1u+`l(|>A$Pzk3f)H#S6xpmbCOJl(&+q*>nz$a6Aeb!62o6!^aFH~E zFcyp;nC<{`MAh%?|Id}-2HwV^2MB5USRickbd&Nqc3$GM=m2`v4Dtlio^p;V-!t1K z`r>_xsgk2ivrdp4rs*VI#)0P&y>=eMYM_VPJj1VL#_48fnYVc*dh9+&R>59QGR$|l zyTfI=*ca%3Z8zyj9)vSp0!RP}AOR!*`%3`!mjM0+sxvb~e~9|>00000NkvXXu0mjf D^{fwF literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/ic_hdr_on_white_48dp.png b/app/src/main/res/drawable-xhdpi/ic_hdr_on_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..342b4d8c82de5b756b1697316f1c82416209bb26 GIT binary patch literal 320 zcmeAS@N?(olHy`uVBq!ia0vp^2_VeD0wg^q?%&M7z{u|D;uunK>+Ln$EGI{iwuj7} zzA+8l2GSl2qc?iY*6qk_>fU%>OJu7-hW55ce2c>7+bbF=l}qe>ap&T9&GQB8_KC}w zFawQ)fDP*IFJ{(+NxcqEy}#M1{1MBanJ>TgH1y1!!MJ;U@VVW><_=j;?{><5m|U)2 zcEITUIbWuAJ3m)2+OgaTZaAL*igE81X1hy#5-&76vrP`Wh;A-n-uE?Wn#X~}Z_jvs zr=4WYSst4n(=AkRa=vByt%7577qRv*G0iiz@3|gWcSP>&g7jKT_P0u`KK=Pq&!mTL z%l~)ZY3U*spHSYvGNp4CfB(Vg-FwAm?`2!Y(&rf&Fjq4$d^2N_Dc}Cx>#`b1%+uA+ JWt~$(697UqfHVLA literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/ic_help_outline_white_48dp.png b/app/src/main/res/drawable-xhdpi/ic_help_outline_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..0f88a23638966bc4aea528f101104931d93c2c65 GIT binary patch literal 1578 zcmV+_2G#kAP)Fi&TWs2kHZ+o5)TS=jf?^?Ud6)_gM3D2|?Y_Tr{!i!p0WZ$$B^4AD z6ciN1hH4IQoE|=Bh$$9WW|;-17~(U!InDv9buXffr%5qF7{hdNm{Q#vDCPosZ#zZ;R4z8Ql^+tP9NC}utE8vhfK?~DrUS&noM2F z4@by!her9LoM%Zf$TW*AGQ}V%nkknrnph-Li*mCPhaXqUh}F{4=5*(?td5#9wvz`W?=OURF*)9)M zF@rTik;38$T%GbnH?G?prAYsWB95_uD4JI;7Z8$#tyf<%>qFPZfkAtVa|R)^Q$dmJ0b|H`XNuVG3)DVnHc?$JL<# z)L>002n$$A#eiD=g=>Iy3P35=oPw~7l~CUJEz`J0`K1Dpz`Cm-EMp~&MOGrSSOStKEU-i5{iU3vAPt5<5>O536(5i{hKmH z#79_d3c>-bapi=gxb`SAe#Y9bAXMY3R}T09>z|5@hj3LY7{gevCcp2CGiUfxup&Qo=ih_+yW2M=v z2yiF@_TieORMF6ZYlL!zhC`vTiBVjwii#vZ;ObLoI20OZab2ZE(a}t%R}~Ttg~V%Q zdPce7AGp#qDHI$Eg%;AdK2ffyWd_$GPbmZ(3V}l`;krSMaz_i97HL)>9127WOJr(N ze&`}onhwQ`*GQA;wDQFU2FRw5a>ayAoF$ttSf~6^#W2~7uval)AERXR9h(&owlPjN zX;M@v4^-1bc4OSH_^^X%Zj1)Si*1bHWrlayp$OQ?yUgO{JMLHf zsA2#=7ii-_1>hmt8N|;QY}U$P16>69iL<;xBfF`glmrRNs9`sayvaxWOpw#8(@LU+ z8CJ>-nzZt$+JT_kB|ia1QtN{QA2WxU80@-fT{lxi(e#AAHO zEuxyGmq#hmy@4c8a)!%XA;DPbB};p_0dfAo z$vAy9#+6S9#ELYJ@(Lqa#%C_802gIqArYp#f(wN zJ5D-2x}I^0EQMtFA?SeRyu{vHx_o|^)~`kO7MeJbsRD=Aw#N4d<{5Bs2vT z(7+gW9I~nSN2#E6lJC{kK0$L^4urow?@d3Y+FGmL|;O)mE-^F4Bc9HL2 zQm7ENoloT3SxmqprfnYoS5!#bXp=BSqyK>0=yIe`A#Mj2x&gld?d0n~g}ejgYx5J3 z2Egy^UyUJ3s{D( z);XXRX&hy6FOb$b2OLD|GaAs3)ae{>38~9yz)_?g=YYFNn~esfkZw5#3?r>T`I=7o z+A5@n&H)oh4MqdzBfWAC_=prY8jwJmaSq5K#f%2TkiLBX0pHn2D2X)lKlmdaMq2s3 zeU08h+F~>yg>=(7pcm8C~mULv(P2h^i$HWskl&kK!W z1ZkhKfKETJl!_jtL1O{;k-FUlY(=tYHWIKD-3E67NoJ5v83{OpG)>%HfT9m6OI73n zbC^cDf^r+M9^H}115Tj(#eF~-qezp~L>f@f6jFv+?gJDZ=*~qN(2H(2%1b~cZ;&k3 zL>90X-2_Q*0g5hkkEx6#pqf#1X_VK11mozgMH0}DZj8A107V=5_M<}HVe)PBd(>b6 zoyB%ki2Dr--3^rAfQ4kyePSajq;2Lix=9xJ4^Xs&O~Q_l{`EhTudUbvPZ!B&aUg`x zzD2%ssA2=k8N$vrDp3KgnttpIGQ0QyMI9OJJZ23lfURW|J0sKtb?mZ<=h(66p%zud z>bZoyXEX+R@O5c{1jurnYLt)7;RI9Id&-iaj~+Mi1PVr+=0}fjTW6S}kP#ZgIQ(7D z5MMJynq@Bjr=gSke9a&=VI99OrxzzL=;tUYRxzIhF=8Z{&ni+J<2o;Ka+cZQUVzxi z6k(a9Es85BjSLWm8!U+K5>OlC1Zk9QQD4JKaDdkYVS+SqGZ*11*vk`&kzqGU(^nB& zxXOFHn&t{WGwWNq44PmQXZZ&=hUsR5_|TXQ5UXPyo%C>vhrD8jFMMH!S3Kk<=SkB- pEh@+Px#1ZP1_K>z@;j|==^1poj532;bRa{vG&=l}pD=mCOb1snhX02p*dSaefwW^{L9 za%BK;VQFr3E^cLXAT%y8E;Eegmrwuz4;e{BK~#8N?Oh3Q6jd6YCHG~-goHqn$we~L z(?>!8m*wzeON1@SmaMu;t+fiZ;;t8EEwXO00+t+AYn7su$F94qpd}(l02LCFaD*cv zArLuARIUmkTmeGF)wTD1J>58&`DZfIGd-Ck^H+UUq36B-{r~s(```ci^$e!4ge5G! zKz`1Esmr|&mJ-)rCJcC)`{X<`z7fsM)LohN!APKf==kh z7|&*A-hwgJvHM*xG~%OX49rWe5`_C31nFaT_gV_ZR}HlxN*n-tDqnsX@K4B-%(`r zB$!Rt_X^Uoe+(Oz2U}H}dwn3_MC8l4x#_zFap3_`?4+CzB^(f?&Rv4A1UAEV)rVdu zTDpm;vWe67i&6{aY$$`xh}J5ee*`|LzVw>e07D~}=2*@3qO^{3t~X(yD6M@XKR*jT zslJ7@V7f+cvDqfG#jTZcs5b(>Y!!t2;G61W$cd72CZwk)pt@ms^ask3UXCAj3(~yO zjEs2rs`?xfqL7cx4Qgw`@ID2WR#&NehmZ(@LjGK{d2pjB?W7Fui(u=*PPIzv(I6<~ zFUraq(jZCuC`0=)n9p^%@xXz~MNs9&o!=kH*MK0*%F9aw7b+(~6D8;Or?le^NJo#^ zAMPOtL6HHdcIrg&6Uw0eh&n;~Sgno>FHtegV({>aGNeCary#rpZd8s85h#w@Y_t73 zkk9J-3J!?k_n%t$N#IE3D&P|Fs<|kqNs`V|283bXF@qy;Wv47~g4`BYFG%aDfy4L- zdqr_A;|j;00b380)U$;Xen$-$Mpv-({9}dvDQdt{QN*0g%%MziIQ4i5mWob2m3CvN z?3W;dF*|ww@6~2wf zhZ-PQXdW>_Zh3!&+_t0EC$ud8kK9~ZO6m+27(|T8!-tc)g8?=RuFW-@b(N(`?%ys53n|6HfI>bakRsL>C_!Mls4;?pZG!L$ zxKugyS_Vu`%t*yMGn7)J;qRUA3?&FXl#Uu%MKm@!c_28oQ|p~XZREJ$QVRV>aj|@K zN}2rC^yzNVwqYYlq(5jYFDJ*+aAe99#WvDrKT&S6JqS+i)OIhJDsg+*j*e31502hH zRld~FAb;E0DR+7RMWE5>WVlpc&uqM(wA~-<;g#Jn-rDvm4MjYdk}?GEPg2VC;_$uq z${lCUkc2yeA}5%*%4|Gz_uZrqdI2sUPEE}Kw|0s>OY{b-^$|*uKRB~uB}up=C_;Jy zXIR-w`QQ)AIqqS`ZMSJiNk44o`yQplAG9AnOp>mFBBG~o>EJ=q2Y;|b5dH~nRgT@t z0JdBwH;UqMN{K&cKXQa59-v6*Nf;3T#_5;0m`pc;V>{KfM82Gz&9R&n@B?lmdU@901C3kLc)-+gJtl z#VfDKJIl)Ct)oWCn>kJa{>C+YhBjzR>hKaq1OQlNwf+Vi+o^UHCI44&+<&f+UfBnkxy^gLhAMeqXMv35Na96ZWrQ+g2co@jgoYc>g+Fk4Zwvjzm%)S zj3J2yDu)l3cTAcj?_e7|{7tn1paZ(7G2FqZ0PGXR(~Nz$FL6Zaz)H~zi6+w^aBZi&t3*v1 zl$Fl{V92}uz67A9x|$?%-;+HzzIx#T+1?Qd!mg=PUHUgZ`z+a>f>8kg zW~Qgxz_p$7t`a>vBcqh+6bN`50BowZ-g>JGfvvT*WP1u|>)an}0a6>)-Wf&)V0v1b z0Iuznca@mu2M-=WbqWN$4Z!(Bhg`_pHerHqn=96~=B~S3^jti4jBf7)BLnbQTAIkX z4&4BpEH8HFBQ`c0T-zz{D$&;1SToxP1oR+AgV!mDwR0wJ z`^1T4TTehAXFWZgf>4IWnM^~$wVm>=62*21L-&c&d8)I&@HPOKTU%Xszc!nEuDO|P zPXTSw$5BURX{mh0Lka5$qXJORHe&H66Hdm+DhQi{wB0;!$alWH4FG_RlOuUR96~m_fHEU{nBh@%$ceta9xlxR_dH;{$#e5BL&*Z$A3Sg+OekPOMqu zxf)`h9bdiLvN>67J z5rpRX^X1DeEiQdWKZwvtlf4|D}U#i>o3 z?I>Bm=d@}6g$nN{v|-TjV&2;vmS zR>ARA)SzMfgbJIjf^o&EoaxH_&VmTy%Vk5yB`4n*$jw<_1Gu?^am2V9qH?A?^*9Tj z;NiXY{J&*pKgsq6;UDdDLX#-|FgGjf58z1U%HvZ{UqxGx@jQe3u(0qg%834q4~7*k zW8C1812_U#0lWJ((Na-HyeX<=la>37GNeD^Q_i-D?X-%)4FeuI_`UGD6HF`Awb?@= zBS&xJ`3B0M{seFUE*K~HUp7$n1T^(H3#JvGY2f$sb8QI;w{Pc#FDS$M1Ge$PK}&4n zZQz0dc^}*aJoPvW20(d*_gnJCgoHc$V-T3n2a947ZU+Y{7k+KHd=(6Yg$7Ug3i6pD zH8#GHGPEzEhUXhq@(WZB3|Vx$6AXw~ER-PR#>S5O)XD|)7&8>Hkrf{qk&$EJv+8?~ zkgo|=do2k0tV9~MoRwp!i7DfIGvL$vto&v^j)bqO&pk@MCR#c|9>lZnk~u@OpJKa0 zLtb<53DgPVxfe1s{{-JuAK`0Akgo|0!6Jl80Ogo4Iw@(==Nwm!hf|bOy#cVD`B3$X zq@;V{69bMG;bTx2TyF&ip$tPUjQ`vQzZyGgVRp`6>qN2F{ikkw1+jHecFv3NLG=Yb z!MC84n*InjBG?TgN1{HKhQ%b@{Xt>Tzk9>U+I~^GzLMFR7ZZciEQtXr2fn~3LpNE0 z6fDljk$4pY+w($((h14QWowHpYZ^oFe^q5SN9UR%>uT5tTUDE3JA8mIhCN;|N-#~e z*?@NTIIWqFLm$J;WKeKYx!JHLrtZy;qye#tzumQHHHo-R72wP!uue<9#i53Vf zWN;D{1*SRj?5zbcF~3}3HveTcO9Q(E;W(?i1NMDo=wdoguC-X!VvO9FmZmk&CW7Ex4fY6?e$h}Lk(wZ+AB(} z4T8|#P+a2ji|)9F&-Sv4tA^wDud{HQ(D#K*^DHqwX)-Ha#;Ce62FAje7#lXg7LE8` zoo;*)IEs=EJ89f`$hjZ{X z#N7)*0sQm}p#2-4p$*!i5Bfp}bU`O{Yozz9yWLv>Apj~5kc9FNl?aH#K@dnp%D^2A zQjxa-{Ech)3~kUBeb5&=pbI*~r28mZKmd>j61Ct^&_36Q))1m%GW{Ppre2Z8o$I0i O0000-fw*~ht^_A8_p{OnG zCJ=&RLKX>xyg(2Ffg}b*r4fXvxzS2SG!9Jg69q=8E8ar;l2$K}N zfBHTR9w%Ug7?|=Rv~z$@`?Q7!*w@FCu!L`ag$LNz_ak&r!M6%+;Q{vbeF)HF}2 zNq_`|Ws-CuJfOf-BElv~+6Vv;5=tcLC;&i6h)7fbfROM$kvc$5PzT5f>Hs-G9Uv#@ z0_21~B6WbApbn4|)B$pWIzUcP2gnJ!06F13B6WbApbn4|)B$pWIzUcP2gnKP069Sy zASd(^sRQH$eSn*A=ugNZz*qo3LPi1pgkuLpl%MaGvADw(fZS z0eBPKTc_Hd0O0ZV2aXG!0}-LXQ>xuhAFzs*<2#ly5D{9jyO=&;8oQ4$b!(w4BJ^YT zYx;m8?0$Bf_!9(#U$FZkeLyF6_Z%l$c-%Chz(ed_69XDPXWn(37$9zv(2Lz&ObEaP zcKRDn-{mmW&UBFITpCY2F~OJgJdnNB1%p$$X4WA zo@f#b^0l%=Ow!8y&Tn#pA)R%zNEzP_`GYPD1*WA0LwY;Q58S3vH~7YSMu`cAwD&SY z%yOOjt}(;syo9kQCm2G_|2we)GyVj9fSjNYkQ4L)a)LggO@N%B01y!- zvk$NdbJP~|b@l-MgqW`|Sp)bJV!p&=4d73R8OCH5;PKHmjFAIWmjePW2jqYpkOOi+ c4#)w|08MwkKa}4vHUIzs07*qoM6N<$f?~y!82|tP literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/ic_mic_red_48dp.png b/app/src/main/res/drawable-xhdpi/ic_mic_red_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..67db462fd2bb9d58b8071c3e938abe8f4a4e01e8 GIT binary patch literal 1557 zcmdT^*;mpD7zLM~;esAe6!)wV8arrYE;A;B;zCX+W*sdPIAUffauko2V{RSS#-vu?cY_$Efv}RH8ZQ`Ch7dfD_v>I9gt&{xO2w z_aw8iy?voAErOri{_t{%u@)poc(LLEtzKK=%SGP^PP*TuQH!TV~OEaTdP%xs3h z^22AdofHZM_oB>LsTUw_qHPE(Wjp4l7qcn`F+_lst24B=Mo52LR{PtLGz3!uVA=Pg z*M}|IO&VE4_O&7p7~ad%++$kNLWixv;ws7Ipm*)fB;}W_%FIl4tq7;hG3Ae3taj|u zExa}0%>%tB>Tj4;>&INXih}nWD-kd^#dtG^b4*gT?O>C>dO(muju^PQbzRE>DTbm3LjpmH~VOiyGAm-`R92v=*GLJc)_;D_K(~P>e^oMcPwomtcHG zZ?+eQz)`Ff$v4i?uXKAR`<@%}fq1Yp-!@1u_87)(uG8Ue<6tQ0ft*^;NpcBn*~1mO zx7vOs0jW8}rRew4ONv>#fi`vPCyioXhewipc|=z)sg!&?S5esQ$@a+~ z`Z*9WYT%Q8+3VRKX7$rZ^zs?xBNAbrA_LrRgi=wJ+RHW+^YpvP>H58}1>u-qRJ1P- zL~y#svE9glB(M^`=Bsl@fx$;2DDsmW=kHGLdc&yEO6w9xVV|AUysGrgGo8@C@AP2s zUX~Sp-diP@t}$sud0)^CYlu~*P*12%9Low8)5QrI z^B=UH%*;1%cb=!o%RV855P~Xh zGRiC#7Bh^}Ntir84nIyB<^?vXZ2Aci`~X^rl1`N7pa*oYk@g@5wBW}1paz7A;>I$S z!3^lft>-}usKUlAn^5oqI`QIq@B&8gVmNpKGk6gRUVw!cvET)GWH>+q;)Vkxz&0Ep z0c(Z>Bw*EWfCN~E10*13I6wkc3<3&#^*}S?8E$oJW_P2yko|yNsHQc41r;pg#v;Xz zxdlW}h5Yy(_=@U?W<(PkvuV`KY(O>T&kxyMRIjvS+K98BIG42Z22tJiC!mI82eqTl zvP3$+IjNmfN3uOW1ZckN$0*Q_DyNS%&1T{gO10w)`GD%Rf6qckUBCBGNe4qjh!G>g z5Z5So{U5Z4{d|}kMP=zP=J6k871f}>PosNNE;mO_Mz_C@1ymN*em_p%A=!6oGr0Yq zB)jGRdSDCh9p5eWo4WnuERk%S!dxt9!pxD(<{sNUonr_0iIeOH6}eiG)v-dV&osK8 ze~Ky1PSK%UE-4!{3Gj|4wyryG8_i5;H@fO`y(VrTiq0aBI75kJd>Q9>%rA7J95b<~ zKfny>&GD82dgx_7_I{ z8(X#XVd0iV57ohY?XsA5K5n?6ncyl#L4J3-lvcVJC)GGzv{DlAcc+yjgJW^CB5V~Qo?rPBIjxF5(@@-$fZgJnt1Jncq{CA(4WL{hp*xPlb^7hm9 zSzq&}2G7&V-~TVzZf^aNibH;X&%giU-pY1%*45R2_Re;H&Iof51H>YGShIS)V~q zLza@rQaqv=o;4B5@>JeD{r>*l&*$88&-vbS&%K||_nyy5b#g?B2tot_0Dy>{t(6N` zWBwC}U*Z}df20Ev0I0hw^!+NBYYX6PJ@Ej*k;wmqXWCC8 zhU=7xwDyQ}4fBg6T#fJr;I0OQMrwunMk*q-w6%0H=JTAmaS?V_$O|#vpC1IJNTFqV zpDMP-E!(?9KQ6><{RvC2&p;0^TE6W|7_vAlK9wUi8llo9<`>x@msE7AV@3%}(NrezDwzMDTJ_|F$D^cNZ82k^&wb2ZSF z7b;wy*dHt&BF>y48pa%c#%~Jxggk8k`pK)tyAw9CGpWx8vam5LzNCddmEmzKEr8ga z?T*lU_choi+7d-#qfLbg2*3V(5@Fe&eg5tBA3UurAZg6aw@)~rKP8>jfDyZX|2EGc zNhn?e`y#7{?}Zjd%Yb*I_)5oF_8EHL2W>`O{29KjJI?|YVkFD!^+5r_fU$%qI_w|8 zbAU>%3Z+c-5k5+M@e+(JQMF8k4AYJ*B%C)7hML_bGTo}sdwY9hD-%sY)1?lDUTv+d ztE-EPG=y&-wW{~7LYxZcdBDY{z;`Cx(XigUc|({oHHbeUncTGiK=*dpVg&r%kXP@a|{AKb9>@) zLu9N5-2LSp4^GD^>TVLECy~(Jyy?6vvA6a4;`GFXk)5Hs(8R<PYO}E$S1GC_w{{I^zNCus88>2h_^E4%y+xw0x|$* z^+gs*KfRyd{xMtGdE?p(vdDRQza?`0av-=*-`loCt;id9n=`x{G*%OWKKe!nwi!HV zvmB&Dr3fFdh zD7xtynW6@d){0!_ucOfADRix>JAr?7Rjj=HBrv?lK8iVdY|{Q%azQxf@X@G$49g!n z3N$Y>lCk$}fEkl;K|)U!qLjKh4Hzjq|Gaxv z-9vyX)~lGw+wM_~biH)W8BDRCmLsi9wQWCTQ&oS6J|i=RVHhUeSsu!0%0?&hdDLHf zb@`ZrzYb|*4#se1B?Xi)$yI`oWF-VZR$e~Rq2sv-^)%{uChsR7u=HE>@mVJ--{R-b zw{#7j+L`gpy>_<6^4l25bfe&JlD-xrSoOZIOJgD}^8j-rM#D0lM&G-}TPp=+@`@1v zZc$MB$3Ex(o1vM()@h z*TOD`<9%gO3^=G6EZdDkqM1y|3Ld?%=@qZ+ACUhA5qu@Fx@gXf3xjWQb+ujJpvlgR z_gAXIj~PBr)xS~+Nu8p}7fmqknC~`H5M;KfRf$Y~wV-sKk}hks!nGxtrMil8HGQg_ zd7LRS3L__1b#_0o6=Ag=D$`q1pcB1N5KF2EMzM+y)}{#5)YQy4RIi?^RZbL;d%3I< z3?)Zgy!R`Ur-)^KYzJ)hl}l3k*(?Za)-qVr>EK}6PA4oh#r42V|NZx{tGK|NY4E`d ztqf)TF&GRxojs}w&p;jA5$Awfb3fP&h*~)NUe!M`5kbbZLra*jj;Z8IQaH>Wc&$az zk`~qrTYZ5}%CK~XU={`*Ksy2)>_hCbao&@#&;++%ejM&gGP_X~9%;QBA<3mesd|ab z{cJb=3tTF+Mpm5nuorS_mrp_nGSx_r-r0m7YSjq7koFdEEZ*}Jh;Ucq3dI5yvjHbkD6yP-AM12z^{6fks3bl zZ3!mRb}Z1^gVh(CYecNKTfXIQt!U7Re_KJ6+5NJ}I`RKt#9a`f-TgUGrLXa1rufxi zr81|-Ip>IvHHy^67aFROb(e1*f<1~+=3*0CrE3vsr*$Zl4Fjt=r<@Y#ptROEO#`_D z9FE~NUSklLFQONgu$c;;u$)1IeJayD3hvaNlE^LQ74rGRLuZNo`LXen`?;?5B)st< zQF#w>PE8rDQZ1TXtvyTwsOo+T1qq6TQz!TT0>jjFc^%d9ZW3eB4%JC$us#oZ+rt)X zv2$YQ@AtAQdLU^dsng!XvIs8-P9=RTg%guDcyc=VBY1ubPU zs&WYnkK=g|#1wBU$eIz-&owp6K$L<|sQ2YYd5g7PPxV+s0{UHW$$RTG;k3DVesZ#e zjrvH>#~u9)5A0h>iV2OiRMrJuOGO#0GT^>6cbx&{kHPCg&7A!QsR%v5)`NkT68TiT zab1~ov0|o3B7ut6XiJ@GHm~-4!?eZT;oR&LA_=T+DAimGhpTK5Z@Vzy$}~`#z^am% zr>o@i=@#;2im7(NUaN4gRuu-PDlL=i7U8i_om*n;s<0kMC$>iI{NUs^a zlQs?TOWK>EJ!Hf@zvBjcPN;#RZBA>EX0D}$?+WMst>@Gk$9(})GsW0-ylnGT1UW@t kBH+|3(f=c9obY|#s+z3W#+>?EZifc2vv#zqvA`t!3teBO_5c6? literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/ic_photo_camera_white_48dp.png b/app/src/main/res/drawable-xhdpi/ic_photo_camera_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..777658e95515ca47c9852d00621e2e6d45abc5c7 GIT binary patch literal 894 zcmV-^1A+XBP)zI9LJx5Cecy~F=}h8lwKtS@9n)MRxm2`AkqpItJs@p>)C(tu-V-#=CId=UD(s^ z#Zy@d`v;fOn~Tzu?OJHHNtIePI-lk`KN@Bx3G-g)`+Uz|m=FBkykXu`7K_DVu`~_~ z1Z4r$j3}CsFA0vr7p4ddb45K=_{;=>kNJmsIpZVU0%^)LLWz`4fe-nOMmgjIT>|g$ zGtFS1ZXE(|^ApYDN8W0_z#v8Txj;7;?6E~Um_R#sF$;$xalZoX{D6r(A?{D$E~c{O zM_>?haY%a$6fv1Q-U9oW%qwq!3ryzRTflV87q9|W;F{;GFv0){qSYiAAj>K*xHezl zfJJ&yHS#`7{A#YiA58NG$|F%`I5JV-9to7sbaLM;flIzZ1Lw!X3#^Fu{wYByi=R|$;R}qoTB8JI$kiVUUtqx1 zx=B!Su6`5*yii_yR9b4GhtSXus6)G54OU*Fzb<9Fuf)`*nQG!IEfB;+M}w za~X69{6e&@G0KTnj6*#FFeADVZuofoh-l4&E`cLDWuFO3wP#-rPc_|?^a;Rylnh;L z@)wd48zfOOJOnKQa*Rac43Xn8&v?osa%9ANUyhGPfeM+%FRSSkfU`OkhV}j2l~kBT z1bVSYwP7Nn+I` zNiohkFLCTaw-vAgjq*RZ@U(}??0O4qVKTS91=2W}ibKjn!0?Ng1(Y9wI8QK4e-7{>7-2!>!R_PTUes%Rm$Vr^?-acLICZ(tSgz%L*+#!k3eELCi*tnaFXXr-kf zW(zf&pyqFP;oiB87URvl@H|g<@3JqyVwhr+a1upP6h%=Kg&rfOv;JkB;s90x*MR~^=vu<1( z;E-zs0)4(<^O+61&Ypz)fP1s&u5&au`N<8t%$|h&fM;{Bx-&Ps-?_3b1u$XNyzJ3z z_Tto~;}R~iuk;C~E4%{WJ?nO6PeN|MZ*CIea-JQ&b19z0Cm}E3A)%vvZvE4WIw#;Y zD+!j2a{)e5C0Vk@ZVo`p)dUO7NmwS}E+Jv-{|`7~2}$;cuLm*UgDb!!20V2I7{-7r z9J>MRQ^kP5wj02Jun2ILw=Mu%-Je-k+2*)tz<&HsK(?28!XAgk0S=fjq|%pq0R#|0 g009ILK!B6}0sT9E%uJ^)AOHXW07*qoM6N<$g0#uz`v3p{ literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/ic_play_circle_outline_white_48dp.png b/app/src/main/res/drawable-xhdpi/ic_play_circle_outline_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..7cc9f823e80fa0e890875daf57dacb0788191b04 GIT binary patch literal 2700 zcmaJ@dpy(KAOCK188(#2R&II9l&}zTn>#&dE{QctD>IVoGU2hMCzl!{xrE$?jL5B) zJQ}(5NRvrDE@|Xao?M27Zhq_W|L>2_`JB)Bob&#?&pDs-d7V_OnM#lt1UI4-aNSC5@FI|XMH`LYFHMlvOixsW=;cRE^Me$!K3Q6%E zM)p3z6xJ1UO%0k28O}L#Toy=9vdyZ8H0Ifh$c8|+(#pg#W)al|;5`1H!6`#m>l%Axze2NoNb5!xV zp;DJPU;BY%cEu9s7C*jZG|!BeX8;4~(KJg=u0bfOg%GcTC|hLIwDkkEX4V(lQ^@ca zJfmkX8(a%oDBGe{V}$!rJmoyMa9DFeI(VXib)tjOgDHet0@dWHu2ey0W@bleY3a!) zmlv*F_|QXFv9Pds6hkJz(@*%i_M*}yKT>M)Dd}{>xo(w&gaoubC-hL+numKkCcd8v69Bi;nmJR)i>-%x>Mn>+22GzCO)CgNHY2bC>fI5E2B< z4Fhb#%WH;#(+3%pV5wfUktX5*Pu|VAql05K(bb~tpOvO}JrTqjsLt$x;pJq(VI7EM zgnuBnF!p{VPIVdAf?F`c$Nd(3yhftStBD0C{<;>_qnwdtJ7uu#k?mU z-fx-mGB{jVR(9e^`QpeL3~OnykFLa~Q&VzROv^ zOVo1NWdE^Az^*tU{@HbSM$LQew8v3%6W1=Yy*&`Dwx+|(LBP|oc8XOQOi`MLU?Pr2 z@;S7O7cl1^KYqxR&;DZwL8-afAmRF9y{LGD^yvoXxH&++jmQcdxSLSa>-P$tEvKbH z2V45oI(v@0xWl<=NPyMPXpUK$NVI#Y|4EB)wEwmPXh@uX)u3l(^zS2Z;H0+`uJQce zp{jbsb@0p!DW`Jko*{<_xl=}lVh=(0YHl(N&CNgKt|rQ$&bVQa;%{C`Un;3_xXvL0 z_sVQ}=_+Ra>AIo3N0`k>Fi$xNA?qOU?vK)Z7X#eeXS;LBBtw5qt0aCDgCzJ=759(1 z!(sMWBG`M3Ywb_IaF!(cs^CntgxoRZZZXs_8_PrhtQ3(i5$VTeR?${S5Yo+#TiOmY zf|X&HQBcW>tg|S$g&#c*2(T&JUV^m)+7D}_yUSOiq^`||SLs@SKi#+`xs5#Fhv>$; zijc1<0|zr0ugAvXh--oa3GnO_dz=U`)jtfm$Q3@#pt~vW%mj;=33xo&oeh8DLYNz&?uE^+4&WqXTnBiH`;?_{ZMHg0>@isXC?3P*N`?n-$1=2R$m zD49jma5@6SCKm>paN6$LS|2?R{dYmRJBrQ9)L2%b?z3?-FpF9IoTeM<@!+s?Cpom7 z0>v2}5wnC~y`GZ-DU^+xk1GezWcyz!fI3^DUCyUAv~@BhdC9m$ef#t_WEeIn%!VaN}&8NQPjMAd#<+Yq>`9ieId zvmSXj=PQtnh@9^)FkT95PO91$dT!^~r|p?E@x|J63*y5Y&EY_wh@|MX{-I1Bs_dn% z6xksUKWJ5#8Apf{=?!15&SQ`g5)!R1V5Vg6`#7Us!tAq(Z+2woia}6~4DL^B<>w!0 zc%{L7AY_9+jsKRaNtexA=W6DA!{Klti`%TJiU{A85foVGn!hWq8N3qu;uM5 zn)@&R#S3q0eTPyjzr_oEzkK#6iJ!JZD)4I&))DeP8#d%-#nkpq{|L!GFVZQbLT8!1 zTd6JKSIqI_`m;^<-JATb?Aflx0VBFOOOa`E2HbR9L|R|<0ixMmIsCj#xDwV<;DGex zfG~M&Z)IBx9ZTXfp(Gcn-%T5t<++rCrZ3=(twUA2KyNRz z*}44FCmOs?%uD-5Fwqd5;CTz^E{Q2r#+HE?N^1s`oBf!T8uCQx%@k7XEik<9JDLsn|D7@_*`(t5eebANY_h7;bD^IVlSiE8s3;q z#5iV7^-?ExNXIffsq4E5PZ`Qhuk|JC4hEP@_l-!&b)gj*n|Us2Gn3HDWQ9QOp`Kzv zOkP6|Q+sydeE1bVbHUx%hXflWnscXyUL z-Lx|-J4*YBoxo3hm=%a)qKWL{Wu3FMC*C_^8rk_-6bq|EJRf--a5p$6wW=1>{U~N} zGUvE0p(r* literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/ic_power_settings_new_white_48dp.png b/app/src/main/res/drawable-xhdpi/ic_power_settings_new_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..191836bc4f8c831bdef7d88a45ee2f318381ca00 GIT binary patch literal 1074 zcmV-21kL-2P)(Nz~#4HO#Ng=q|>5sQk3ItmWp6Q4r$8y)ci-bOWT^jc^R)thkw+9)IaMYqv_ zUaCloB%=o$L-mgF%jrC-qtOGt@P8wX2Xt}^>1xz~9z3K;D>pQGB#(!p1xz5-*<&T( z0UAi-(E>h4`o!8T<)=uqQ38@wke;^|FpTskiSPjfNDX#c3rN#MIuJf!4CyPYPbI!V zdL?|oMWpww223G+5I$fIX~b&438dNZ0Y4%=Wi{X+QYn1EGE%SAfPSQ<@BuZX4yyra zq)PaJO{DEs0}@EB_yKKJ1KQ&UtRwBQ8jwM%#NUat|F@m^2GTQD19I^XqjUa~xy5Y^ zX(oKYhe+qF223HH4<9g&^qtj!>qw*F1D-@`vCG=>@ADnR2cq22)R11V7BGxdAra-4 z@+#72)&edeU5R#2{5sMG_gh(hh6d800zB zb0<(u8V|^$8i^jTgWE_|dW|l>pBmCKsptXz?;}hb4Y(ZqKaU=;n>AG9#+E;UYK3&% zS4?lAs&l}|@(0;Kbte8hvUV0wEz+lb`43Y@HBVBS zjMLU-f;1R1G4OO8k04df(i!2JCr^za9>+|LoV|)iu;0m(330((V3lAVqb7&`j??77 zG??ZkcHiY@ahRDqe#SA=W0!}N-C5P*d!{%^j(#$<(@utda*Q#>4{UNLYYf>KeBI9t zOepcFt;yXTe1u*e7f9J1J${Z~&}E+7R?JSf@dm5JwZa*aTQ~f@i+8z2RLe|~-umhF z6fg579$|P~V}#VbG=AU11anlmn+jJrP4~U}`+)?5oa6$tlvtulixw4@C^5r%Mj0Ss s=l28lHy2=ka{(^E1-Jkg-~wF0-#Q6Y<49dt_5c6?07*qoM6N<$f()|p`~Uy| literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/ic_save_white_48dp.png b/app/src/main/res/drawable-xhdpi/ic_save_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..bd80bf1f74f5dee36deef2d8fefbd1b59b2aa1f7 GIT binary patch literal 504 zcmeAS@N?(olHy`uVBq!ia0vp^2_VeD0wg^q?%&M7z_{Pj#WAFU@$I#EFX=#uwvXpI zW{Pd)=D69^(an*rvIc=k>%>fl|gI;!tRK{Z;84l|>e7(Vy4L%kU(lfKjiFI>oQ3X>ech^2hYjIl+l`2ky&>*QB4^6P)!3>_E@en}u zf_ao9X`r|JZ=3}HqX9G*X&Rt?K?KdX17Irvz&%$tbahngDDiMjS_7UgNHoz0&Xi0N6&XIF72kMfZ@bVwKW)b`a;6z2I&+E~|}G zwTpOh9PMyd)mq=bgsz7;jxgq&96`L4~?pJfi;aOAM>nCPq;LU3)PI{*hH|pd-2x zViYsbw|~5a0MOc2KYEI4n(-6(8LIwb7#gvRp{Sk_DMnBUT|VmpfXreE`O%dZBN&Zp zyO@9{EyWV{qZ%$oa2C~{wgUiS0WDFTHg*Bi<+l~U#-T(ZbZNvJ%A&d|rl7NNC=te8 zRHeilTA`Zb!vKJo!a`I{#TzD}YU|qoKo&8K_Nc~+H*B=m#WA9d7({JUYs6_7B!=(_ zs^dWuz*y}j8>*Xu4`8Yyk_y%H00gjeD3JoyTk)k(fHwc{Lnuoyp2UH_jIt3`H3tGT zb8sMIqT^K}qq9|_6{>jwEpv-ZKj5Mas_Q-s@Vn_ceGgSh@rLy13i>EOdNGN@=u(S! zoI%wzV7HX5QJoSa7=>z=?Ers>B^*FCOpKrcx&pQWWDrXzfUcYvK_pL64GzeIt>LJi z5+TMg9o1t}S_;riEF%@qP)!k|D2A@9g#ekw2D+dtCPv}n45}w&^y}mG%sfMNiZC&b zpTXJ=G***M7aI6U%pj6m=!*O86;nxcw}=#T_ygTlQi&gsz*Tg=iCKhmK0vQo7o)pC zxR^&}R2zsA$I%IQmBbnr`+fBr;%*5pv4)J?^Xux@;vKpNWD@JB;>Xo*%p2TQ6MGo! zkJZOT7d#m%c9D!okC(c>#EXxRhQ)ZYndDAgsrLex_(ptyVqC+M)1(q75YKJ^049^s zX6KoSXP3$D*e91-&Ff}uOOc*lJi)U|s%yb5Z>HoF`cL-8cNZPrQE@rVI&tPQJK4r{Qz;Fs^UB{ zu)vW3OUNYtLV3>mG{6Na;u3!%Ilpk-CjoBJgm4K2!ug()wgQ~yXCft#aPbwBcx*br zQ>IajFbNKXQ--0OFcjbnBdI{71S3*Wj3$g>jc6Fncv@11bP^I05)u;PGk*cKJ1!5O SXtogm0000>P)Px#1ZP1_K>z@;j|==^1poj532;bRa{vG#Hvj-3HvuzPpLzfQ02p*dSaefwW^{L9 za%BK;VQFr3E^cLXAT%y9E;EfJ{bv9G16fH#K~#8N?VHn{bKrOh>1nP3LlG@SOnQ%aE}!|)-#BUx?u%p zA};D=)ahJe#im*o%|u;{H#a}hViU(38z0b2)CDVak!U9Bz_yneD{_(6<|Ueo)4~c| zMKl+uVLTk(XT^2xTz~Ac&s=YdZp6H>!UnDQH_V&tvmm+=^N{s=S6ESlh!y^kvTpY> zVq#Y8=>Eit>e@y25HY_5DOLI&-$@(8gBcBlN9WrK)`D%ke%0V`q;&V% z;0tRLL`)Qs_4`*@@q%`$xrOKn>|B4FwF!FPh>B_W5zdMiL~JUoO|aU1vBws3!Y`xT-h4=Y$KVx_RQv51Kn z+uJ{6#R}TD;|ZdrSoS&VBj{rxDgvxfv4~ZM^%0AhaBO#SofRo)`}?nmRwf$^zOlZ7 zJ|?2V3(SfX^!+`emD%6fxx@Mj&htl2l2N;Tjuj{t(V1a={iBVI=;~re_gh+=C1_)p z-d7|KE6g&*G**+cN65>NJ=$IwSV7K0OlP&QBd_q+49nE#f@!S|S-=0C72_=QxnNqW zLD@_6GckI}pFDQ)eNT%J>Cb)+c6aaAq4&kci1o+Vm|ps@0>t`b1Vn42$|(=gw6UY{ z8C!g8(QC(fv2hDuSGqRtQ^eDSEj%xxwZq2Bi)fluItmx~yMF7t^#2Tej)p30{j}c!OHb&uZhyGaM-SdBk-EAJx#YWmr)+QFX#7fVs%z=ex{Az85t7cMA&3WXF0ls~C)m;)CM zC|3RzhbhWpQLANU#?ub!`}zFZ)_#6p-|y#F@5e(j#u#IaF~*pb(TJ~6<%X;HuBf~) zgMNJdOjkMK5QPq^d{9Lfg+5cIa=|8svQg!Nc82my<$)y(XR*owH%UC=F^LHAteyF9FQuR`+ywE}# zT9gmQ@qskFXRPwTIzk;d91e%W;cz${4u`|xa5x+eM+c#`$^#Dx?bRsJLg<0A#{xoq zOw%Y)#}7jDl{GFAx~w8_mC!|Hi+cJ9Em9F!N~n)IWr+iXUZ_aCCbVDKp^{EQ+f^iX z5&B4_vcd*JJxo%OsNox-^~wrQ2%XhG-~yq?GR7EV{ueT)w2?8Tjf^R6WK3xzV@ewt rQ`*Rw(niK)j4{R-V~jDzm=XN}Cy}nOYNFTz00000NkvXXu0mjfpF1iT literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/ic_timelapse_white_48dp.png b/app/src/main/res/drawable-xhdpi/ic_timelapse_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..9be8b3818729225b8f287c93cdd21d2a8a4437fc GIT binary patch literal 1548 zcmV+n2J`ueP)S~nw}E!Vie(`rHF`(Fz@`+|%DMo(k-(O* zIeg&3FgJ~)S8hu-RLmTiu&`uV(pj(+u(N_-A1GzVc+Pq6`uqF;PydexxHzwuR9042 zR#ui88fm46*La^Hu8<{9o-9`w;y8!tp_K)?9Z^d=2N@xZVGgi{jBXE9(8>Teh-!wT zJVu3XCFau2MUuEc7j?Q7$naApNW&NOQlp!T4n8GKqpa7>!cyL&6ppc2rN^^OQz}z* zsFbMYw*>iwx9Fvf6*N&zC6!du#0uJYo}-KsjL@LusOB_&4pXg^kl_e^PEn<#ID%J>jY=7t$>ViM z$?z;*Syn4F@=P>pT?-i&k9Q6cb@gvs@0g<65b(7{D4QqnHTipWG>fIwrB+ zR5;Xf1M3CFMp#?rU=P-HY7_?TSOu0UHo}^wNe0U)U_GHQIEeLu5+J-^$iatLFDnd2 zur@0J!kgx98FXQtQ7{^DEmHzSu~7#1;+n4@v|@dtM2O;VaxjLqN>!S}W$+Nzpn`B5t4%SmlX&*X zpdITS1>rE(^NNg}#MLH)KCD+2gdVJ;iin-WRiIu516bV(LMzs&B4H=-{Z$S=!&<2z zEWp*Q(AY&18)R@lu6YW^FxGPliCrWy#n)xfjWwh&IDmCRp|Fc&cFVy@to;guHCP3f zDG<9zW|9UOwBTB;Fvu{2wOaw`C#g<3*o!qymBQgD)&#Yh1^py-lCQ~NE>l>)S6DoT zt4BrAPg0XCk%K;5-&a^vZ~<$ad*q9LlKP6rt@5CbVO(1j6*Y|FIwBvm z@Gs){2TSCGH*j5`O3|?%ug&to9D2D<6xZ0oo$^69UTYN($8hColP?ysj~T+4=I1oZ z7i%ft`jg^fF;lp*tWpuw)5$4j^LKvC-712|`4ZP8-%@;Z;FaY`&4O>Rj$iQ?&M`rb z91{%lXMV|#Xw)oN%a?eqQ!*UFD^I6VMmKr9UQ$w2aSA`LlTk{juJo~9v0?gwT3i$aii!WiN={E#Z$j%Z*NTRFhH y404$na^#rdGK0Lst8}xH`6??bD=RBY&Hn*bQ5x#j)VK}+0000l4N5{jIAh*NsJNktu<}E zJQVt%e?V&*Vk(H$hhl?O+iF>2DSarpnVTlN*=)MAU!Tv+Bw=RGoNiz~=Xv4x&1Ejh zIU*1UsDV>L%@QbDK|l}?1Ox#=K>n}%4+sK+fCw+s##dbBE>om2Fi11SpA2)JH`z@n zUjw%BK0mU|dQx1agDv?KP)su;IFsXN-XMaoU)1xFo`6k!${cP@(M>tRvi6b$zNWz6 zfFgQWz^fTf60(jDz-7Yz1svc3swCKJ9Up*iDexnpl#8g9<1B>;_K-yO4Ns^J*umfU z;SU;E#Ogz}0k4o`Em>}GnnvoVCQ6tvQL3qyjD4D4L;@K{jAfV1)4 z2kan;Rhl@$<9dr_?7W6h7f{M5Ru9>UP_Swl!|Hb;>H;oem7oftXvG-C>N8aV2W-cG zdYw->dx_tSH6cfxihx2MVD^wIgwg;(jB(8FQs6zH8?!V!5y}GuwJc-yj@N)F|6vwK z_!1y!!)%%|ZvmfRb_wBIfM5`_PA>t)%pzN4i?0DOQphF=yANo@>=eS+06`CC$J__} zg6toPeGe#Q7TGno0b4QaM))2e=tY*}Id=i?AUwsvUYa?KUl9nS^@+E$cEhn zgh?Yiq%GhGvLpr0171S*h%#*f6&T1~a30W#>;^(xfZ!&wxbuK7k)75W(2wk#^MGMw zjamcZ$cCH;{DrJeYd}4+JI({9kX369s6jUFJRprMsx_bz*^=`B16f#WKm?hw(F3v@ z{YNP2JYaI8{}SidW%Rc98(9-V<7z=R=se&HWM^~+sCblUMK+=};3l$W?~l`2%C(LE z6b7%s;HdS5M#U@TWo-dhkbQ@66Ywsw44br!Z+)qM+g(5mvmPx0eX9Pf zBDjuhh7#Y$FJlhbPYAaG$1&^jJ)jq}*WCvcF^w!mm9OL1u!L-qLiYiJk1-qYHDJi; z_k8_Z%Db%l@wYO8S%Q%F0KtB& zM%k=%{Aaj_S&qG`{?UEGHX)|)tA7ux^9WS|B_yyKqgKiIyO_Z0HihZ}1lw7_YMC|# zSDp-3^K4VSZ}cEp>|CMR!)>v20d?9I|k zrGp3NKIX7z(Cp{l=|-}wW`zNcP_fRP{V0PxVl^u?_`Ui2APcM|%dhkkr=A)ri4dWZ z8tQ4GpWiT8%RKvZ>|Wna0zceho38Em#auwG9Oo$1dwyU)_faLm9`ql8AVMeecs0Wb zLXYnRM+JRMo{X@jblXe=@3vgC-|Nv*0abE?L3>W$FM@YNDH4c z#2v;-k>wFd#<;~5&eB9JPX-6!f&g_Z2nYg#fFR(1fgwO7*(gng00000NkvXXu0mjf DeI+{> literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/ic_touch_app_white_48dp.png b/app/src/main/res/drawable-xhdpi/ic_touch_app_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..3678975acca554d6d60805558a75785fe9f6bf2b GIT binary patch literal 768 zcmV+b1ONPqP)KN|29zjlwONhH9=yT5lV%*&|-HZ@odLF{;K*H z-qtg66`e#Pkw_#Gi3FXj;7j&0L6IU8?BPpZphNLT1CO$mx%|u{G$_8fixVv5D7Pz) z$nqhJ^&T0;3zxH-rP#@3@p}E^EY(pijn9u~H`UldT5-UKRO_91Y@XTV`Gk9Dp@}A1 zxQEZkGn-rO_x=fH`iiR*!qt4sOh;&tB_1WL!gHDfE2t3mkSwv4u<+}L6T*Iy9l8kn zN)8z|vYR}4cCntc9KI#2T~=5@P@b#YkyZ}#UkAC?9l3@ALC?twUlR0*n`byo*g;b6 z=;s7|E-UOM=pHxU$ZXcRqxTWCOIDa5sKw29Gn<|6Xd6LevO}`P4$MZCM9N;2eAmwd< z)6a@nMNo~itcpzp)f!+mX>S9ZLDrD*HozHTooNJ77-9n{*&r(Wx!Ugl*hjP90kGQl zfE~UE6to;@@-~1g_<(7km+#rkO`N;T0ml_3I_W2=W#ls9C};Ild-rk$g@# yO->fLmi^SOkLzTG6l>WL9?7s4sjZ z`dg=nZ3NGX*QczyQwj`vc|$kX%>MU4_u27-XQ1Fsx=&8bpK^6=W#{+5{x0v--*iiI zK6_5eHWRkq-woYo`i}j$!>)2&V!wgH0wW@j83QAeoL2*oWZ@_>deZ&>ak+XyvA!1j^LK3P nm_Pheb$}Qopa5qqD6eI%v1Ev>S}gk$=xGK|S3j3^P6YN+qP}nwr$(CZQHgn?)=}Fc`Dhpt0$?8e*3!3wW>8hrBbO>Dizk_ z=!Vl_%%yy>0v2m3S6qO_HOdu_V)24<#g|z8qC7DiRwFA%%!t)I$`Pw#wSjWPo>(2C z9C0pIS13n3h}AR75ual9gIbGGxPu=!i76Ds!gR-OWd$)A$MQJ)GQ7&d3G`Lt9#&Kx z7T{rGJ;(g2!!q1XL0@O7985`f2K5PhF{b)qLT<;z{;n zb`64&*_g*kah&WJmri&+%KQpoAs)l4D-+3z74hlT{90u*df>CFtQec0iT67T$bd!p zjd;H?Ufd`2o5ZQHq>PxCF2wneY2u$lmk{G1MPN;0+`uUEU{g9{^R6OsoB8J5S$@pI zmsl*XNK8XPbKfwJ(l7@1(9H94;8vP>jImS(_NSnco#epgG*aUThEZ8Kjj)1oT z5O$%;#Px)IEGIr8>^7B&#|V2`PCP@{lPVLh5O%YixP`EnTekx@$cY z%5h=(aC^&d;yYC)E+(vCEN5v`v}81yVK{;sjch6hcBGLGj%QeXLmKr>G> zuF^1%Dcl#fuXRW2`<{8^$If)e=5|HmJ#4zNgLhcz1`K>fI0q8r21bhiTK|wZU6@x! zEJ=+x?=odvt@1nZelv#p`CLo?g|W;0X~18rY(Y1CR*)4F(;crz8WvlR;MIw7WXFl5 zc$7^BzwMZfeR-M`hbRW4@+)cn{wCEojG#DdNlw=GWq@2fBM%lL4+XQz zhik~ex$gm&9SDHwroXr~VtMrl}^u&?p6EoI4})kyrydCbHx?99KU z_>-+A=HwDO8&ZZ3vr;gq_t}Nf)DM$z4_>!0;ULjjb zVm(qH78!;WxtAJ!9j~%50{ij=$1t&iSQ)$SjjnB`;$(i{c1Bfe@e@{`C`UYp)dR{A zS7CLIa>QX+?V%j85mu`xN6d%S49XE>VpA}j^2Bdge4$+N5*CjrS6qw51`&ZbiU0rr07*qoM6N<$f<}Vr^#A|> literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/share.png b/app/src/main/res/drawable-xhdpi/share.png new file mode 100644 index 0000000000000000000000000000000000000000..40771e480181cbf5144668d273b6b3dd4bc7d1dd GIT binary patch literal 666 zcmV;L0%iS)P)xD z!_|(6O+=Ux*Z_M?@(N0<#N_5P$G!gmmyO}H0OoCd*#VD3&vRXXmF2vJF9IxwUchw$ zipdIqNzc}Dx^@_xJ4!t2NP4=EkrMzz-ZR*`=dChGguclLfW)JgcweJ8YfOl`plX8D zWc~Sw#1~lD>{`NB4tDtSCXBC8gk_shF!n&A7y>epF7cK#B5WLZ1&pspsRr?Ze!b9D zgkXvqwRo{W;>A6kAuUMnq#0#PP)#yHQ$@S6+P4DNAvSw}rF{ zJ$@7;LJmM)r)F2gGvNTF0eR)k50gTpC8TAx2i6cU34@y+FddOg;>9BE+dM7eibQyVej<(dLazL!^fYrY*5+ zuMw@5SVM?gH$@Xt0yKoURa-nICqUyJ@B3loN%z};bH$K~0$?%b5tZd_cI2x;42QN! zQb1k+L^$*Ut_x62U4XA11(z2BoQ9s~v;Y{&a>Hq*Nrg$FCzuNTwwl`diBD&Mz#=CW z-Tw=z12gvSSJ?|$;!S^+W-u5G27|$vEPezS0Ffm2y!BjF4FCWD07*qoM6N<$g0})1 A$p8QV literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/take_photo.png b/app/src/main/res/drawable-xhdpi/take_photo.png new file mode 100644 index 0000000000000000000000000000000000000000..aa8464f629c0b879af933172b6c92aecb677182c GIT binary patch literal 11238 zcmXw91yEGq_kX*9z=D7@A|=w&N{GNx64KozCEX3Xw1k4vy>yB+O1GqRNvwp@-LT94 z_|AX+Gw;p2XYM(3&pq?bocctotI9uwJca-O@K8}fRucd~_e&6f_u&3x>|Sc~ALXGb zFAY@vda`{_VB1QmNC7}iJORoA=bpxQRWR@X08-cgUJz4enmqt;1t`i&Y5SV)XX9s? z$fa*p?5wOzE-OauB|t5u@eHgedjG!9A>2AjP{CSLV`3s<&Ii8=dnG8b@?F9TUh*L% zT;(B2$|D*px8SE>upks{Ffhp!ja}>c<=`whRSS9B?y(zrP@EcYtDPNi(1y%4o6?!d zKA5_kZJO273AIQ&k;@oU#&=yhnSFn?y{CLA4)r~g;6indoxNv=OB^xul};qi9j}X^ zeD}`#n47Mdl{I=Gh!4~Xoyp`StF)psSZ=SqD+CJ;I*|=zFMA#qFH-3e$zgNBKTPNi zNEI;Ya#AL)F#~I|BwyO;Ef8`9S7iwT+%F4@v>)RdaLAJD0yxDC6%sU(?b+{B2agKPG4^T;c3z|(+6Kav8DQyUt8j1r|D!;Fovzhm%~3k!x0&i7Q}?R7%T)6Qxx zkZ-u;YQ)LFrFKX;xm&F%>u-!eC2&$0)qr`e+8uBGCcPU}A+?26cy#Ndi+$Y#VU%AX z3p^IBzhD@P6VL!5Q|FF?e*E-Y;2LoCIYc zB8kFia|dsr4r4#?sQC(q?zQ@$t>qp0-&xBUaKPzxF-CUhC01`@$;X}G3KX>O-$>)u zJ11)P#+ZsKv>C#iX^pH?8Q5uYpxyDXJr|85H9?s^&=yhs4!z-VD84@rVoqvBo{h7R z-==h{^ZGdEfv>ERAM6Kv=odY}pdy|@TkHVU+6o1pK}#3 zW{e1Dm_md-`?L6x*M|&qyfkeqgvo>~1;C2OHFKj;e9A_?!}8PEV?w)#ydgpg>iM`e zcP=3%`y#|Ma_B~-gsN%%MaI`QI@I4cMLQ}@2s_Ylgc8bkKYwywFJV|*<1?#fcYNp} zEX5w1phLX2TMb9)4hqtc=Ak6P6sP;hsc8fNXRHdKOUhTn@nmG&+RsTT%by%^Q4knY zg%GWrE|;UZGO1K}c3Pnmobb4#*b@GgT#HEQD4dRJa>GaP>5rt-G<{iQ>`FN1&oO4> zCsZ4Zs+1@@jX#$K57)PnbCQHR+zilf1a)7KCxiLisH|@=6x^__j9eVa-{jNGfIdrM zYt?*fNWKQ}48?wi?8UunrhyQTD>CU6ZS!m3dghZ1M~MhCsFh4&s-3qjXh+mHI3V|j z!K4Jxh?j;G6x$y~MsD*)6-9pVD-kyJKE{5{8PVFT|2kxV>t5j=m8;Utv^@0>lfMN8 z10i{x8l{i5hQxf(`CEEKZwx_JUxL=ywr+U&WDYaA$3B zadn$;e`sVQ5hZ41MpZw5zIyo;OaL>G>Q?&!n(~S}#!iu&BKwepdvCID>Q`fYGODYx zW`1M`w+t$3i~^8OJd6xGiYQd}hcu?DO7}u1iOmTI|S^T2S6B6lF5^!Ni*Yp-9Q?C; zj2wp@^+`O$|Dh$m{o|pC2-Q*uJe&dB7+*SgoX{Xj5FO{l&if}Sk?0sZ%X{P`JMSA| zcrj2*1LN3eNPl!2p4??U6t8?;Eye$s;Mog(^zg(!eN#`(IWi zL!WT|qA)aZZq6f#0T1+|yA2u>BXaQx=9Cy&H~i`j7euB!v)7G5S5=Z99H4k?EqUr! zW%a4|EDuwappci$IXS+UdlaJ{qX}ABoN&jbc-YLG0JC?VFeoCIELeH#N(p`tx>ZU_ua$|eZO1doN9$C9kF-q#7WU?f1WbHO*^A(?riEB))&J^H>!^rR-U6* znn~9k)Z)26zVbB)m!g7nn!~NM-aRzoBaLm)!ndrLpdpp3x}-!lcpm)xx{^hiyh3*( zJ0;`<6k?O@f@U$cYStZio|hzFqxJ&=OQ~d2->JZ&hD;LxW1joKnwHiVwrtK6Zh|C7 z!WXB|&OYGe#Z-gzkLTCPE&pAn z*;6Ye31BX@IncpPY`Zyx9C@k1Ou0?;Hc_m_7?fni_G|y$)moCwY;HscZ0nr;JPSaK zrxm}fhbyjy;MyNW5pz-TRl;5z{%T`TZ$6QjW++L^>jXI9;FUG4!^4KTTje?Yb$NC_ zH0oyiISNb0x(5j>cG_K0{dmzwr?PDCXvFmEIk45~+WA|KnL5(vu)^up;d1mC+F-;i z?efcdHF+m)Y7;7!e8+$4A<@>e2biP@!eou)X1@b{ucMwsF4nB$kfFzvBPSu`qjh4M zgSI};kR}~!@%D=46aU`IMWy%~Nwe~isVHT6{P*@-S%*)&i zU5!!cu>=8Mn*dI?L)vAhL+|n$n|Yf05vPEnw-Egro8!{H;!MGwbhPdqy2tpGEfzNM zykRrsbXVT&byG}4?s1-<>qJ(0d5eURN?Uxae>B;y*YK>zZ`=!vI5x+B(J7I2}BUI-m9E85znD^T?H2GrZYc%V5f2o9|ZQpbUAN0SQ3Y z!<-uO#3n;jgKrS8=O&l`<;rOH7$E6YL)3$&4hxE?dZ?H3yl3iLT~@@D`U)C9Y(}Y51d7R5k6E>g+R-% zHgN_LRfPHO4%mMx({f}i+z1LG{5&UWl6yEahH(~Ij!37Y)^mooTj5#HWEH(C-#_?y z68RzI>S$w#zj%-@L=-b$9%*?D#v^&kRwPgeKAO9e3jXC8Mkd`R*`~2MTK?F&nUIY7 z1k5_u9X!?WDeR-GXf_#NAsEQpjx-aK4+=XF;4V?7Ok?}j&qEm+rV1#7YxKV3_oIYWGfbIJF2>-sh{?b_|U(u&gD@K3v zF#38y1Q-|(+r&Kqpc}2?pDl*y-yepWaGQtw{=3Cu1>aJ+lW4wm;$r98)tFw$_Y+!!5C`=(aWTjau% zbreJMO11-HzCt%T$3b&jv;6=p6HPR2H>)Z&8sV-BJlm+4^%3rv$#Ql%m@F%lFOhr1 zxU#MNj_9B36^o9h>f=0Ob|!S9-@AEglLF3963eCFA@izIx49ti&MVmv7(kul_glu*xIy zIdw*B$$ zGxgQnck1J(;~o>ICg|xdx=Or!90d)wY5s;S-Lo&(E0908NIk>+;_8qKq6|th64GLq z*E1_;1yqU28f2*U@l)|SExJC75IVlNMvoUiQUr;R9wUQm(QKR}AJlkp_ySHQY4|_j zG&_qBMAD+G`1r1+q-DG7YeXL(1Xo%0gd(eU4{YQ}fVW2B$q9*Xig!Dn|8{tw3-IfB z@!a+Q>682rMy5IhA}1)>amtJ_~ORp*2<2fswUQ>4JM?lmc|>8S|vHUz|aX zZ#RHv3qVr4O( zlc85vxZ!7|czfF*bsY1fSMHxMr;*qeUWx1#*w1zNS2`pI&IzGr&Rv|=62LNFFQn5|ii+7O zV}vB(NGrnZ$M-vQO`u9BL4-S~K6%LS?4+VMWFC)6_qrC71EyX(>dexpsVVLw>aaIuiMZfQa-g?&d?#{}$+iqlsg=7}db^Dgt6IlwT*hWuijah}zSg<^;f)>z(bLUkr6Bpu&H^zP zmOmHoU96#MjGB@139}FE@|_o%itrcm(-203dU$bA!zh7ISt{lkPir-oAah2hj&Z3L zN@)EFDNm{!Z1%U=i?{m*ex_&(N%1y`L&3YP+im1TM~9WOBWCZxSef2Ye~xz4djTR1 z=})vXCa^q(7iV~&V&%Ls?{XlWDX*Z#xDTzh>orw=A8^05nxDn(N*IbUZPl2Z9e$JW z>P<_gdNG2?6{`bfjHFi9R%p@nTR=tN<2sgBHkMbI%pbRfG}6O^(?r;2z!x$xqT+0A zr7x~LVim#0>Uj)!H$NmXp3C3g$uH`Gvk36wl}wI5r*r?j3ILe;R8}i%g)I2TKjb4)NOppVSeo@d|p} zCyRG@`&hLA{GT8Y1?RkT?7w>!zLA0S{8w6F{NFDop2UDaIvC-%)`$&yuwxgJ-q0^C~_TO9xE7V?T|e+) z{dnLa`Mw{-L7&*9y0MdrQA2Yd$>_cEg< zzm+b$1^VYSK4Jni&B`iYTN!#pmpkZ#!r>+R&+Bx1SHwnN4)rEyZ|*69Y(aEA5V*w< zwJt!Y)n5%ZPZQ`bzmLk<_o+#oHPm5ulhuX-PA7+GLl^h)#W{7>1+!Q1UIFG`)g#>G zhqyl7T)mh(rh3>8Rv?()yrgEE)}BAE_Vymg7ZGh@#?N{G;AwFo-B{8#F{WX(wUg9$ zw@AjGYbI})It+S#eEDtnQ(!XPD?oP#c%v;FfRy3K0;9-UKSn0&8Vhmgw7nPj_AF7L8ouqvQ*p9?PQb)qqi-^Cr9G<(UF3YMSd5@X|G`nv2r z6l3DnW48E}E>NIGb>YXiTJ362dZ<8(zt`@n%h!U&Z$RU-IpKV=>crn63FSleY4pba zs1L`D-UD*i)5iW)GXb4L&T{^3?KgS9nUzzS3hQfU9y8Y$Zj~RiX|Q73?Std+FK(Rr zU)6_1eL56ch9-D@+$sCv)B5|z;;dZ0hQw7Nw!(*USz;V@bfYR4ItFQz1^wx>cmmG% zw?AlR4WJ4=&$k=VdKiX8GuCGLCw+U>gO`X}LsB=D``)-;9h zZy*Pw^n}t_!{*Ctad5;gruU7wR-6v!q-DnqrVtxO&duAoGy4~XN7MaPhC%-0+nP7xB~8)-XF72Og! znfe5;`7R6{!(0lJyme)Pb=cM(xrZzw@KnasWq%HZzxq({=O|Y%x;Gp9HC+d7>0h9hm_aZ91 z@b3;z%{zI*J?Gf9-7Zt-Y{hXg9>?)Ae6ok61oW@#^$JqUahOm$2n2k#waFZCta@;^ z@o7$r!eRq^YZG=l7`90DdUDp$=t_})vSzJ#nA$O?jw$c*+_H@vYPj`>2>|8j2scfq zi8ppDrpbzeYKN+lptC!eYr2;+#f!OrdyfU&edynS$PHoZpC}6VUW)n$0Ek6pL6R>n z`1DCEZ1HKba{-xEIs=@EUaVYBZy$ZHzcDDD-7UVi4Ar0*F~O%81Fc3NU$5=I=zbiM z{gHKYASz)QgELicrR=pf=4DR1pcP|6|>cgy|H!mZKZiEr2S}XO=!ya|5IW? zW#QU{gwL)&NGo#^5+?exla&VWy=p}Ieh1-v%89DtM=tCXZ5@5)%Za^;(&8ee8vFVQ z^`^E6X19E?#v?lFDFXt27bWQm>3dc5GH340J|`u(J5c|y;_$%n@%5a_`)zLFWph`# zKM4uIZ&S)Ac%=Uxj@E2DT>O+cnkI64#*q_C@4TW!nIloE^6sqR{3^%-Uot}azF-|} zQNV>ls%tw!e*q%*b+GBg(#&qg%?hh|7g#N%IvJPRA!CGbycSWRZqBFBDcsmw;$jy) ziXgU+VmZ6-TokK+Fu`;s=&q0@v2kl8h-?ua5*6L)>l)bUDF~GIvvp2=PHP9huGsu4}$s=XWyZTER&P zbZg9WlaU>!KQV2GbbJ&cyqV67&!D_MVKmg`FiG$F%%ZMRDL0H4L&c^)hABbXP*gdE z+O7rG!AY&tyI4#^NKx0QSUAn(;Cq_xwXS4xKV{0rZS)36;Y?7;vs)hDz}Nax>|QL; z?lCu*Gvd}|zm}f+{H}56SAa?r6}p{_<1hLFLstwbu%E2sNSmao1AdOWwx~l1mud1k ze{?c%Xl~oOT%P}qPEpA@a(=lCRiIcu1)UU5CjkT)zG`v_2hgn{*P=7kvHpQ5;D$ao z#J6^m&sFg$HXh&%5&RV1L9t89Bz}n+tj>;*lvFpZ%&x#EU0I@V@l$kEILl06I5PPx zz0d-eURaAvCz=!wLRot+rVW%3!dS8(C3mxp%b=B$qt^dd!O zi9e05F)(o8KJ35^YHE!8exstk5>!k}SY|&@P>qGZ&=HvLJc*`?%TcBDxqIXvFdqXC zMzS4WP9BzQUTtrmA1<1XY(()gP{zXQrz;XoH}{JddBZy1dHNz!c2#&q%xxW*PwAg| z3J3X5Ym@d)#fNu{Iv+hY@q2J4Xe(SuEBO*9u&aXx#!Jv9*ZHYQ#V${{>E|WS%DC&y zW5H=&8UKGjxn0n&f}2^UxQJr@LP=7u*lu1U8`m4!EUm_!+TWF*g=i7$(IG_|yp=== z0-1=gX?Pg_ObEZ>X-910ccMSXg3}_A^^kCp1wx!qPAn5D+#0Yiu_pF??3FlD&D>;5 zy=V*=EW99-L9;|z5r&ONd)07q2%6;zy)uQ}RhDZwZZC5J#%A@{u^rEgslgTy{7yJmZ32C%8Apf_|2k~r4 zg9ZHxz`rllwI9bF^uaQ!DCP;whiIry%|b^i?wA>fp>U-o+{qkLb6z-yt>YMs>tcOI(DL8u7~#QCgjTc~9y zTnXP1WT<6kbp0AtBogXV%)5x)op>~m*h&F>o;`Yt;_}XLHs_;;pkdNcv-zu=>9HK_y%<-cq!!|TsGT$IDzPkFR{ zConC{aQsX=s6I|bKR1Gr|M6Nx8vgJvHGyIAJK{ip0H~iGf+k$^s&PwAOl1oM=AqEj z&7G3POvmpSo|Lv^?SS6%A5?c|*7FefIKm|n+<33oI6*ZfWIXdCS+f4nnx&HvH$xbC zttCGdi6wBA*Juv=L0cHiUG~NA>+K~=0@xRQGO-IC$hhAmUV_j4_M^RRdFeK~TwP4S}+%N@KS(QKIUA^d7~@O=sdlBrg0;VGopW^nH6FMD87ueBIh|5)qyg6KI#6n{rc0aVyeIwi2{U8Q(sC? z?OPu@?i<#zonuja*?m?iSS#HW_jQ)Vi$PNG#clJ<_t_+S zJ`)G^r9;b6$vo1I1(AzBmbJRv2JfIW9US<89~Y5n1$4#yX4dsQUm#9}-Q}-k6c?U8 zl4kcxdi}zBcl#z-@Co%|FfZarGS2WX%T&_?{Eq!X#-91!mHIZf2baIe%1Q)4Aa22YRdgTzYjrzpnr;rD7rgEyCbivJu&zf9~(ePBqd(lM^Y z<5N~Q9(~Jw=78_@XVUBKWA|5^l4i!m?c3z5B`S+NNfy>=rREq_7(-HS*w>n0E>4D5 zAs=l9Ui~76n=zG6n)OKb{7zYwQP8ozMFEjCM8OI>b`hL3Z_&X~^Jn?nueyqG8E1JP z*U1rlAN2exy@`wW^}5-u*xZT}z4ZOg4n0>ack;k(n=RYFMU?``3(waHkyn}HQJOg! z0<`eduZWd~y|c0P69NM4F|Q6Oo!)9 z5gx>h1b$#cyo#u(vv6_H{X8(j>}}gCrWTnRl!3Gs5Kit+5nOA^niWxjL!&3sgp&TY zUukDlZ#^N)mUjXTF9%SZQcTVsx(x6u3xM~L044uwk>DzBUgmGkSMBZRZ-$kD=gUT>;@AA$htHeB`>PAe><^2?nlbA*v}~x2w_6anev2o{ z=>3En)7`Zn-dBfX@7S)0x<~7 zfqOR2F(BniZ1MS6AT-3BLLLfA#b!#w(`%!#tmBG?IC+fp(3FR%YD<+YD26Gout&j4 zYga*$G!S@_!zwQRE*V91Q6|-=CjWfzc5-bH31pg+yEQknN{~IeM+r_th)662eUTo5 z2yVntm)bT3`%ONcOAeONaK-uajj(ij)Nl(boV5|E0d7?uXeE|hDG9hOtb8G(nxMI- zt@+0|wJ9SnRCD|ZR861+GS$!I_ld^+4eO7dpRxQj1A?AEM&_b2$G1%0{l+QzLl33w z(Bw=0{viDCEzV6BRq(jtl=V=OqXPApqb5W(Hx@^fS9}N8({+#)lNw-FYUaQsVIP^* zy#nFoED}20~PQo8Jon)H*%JCdJ(!seCCm1j{MyHro^ywzZ>iBJB#CM*!EO>l6 zxfLBsLQC_>$%!;+@C#Giz~NkXkBo2JPj^99{1sTjYXv#Bk}qwg>76q+-{jCX;E|KPGRuubZFQ3`$5d$M;rDN*%hpT zwQodtUKNzvhX{p8|HrPnSE7<6PDE{zI3#!V)A4MU(Oj{LSJC;$fWPGl7}oz&a*{9t z!jh!U;Tp^czB<1(6du+`xeAiMdY>Gu9ewuFf$Ae{n2_S3Tp92|bl97LE`H{9IIJ6Y zRDH`ezq*?I?mQ}p(uk>SLu!NZ-h zlW%{$M@^nJ*p#w!WITbwtQ>Ia;oa88MRB$fxgOXDj9qi4&C%X_6oyw3<7z&GXQBQ5 z+c;taJ(P0$AgjR*?B0c|V{sI<+PjEwy@z{nX(Us9lbM1}eAS(upt1T3NC)W-SNh-W zwUT>iOv;a8;*n3WS0-B&S(cFyf-TiRN>9=&h1$Sq<9wYMC(>Sq#6Rf3p1%ci=@a=S zh2(%mYe>=6u@j8(DbW1QGnI)e>`ux7sW?k-mTQ=28I``Okf6COsk?RcMYHBcoI?}` znm`KG+Xu632p_3sIYt&>_16L2G6YmN{$5FZ(lB88mVfKtT-qugXrE)1TTRFF+r0bW zD$Bd`0L^=-(A#Ia_>o{n9ANoxf*`c1JsX?emhSP*l?V!ErKbwr>MvZvbO@OW1trpO zDk>R*NN8Mub`r7^PHk*e9@Yl@NJtBxHnk2UPugbimqYh^%2drS4u?B$Ofo$Y>Xmyj zEG~=nu9Ypr4o{^Ax!})Pg^zIVo48$)Eh5G)9s;0c*8cv2(w zZ~mzTbD1TkN;gz>BkfZ7qmn%gv`(rX<5ypqhkwa`yri%nv{r0nMJAZ647p`Ub3 z^AS<5NsVHE_UosKpRJo*CfQd?l314jc~o{F*}S8>;amJlKX?wn^%#$(E6lR708|tMq$VO_tIJlM1Oo`WEZ#&%DBX z`hHq)U!FX&4&T!lH#w7EwBfb7fI*zREB)_wm-V-#*Ro84fqb6737#GcvXnD$>Qx}g zbOG@+FP~wznfG2S?VKgx525=4R(oM&XcN?#Z;8U4{pniGH|sYvx&S)C;FYo|qii5k ze^lKlpH?`c4|zB^J+`jy$7_VT?17k**zh2I*6=j0Datk!zMlU){>yp9^MX~+)v_ZD z6Ob=cl;Tu|9bO~jrbuK{I!Tp zU;DZP;)IFBvYE#6DOlC&)#uHKUy!(C-c@QzJ$=xIVq+u}pI?VSKb+TsKdC9@Ib*Bs z@zi1A2k{|FNtiwe+x%Bdko?;Z+#m40nA($0NBGvMv+5T$Cj+o0n|4H&I#$2*2xao} z`*lM)!QyS;I4NEbQ*8Nzc73P==9%FQTlF=yQGi%AxSEgyOKpqyXT&SoaFIbm!5 zDII^_5~)M*!tw5TDqRiBDMkN-O5n-ob4aPv2@b*WU!EU9xI92vZW3Z(lFV_Rt{cMm z>|dqtrX5ZLl^wp0o4Vegw_`7H9%O~%CVh!_fXf;d6zMr`_k>bFL0h|YaJYw*!ecHn zVA=0bk?^w+`-iD2xyBQm`)%yB0JSN?^zzqK&$L6wXWTxR=qk}m^AZrRw{zya_FTgIab#J&m}*L1DTPB^DN|*7tjupFy9uMXY<|i_rTD;u&g>u zB%x>eSeN&cwg{2taene@gQwN<`*BrgCc M%Bjj$Nt=iM55a?b@c;k- literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/take_photo_pref.png b/app/src/main/res/drawable-xhdpi/take_photo_pref.png new file mode 100644 index 0000000000000000000000000000000000000000..618cc59d09a300876f686a1e88d2607f8b22292a GIT binary patch literal 1540 zcmV+f2K)JmP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGmZU6uhZUM{ jaZ1(``iK~#8N?c7aF zBUKy#a7t+bwJnsAAR?5+i&-@qW4st+JQ$ZiG$tl;01sp@t0WsW96fm4-K@I-HF6QK zWDgq;0up01Mh|LY;>Nhi#<~+MiXN4pqbSOC`WU7~&&to??x={9 zsQeu4j{JWAdHQHW={5_(FKDybI_-A*lUA$MLD`yMrsILKjg&kwJ3BjSGMS7}@(_sC zZ76dqlga$m-QE2jWy=rF=4%J%nt8-xv7hMc$qmnZ`khTD8jXHKUsIThE^vj?w`LdOoF0l>50>2y9pNmH^|EDzD95JCe`3ux1{?lqw(m+31^b{qhM3B3*o z27?znJ3HT}q%L6I{x>YQc6WDIU}BP@?Ap4zIxqCk^&XGsMY#5OfbxY~US9TfbaV_* zQZ5*Pg9)2>c%_Vk{OIWDOYl+NVhjEpW-1pUNCDbasHLT)FX40Kq(=ba@%V40RD9ur z+9=RsaVNy=2I*P@Oyqv#mZ*dSrGL7^I8LMUteEOYisLUlq{Lk z={yDdzDdbEdwY8dZ5mFdL0F}A9;?;bPMc+GYioNOS^=K*dFJNk=Aj!3QPNOS1OP6& zQd%<~tUQ;?^*klZR$pKLAhsY%-hnL*THLpkG?WwpfSFB5tNZEg?d@?m9FI}5Qg&Jw z%FxD>gexzi{@TS|$%*<6v=7HAsdP75l(6<{ZB~1XDnwp*~ z)(uThPk#hsL0Pww!{P8}#m@fb=H?eDX*g*DfMCx5lvXRiikz63I1`J-exR(PAW%VY z0b0%+W#z#jLu-4MlIC*UyP?SM_g{cR{eRP}aDm|91NZZxP-rThPDf}Le0-T+Sy}lG zAI4)Dt$lI9pTT~sv|B|?|4dj_)!0#$m|L(6KCH*3)bh}Rex{G9nCSz65{X37TgjCy zcq~zW0AkwP(RYJB`qbdy;HOY=Sb@ia$LR0xKj-my-k>b0NErYGSI3VJ4GmqxXTOyO zVr*=zdthMTN=r*ib^M*Ms4l2bG#dRL2j5b#iwDEC0y0M8!C&c7=)_{2f$!L0{|_QwoGl0)gPeDwPW8QTa%>f7XHUiqvkmKWw#Hb@}B_0-X6bq1zduBx8&*#u#Ia qF~%5Uj4{R-V~jDz7-RCQsre5eJC$@` z0hJC?rK&VR0+F8b=lgqJJ}-85@9sJG+`Y52JF~lwY^+S#nE9Cj0AMpWGqM8!&}kC{ zKp9RemwRR2|5?FyrUpRO;Mw(41LC89SswsupRkZU=ud5?Kr^Rc0O0WduYi_X3d;aM zRLa~)-{HR7RxV3}%ayEELV)9(W8OvhjUWocbqa-A;(t{l=SE|;Cukd#!}$~|-8!oFBMo-xA3jfJ0MSK{FL^(#IY z(w2Sm-3_KQdt>wB< zFs|Lj=d)7m#efX`mCrO8A6WVH*Xr=yKZTX@K03l_&@2;(h+)d=BJ&cs4IjV;9rUQL z*U3IhZg%s&d04J)D!z;tH3D=z8!|FhP!$e}iGiYsmRG>QknY2@TLlEVZjM`WqH+Td z#F+EMz{BXCqz%&1-sfG9vEiX=rI+l6MsOW{h{#P7Z5{Br-(}QT|MyU}Bq%$?$T}#} ze)$zq9}(Xp0>*=ja@zRBqZJONHYKryx)Obr_-AmCSyH-{HKx7vxa+s4haFyM(g^8L zz?5AAaC`f@-`D@DWXin-;hDHo%bYAolw3vt!42*PX@ zQWBOAnkw^1If7>t!~pI`hXZUZ$9mlV2)_;2*&Rl9!8Tkw$c~GBCtr*B63Y6xh(Z3j z*aDPV&!L7$xmBC$Oyw6|f!T`mpf&u%MStd90b{81HQ-d`^VuCtwNAEZN_~djh0Q6s za1QOej`wo_aukC;V`3$r9XGSOc=B>Pjwa7dzFDaVJ|_!I966G{(JpHi3C`hb>L-&C zj1v6l^O;|6;G4Rf(8}0F@6I?j?W-*j)q?)A&K5xVYbuY%M*dZyy(9}}ck^l8;D62< zq6~a;qBa2_y91)_w>?H@6Ci~|N~9=iQ|ZI{*yRvHK^7A^twJWu5At#C2fduxYak^rFb7z;{rgY>OJ(3%_(p*|vJg+!RZEv-Ao^bEdL2>UvNG}8Y@)p@(;#2SDj}-34#!<4QOIrJQ`h{3Hh39X5q-T!UVZdW55R0yg*8 z(hWHIbJ~^qu&Kh5LO;l&(bt9cxyWM1!*CKsZe(6Pb%@!Q-F;^Eu;8sXbG8A%&%Y-* zV4Z8?P0#NuSXM}wd*7nD&*kL?@UM_>_6kFuXQ&Zu*A{twAa7aXdPXZB?>4C!O5y!L z%7af(0(M)w$v;m_>Ef{{+Ml5B;ahpXNvd-^eHFm7T;bS3^#cR#X9~-Q)Qmbfd_zcL z@5E^kq9O}i*rl;>&WP!cc#eP-#H`(K{ z^q|tX0ZtOMu#kL@pKxnCT*>zXFqIKRN8oV!#aRW(VAC-W%si2lMWAeiS1cDN>i=rA zOu^KjJ$wN??;BD&7*w*4Hn4okD%4iNdpi@jwn|eVw83E?j5cCA5ay2o?S(G>9hTG| z##4P9?Ky|cw<{nHd~=6A4>Ch>?(C{Yzr^YeUuU9n*G!Z{lh2Ri05P5CTomxcI~sQ~#^VuynT2 z!AzA-`7*L;Eao)RMP?|-?s%B2Z)l^sy(kw*W|Fe_epYr+Ni7>Z`Y_8TIjD4q`^YY6g%B%CI2!#C#a)fO_d?L(nGt<~-2tie*d?MHwBnUa zViEGVnloc4E%KISx0u>qp%HqsK3SG;?Pal)gP{q~I_TwQ+qFXMROaD!jB#F@h94J?1QE_GXx*m{)$`hVV#; zg8A2UlW89AH!gP+s{l>*v7`SuRSbBGa|T%#j|yg9bd+4y)LuNZ3P!(1PAX1bSJgY6 z`GoSpJ3~aDJ=MOHK7JjSfGs0Lw7+wO^P zP={n}%d8W~2W*qKZ}0xzTSsD#C!0R;4OX9Xz(X={|17&S7%hO2+6%Aib+^M5W(7td zgSm-$#tYLwZwKMuu`V)GyKDP8oyHIc>ZS}ehTwfrd^!J9S;H(0Q# zRQU?NCpCXxe%3P}b$~{lX3~lWhwPg(uDn-N0WOZk+8wZ5W_`VJZyG*s7VI%r5~i0T zsZa|MNuioyZVcSVzxd?td)MUxgo*mcFCln=#eZ^T%S_@1699^N3RSA{4(VJ|dP^7v zWQpbG*i9p&pZTt$O>=$~tceF&03d zXnV1dAcU_X49jJ@VZRAJ;9ql?`cRu(and zfzdC?u`}5t>3+z<&q3c$?26|v7>8Fhc)J>X0nQGbd~3)bo_Xd7{glN$4g`0Gf%BuHj#&(n-YJ|W zV1PfTe!pKZ;N3eOn|fesW`r;+Y@HtM=c1tr?rB=*Wg&^uCy6X8y@0FS9!iU4@ zmjX%IMRPcajlIPQH9z4+Bs5&_ed-Jt3>)*^A zMy*l)?ZUS5r7Az&_F^Ga!N5gwPx6nPiNiA#>-BNQ&qwviAHz~JE6)#NW)n*XoOHB# z<_*K9WDD~(Waz-d_Wn+M8ihPLF_X3Q6^KxbV5mB%_t!WxKzN5+B1B(tXhNXp2u;A*NX zPn`xSyvn~$W^f!pfmNQsgkJ_B1=G`LUnD6!DrQBw7+ywdQCvxTkvm@#LStWA0+p&2 z#w@l@z^k_e#rCAra&}%gR%rz2Ab3gkiXTsg4XS9>_1&saOI%*5D59e)$UQH-eJ;R?CyLl)b$QACKV=Kvyw;>V>ml`e6rz}6BP z+w)vlv@^3=rkC6uVYkzUaXF=kA8ww;TMtTkzb$^htMmei1|jPIKW}u0f5}}MS!~$z z2lOd>Uj{r0p1Qb4X2MO0xJAjV(|{9@h&4ZA-^KYFsYSN@Jlc~`-K+O49sWv0NkHe* zC0E&l7OlM^felvD9V2nQe|@UN=hZT)6=#C*$L_?;eof@hV)lZdr51`$4&xIYmF27l z;)9*bNAGx6WO)AkdH9CP6D^rMh_tjM*kMZtWiHcL|AnS10h-@~{C*r^-qPrWzci3k zt&Rb+x#0=j*WJRQ7XnLklhVd%I3KUMBg3`R_&RW*3`CDr3<^C{AMghlp&<jE19g_^|l|H4fu36AYC z7smQ_-zZqB_q+<+!WmDbJUo?RR4XDii$$Z%zCUi?Q1!YBn1rKGHQ3>+`MhZ~*P4he zqGdbLNldWCBR4R{>{v;SJBa$l1GI*ncM^QAuow^gYHtqM;)-|nV_FTbmEgSwz*b9u z+sxMEOrXqPOK}+nHZ`U=yj&&6QLX~hg=Vn!N>~TGh)>Ec-)w^{&jiDHJReYqXmFbm z9vZUwdEI&|hUBzaa|ImuE9vV4Eu;>2JZs?ZtT(RP$SX)`bxe#*yS=wiek?s}Xrgpw z^oo`CKuhm+dKr7H!DAN`_6PQH%W20q%IOSl3-!Q(3ym49-B;PEL- zD*vmwO-_5Ac@coVuZIx;DNI0|4*q|gL1F-`NroAx%%n)E6a#kR+jqE)_2E+Og@lAO zYood>Q@AtU2CzUe8eW3OUlsjFxwr9ToTrTiH_QPa#z!8N#lFAfiiCEc@iKNy?LpIs3C6ZGO7<0Q>A?Y;0U^y6Ur)J@Qbm23dySxJxojy1^s z(JYTy@vJUyaz8Lz@UN5r2T@Yaw0^+Mblf3+O_u|q#w`q6_}#A3F|!q{hUSdfiNR-w zK2S@mK=+qWSD4xh{8*s|41Su3B)TAqH62|Iq+A7H5al6lnlp*0rA;8>?#DhX0bPss z0A45x$^!j{593{1R%;;1bTBMCYBA=ISefKL=~5l%5<-!S2i!}+KsLjyPT z(Cp$xw%dQY;sSZl2?(%FJQd@Wn$#%W{#jTKs_4gz2()qIsX2U=w?3P9x_LlBQ@TGd zHececINgtn*+uS~?Hh}@Mcl~)5hHv52rTxYM_>slaG+iTg)kFF`C8P;ffY9}25ZwV zF-uI?2u>*QpmYNV^{|VoS)3ro zn~mVjM3w-xxQOnbw0RLA%y==Nl8=pt^c;s5ndSg7vS53r|I!#h$~8tUUhGApH?eq- ziYQo1gS&~!E~*IXL0f{L%X&g7=_;r`@O(0mcsJ?{U+S?ML-13Y-J)jL~p%)Y!7L3{xXz)?h>Xj#YFzrN>ewriib$}Pb72uB5Kh^NOFA0W` zO+i9&MaMD~AAJw`fbS{tKuUzcuy@p1Hd(rO*5wD#4`wvB$7tYcuFq>)raSFo6_B|H zNDdQfyuFH9;dN$3#_fd*mWQqRoHqgGXWJ)=w3GV?ms9seY7P*WBev!Es3@s3*j9V$ zCZAo$K?NV1J%evdzcUUm%P$HpBxsn?)Hlv0pck$D)4s%m;d(kYKVkmH42b5ON?)7Hm~oF^0*Iev^rM>*=8 zo5;aCs{k@Lh<6MqDe6T_PO8Ld&y8oVgakCi#PEXsjVhDep zhSI2m_(n`ZHUvGn>lBr=Cgg9@)8xHaq%=~|Xf3qOi69CGr~Z!$Qx{Cj#wfuWgeeUD z3KQeO@63TiC2@Ms0I4#Tp*o<8E(+)8s52bp^>eTg2aUsI`roKZ{b;h{JiWBiHPVLR zPTx{$J<_JZcw8OqNo^~wJ=-qfn|B}~O5oUbnDK+MwdDs=9se%Ggxr0p=X<$>&a%GR zb}zPEW0xiEyFkA4d6IR7rfjJ0?Zt{vQ>r8Eo__@&Uj}`_Q!X41e&aRatL6@))jFZ^ z=I@m66FJXjEC80bh1-Y?%zJMyFAuzkL)_O#Nch@-@wk7b6K5z>d)n{dgL~z>oc<=g zU2a$rHHc{=fq~MG>MguE`!YPB(NNv?CBO^6hg|uz#N}^dQU~ax&l+RGe)&X3a&Mv; za{~_+d^fnU#M&hk^1XLR$_C#aXZ%&;Ke0R`3)li!2fr0$qxddJ`I*|>AXo3ViIg<) zQ$Pix>d5g{xw>b^I&F+zeW!}BlOzGmR;|yQ6cD@z*5aLjcLgF1ye=k_&&!`MYGbSe zW~tA$OI-<{Vc(4ekcwdh>jn(0naPSYK!2}om+Y99btd}Mp_m1J>^??i*j>S1Z||71NdfGVXVCs zq+s*gyD#HGe97mfhJ1q`)XmcES?-$~)!9>En^<&G+fa6G(04&DNIujaV>%Yg5cXpd znCzZ!+YqBe-~4t*qByaVi`~J#_zXOoEish-a#Zl9bco|m2O!KXX2y-}Lq~QklMlVu z^8&oS%S8&**OVz4!~W`GFGkIs%ZeT=YJ6yDwIJLScKbhwvCBdeGqE=BCh2`g_F*pV zmJ17NiUlij?Tc7oJM6RJRVHSquBB)~oDJ>$8DD8v+K2`Z&}IM5drJgM^Bgz~l#`?Y zqMRC4C+cg`iQ`M}-dAr_t!vWuo+bg;gqA=3UftKY@H-Zpgk3zJmYYvm-Shkyr?ly| zzwa8>r}>Bp^})dAxn8fZIQ4}`@+nAuDQ126h%Lcor&p=-FQ(pGOd<<(y2hoqzoVM2 zjWLOVaRHepBR^hFgn&(HcO+$^xvGwb@q!ldM)d6ks4S;A#>#a_me85#jvt9txeIu@ z!_cWcQ_B#2mt`>80tDH+$qg!H;q{8NA;v-ecEuXC=7D3H8#xmwDuVrPAjmNrXt2?6 zC+J5ABL0)vB9+Bvp*6<%;GVyf)iNI{A+Q?2zDi7VP?lbBf#{pPg5M7N;JeFB)i2JOCPATo8AiVtTkjN9h9AxU4$gh?H6y(oh<^tG{SEx6Ct_rTGE0W! z+{_X8U!s;PcO;PM%c%O@8ZS;wSO@nK%#yeL9rXi;hmW)N!8tm%+H!1Mzyq;HbHl)Q>T_{Z4=ujbd5Cg&5En(BI!I-S z@4fRu25;D#SO0sJ@{k3kp<3|ReX_tart8c7zIjlg+gJaS@2s3gn|M|D(#?eS1Fd=Q zM1BjyDB%0Ee7V~fU`vu=edk7b;I~ANPJVI2A}>z;d^kpAEim|CSECU^7gw~+!<+iC zY-O3b^S;C%vg!2n1jmiy?Hz0802M5=6z3x6-W@h#toASNv9l8VtoAzk=uW{HZ+mO7 zEQTH&Gdm{VjP$u2K$M~@L#u%Lm%dkpv%@@JJ$T9n6^#~_xUzjcEj!!YP^oUZ{+CLzFR(v6q@$|xGbgG_r^n^8z%qVklPQd z6^|@c+jNf-%F*cQegBn%DbejODFAN@)K|u}P~^aoTq>R4W1gX0U%YhilG#JdyUst+ZgF%)ya`KIZe=lTb8Az1d%pG_=~os7r?m$QU1y=j1RlU z*nX)1c;SKqwUDl>jKfIb?$Ob_JbJb$e4@NcF33UV0J3-^mLf@5$*dj6kBVI3 z=h~6?4U47UnA!aovhT+XDhr(W)Rj!a6JFKhNLx0M@aGz0g5up(%5qFr=FRVD4DdWt zockG>5235y>)+3g^p81!(KXZUw_;sY7?}|zt=VFMQ<-5UzYpO^u(jA8rt#c#_Y~s zRCUgznk^x_hzh+hzjG7-p0jT^#5DMS#?6J5$@<+o^;-`* zHi8i&xxwM?mi&wex`5G>^J_k(9|DveKX@M0P2VN7^cZHl0diCG?b3%6C;8;s(&whS z1;*X&y)sDt)DCXuDvRtoNFis9IoA)Oqbkx)@;k^GFXDV*K)h3XF$Ew?!$Ag1UKcx+Qjd~=K>-x|@j+jbiCg|z_ z2?1M9nkQd&%S6D+48PxYb@l?g-#+NM+)hkj?L^RQu%EA!#@qD8r~(L3Cf4{5tSySU z!9AJT$F~yMUh1DmNng~A_M|b=R?}mzxHN}vG+sxqvx85>3z-pE{_{?85k{Kewk{Zx z5XZ#fwGU3Vz3dXp)@e|bU`{P{QPXui770?QF~ms|Q*Y<>k)}`rlZTELsqXS(K&YLQ zVF55-9`}MIo8v@%Gz^8}+;?W5!0)kJbXN-_u)GRR7-C5AkI}Pb@`nz2cM=ogmtO53 zi7~JfPtNC-Zg02j7VGYt8sz@>a;zo4$p6m*tn-%4*&%s$3)7QMMd(GFy>1mS0?vAN zivW}?ow8)6MHg)aBM72ju65ys8Np(Twk0S(-_mqSq89_0Y`-w~*KYAB{2bfbZlil5 zGLAH~lNuF}n2}C`qAV1gXu_rP*~jNo$J3=l+43Wu?9ofFh&6VHHB$U?erf+K#A2M2 z05$B$GzjEsf%UF17O`4=kRSA@!QQs5osQ$TKs*%7m=n4kmvHSi;Te90Da0r5o_w~f zAunP{cFMi8Fkm~Is-#;J^0s2oUs((gvda~9KozsYvwCw&i7`p(g^v0oFR=eSHNPHA z>pw8MZ;b%;!9I9VIudeJ+&grT7&Ywg3mwc<#>f-71!*H)r-!p zY&|=|Vm!#Wq`*0AIy9N=>Px# literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xhdpi/take_video_pref.png b/app/src/main/res/drawable-xhdpi/take_video_pref.png new file mode 100644 index 0000000000000000000000000000000000000000..855ffd943891b83c3a94bf35c820951f4bd315d7 GIT binary patch literal 579 zcmeAS@N?(olHy`uVBq!ia0vp^2_VeD1|%QND7OGojKx9jP7LeL$-D$|SkfJR9T^xl z_H+M9WCij$3p^r=85o4JL74I3o`oxb8g6*HIEGZjy}j+|cgR7cA#qivXYax_DF=_u z-EQ#6&HtgKP0Kpl=8qhCCCaZ{gS`}iCZ(f{!gFr@(kDW^^-H^ zFahmA1`Q<^R%y#FX2ghhA8kIFvPnCBSJwQ;7OfvE?yL=4J$=`f+F56x_2yKq`&zZv z{O^w2>RpTW)y>~|EpPk#FUsm!TcfuBn$aaI8?2uHf9mtfeSXEheT$uIV>EdVPxo3{ zqP+`{T5x_LW*)DokdHXwLaJIp<%eU$AQf%dcnGOJ-RytywU8 z{>j<@n;E=QD&HItOlVxwKi7ikjA0>=<8b0V<7WGlGpx!UaVW%Gzx%qxiho%jP@3cZ z_x>gYi_0qmSp?=ifBSlIA83DlPqMueYt4C)NwH3 zgIwFg#eTg?4HCU>e~Wh3+1(H!n*TU|BBouyR`l2jer3mRTn`SRx#h zB`o_mHngmK+`*td!=jI4g2HPcGyB9R3#9|wkJMh6-x5A|m({aq5m))m(gK#>XM2lh w#$Pxg58p=bVV{g;f$p$=sKcVGB{)#aM*kLyq0Hh|!lvI6;>1s;*b z3=Bd=Aj}w-8vPh3C{f}XQ4*Y=R#Ki=l*&+$n3-3imzP?iV4`QBXWa8_dH@3hW1^>v zV@O5Z+nWo$Spo&vA21(O;Al!P;50j!BJ9(2R^=iCr?G82GaG0b9K4*F1<`YqWJN1q}u;1PL=Gy)jPUrr1Uq6wV zveoX`qA7*{->7X0mQTM|bj-N2bB;@m=;si(-__SAzWv4ZPx<-J^z#!RpYW=c-5+|) zLOy)r+Fwy=Q#a3Ez5GPrk#NIqqfJM5_T*e!{KM~D=0iE<_OBjxr+=2bQrZ;Qub#e1 zgnwdll-uqiG4HQ(mi=q4Pr4Q4c0vC8#J^Ed(wpAj%S?BTyY+Va?h|pkKi#@CUfex0 zc=E4}O6Q|yZQs+q*k=B!-=dZCPMhxn8nF3?W3@Ig0uW%9>C762nI0D%xJ_GrfrLF> L{an^LB{Ts58Ys>` literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/auto_stabilise_icon_red.png b/app/src/main/res/drawable-xxhdpi/auto_stabilise_icon_red.png new file mode 100644 index 0000000000000000000000000000000000000000..66e6ffe8816614c3d8990559e35a20365a83b856 GIT binary patch literal 463 zcmeAS@N?(olHy`uVBq!ia0vp^2_VeD1|%QND7OGoEa{HEjtmSN`?>!lvI6;>1s;*b z3=Bd=Aj}w-8vPh3C{f}XQ4*Y=R#Ki=l*&+$n3-3imzP?iV4`QBXWa8_dH@3hqpzon zV@O5Z+nam6oE-(&E;J-4ax^6*s@~w7bjWXmP)+@Z#xKI*9f>VRRKDk*Z(Q%GSDv1} z^Xwat>tNu;`QKH4KTh3|VQ;u(KJyKWj^E$Jop0DRoIO%m-4nc0YX96F+5N>ip@;u_ z?fI$IwVep2!HD-eIxI%zXKgJ?>AgE_*g>#{Hv#TPN;Re`c(- z-fY8?BU|+QRxO%7W5%( s;jiw+S&2XAzQ}grgM|Q)cGw=a+W&4U%Qn|bkCZ^-p00i_>zopr0EkY*EdT%j literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/baseline_add_a_photo_white_48.png b/app/src/main/res/drawable-xxhdpi/baseline_add_a_photo_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..3cc5b100d22a2c93c871820a1bb480831bec2213 GIT binary patch literal 1123 zcmV-p1f2VcP)`JV+?MHu|O8c0$CsnWPvP@1v)Vn$O2g) z3uJ*TkOi_p#sX2W!au8Lf&5agEs$SA+XDGTTv#B#h`I&xd#7%J{1yZj$Ztl)0{OjK zpk2lmXqRsmXqS7XhgCkC6$_rZC9pt#2_b<6@>|ogKz<253*?v3vOs=of`S537NG_5 zO9(8G->n7md$vG+3k&48vOxHiBaj8MKo-aXS)lC5$C3#lL%LjY#u;sT-1D6Y@24Fo zWzLX>cyP{F7N;2K4|fET5jBRaPAAZsJ{8#$RUU}TA1LOLsvL+fjM$Vp(3~%FBpUqw z6oJU!n2O|pfS2+GN*PFw2+I{HqAfY2OH!6V8!n{)bjuQ`FPTxNCYceIA<#&6pu#mj zcw+?sq^wvlqAfe|qB2nS1v$=nBKFL#F?I1E;J4;L5rOvrF(y5B3xft5y@7_Z6*t6W zwf2p9Mp^@XP?0ToBxN5P?mP#OSZAQFY{4YoJLo)U4D=>j@Qp*=i4#>;`T}*tiEa^p zIT>mT6uajS4U!^ud8la%^qhVA^>OxdIGh?q3C@udU^si zoPQDXlZpdvHG#x|7CCZVnB`1&piA;pcmGC}1frh(`_2C=&=9EkPlj|?PoR!C5RsQR zabTz?P+uJQMP7c01EKb##(=zBYd>nd`Y&n~bTMwJCs0CF9GH`bCvl(wN&0quaP$W)VkaCQen*U;)mO#)h;08^~jW}@!+5*jF3x0B_M{%OU zAANz~N}Q-L=MWRwf<9;rwDhb@{mkzTK~!0*4CH^~}MFoUuN zf)8$w`t1yfKz|^=C0B9)x_khXKM=r-wq!C>S}UH(Ud7B>tCnX$o7e;_R^kOi_p7RUlwAhp)AFCtFKo9v4vr{q=kMaU`X%f4uG zI#Oz~Kc<|DvH1CSc}g#)CWm1_`K5H^aEwZy=U$G(fRwUIxRK*=!4&2ETO&E3Nyw6j pGFY?Vk#jM|7-Nhv#u)R7{0BSS<%|EICeZ)@002ovPDHLkV1oMa<0t?C literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/baseline_bedtime_white_48.png b/app/src/main/res/drawable-xxhdpi/baseline_bedtime_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..dabbb83879dc473c9b8ef74b23542db5cc15879a GIT binary patch literal 1130 zcmV-w1eN=VP)Hr@&bBbDMlcHJcc#PWg&jerD`il~wW$OBp6Qweky(aE0jf$uB&>AW_Q6C(NLaXl<20sG^&wwaXv8PxP|#1uG~K zy@m1v_cBNvX3GauaF}>Zk`HJjE*aU!RQicah3w-K;xZ!pSV)PuT$6qD5}y*4vW;fq zb*F5jn|Mu^Wi%1D*|Ll-;?^X)c!>BtE4$c2{9cn)jAMZKeJrcUl7ydR6?;j-kZfW) zBP3&{ETWC1td>Q5Mp8D*BF>YPUt|yUB&Wz!S;H$N={Z@$*Cgo+S;HSBX^3&MgbYQJ zwNjQam!$2MC1gq3Fg3D+H%Zv73` zs^u2yaZ5K-)umpgwi@)(P_TT!C}pKMmtm_k3kxWHYC z9?#>8R>gq|a`<4k;=&9HSa*S`iVrP}V%aE56fZVn*}IAxRrFxhcT_5V)Nu}req*AN z!5j)$)6Xm=hxuH_k|7$DESed{ifc3}d9*Tu1y^ZNI%wuHuDeW=(nBNtxNMMzl`iIR z4p*IKj?zaRJ-Dcg$#Mf#Y@~!sN_;?-TtW*4Tv1?|+`|kG;esEildEW>pEM1yR&HZ5 z9gLEO65E(23uxpRNjt=RS;A6|kd)&rmqje%AaU!WS(eejRtAW}5FIqg2V}^yhhd`f zH(j(bUVfp5b!?}PvAD!;)-YKeD3`uNuf wdGeg#AfNI!FVe^a1wjx5K@bE%5Cmo9ANmB!xz%0UhX4Qo07*qoM6N<$f*Vm0)c^nh literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/baseline_bluetooth_white_48.png b/app/src/main/res/drawable-xxhdpi/baseline_bluetooth_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..9b3f0e3eb33e7fb55ad7f876906f3e9196ac32f8 GIT binary patch literal 620 zcmeAS@N?(olHy`uVBq!ia0vp^6F``Q1xWh(YZ)^zFd2KgIEGX(zP+>2mpM^@H8IJB z-Ob2V$iktBam!Py17dv(Ti6#){(L~_?q{E>gZi~E-@cnW6AbRA?aWz!ymUdz@)JfA zTzFIjnLUp<2q_8(a2|1CGMwn(qcXweVHs0zC(9&_i5-n9NeZ13D`slkmic7Ctfj~_ zuXxhq7LAoa#+i;uVoDEY3aQGv957b*lImzkcVltTe+`sU6lC~YQyIr8_~A?^L&j4V z%O_PnEDqZ0r-J)@zQ`+dGI%I4OyX!zVN&Q6@K9o#^upg^cFjZW@2M_4B?pV8_H6J| zVw?3jM8NREl;tfQjcGQfE<7d=S1KtjVDy(3WZ(Ctb##bF`Hh*6S6w9%!QkwOVC(Oux;){h{%j;K`pJR6T#4y2zL4}{;B#^m9 zKH^kkW!_(x>+GMu$X6yWvgBt--LEF6AoYPEATAu}paYldSBZ2qEdPH)R{&`I&J+cv ydHm0&pLpqh{4m<&t;ucLK6T!dHc5j literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/baseline_check_white_48.png b/app/src/main/res/drawable-xxhdpi/baseline_check_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..40b92d9fba24e535f41bef8cc0d1524ef2aa90a4 GIT binary patch literal 383 zcmeAS@N?(olHy`uVBq!ia0vp^6F``S8Ays|{O<-*>H$6>u0Z-f5n!FImnBdsZ%L3} zFoR3+|MrIR?h@yU)_Pv2+7O_`FHaZ8kP61PS3@0H9Yok39!*Z*eRnXHOQ4abf`5^K z!1l_6X&>vk-fX{D7o)j!N|0CPiqfpI)uov`o>?`n+V-nq)idkHRo{L$tPw)Gle9HZz_cU|?XJ?&;zfQo;E4nxda`pn%)M%tN17< zShufUtnr&A`?m?ll^8XOCvZ%ORAEXz$8*XiZ+Zh;r{mWy$C<{BuSA<3taEtoIlE;y zQ|dnU?cPeF8vX};<6X2a+>Wi7#BSK>=<-QHp;BOi4Tpyu%aW-~jG4I(4pDCEakt!T9!EJL6#k0p^Y7Pve&Uoqm>O zWzw%irX$5b6%4(4S@%8|_N4-?DXQL^d9p{3JZ<@FA0G#7Hr_GAnGai)8fZn4DL*;={T5nrR9F6ZA| z8s1vVySq~A>w>~rk0slbr!TQyI;meV2I#T|;n^0JXFuL@?gqKj)78&qol`;+062@B A_W%F@ literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/baseline_face_retouching_natural_white_48.png b/app/src/main/res/drawable-xxhdpi/baseline_face_retouching_natural_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..9457ceb411ad4d4a606263679a68d0cf13b89064 GIT binary patch literal 2383 zcmV-V39$BwP)SBTdBsgjbI8u5oM5=_47$vHURKc}s;Zlf6V?_9{uYY}rOKsns-I>{$ z-C1kT^WJmL{O|6}`JPNBrIb=iDW#NBa-3Mr>J$Y(fR1xKML`VDVhR*koFX9t=l}o* zQY5_Lna3>vZZR)K!waAm{zolIQoD>AvNUs$965T)GXOw8c|Ih^pJ?NkY-UkNIS(*+ z51>IFa37dz8ac{Ess=5PT;lEU0^C`pkwSU(WSrfB2QALAhndf2^{(; zb@5}$(#ai6(M=BVJxSU1MZd0h3!D|~AGZgXqzmk#B2a!$(eHZP0cR~;*rm@|=c~bb z&tI)~0h~>owhYb{e$CtfE#C3`A4! zDJe~sb4JMHOiU2*ELSZ;Gr|!bm(pXs#O~J>@m|D2x-Eipli%~Ol-*PBadL^Mhx&-@ zBI>wiaXbDM4@&4Tlbw^yLK*5~Co}DWQ_4QUb{L-i&W~g)XXeoo!rmIB#hRzj0-wz= z9Gzw}B~szASGXNo+HGF35K?2Y#!D@;9Uo(rWUTqP!Bexqaig(oR#`f=rUpQ4;D%L; zC)uAETeypTO6n%f27}>?sdvn?((=cZY(tU77LCH>yBaL8aMX~4ROtpU@eOA}t}HaPIS(5JX_&)2=0MP%5^{#lqFVCC-=%k{i#hGX zHbYB!J(FRP6(8a4=`Us)?D5+2GQ<7X}0Ac-m=+G8tPy3OSCUyts}TcoyRVe(V^3`i2T-R0 zMckP;b}cltsNy%;k<68z9KSQ+nV0odyByLM`L;1* zwVbVVjk%nP!8s3ZZ%laRg0524q2RgA_l?{1Gug}m`jwoHqHHXS7@71`m7Y3{A>jFs zd3Yw|yM9gwJyHR;mnBT!AM2@cOdFnm@<=8EqJ*Vn*~_c^g&y+vwuH~gljpA-<#+s~ z$jbV%NZ*5c;xB2!)5C&{v&3ExjX&1g7Dx1A&MvpCF@w)MbpBGq;ouRiILuxLyRpWv z>ZbzDzZHcScFFzLea;JX->lB+c|~~sz}GU)lQ1{DTb)(A0ytOvqJ>TYB+Bl+s?O?^ zBsjNdNy57`wyK{AR`HfPE4eWGYktKi4gypsSVvBs)hEHeGT+T4jD!*PkJMS^ef_NV zuSyUS2hbwHivFR_N)%xDH+$6+xXdC;6HR296>5PY`A{iZl{zbtBY3|?T(WgsWbzKJ z!Oh&Z_)sSLCd>geU6IdHkD}o|mMZIGgb&lr0VMjwlp@SLB&ezGC{ zDY)cQcGVgSpDF>Dyt!juA*$_c@W^lF5Jsplc})Q$_`4oZbPP@|qQ;^KvMi42yl6RxLPm!+uur7e*FwFy>p zS)Zk-fa;47WovQ4GecNC+o8`?T!L4hIDqPfYiZDDDlX#eN*q94LQA|N3|mnd@F>d? z1F##y!MKW71YUAT^HIL6{ugo5_r7(G1I!cyWvmC?)I%*D+t?TGq6a6n-jCqY_n5j@RabjkLDXqit-yQ(CjQg%|2ScZ&sAwXo_LhI0?@hpT5O{ z+?-Dpael*1`ej;nBZg`>(YXZAj&qUI!B{A%FnC5655K3qc z9B0uNStq2_xa5=3xv^AoGEkpukzOJyHE{bnJM71cn8pqtkNy8W{41mW;^juRpOqVs zt*yo5KH;$OshtgRY>IM2|3_%I6W*tqcB#hWyRo(VNnaaL;E35g8p}8$IrQX4EN{=a zFo^tw*HhH1x55-jyq|jMZb-OlJglK{qNz-{AEJ#~J!^zgYH3p%k=y6Q_17AgAKl;t zP2wThMG_vPi4)30P0u(p0YdxwtRc>Em>1YWEla5S%1L29RV<;FExf>C&S{NR9%Y|n zBMz)yz@|#qth=O|!H{kz;OVv+&4POWD%Zc_x;h>ESP*5Co9Zh&O}BHCj5 zAa;vK5hVLp8IMaMvni8Air_MA=rUG<5EDrDu2Uame=;hRf_~^Mvih(37CrGx?2K=*ZZAROJ&9?{zx2vGk2t zEEbDVPN`yz;V>36hdmUlk9NES03^Aa9L2tmeOSr&{fD-SeS_0U@MCl7qu3XCfQD`0 zK*e~R#w9sZ(Z=mGEy2NxE>0%R=4_;2bbGD@UM!)rqK6U|;MM!&C~EjUnYdfg!rr9G zM1q|a9TYN~OiiaiQNd04ah0NhV%Flv0`j9Wweaf-g&(ciNaNx%#MQVsSz*V2aPfx1 zjZUO-QA3HsjN5Q^UR=SNHc;V3E;YDXt?*)dENrc?;!>Itw?h0G3%^%bF&qm+6;^zJ zh4+;GD5ewJvKt4n2fgV`i9BFB7N*JrO6W{)_TV6PV_P~=tl&|`q1?-0R#9Iiqj-^f zIFWKSf|XcU-o!-sWbWl9Mp4z!^BBZE988JqQNaZaC50CWhH(k4RS#(_q#Hc?a5*DM z;YA&TIge6V;TZZ;N2XE?Vki9opI{-OA7D3z`grA9UgHqCA%~L~jelwSv%S8>dMvEf zx7dZjxOk7_C6UkF)MBNb z{Gc35@6(0;Sa_4}jKfln{2`5{RlJLZ|FITJ8{`*jh=Td@iwQ)*-{lw25d}Y!U)(?x zoF%{5mnhgqeo;V*uv;q^j3jt-c#WIck3xNoBKF}{US$r!drmI6gdin&p1x`V+w&Yr zf^@uGP(gxVRPk#%%LBUcYibC_CJN<-!Pu+j=b6oa=)ljZ#@=gk#nD)s!ItuYt(cCr zedUhHSQ_njbwqO+b!QfH;oLdzvz}6wY7*i1svYET^euQOs{h zwP+)~Elr!s32G_q!EE=POE(%MvoPivq$fWM)3S6fCx74Vn6L7A=^q4)iCYJqxpH z$+eQqEue#fK^f!l=k}~xv>!`}6oqY{Mff#b_P7xjkH{F0;Nm*jqPz3fb6?}Wv zIgG@HV`*}>l^?fbz*{l{YsbhH|HEDtKc_=h93&qgH?$)i;8ldKp06TQ5sV}ia>2DN ze3@kjo+n9=E|v@a(QMbD_N{jv>NVyNypi&c0zM(^QWVH9_9F`RahbtK!42|@7m0!w zyaaj03 zB9A+$!bni;v-J7i3zMVi3$8PkeI-o7(|Lp z)zY71WQ7VYWH`yJ2%4->J+$ExhLOaJB*VCnGTEb)L%64jsM43XhZAX~Mz9`pI-u-iB;Ig2m?3&Rvv{0Us~UwFgK+h4g&7y%>K26=CDh;|O=pE0@8IG;3Omlg#We~$ z@>zgi8)>8P<7)i6Ur|6l%kX0@#fk>5$B&y86%;a^OwFcH(ZSB7$V8gG6)oIDCVr!+ zA&2+zY5^q*JBLKH1p6qO=*LDjC(UV!E)FJ1({59=aW+XB_b0_TkPS2>{q@u$MjsaP zV+l@I>>G68KYZT`_EGHn$mKp#0Km(%RjfYtU=E8pOpS?APKCY}i^XEG&j5tFwV9~mISGP}aQ})(Z#O=b zs%65x&)_icQ*+5#jJh$Cj=J!Cb`TT(IGcUbjE?JbCY^QBnQ+!+&*R1%wWJ9W!W(^h zI&UZ)bCD5BR_e?2TmEK)oBerSk@B-L^La$dJ(o8YNty=SVU}GeF=s*-x1E$m-bII< zHRb_#Dl|lvFG)G3{+rXs_ri!bPcSl4lhvw1pG z+T_+{hFvddyYz9YWuDCr*`udV{9bdc=#<{-iId-QLIV~UymE|*@>jb)tUAaDj4uXH LS3j3^P6gM literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/baseline_highlight_white_48.png b/app/src/main/res/drawable-xxhdpi/baseline_highlight_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..1d40aecb2b34fd8a1ce0306142ba2ecc12c604c6 GIT binary patch literal 462 zcmeAS@N?(olHy`uVBq!ia0vp^6F``Q1xWh(YZ)^zFi!P!aSW+oe0y8ar?pVz*h6l| z3Hr-l&-p#g`q{hFvY(%FA6DRKt`Tn*-pAeazwGzE?O*@YSihIEZ~s42#dDHMW?cu5 zfn>CpgRh@@6|Gf0!i%@I&=cNx{s&bv@^M0Nh#Cws+ zzv{=_!`DCWa=3E(vq^Y+i0_s2m;JAt-|Y9rOnmo;82t}#JW6KGKJ0fkmeIdsR)tf@ z0pUez^8`dcw5;e-eFBG{T9zv%ggDT2218r>(U_(me!5BCTL<33Y~ zz3<(0$wjPkE)N~IoM_gt=?JS7Xl1{s#lPr-Lex`75jmIDHXVnWGv=}fRtT*9tiZ*; us8r5AwO)(?6f_tnqKd4r2ZpKVCGn|_=EeI=?QR0&iNVv=&t;ucLK6V%!Lefi literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/baseline_panorama_horizontal_white_48.png b/app/src/main/res/drawable-xxhdpi/baseline_panorama_horizontal_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..812c66e7b3d006c103acc7f061617c2dc7265925 GIT binary patch literal 1020 zcmeAS@N?(olHy`uVBq!ia0vp^6F``Q1xWh(YZ)^zFdz1GaSW+oe0!I9x=y-8>qY%- zxwnx~n7XxJ+S*-i{$KBGu0OQ-LXH2_4rE|Ad#ml+@c3OP4*k-U?P=6~Zxo+haVl%+ z@5r!dd5aq?OPviCERB6povRzSYdU*~Z3^?@Y12{*PFo${zUrOE-P7k6yfWv%b#=

sL&1SHC@7>qp zqGvfmzAm&duXcEfnqg05&NG|!D^~aA{PgUrCTeEO1bNo&U$VkD$=+!?CzL!62?cei~wSVXJy>g$M%xj-y^TvkHcQBpcz&XW%bIY>L zdlrlCeVmj1?VH0>{bO6_eXd<1TJxahwdD3cOC=j`8BFYreHC2#rPDhnXVItrXRNC( zOxIlVSmOJSsOsOhZ2PutJNYGa)hziHld@z^-@3l_-esTnyRPk> z+rDKqw>WEOwq-|V&yw{)R}VGra{Aw9()H0NGi>vWOF!HUr~LE2e$_reS2ci%+Q`qy z`at0Om3J$y+|DSzQk{Hzaa?VAgF~C?`rQAP-1W=N=d{|t;X1zcM&yotJ;{>4cU;^3 z)<9>&n!;>Lodx=nBX3{aI&+!DF{@=Oj+(S`=016Jt8aG9t=z|IGmr0F(D~WoT2UD{ zzs}mLubE3jKv6Ko;p)mu)puThtY7A}|Kf|yOs0DtJ5IN{)OT%r&rYAb5a#K67v>-A zP4E$(eJm$z!|Kg<-e2w$dQr@8b^KkJNps+ryWY=#r*GI~JLh>~{Pw5?;d7FOjiq*( zbS9q7sy)8*`u4B9U-m}7%@#a7rEH0Qa^$6Z?>+^`GW~rcS3T4D|I#p?6g&0`kHvqP zY~L$zOYFEZ>!8lV6!~wz9PYoGGyhH!ck&dmMf#U-99Fh^e)rPeJL_LbFONR9U}>u4 z!DVuxrNuAfrd_ z$|~Htn^<``F163-RXE_M%f4V+?UW#C8FpUA6$jW_-f*YfWN=|Dxpiw%{oxJYOuol` z-c$X4XPx}Re(t<^?q!iPW`aOrGppT(qaEvh9u#|Dd8#y4d*J3 z{=VdO#k@~v8@<@?To;rpc;KLZ_O-+>zAx*qTQweh%4?Exqh?O2lfku4=GwF#!)=zk z*Y~Uz=qbJBU_bMk;}u+z?} zt)kOx=OQP|Yn9AXUl;rAbopq(o;=qoz+d&_j+0>Kg@D$C&SzH{{vK&O^|f<)$MmcY z$3+ewe@*cg=`Xgq7RmfoT2p@C8M+k5qq&_7odj+sQQbwXpt0<+;p%eZcx& zw4Up9T;^bFdVop3xiHV*nj{nNRe|dvD-5pXvUgoyzOQJFwTwpG{9iB1Z3FzpdmN`T z^IqLovF0}am91YjkMHVkeDpQ5bj@z@70dY+))zLqrI@Xf&uBkW_KN+M!sgyzjnhu% z?5ta$X|y}qIN+c9?dwa-n0%kqe%bH0fN67ziE=1cNYca{v87#&UN$N7b-vA<{EFq% zbN9jpTkfaVK8*cP3v}{KP}GDzMnoZBN{@%{R@ZVp_FDxDoF-dOXZE!WXfS2hGTBjV zIpN3}X4Z(31#LiSu6HqfB6oJmtS~bMgWufyziJhFL?3?uOw|mYu6{1-oD!MoNABB8w>a3=9J!US{is$%r*Fc+xa6gs^K^5{5=+$z z)2>xNpTq9)Bze!|pZaIkhMa+eq8`yQi^qL}3r!yN8J9(zlwa=FZ@AEGnY*t4JhuLp zd&<3gDy)1u7IMz5SvSw3;$ElJ&GokYN3J*@%NDdvT0Q-3Z>@~%1&hP8?PQa5b}yav zS)=jXX69YJLNCm$7X(gmSp25JPGiRM<9q(sHL?hKD4-L|{`YBR=WIMK7riD>^u_t} z`Inx*T;E@}$Uk_2*HWjs2iu)5GJc)JeDT@S%R4`EzZGKMI6rI0zW5-8TPwfI-%@2+ zbgnpS!SnTNT@#sjm)$xnHHD|+*p=Hgy+w>)m$SW^&e8FTXZFp-H|t~D*|*qTj{8+r z*$`#Sxb+nC7Hi=P>l~PV>OWI@n|n{b%P#B3hF3cdH_l`>c3;Q$_U<>$H{5qDe7?WF zwf6b{sSZq>DiffD>vxm=C$w(M6rAF3`@@uQXu|wCMF-4Q>r4~skeV%1bTs>s$&0Dl zd^i7`x*gTTlr&Rs`W|1~E%lEl>75p2Hgxaq7P)@r`Sr`Jy3ZCdC(YCQK67u#qj`Ft z1sN}<%=kImyzM1>)_tiC*1n=@+mnLajm&|MXW8ubD=z=N%|WNo`QF_PB6=4%T}y;C z1f)f-y3KGn-pVN3&S|2B49hvx~!={WwXeyv_i!OSqr}E4e_Zoo5Zj#Oo--0aC~+DIBD{cohZN zMG{#pnY|PguOSzUNyIA>Sxj#64qEXCUh^C6#0x0OA-w7^MI~!7ngqNop3zykM&3s_ z*hfwYJxcKf;o=KQNodiISnN2({mfxF?Px%0a*>Bp)TaZ(n9cp1!j9OG4%v(%6}>N+ z7kD2dZ~<(&|n2xDeR-3WB~QqM=DyY#5E#`LMw{0k_nWx^0ylY^q869Y zIEYVd;vqE7QPDwcbPkRC#U7TUagAEy0JXS=#xk*k5on~+MI50Esc4KAJ-U*L#sYDO z1!$zwS%1bHkH#w!1PU;P<9x{(Zg8Eme8o|wl23xbOK8MXR<|(x6pc%PA0+L0nq>S* z;%VAT0OaEm8lNZ@j%-3_M8GqFcM0T8+BzL0VUuJItw}@U!vOEjD1sH0{;SLf=(LfH zBRA*JX(C44%iCnc+uSQgG(qQFCNdDVqjN%xxSJozs2{moj5vYLcF7oOkb+J-F~G-B zGUGLT=FtqD6lzNFh~Q&%E)pRItS7V9ng3%hpz{$C5-i4ptC@kiXw-8yS0_qTt{n;1V&!2pmj9xY-4|R-l0*pp4jyolNEJbg#_(dL4&`L4Rk(<$58q8+(vct{l zQBVKwE98~HnCtMkqZ?Y?jK>`}tR8omk&AO^wGac|akGe{1zP9H6}ltW4Prn&H0p{0 z8_=2{!6Jf>&^bkf7_iRCq5Va4J|;qfM@^@(&uboxeQJa{I6RteaQK7e;P9BE;Lwaa z9Y@b^T8y3>IgFmoXhWJdqA?>o2ME^T0L2UMppixk$s9JIGtxr&%UiSySQMg1~EN#8XCe z=tL?S3&kN8s*5=F7>!0MZN(AVc%Tf}LfV>0EpdRFBw9M!(Bpp3T}7y1@hXBIJ`SRB znfhW8b-9ej5qx47ksL%Lp5bB%!-z-Yh^rf8PkG=%T}h6k^9+&hu4F!kPAmgOfk95M zWa{A~3avw#zSMgNttfor8Y|ICAu7Yy!lOtXZjTdsj3yQ@i(|B8EsAgmuR26Q@dDcM8(#AVt;9RX?YTCAxx{NI z#BP$wYDw&&pm-JKv$$310m_TlQJhs-((HCM#Uxyio&KC8R8G*J>=J&c?7ZRjWwud4 z!WlUk literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/baseline_rotate_left_white_48.png b/app/src/main/res/drawable-xxhdpi/baseline_rotate_left_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..fa4295dbc5b459a7f656f54b6331edeef4d109fc GIT binary patch literal 1372 zcmV-i1*7_jP)3N|i=m~nt!`?l`J&pxn7=OpMjkRstw)bXj+qONnZQHhOoBih5 zU)MNE+hm%P%;bODyQd8g`Y`8EGR~9AGP?09=TIQd<8Bv&c!W}Mo>ZCIb0xXrJnklN zb(|-a@mv(=No4@X#(CWBp&8ZUJnlAdt)h%_tRUO!P*~ALZI+O2GpViUq6Q1ewsoAM z=%Px-iRv+%%!W=|E5&%yAO{?iM&g;2P*_k*8KQVPiR6fUyh8$a zeaNja;990r&O@S6q##$E!~)!XD*GtSTJV&pC`0`Ok|;(u-0a~D*+zSkyd^5+o=7Q? zo9}V6fFiPqn@IALsK4J~AKu1I9qWge&qM|2XOeyKl=z&f$f;>qL_lh2{`##d%>TH_VT3Adu-24HUEBm3^aOtsklvNNo4JViF-QTG^;S`+Qs37nvX6wkKP+&P`Un>Z7!0bnb1fN^I zU1Tw4S1BYoAF~{aDcdNIld=j43Xy}^naVaU$85ergbCK}g?J6K777u5!t5Qzl6VfY zW{M?oJ!ZodOX66}rYV+0b<7qimP8rM)+?4oLCkU#OCmpJyA(^JG-m7MDn3asRxF8Q zFq^Je5;tHrLa`*C$E=CH_leDAU+VF(H!Nt6a=pHKl*KHVDqM+9t1aG)>k1w zW6TniRxoIQlcQxBmBTQ3;W7y8?INGzWu|Hi^$d#)Z}9d$rB%2Q}iRZqJWw#1{QFh{7Zjx zlX(e2S+8(0{&??4LC#|QGyrPNNO86UQs|r<7qU9PTWQD z4DXfiq6@o8$L8qdnT@lZbm4vuQVNvk9y+s=bZy`iMFHhZc?=fQNDe$~6S$CrqmpMj zX_EsoHG;B=0!kn6tD0_gTNT2L}QG0000${;!Ro es;a80s;UA%4aziygtCVK0000FpI)03KdOM zG*QuHtH@T7&1dHx6l2MvFbe-SwmG(K+qP}nwr$(CZQHi(iPo5RpXx%okkt1Ey}EqB z@J&QBv$GlhmnUKjHsLxxAc$i%954X~@+>3E6Vc8VJccDt#AKX750*R;b8{26JQ1UE zDYiTjWo*Ou*z!b-#bdbSiCB$qDXx!bQ#Mh-c@*1wjH7I#l}9MHw;5B}#6-M9Ft2i2 z;5?%$o0x{L@#`n9VKqARD}s1E%Xz-Sj~=dPHR|*?{CkPcp`GVzcBEPV6Ch8UvWcB$IcDT)?xbY+|k4upPT}Y`y zppw@iCC3o6;L^ubkpi8~hzKKaZZj%QT!hQ13e3n866fJ^vf@J>J=lJvBSN547!moi zOFy>HDOFrp3zx$b!eEYcaWO6nDK6ZMt(OjkVB`snDR4Pc@t}=9Z0Dq5NhWt-`-Td| zfn^5!X@f&!x&SP$IB*!Y50whk(}V3m#eoN~9if!qN^Cd9GpVH)+q_B%R>SsXJmX)s zeyWrbjD^dnDmGTe_LNeDPq8hgVq;Hirzl0Z6x;49HqOVkopQh7L2Sz?_ZwcoHnnoU z;T>#aDEAw_#MY|ZZy3Nmicr&q~ALhi?Pqk8lHe4E2Y}C__ZGNQ$OJaLRao{OzM=B*a z0ozrI1IMO(*r${)uu!$lZMgnAL&f#@$*hvrr@2X1aS^7`zub&j7vXLC?U*+OCMvTXBom{N(7a> zfy>E?8>`~BffB%W$sIoi%6Su)Zy8Uqqn#JAosz!AEH6@-Y@+y&9|Mo$cAk`yM5ih# z%z~GrQjzFmT9h14$IJdHNVKq$L@n>*~ix#yohtiTcD6eNKy#$KgIV7KlI5 zQc43#xGlui6p>KPA&ezY)GIA$n?%Rlyal|=Sjq|}^u0Y!M2x|i{N-InQ&uno zKlvVs~;k2%l) z$5X2;VJW_fEzm2>r0ijIE?}VOfqFTB3S|{jaaU0T-NfX|I_BreA_N-XN~YFO!4f<_ zh=KYzj|nxjFoGqxoSw*mo?&;!(ojVUn{Y2Zp#nY4Zj7tpfGXx-S8nD5AAx@6QBGn# z#@BE}3v;l!9N3U)Dc3LU!65+v006_F{;gl&auR?L00000007Sb{9e9jH~#Nx00000 LNkvXXu0mjf;O~Br literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/baseline_shutter_speed_white_48.png b/app/src/main/res/drawable-xxhdpi/baseline_shutter_speed_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..0f0081337d4617172e7c407da31762e75507ca98 GIT binary patch literal 2741 zcmV;m3QF~fP);anZQHhO+qT!Xy<=`q_x%1iC7(&EQoU8(J$v%{*rmu?80-@5r#4zV_k8^@?0#rj8bl*jIe#j!4!!vmS!sObdKJ4tiU}q z5S8hiOtDXRCh&*%D9OD$=Lwq7WHv~YGJ$`5PZ!A*4?N3toIrmzWes{#NC~B^PHzTqJlFCJKawI`lUPbJjS@a2 zhRmF?!M;i%@SKmIne-JSR^@nJVLskI6HoMIX6}Y(ds29NgHFcB za$kPKMS|6o>pGYc7Ifkb{G2SA$Fh{+rI90yjQR}Oc08HcPmg^jBLp5H0(yxge_u*wB8yG&q7=k%U6nvxQ zby~a?*?@(3xkxgHt$o6?mS`y9Zi4z%6zqcz-l3gnSj#g+jjSS>K>=g2t7VfGbE%;O zw~-d$X|yzgWktg#)MEFQWCmwrm!yv&puq$5q$s!yvw8Fu4Sh&rcces*rK!R0B+Fp1 zi6E*eRjyu!I&5m$Ni>{<-9ox#ckKMOSa1bFoGS{R$4i3UMZ=re-5}9o85UtP#XhB= zkWsjrNP%+Y7WheVfS!C|3N|$nkEyvEnn~~~F zhn19ag=RJv1$W_PwmLdybrJ;8#CqCYJPw-!BnrHS&0(V8J5&=W))KT+u{%T*d_YjY zh=Rkg`9{J=7ZR9FRc~4LMF-c5f^8jVU;RnP!CIFv1+(S|I%-bD<}_{AG#wqJ^m;HJ z#-v%BeHAf*bWBn6MVyMwu@W}k$E<<&z61TC6lj^jS+w+!C^(OFoh^T(Jq?(>9sUfy z7PIHHj&U+(m+F~hKvzyzt?;RubWI@7VVY7$aoG818zm{v9@s2mby4t)S2+QXvpAGw z8vT6!?yztjX0x^L>mBTV(OPz99{(6kfi@{=W^ufu-P5x(GF;?m%%0Y!7YU!LyTf(I!PUFJq`XGmDM9|L`^ z&T+W;Tr!M1F}qvB!iSihqO`dQSBFa=>_u=lI=npIm9X%GLvr*rTvgI7q~CA%vNmov z8!TaA1ZEp5Z3f})H3@{?1ofL%_ir{{!opO{R@K@Bv6}?K+XORBt3)!JC1Ifqvr?^1 z5cB9P0k9@1f+*LfzEKYqX~wLr)+U%|BoH1Wh?JJaG)qcYScH+5Oif@H!hD*gXu#bf zZ8gQ-CgMk|yV9ndpk~oQ0^kPRl_}SCx3&oj(=c0EX)~SR9+Cj)L>aE8YCDk3rb$>B z>F}EWm2_+yTH%dun8Ry+Wc1mfK1!RH2~J;o^+gdgar35f)lQiGEMegz|7sv^B0YDA zC)RLW>(brgP%-qegoS&plhw#E1iiRrI4(|bn5^C_Vd0F()0P$-1x(R0)&p>{p+mak z^b9A=!bdyi8`eC&z)yu!!faqBQm#u>(bMl+iv=YVI{{H=E5W5+0J)vX{tc)g5|i z%S-4y7M3|WqKSr%A2EAYqM|PXG|*GKqZ8OA)Db$*VfKTBiEA;N>%4I2?;FqRjz{$O zs^b_ak2%(It1z*H)6#Q)($~zoS}ni?bV=6KmW*n#*~cN92s=oaD5Mdy=e6%^0_l3# zcT(}TQ|0w|?Vf%CvqlOfTzukG{&Fhmsn)7Hx>AkF)=HZ;%*Eswt^B1Cvri;!oZxcM z1I0`y9cPJxM==}ZyXj!6R_l72&qzB=bRdD*6zz{XhTtYRS9k2I^;44p-GN%SFcq@| z?K4}%d4yIOIEkS8i-KRU8>fYGU$6N&M})Ad64CgkI9t8&n*`TW(#*?DA z_QDvC&7KkkiYUkC2%mB**V_6LX5($1_EOk}e_Z8w|5Q+v?X3v;{EVxcMZq=r(*yAy ze;7=@<5q+lB`TCUZ)RDB`M8*`)z+^_BmQdX*8PT@!so!;Z|y zq>fT;_EkIX+3c33gC;+t zK(dSW48z4&^vn2BrNe12*~ik1$>3qP2QU~nV^~IV16}w9SJk}8{ zlP|3eOYcF%}28J29*~C-V}>VM%xNb!1@J z*w6hZkrl}2EbxddW?>F)Y`~y(2LHdtXV5 z+OvT$JbL=QOGoA`yJR?f#wc?rhXZC_gNQ8of8qT3_&sGea%vh6YXUPkgQu&X%Q~lo FCIDb}M7IC{ literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/baseline_text_fields_white_48.png b/app/src/main/res/drawable-xxhdpi/baseline_text_fields_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..ba38086ada1721410c28c9c8890158e16b619314 GIT binary patch literal 124 zcmeAS@N?(olHy`uVBq!ia0vp^6F`^|NHCnYy)O!+m`Z~Df*BafCZDwc@=QHl978G? zlO@y=L=675H_A`p?&VTs{PzF9!<7x~!Lwt(FN8x|m6@E@oi(+{)is S>i8!OWUQyFpUXO@geCyD-z3)n literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/dro_icon.png b/app/src/main/res/drawable-xxhdpi/dro_icon.png new file mode 100644 index 0000000000000000000000000000000000000000..87aee1901f00d4b68119f03179b7545b74815708 GIT binary patch literal 1667 zcmai#i#OA69LK+!TiEy^W-d{al3|Pr)n=1hw9O?ZF_)ihnf$b=TuRiWk^6p8@mrCx zl88=Gk-J43W-ckek`|Rx==Rg^Tm1o@^L(Dq`8?-+-p_eGpXWSJ7SY!os)kSl008Rg zK_G)U{70xNgY&affkJV7)rtBlr*iX#Wg_>~*(Qz(qLAEr{iJ z@K4rZYC<#+Po>APtmC3trWk8mYy0e(zli{#tl&w&`5)Q;rj#xS2+-{g^T2qzWfYg} z8d34APp8=;klc0cu`9$VrOG^2gW{ry{OF*PO+LF6UjQXxiV{qh2#cCL1PvvChqz2}-W7=AYO#bzCRfN?LsH##nm5t^!UFsL~s61C<8lGxNiE+sJb{4;= zy~q##s3~Q^_!(k226C5s2kkD>m?lGx>_=_uQ09l^r)ZH0d#yXVGg$?){Lh-}n$}WI zC-QZL(*0{o*C4PKL2;`Gkn5%6%I_aQXt=j6;aPShCR1xqs>eNil@PVMtX<+x&I`Xu znZsT7f@syTo6Bv8A%-2^npo5O7NZ)6Ys@o8NAu3u;h5XoHhYigO736BU}{_d1>x19 z$ZscCSdIas*Q^Oyp&^oz;f(1--AFEF7I*m$1fD(puX$G|Lu3}n2Cgfe zZ+o16I@N^C;SPhk0ca%&Mhaom$OnK`iANK1(0f3#8rW1rxc!BzJdI-}dJi~Hi8Av9 zwOKMZOw`mEpI4xi4*0w8(AJPcCes$#A0J7l6f8<>*ZlEBCNy=$El(;hK7GxlR#@9(3%X{ zC`ui#hd#5%kJtI5D*1`gH0rr)yjgKYmO=$DHLTh^GiXvFC8NMFZF(lgTJ)T;3EIfT ztG7oos$o8*@K!p%FPz8dnAMot#4J>Nbh(WYRKXi0He21aedzDarVVxgU8gMDlfiQf zi~$yG9eqRG%#QDwxoJi6rVB#XQs7qvzc{J`#E735Q&5(}9qs;I6U6K=$=LV+_U+Qc z=-?{Qb+h3~bnV_>u4T{wj{9%{-pV1ZlhHf%5+a_piu{aaDkl`Cch-f1ERKzaE?y`p z-%|(j$J-(Wn`=gf>NNW;aU|Te) zp5%&RYL)8@a{3wQyh8hIcxZW3B(ZROj#9}>5(^f9x*Tl1RrVkM4X!UkU|F8-`dBC;tHre%oMeU0#*UQ$f9yiN7J z2=C;zP$$r7nRCse-lELn8^{n-2curITSl=J9wy!e-q0y3sK2OVMe-$qH}}QEt(_^2 zZe})*92UCV&wQEtu7}~mfa#vM(a8CiJsCZeBFb6lG_&Y28@V?LSoL{1qrZ*_;@sa* zQP?w9YcwjVU$jKdn@r}C53J~IUexZ#whsd}rnf3;H$xW!RPx57zA8Zje{9uRyJ75P z(!DH_8p6>57_)hG`{~)LSpY_;w&{4qu+Q_pNGpzpIt5KQs`xag=Ii0&{PbzfWmCm6 z?MRk!TAIaTbc-j7)FV3nXy9S6N$`pO!N7j}2WN3>a&t?{@L*WY!Tc73C3Hd6kcxzW zAx!9UN*jnTol6ob=rEac+xnNVogO$9=i$LMDGRkR*Y`3gv4js|G-c&!`p#I@0``&w zVX5KZzV4_OPoJwf6i*f8A9^hkN3rw^GOr~|`sLcJB8u_vU)s-ML$YPfXXiF1h8c3{ Q;D-Tty7>}nUBlD=2b8P#5&!@I literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/expo_icon.png b/app/src/main/res/drawable-xxhdpi/expo_icon.png new file mode 100644 index 0000000000000000000000000000000000000000..e68d4dbf497f88a9869534343930923e9cb7e33c GIT binary patch literal 1644 zcmV-y29x=TP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGmasU7lasggtTjKx#1_4P#K~#8N?b>OG zRb?0l@VQdhs-+P^5=m)j3z1<9Qy3Js_(d?x9LP-Uc z^}*5%N~e;r8jCh6Nlh)M#B%yS@AKTldCz_Bz31F}W+M9k;Nd;*dd{5p>~}hdh=_=Y zh=_=Yh=_=Yh=_=Yh=_=Yh=_=Yh=~4A`uqDUw%E2@17U@;kWA2AZi_3G%Ga`h{&SE_ zmPN_K?}+87U#nRk`YlCroaVx|zJopwX*SdkK_c6Bawt&Mb%>papAcIS@1bv>6c&MX zj6re{;$noXXB6UCL@(m=RPt-5Wx?v52LJB3?qAVMCMy1I`LV7k8TtA!Cfwq8iC0rJ?*##O}}{cA_;2 zkbh2#3%l8daH0Nu#4n*`aeEQdY^bK>6dovFSlWS8^GhW6X)bcRrCIY3!*nt6AjbL1 zmfQ6xcGBgx96`BUYTrg26AFqu4DlkyIZd-|!a&{`Fb{D`otS_aXWbJh8;btB- zfc!`;j`Sf$T9X9IyfffhglxyDhzk*m5T1=TOa$dmX>|>`9NmBzk2nZ%I^rrsH^S5K z{+1gC@+ROKt1bln=4kiG0jmy>e?*HTcUpA><=zCGXVnRm$)tGBwI%`bleM_Ut=7ba z`p2|cjj7h8br__y2@vSJRfmWyv%2Fo8~GXGKCPiV*8~WZZ`A6@bygi9zebBA?tLl? z?cUPnkyov{u7SJ<)U(jiv>DjggOM)RY@`d<{`8||tGr9XNtVhEoQOmoH`D$?bn9YM zPg%NJ^|R7vZ3B1s2nYq=whN5?H{(~cFD~#E4-(ev6iNFWBq^Fax?OG{ichF z5tvwS+(fGrFbV^YsTbC!kvl9+n__8Nbc1e__x!)vQsM2W^_Dh;iIRJnyqsk<+#jt@ zzyk<5dfdPy9EG$lZ0s5&&909h+S$@JY|j-)fE`6D-&GDsKY5wKOkp0=!FK;@`LlS4D5NjuR{`YCU%P zPQ*4W??$XZ43&x$`F1l?7Zal`P1|p2UVmDew%1Zus}mp}onA-0hIkqAJmP7@6NnW! zvgG4eGm9*($9?wC>1c#}F!RU zfpu}AzR0(r1Bfn!dw)XtCTWy$rB&yG_8YWYq~;AiZO|`4*dSS_cYW`&G;JZ;%h6oK z9)w)G-$$%M$Vb8Hh|xIe=OcDu*|Xi!w5u)6D|@5w!}`4YLVnl*WxomEqeDcNSl!>6 zjocqPI#B1G0cTrvA?SCnc8`2y)lH%8bnOz^V%2pG>KT9yk{jK8%|+w~n{=8c-;a_v z)*Oo7IIA{Unl=&ZKF5}uk*BS!H!W=)HlWOpaVxcZWVTfY$bX~7k;kn%f^zQ+IMb>N zLBAo09btsn8;C=!nG5v`Ld)15MA6GEog@IAFA2NhA?;oXd^#uW^anwU(c3labi1S zyj8aiW6AFn@nt1AGaQM0T9$8AMXnMT!+1B;rYfue0000K`6D;uEwU13>+rCtm9ES5tW@j}#TGb$Xn!{r0K4;zl zpYUmt1>UvyO&koj>L@2j&j`M+{K1b|^y=FYNXHK?u@1aDigz3}2 z^5jH$itt@#w5?J5=XG}FRF{VavL71!`6V8FzU;NAUfQnyN6S z*28ONe?Qo?Uw-c@-o1w&W;OW#*ed>~mVN(svAH0wqTTuKD3J>X-Ul&%*>&ilSir;L z*!ucoI)B>Y9$w&o@t}Ou6RYPxw(1(SI@>?so1b+vNF+n8WbKR(K^8d=%3treS9_>@ zWrYY|ZA0-NtG?G?KU!~G0pfZ7GTQRZs4OE;Q|6!ihW`5&hwavK?Ow> zrteD@)Rg&kUvti^nbS5UcY8c6uzVn}-}4!}k%^8}tMmV7cOPaS`nvwZ>qrx>_P+9t z9#2LKy5BHr&!}hHj zGHUtvc000FCcR!#Fry^G>smkGg9S4VM~U!F=JS?h{cUuAdugDBkCo0+VI6fJQ-^Z} z5)U>^;kvQuOv7F!zHCih88!=@REg(l#~9kTE-2W+dq(+Q8qb_|`!%iGQ!^GZdWA6V zR%>MQ_&$j<|L_{Wa3S6cot*gx{;vx>u(iGAn%n*lv7Gy~50+2-p(k|zD441GV;#%=sSkDw z)r2>__kzmf7Ib~I+oMLj^S#o?Se1RDiapFR%_^U|F^IsNau#SWV;|6{wj>)vtk^@eb~Uw$#eM5_1H{P4RDy7D3g z3U~NgZZ~>ZUr4cXoLaizP(KZi$1Fg zT~XX6#PLet?z@Q#_%1d(1hi*t>jSEpm(sW;!%NPi)9kj-1cA36ESwkHotZp37HRn@ z&7FJVo!e30kUK8;h<`37_a?}nuUw=Q^+D5L#b)+-m98)4N%K4!zQucpa9#*MAUG zYy3@k8Rsr=7Aa6POo|eD5oYAkCX0oTYNF&3wKwAirc04 Svt|HO6oaR$pUXO@geCx%s(l*( literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/flash_on.png b/app/src/main/res/drawable-xxhdpi/flash_on.png new file mode 100644 index 0000000000000000000000000000000000000000..803683a5ced2ed5fe18748fcae421e40564ee27c GIT binary patch literal 487 zcmeAS@N?(olHy`uVBq!ia0vp^6F``Q4M;wBd$farfpMLui(^Oy;@r|nw9Ju!% zf5#3KkOd0-zVQ~Q;(Rl+ZmryIIiNtH-~08pzZDoH*cc?9-2dv5@;9N`vnIUhz0${6 zm3`eu$|wHN+xB3$M~!$VvXIby*GEJNu4B1B^}%i-kcnO&VmbF|E7poHdEfLUcEUdH uOXZHg^gQmfzTB-qP_PkhyDbYO)SLMI%VH!_7$;u`@jYGrT-G@yGywqQk>bq& literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_burst_mode_white_48dp.png b/app/src/main/res/drawable-xxhdpi/ic_burst_mode_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..bf58c0aa5a091184d801bf81395a54bc2781a17d GIT binary patch literal 613 zcmeAS@N?(olHy`uVBq!ia0vp^6F``Q1xWh(YZ)^zFzI-@IEGX(zP-czBCb%T_2c`T z+h&I)re}L>eyf{pc6f^EZL?2j3~qnBpZ@Ii_Nw=P@6PjIH+kNgWS|LfP|&L6Q77-U z(Mx*{Szew%Q46 zTycE5ZNAAAhyS}*Z~7*_R^h_^OIL6GTX<-f%pZr@TOB`2rYc@>c;77hqN{7wgmW6( zrix9gVzIm+eRcEt5Z8W@yRM>}IJW4lXe_#Nvo0vRwohu)I@cwCtVGW^Tz;efN>nvP zyxYt4t5!v0ndbDr&YMN=$#i?pog%!(;d;oSe~V*otqN$Ldi>FXN=L3QalSe{q8C~J zzTS5#GG>bV@n+!_vt1Xws#;}#>FTvd%etzBdK^CMa%~NtRVSIu8OWZxQ~t$*PZ1$f z3*^J151Cy2>aacZR?;OW&*>pIgqFqJdR?@@KP$5^Aa8A;t#{X{7wdL6{)*yS(Q9;v zXUmNsm&dU@B~F#PsasBNb>I&<802xZ^z2fHms!c0%bxvu;54V7$f!U{^>GicoZ`*Gz9NsFv>Zr_N;B%Uy!h;tDnm{ Hr-UW|N<{%K literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_colorize_white_48dp.png b/app/src/main/res/drawable-xxhdpi/ic_colorize_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..347295f43fc62d352f665e3fe6b29ac3d3c8eae5 GIT binary patch literal 725 zcmeAS@N?(olHy`uVBq!ia0vp^6F``Q1xWh(YZ)^zFwOCFaSW-r_4c-Jo|K_Pd*OPm zuN=IbOTH+aG}QXal^MQQ{&iT^c2NNjk98ONpYHE=;c`r<-q8Lr+x~o&+pO>Ez0bMl zC@aB0meJm;;eRjK|Gj8__XV%g`pSE+r*HecvV6s!4?bHj<`vIXES|x>NP*wQQLdxO zCRfsW`+B=`wez?KdrSwW!dijk8m-)PmybdXkjqD{R zPL)2|*m1`0a{tygcDafNjW9x}^4YHy_dis)Uk()td~0(~RdD9hnMXdW2y8u6tP<#L z<1VoE*yV{UT<7_@X!lA#Svg08-CuptR?S5*HdBS3J}EqBS=$w}CRZT!`O8n48EZRh z=eg$FJQe0W7i<eZ(CV6gU=eyFICjG_d zGMA{@++z=(`&w*XvkiO4gPS*BKe`(C;mzIR+{x)BT=Uj0ZaiM?VqB5K!f(TAGyTJl z%6I-}l3zx%+I%>`?7z^)8|Izkd;&I&6CZdoG#%%jGJpA(`z@tRP3jCKto`yY0_7ik zT<;j@GW9ddx+KR%uPU3qJM!BtVykF!kXxiyq13R1-FNoWPNrMVS9ttYF7O39+gx;r zcJcjK!Z68fmfOcEKfgB!Gk8kJnjgC?s1cKEGqds4I~n%g^I;Kk1W-7~7J)+c@z0wz}mPgg&ebxsLQ E04>Tyk^lez literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_exposure_red_48dp.png b/app/src/main/res/drawable-xxhdpi/ic_exposure_red_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..47818b9378c7334fe22a36859b39bdba3eb00a00 GIT binary patch literal 1624 zcmXYxdpr|r7{_;U87t=2b>=d)E=1C(GtA8B6x)$p4q4KvXeCo}*-$B)WUiUYttfJ9 zMVi8i4dvEK8oA_BZY9fII-PgtbKcMA{l3rpd4Ip>dEP(XmrHYTT&=o66#{{*-sePi z1#6OgB9s6}DML@e0u6C>BtgpB(c@sFaKg^n4gwMIkSl(QV6Sr4$twf`QEQM-Xj4$( zF$hHQ>prrbTbS?HK4+?%2RcR;ZeUJANhM;bW)foe7^%02JKks^$jR+8C31q_N=u!q zQy}!*S!Cza@pkg9b9O%K{!IT{T4?+#CF6)vgPC>oXTGc7boP&kc43EUW~;S5al}SA z+8>EH2s-$G5Ds;A2o_I-&IkS!U(-X1hiH!_W96nM=ka*_tYh*OS1NUIL@IR=-1V<{ z&8U5lkl3%wzs)MKMSBUl853>OUB{1?b!KElhgUsaTV7$kfqnYwvfa8nQJrQ@d~ zmLFj@sOTicpU4iuL^s!#ybqj-I4QEqtrQB2Cjuvt*0H!w8{M=_2X*)^HK(bWvqe*S z-S_7T3L+!c@t3sSjn&3Y7Vo+3y;}v7pPNlOhusrC`*7b9zN=c@Xd&uVO-+zj_VmG} z&LWb3$V|mcYlFAg*Sm*;o4A20*P8_kd+Ft0>9ra?GjHw${~!P1I87 zg^Fvqk$rKX1lV3B8EXTy(vI)~^;(02vw|%nWPF2MIHw66J>;_A z95WYE`~Y{?^6?>z!%yEutqWWC4K`zC&50J_e5DL-ud=8@$GHq{m_5eah?0r08Bh%> zi@6qr+dV+I!qh^^jIe7#$b7EO2}}8Oo+wHs1ii0MNx+GE-1!UGU2NM*`%@vhJ$1GE077@(}wy(FB4}#swcPAIAM=Q8AOr4G7fHt ziL?R*rQ)I(PLoYLkcKjknzTL%>+HBFc57RXVOf387hriQjdiEyv>Pk_vONVnmY(ZT zvZoEzz+>XBNG41PHtP=sr|Lj8c+bp1r@c14-)&Zd(3%8->gN$BDzp25tLaDW}5xzS~05b zW))O@yj4@9Q=`KVCZ!4Fz^M9UU65y9;1DADo_-r<~I#oQeDI zu}ghD`CrhOsIs`TX+3e#XKdMf3&78yY_xz34Xm8()6!DsxA$V-x-W>Y)jeP@R*u)? zTuzuGS3SmCvxUCJZFSy0=#d?jeItSUUHG4F2Eh)y-X5G&}Z^cGNZf6!@J_i2Xyy-zB~o+I}(LZ?oL)YA^fG<-g`KIc&HsBmSrG?h*rm z+^NfZBBPS!_SaQQbeKQU`z`Zn;?D9@iobLA|GOc1f>ULJ0~46AY-AC7aqBnxgKNtr UR%*B^0aFlzr>mdKI;Vst01%&=l?*xcmoC0V7&l{*_= zwsjM+h$Up**IM6w|AFrhGxN-RX6AY3d1mIBNkUngK)^6C004j>O%3lbW$J&#afZpM z#`6tK!4h=GL?2L15?x^`Z1)kj5CA~UOHR5wJ5xU!XzCaQ0C0ExM=ak0D!iCRp;fgsY`YA5^0Z3Xn9PaChTcgL&)?CnS2R=0lR)?WM2_5WBl=}~Rl=~nNs)hQ1( z+Vy7mUMQEv!ms)6`s_=LNQzd_@!op)QjCTri{1Q3wa1~l&%jYZHON_r+=2|hc~bAtA=KJ zBXVf_%Rnab_dM1h@g&Rm;O(5!a|4xJy>w!3QFcsy<@x72Ak>&=zR4qBJP}Juq0l#K|=1a$&i*bAGqYC_Hs!LSDCMty`bTc z78@TZP?x%adqDhIgg?2O|L(U4(i0=wv%9_?prJ1;(y%`>9#}1vSXLv=2aGHzoR&$g zNkdyVN=v%J1}tKDZNRi8RUaFr7-;vPMt;J#i0h_ zH3Zfa7SxGt8vL8Di=zvJ)=4Z=6;v#$(>W9>kx3_tesCM3_Chrn0ICN<8u!*c7NsKc zkRON^6e&P@nmO>~QRS@OhDl*8*g4(Q`&sX}5z#Pf7V2R$PRW^tyd@@g+^>ZS!Gkdt zETUBO>*8pk7PbAU`+w$IG>G~M39jjn=wySoZ#iwWPpsc3(T9!KQiJ3YHHh;9dcB=| zzbuDwZIfw*`|}zj$mWzgk~r1&$qt2o>$D!bJUZAfI;kl}CK~z|9WdwP@_UNp}z5cn6-H(-~ACXe=3~+nK_S0VWeiTup;4#)3DA*Os5p z@3+V1)nu$#@$NihBnAa5NaG@7&UZ0pLKyVIM=1hWOapIz|NRofmpx^Jf?9fm2{VQ8 z$q#&(0>|mcv-R35Rsthp`|#&Y2Ta&YckRpfm3+%|k{jzVYlOb$Q&v(MPY8=ysWQ*u z$G{4XR)alM_UBcnUV1sO@1R7!O?haURjs@|ZVj0j^<6oTB%lN7XZ;X25S6*(y;j6~ z03vmu%}Sv?!KqV%f2O7*jvA(~YN}BXf8<>Qym2u}&*slaE-+oVA!y zTsdT$NZ20uY-J5-aXn5S*$09WtTNC;qn5V4iw<&>V}2o$~(}&i{!hSQ7=>0*9C1RH6?L* zlxBF@`6Rh3Kg`%8+5HNjycTVq!o<#AYpY2649xkV25v86ir1cCRs-qx18g~GB#7%D z)>e4w-dn)(fOk*JZYD8A0#83FLfk#DRyvU*_TM2UkO zvT|fZ7pO`P8%GQ^-AqU5|hbkd_n03(&Mx@~Y{5=fM5lN+`ye{oKa-E1<#j?7Sbr z(m22M&M9Q)Hwd{PpZx^>$+bb)n=>DXT4-O%JOXmWc0C+d3>LfCoW{WfUhsEU?KJj7-UxW3U=HKew6UhBTSqqr>1enwZKaA}Y?YaM{13Y1 z)eGTkzVdAlX3k=BnX4VVxW4ZRdEa%mgEH=M)g2lXMfTt?GE4m$X8(qtGohOqO8W`5 z$yFU$g1Wm^(lt@`fMUE*?O5n}0lig3(|v*P@Y6p_iwGB%U(1WHpgcF0GSj};Rt<=k zIh9qHJ>oaj2}U?>0l=c}ioHWg)&1gSQ`#{fm1Qr&1(jPz2PWdYnz5P^17w94($EM0 zO!k&SS5D4O)TE#%(YpY6WZC=Rh`J`u(dxnt(5U1=Nv5%QDu(>9-<^Q+ zsKA(bDtq|fHH1aZjXWNg6!%juszsTP7?Hz|1e==+?J-AExYqXA4}%kOV_+F9I<$7( zz~+W&sd7&~-Y&vB^)J86f1(}|P|_njA+#tbr@OS4jDUtcBjzRsRq?}kd?p5Kp@T=} zp5hck@-%D5{y{F%_ZhG_beRpbp8j$S-uvftXd)}M+Z^&I%a^m{HA?-#_%VVg5b`BB zt38Ke*W+IQ@wMJ3%KlEdPh_G-JRqTyGx&O;7a_T#+p;Ic3wOwd-OIX-Ehxnk>xw;pW%yCiX#a#eheE@~`4}O=L4C6Uz4-J_`Ld z=c*VQ+8{KItzf+pt2yiAJ1RH=xhs`#cAGy<69X&TppH!rH{-Ctv~m%p#zN!Z;O?w!WD~rZ|-njFAt z0a0QPx<7J{+e_ja-4Q=WrMeSY^#A%^Pzg+GCBt7`+i4`!aSd*1n0lUdaMPHJz!&q$ zWWK2=|DFUrXVD*&b2TDH85uD3&}=`f$<$NMfi`jiVBgYOVi+E1`>Lp}dtcznO{}}! zDxs2$`)ntPlPLzgT}q0e{5GkNmas>u6=AW@XB)Rqn0qk$QXlV8nSX%Uz_y=0%-lfo z&VWNgJEXA@FLW2uzHd(HYyB883~{>L-iBE?-BhRhQ7+A3Dwa*|VMKvo;KZPXu6`bv z9Dk93I0}B3N|Y?t@;6m7%2%lS1DC)V7!*&u*OQPrba~LP$I4)QW1vGi49aQ*WYN9c zp%07JddF4DjbXl6AkgwSD9c`MXGG8OaQT*PWPz=zCk|y1n3&>~DZB>LqtKEDZ?jp( zX^e4ovnTN6iG=n&7ZOnltX!v78Nj{)y3c&6V_&zjVWkzkQ#2W!4^AVhA~qe*E(f+g z32vG#tg1p6y-fc3Q<<`H8^lbL!f(f+aK?Vm*2bnMdGlj?QLz>C%tj0w!^!~)K925F zQ_8d(JPp4O)p#8{-nzT?2lb2s-lP)cI?aMJdS%M84m*YNOS96|aNg*0?J=cc5FV1P zG^8cyFi0?idTVx+q#Ae8j9`#1L5v31-LgTIh_wG_xamK;eKSwJmvDMA?Z^C$07xTC K!)kpu?Ee6rC!+}f literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_face_white_48dp.png b/app/src/main/res/drawable-xxhdpi/ic_face_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..3b4ea3c628aa1bb6afa8471455e45b42948b6434 GIT binary patch literal 2083 zcmV+;2;BFHP)gX2#jM|)|~=o`zyohbFVz1{7>-Y#EXl|+4>-JO|do@WN< z^Lzg!_g$FXnP)O1BL7b!kw_#Gi9{liNQAL0rJ3!#$_cvYAK$ZLu_Qy-4EDcAX`#Fd$DW(NG zP!-RT!=?=|IXUxDJB!;5$Btn@u>>L9K)khVYTam1Z@5u3j2N1*%3;l;`SSgo4hV^4-@ z`U4%ri`&%y&mR3z?0H>xphxj$pY#{iaqL;FGf<58@S=}e>AE90j4c<5=?m0^H{VjO zT+M!LS*a^foDcBgSjo>S#&+!JBBm!$0xxbb1yN>Ja}yh$&=cq^UL+9}%tQPQ^G@jq z)PNU1cW{$c3iIZ9540EeI;cT7m}b(LwcBf;F?@_$A5x2OI8!)`Nm=go7N`Zc(#%4* znj{}$&Qsn3oy46%mLOcu2!2QgGupfanoJQlhFF2{I1~8^S^QU^)_tHB+{m*Mp=HLi zoqyrCX7_>qfO9u#L};9mtfLb@z2P>{FtRvxgCz(b=8OE8Zv2s^#$BL?ajKhH2;U~g zLblP#ZFHOOE>J7ZoMIxvz)T{^Pw8Ng%Jp%KACW_Dv%5ftaN-x#AdF6&89dJSd4UwC z_$zG z$@7UkFUvo51M8&pZXC8E*GH9$8(ES(Pb&%4A=SV-)%5G#dR~OeVnhh%5me^6z5OMF z+zX)$v;(<*s@(-rABC<_9lAU{28jqnm^11i4?RiHhntoIgZG&$sMQ5T}^!Xe}`+~Y8igZ8^Z6KJpF zzrjJE`KT0_9f~}2DWX#EJ&@oGDt`(^pmtQ+5jp}riQ4MG=UInZqnIK3Jd=wT*!dvq|MV7ccXRGEBc5t1_U1d%XUVb!{i#T(G*V)KyVilewKS&EH`f=tw z_XXwkStHnm8v~qS2R|T512Y-ND5|Jp6yuml14*9aCC-w;jU5alP%mIx!vI0iuRbNU z7f3Lj4uYVaIz$iy3F2%bgH36kCWf#dNIlc|9k#s26olRp_%VyQh-vSz5TSqk+tXZ0 z7pA<+3WPu7@5b*FoWgJIG!jFEAdq6_v6~!z$g<1-Jzar`xtFJCqkt|2-ex_YLij&s zZ$)Y54bphgPm1*>Z@IB?$NPLXbAWD~xXb}IGtc}T{sMb5hDEGpEBk2U65aHXqdgsl-oV>(b2C<+u!6etQ51&RVifucZBpeRr@{|2@_f|9Y5^q~L% N002ovPDHLkV1h0y*FyjR literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_fast_forward_white_48dp.png b/app/src/main/res/drawable-xxhdpi/ic_fast_forward_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..133aa856dd9171f222ae9a2f82ba0f10cc33f3d7 GIT binary patch literal 652 zcmeAS@N?(olHy`uVBq!ia0vp^6F``Q1xWh(YZ)^zFa>+MIEGX(zB$+k6k}*u@OP#7 zN+u8!3}*bbI?}7vvsk0<$Me+;!Qqjy%qq=}p$WU2g#>n7Q+mPof=li0fiqmob|vyi z%+p{~nYu%YhclH||K-cy@3{B#-g&Z1x#EaNxwhZVr-?oHg!pzz{}nLblh}R!w$wt2 z{Q27FZXSG|ugUKCu~A>JFY;Gk zY>waac+>f{NmbJGif+CROOD!KdEWJW^wBA7VliL#hWb3!f4=95`}^qL%s)?bZ?~*X z;>w=4`Rt7>n=R3PkC^UK-Q<1x{;D^ZK5dUG%lsK_ zy8coT|0#W4$JwEooLON@RWp7reY)z;rJ~Tf%$-p{thIkjj4lvQjgMM44Tyni!=Fx{ zwi<+)yk5s!m-_(I2b02t&Gi|^J1lJk?HJ~pH_j4LaA;s)WMbi%z`(eRokIY^Re=f$ eD1fBEf|-nZd|$S5wbh4!l8~pXpUXO@geCxcVIgn; literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_gps_fixed_red_48dp.png b/app/src/main/res/drawable-xxhdpi/ic_gps_fixed_red_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..a02f443174ccdd6b7242b2ed874875fefd3ef9d5 GIT binary patch literal 3622 zcmb_fXHXN$77n5JPN;%_kcft&^d2bzximo#1OkYHv?!q^*y0Dc>|g)@_9 z{3b3A=DcV%|Axs}BAl&ZfY<%9OH6_7miZs%06=Xz@1ZX{Q|3m(uSEa=BB~eH$;D1NMPYl@&8ml+8qP1kVU%aXnrAT&Hlx=|1~u zVID<+rv8KesPLW^`kISd(emrHs?1X(yD^Ky`jz%>u5Qt8C6JO||2vkB>!T^yXwN## z&f3g{w@Uc89XA4%lX%Z0CWtL?WS3^jLUY)F&iOKS1gpDm97VX`zF`_t(!Fyq8wG{` zi?b;FzBuCxz6*(OyXIsN0pt0=BO_bWG%?eLS_`!*dlb&xOBWZ7%L? zTSS{*wK;?LPfXPhA5KkGms?#u3`DO@$FGMM@Lhe9X81BCa-LG=0EWnFg=pwZhXk)o zuFiKmj6cT$WlXFbw$8PtYZa6uAJp909vv&Oz+qiGWSPBjQqth3?HdSc^3h-jUH z>B3^;1Iro57oDYIy(m@+Cq>Q1cUgnRXF?#9)S46c;&vf@9V%I3&-hqVm3EUNSs&t; zj&|A}-bLCd;~^yK-D8cJ>B9j(>W8k7h>eYwk&)_MlYEOTQVv3+1G>Q1_BX3>6m`WD z%&Ga(#`mm70wy?EeKhe)+CTq*994=eob!QvvMTv)Z&aU1X-!R{Sp({rfw3e+;>^VM zw$HM{LjUphHYYgSP7$)o%fTJ@PT)F`3O^a^=MWymTk2gfaifiad8Zj~4Ebkkkx*m{xZi1Z?1jsiW-YtC>v zX^*~E0!h-eRY@+d36>Npd>X$~Y$kQ#vE8=QFRrlMbFUq5RUFv55*s@2Z*6&kv*FCs zh;Tghuo&v#NoWB!A6GJrZ3he4R{V;FA{L9S!a+aot)cwCoyloY%@m7bXB8@2G%q`n z3lQ00pG@#wruVQ+g*)U)|g^Qq3ZblYJ5B zMttcqGXa+iRT?x&88cHy6V8${2Gd*#y7@K7!U0(={Wj#QlYCQ@Qo&M}XpimrHnE=j zX&&cA1S-b#xhKO$WKK&_tS4g6m1{_#$n(G0P=_)jA^-|M?l@*7xN3|Iw$@aI@Si!8t0kAdENPCmt#2$nV zyByhDRxfE&s*U2Zb|W7d?HFL!{h4NIw=d~B)W=uH>e{|}P%`-2#v|uq(ZiJpLC-WEx-DQo7Y|QogHB8>g zsIP~H4HIE%h&Wo8{(+o^B+kliQbe2J9LU1A_$YlR!qZD#qz>BNbGP)$JD7xwCN@Sq z;3B4i43zw0cDXVyF;-CB!6ICFlqG6MK89MY7jfrLW$B3QbjIVqsTkGXE(4+L=#_or z2cx_;rR>PvXUMMHK!Hm;bA9yg6y1kc<4wz;ap{lja{mnK)IuV!yBVC%AUPr~st$O2 zzUY!|eh%XAw@wb2DeaPlJ|Bw)SNGp58$W|D-F0&+=0$nRdFgm*~Gr49CdJ<1%cysG+wkl3Wg?Re^I4dhI$P$zVy#j#-@siJ4~$OwDu*L?k1XN*_lZd4 z-Y+%V5_M-9>%$E%ql*Pbm(q9U1BmX>)NpE-8gAo2^C@PR(%{q{enI1#g(b zCOyw^9ZETo+&)eoyrFm=?;;SGuVzA@QtV*){sjS}E7VBZK1o_9?i1T}f*ZdDeY|g| zqA()pQgM?Crkdc)ty$s)jeA?K2S`Z}C3Z1gPf|YV{6d(jC>(z4N&hlO$g5h@F>^o} z8Y;X2E%cPY{?-mUBNT1q8rj5xwgo2coQnO9`H#}U)O98_p*YJ7PlEHrt#mGn1Uts}HDbeRsLkvS~{9M8ThI zQ2ME+ih7B4MyZSQ#Hx4vGb-v(2ui_koXkSv=$K)eW$!_Oi|6 zu+h?lX2cHSv~KiZq%!UY4qJw<$=Ey&jG(6N7Lj+#zNDM%*9|VFi0z9oLYi%eJjR8h z$Xgn&)^zIU6(TrPK!P%-_T!5<;tBCeK?vP%zmFqKVublx>-PjRVLk|1Ov0!-)f%_4 ze!ZUR3|gi6#Zr=&?RqEMMsN8%h?tbI6GjaU=7;Bo@pSE$Y(&4KjBMlS6B1Jhg0&=EtoIpb)+z)0nlRek==fy^d(30$w{3!4qN_$wS^QJ zw>YhCtCVTm*5E5uLw__Flqlu_=sc1uwwpDVdZl^Wopan0t2_01iqRzhOLzB_b`Vpw zd~n69jq|GjoEg@N^QY%{_lh*g*DHa04ptepC*Y;LP-Dxi#FG7p%O7NuqrUU8_dF5K zZnQ`(74u;EhD$rOi70v>tLI9?XtU(4U+eid<~e;+#pjU|Cl;PX3;DhBDHp)@T>o_N z$UE|$ka3uEU@CnO!=Lp)eQrP{nWh%E5DL{gWiRO?ZNLp_;1F03M6cv$pEon3Sn2O8 zKTT(B38u6*u~+_#^MNyAgo#3>`jvD1;i^+*l z+!xbY)F;u>tlgNHhs+qeJFB;Vx?GjoZRnMr%#MAce)^WD_DHj5rDi56pWJcu^QWMM z+)l+xw^eW;=A+gJ#S!w@Z@kn&q0rl_pcEDcdm;wJ1Wlz$+sD3`k3W)?$e8t@9PdLS z37XL9z4K3Dmm|~YK5_PlQQjnufvf`cTI!mb_v4g=%n>4e2g&*Qy=O#1pN7uvY!r4a zFGo9WFZ|7H4CFps>wJX)`T<0ibU=fQmK}?y{0;0n6LkNE#%;wuZ)^?;>IJcE5^x9X!Z_^?LK|(uj**aw<8!R?Y zOxdZ>>-z(h$s5CPUwI7$uGXbOn)3LCJ4E-mTUsM}x?Cqw>rKPr)s^~I0};h4&f_3H zKExPhZ;F@+D7jm2yB5=D9IbKdttO>i=68c-ddz=vb?JdkWYz51ukXxw4hZIdneg#( zbv-3o!a(I3?2|Z+3mb zFX01U+|l{Ia?wgFo-6lRx}`pjoNk?;%H0cno_sE$SrHHEq*}en8#eTs2FSBQ?BqD4 zS9{{@-l|nCHf9+bM_ckF0df6Q|9+$#zcBV2n_A3 z?VIoAb{O2b=Bi&5o%L>v>rol7XD23>H_056_vko!sF<^!#5iJu(CHNa8eCjpr zGoA=sFBvWXvIVmIf1J&Lns1MFJP?!Mc@6h1rT+Ebi?C_q4O2B!{|d(XWFGSy O2e7fUw|EUh-1`?y7to0S literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_gps_fixed_white_48dp.png b/app/src/main/res/drawable-xxhdpi/ic_gps_fixed_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..5b3dc76026c5bbdbbcdb9ff5b47e104284c39209 GIT binary patch literal 2095 zcmV+~2+;S5P)C}#q7^2CR z&ep1%nmEAV4?~7Pv4Lbim@H9QVTG7%L>dYipQ6x8zubHKg?szlKXBfsuXFo;pK~w1 zAFrS3IXrXkx%Ygz=O|}79LI4S2OSmtD54)1$OUqNjE)QB0=YmgkPGAjxj-(E3v@H} zY^DvJHa1gF_4h4M5p^_klp&-BIm#ysuE#)ytfqr;Oc>)Z z4HRBKfvWf&eVBHUW@hJppn1H{D7GYdmzvxSRLLJ1!?q;9r#$xp74RDd3Cnl9NGz8E z-9tBF>*6Q55vZ8gxJDEvc$K1T4|E&dMCk;#W^b}@|9L1tu2pfdh}%)ePn$+VVr1M4`0%xBEZhCn64JpW>)p)Er`k8?_x zXhzh53Pf+NK31U&&r@6yz0qP(2Fep`np3ng3uR%-XeY%~{t{)NolK>NpQ9|!BF-_D z?NJ4Kgh~AUm02juQ^w!$m*$};0^P<{{G`}|3Td{H!cRZ*!VXkSH-3^liVAU7Fp8gl zQW$Qa*YIZ3R3883nNe^SJB&oig;c?ub(pefm+bpha&npfZpr21I=R$ogQXoMWAwe z(MeEkH_-d&q%z*AxQQoe=NLT2BbYMG9m1vV(f);)*+6yZED;7ijY-|Y5B&$xS!6a)v-TW& zp77tB)+F3e_u;M4Y@nlf+b#UooWQg=H;IZ`ofWLX{OAHz3RZ6bo32Q{heZ}h^vj3|Y#NtbsuN`(7s$QE+(z1c z%tse!sbKZq{62qC+$cKTXwwsdHF%DI!rfV&i#?N6+22{Uf4HLyTfWSe!yWZ_>qQA> zD>le4_`6{LfLZml4!O=IfempgNCn6f<7>PnY+Qn;IE>EYDDf<3m`n}vn31b;_n~YAYCxwO6=3e5 zn@pVK_CS{be}&F+TNjf5KRQnl>@PJDCx{UmDL`Rjo7rpYLUP4#(YZuf;GeL(K>~?5 zErEa5Qo*-^ub2h`$*=TjM+KhQJkN0^(V65+wor)*Jb&`ZU2ZRs;$3u7EJ9^8H_|{W zot)(g$PTqakmLe#IDmzMvr7K#E%# zz)y-7Lj2@P1w`0 z;_nn^!_97I3KPK@p5caREvuCEd=0`pKggEbCC}g>QX}l=GDP|7yS;#V;qm@oB z5LAknv-PI`57AH5E>oY)kBz9Mizt0TO}0OHq>vqq6NNFFDaegS*WAT%!uB~o&ZUPR z#n?taVY$o}@^kO;S!KLV0^8#3WM=L@fUTN$NLY;X7IVq@NBC9H#05<2rIB)!?NNj? zF;?&~NlZwvkLAQrVT>ZoDP*WdF9TP^iHna5{t!Ru~@8@&=UU>0$2o!KoKYcMW6^2fg(@@ia-%40yXhF?nEps z!&mtZPq7!Rm)Jx%3)2MD#2_%jQl#q5Vg=9fdkU0Wfe+Zq-Q+?7(s`(wsD%x@!x+^z z&U-veJ`fugM8a9Q2L3kvV)v^Af2cX zJP{mYv0I?@6Mex-`#}DPhUjMpYw6_cw2&i1jusYi8_RiwUA)T>l{Uq8GJb&U5uN2V ze!v2xjNQaK_H&9#>Lc+3B-~Xa(M5hmCt0MzW$2=hQOa$A*&cu_5Y0tuSR=h0r@W3Z z&pwcSBKyno0Dmo$Xtp09p~EYYAgtjm(>Fk3A1EzEd0wW-biM8#NGBTeOeC~(g6Z1k z3uqNzf_I`kZ&A7_?)3q53!?x55DA+pQmSEYsy9%MKLZ3qw3-P@^$`vA1^PKaP(;fp zP^#_q1nQs&!4chFlIRMz)(hwwK0(R*Q#66zQIh%q{e;T9P`9(Fh~DOU0A0^T2$<*z z^oGg12ik+)5efki9YAlVYoNtkMQeyW9RUzEF@)9_3*7>}iq;g%5F`kq4yMrB>k{Zj zuAub_f&@df2dy!h-2pw1))}rv0wQYS99mDi0?Kh7tzHBPifAobpOLr$`XO4MkVOI` z%J4B-tK9${Mr##<1V_|^)|)PXZlH*{zmY)#B+7CebK}gl53~ubjR+DX(R#EVv=4L` zb0ah&ff6-x33G4T25MvybNdh^Sfc%y8zr#|bRSxsNWerZ&}z2}v=eiu$shp}WjTYn zEp~y9VD8rl5;V~pm^)|_D8ne`zK;Y>v;lL&Hi5o|)&eAOqD5%Uw+HlH%$-1x;E6uP z+&%VywqULwNdeIz%x$y>vv52yu60nr?PK#PzR5Y6`o zbQ_WaqHp>GT8^ZEsLLPFBS;E}dNKDUMP&+5<#(5NAt@kw9&@|x{Wre}bMGQ4AbJOL zkJ|(4#@rB+0-}?cTWJqyAzEJvo~RA2HhVxq0dwn+z=_si?mS`>=mX5{M*=6>kGVs3 zfwp4q6q%rj68??3XYB&rjaC;DFi|&J_t*u>F|Oa}3z+CN%#G4u7f5(d{Zn&-C2C|8 za|aOHK-I1#=JE2g);vxxbT58POzp`#{1$v>rfGMbwYD0O~>OFG#A0?so&! zz-hGBq=@Jwi5no{S+vfQPYuye5m!L1jHC53k_w{BG`j;5_MugzJrzW|5SKu0OrUjw zeDFjsu)r;l@B(`MNbp4B8mO5I=siBuh=j`bBzi?wrJTqDP{JU36D*raL_#%N!d3JN z+&*)NgzC0~QWaAE6j=buag0(;%#^c8sCJ9_f>ITEVkQs?)o&$Jl*!e`5^7t*0MmDlwPdW~B>b3R0M|r9 zt((me%IRZz$XddAh<~7Gl}M<4^U5TGyRPih& zbkoOWlw1+nI}`e-q%*w11{Rh14(cI(%XyUC5()OUjVUVaQx374N9ke-H`B^266Uag zZ_rIII~m|}Dy_&xdqjfG-ODh(`xnc(gA2BZ>|dcB#AkotMg*}(B-ma9+qr^|CVA?A z4*#Wghy)kAm7~L!B2Wa1 qKoKYcMWD1U0!5$*6oDd8+g$-*d)K~oIC=Qi?I`{)%f4;9#$3B<@7k{pv2SyYc}?c$ zGI5$rc6iLGGQokH+vJwpf<$>%sc%jT68TxAiekDPZr7fFyQ=Y7v4Q`o&&kX8tLy1q zuqyWw%$WUsXXL4R*%#|qSN|+}S2Vr0@myT}E$Irq3pZbH|MdCJf$euPbS`Avonrkg zHUG)SAm+J#TqWE8yY9W>;4b|B$2#*ZQE3aBJIiaXNLTGW;-LOG&Nkkl*fxH(+_%jf zTTbqMd45`YDF1IsmRmX1lkJQy)a~kO|7*`*QuJ~8bEEUq!uK9jf6M-=Xx3sGp1*;q mB@06HKR7cYhZY0FjJ@}t-uP+mFL5XlB;o1m=d#Wzp$Pykn8iH+ literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_help_outline_white_48dp.png b/app/src/main/res/drawable-xxhdpi/ic_help_outline_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..c47c09c0486b5cfa5cd3951d0eb90edbb06b4f2f GIT binary patch literal 2463 zcmV;Q31Ie#P)u$MN@c+EKbiL@tHWg}6jiR&c!lB79*r^4(2T zZpw;{niQ=?T{XeLCjPL=ny8z-YS(C0bcv0iUA!~`DJm2Jt!rHnVzSw2BE3Zyu(xUH zZKlut2R@%O=X2(BK4)s?@%j&*nVB=^bDHTztfY#k*hv#7>7nZvn2bw`O4J64%AMbEKr9ZSllc=VNG`6HTKouAK zFaj-M2LptmpI2Elo&y!Ij*ki35!MhJe}Q7$#+SHqg01K9VfeJX9Ay0wPM|}Phf81DNRNHxwDXiuT}Ta~OeUlSHrC;IrpaP7uTw%rp2bYC*4; zS@r|{1HDrQSFYPg5>%3P1`oR1(c5V|&`R_M4Zin0NSbV=z=h|e8D=OY_|mY|dJeOg$~0N%dg zX;x82f&^u(;`f}wTR$@qU;7zpbgs=l&<=E7LVT_UFRg4;+@!>*pdBxNL40lpI@_}i zG>HM!hI}8zPvsnXyC^|4n@Bx+1HLa1KVby5ekNuYs2ZJG#OEH(;#CmiS@eF5_}r`L zY|1WB6KZ1y&$&KFXP2)psrBgm6Y;tEj0L;UWID=9qqf)JI83LN62!+Q(vI2~(-EI* zL@mX%;DM^qFnC&7Vc~4=E_CiN_#UrwZ}313sC`R3>nqqd2FGi0T2cF}!5PtM)OH6A zlpu*(4dQ$IQF|IO*dI{ahxpzLsP#}7EYLD^mLtCRHEOF2pX922g^>M`&5~e&wxM>` z=)XEGWR;rybI;-wRZA-8~4f@kKgN>k;FnnC8HjEgo9<{fF1?oWUKE(M-P&;S% zcv`JBNT7?*xg2r6OHgYw{9_rNN#=o8qSiwUalYG8`y4S?j6T$sn+K{w?FbUV>P+tl zHTwj$O7lQZ=^y_)-)sg^t3Zrai`o@MZP+N~U-h=48 ziWqJKY9E>hI*Hn1#OZ#;C~6}CuBS^;J8mAR6ScXB!`({?oj)VSn}=GPd7xg@E<_wH z&Y#fxHzkPirl8hk9%vA?35b(T=Ky+1<|6@CjM{*CpfqZ6#KD%*f!;9xANWwOQEo?~3KacYtbk|nGNgH6?EEd(i`rx)#1-;7UV8XBqM3sa>8LOBF?Qjlg9V6Y z&M)&P^~aN;?T>glK{=wE`!9+Io!@G##>+>VGaEa%8XuSkdL6aLkWe<2|Dv~t^AQW% zX76V8Nyo<>&!D%L1Y%_`+B@#3LhUFL!e%jq&S#vDSX#5a1G5#V^%6s@?|0~YOPR&* zSV`2DnFpGT&V0oB{)5hH#M&03bAiprK2?b2Ekx%X#Nr-6?GzFu(7UL;fmmKOI@cf; zx7+-0y@QuTc9x zC$H0P;=uw5@1XV_g^0zSMr{saZAF|xtsV&)=zet8A{IA{TCszN*H9TeP$?ine0-Z)An?P0Qyo`*a?Len8yFeGvkJ>PEka4oPjH1@Zj|>7C z{t^Ex$T-<+2JZvoK#R~xu>ctdyP9!AvOZ7LGP}d53~ur zo9zUOaT1*~Ov~v&rF5ZljF_E3!aDTc&EY^X_Mvx+&0k((Eu zC?^9gWdxluuCp6RcoV%gN^>qy8J+0WA+`gR(}!LQ#W@wIgk$J+Gt+(`;UT>2p(tkp zC1}J;6%t0E7>DulCh?pI6lXVH4iXC|kT8dy42g;&50uD|r;GA1eRx#TP9Ba`=-jyPH=`^cn+D_s8I_9}+SuxF1`-z1j(?|lft3O8*$TOVTF&5KD_0;<2NKF?#!NT$ zY-WMao5lhb@&LQ(!OX|ZKyo3Fkl+P^=;IS=*~$i%GLI<~6DLkFQ<%q6Hn5EsX(mY! zH54K41L?PsZgNEz8xT7a>}@uO$OQ)}M{+Tc5MwjFM5&u9_x}XaY_r%&G{&f7CSrF& z7+c0?xYo>di2Vs+Z8cxu$}!EoR!#>JV%*9{gsq92UH&s{`@5Fy^b>|8FLMnN?gST` z$R^$=g)O5rQb`FC{secM#=T}Y44RviN~R*NCb(W9OW4BOv|{2EZ?T2N6e4aXM7K#S zr;;bA<3oUe@mmN5y5YC?Xl1LXtd1LcA9>?+T%(T@uu2h;*7q`@H4GM~wCZ#k~z?hh5!XnrPcf&T}GO^VxW@(iFN%R32 zgY{AtA%e>mLYGSrQM3q^Z9}y&@n5MF36R=pJGPXWzV`|IerM*KIkz94*XIF{z7x(h zIU|usBoc{4B9TZW68Sx0K5I$xHU~M!Fr%cHCQFuSQj9XpmwduoBw53JB&Z2~uY{-B zMK7b+ag|Ws_$)}8A&j8h72-LuJ^7jkx(CqLqnqW2W+WOb7Q6F@m zb)?D994A>%0x57syvQl$$j=ln1Q}=(cgV+Wx_A_EagXsKx5>vWEkOlpBTH@>KA;-$ zU^R4;A-4>hg9x-KuRLdX9Px0=_#&@FEq(`D#~pIH!OMsb+ssXJndNyu12vE)mm^dl zKCO~oa+#vuuRt|i$J#VW#OJj$gSBf^`w^&=A*}sGJrckg7{S^g_jn)ZW32U4i3GGN z2C(*#*MYWTt(Q_Hz?E?lYi(WzdW0#g9HIybc*PvX$^^Au1&Z@+jy!QB1WV*dbb+`x zf&Ph=UW$;Ate6v6`G*&Q9wv=mKcz^BR>lB&lPq#S&@uFWq7n(&9^_~A4!a%bS@fn0 zzV6YHg0_Mg^j5nWD8~2bC6Q1D5?(`ZkeGXcHlcR}32h+ZIC>l03KZu@bZ$@)>Oc#) zh0ZWBcLKGb_c9XdKtdaO>)i>|kIoq+^nrx0&^hl$peA%OJZ?mwr^uqySa_iQ=zM?} z5lGmF&fY=;mGc{FGgKQBsFqpOrntYbKpp6GA;ts}_M)@Bus~mXEeoza07qc&ncPztqjqXSi- zHfBH2G-{`u5~yX|4HN~6{);(3NIwuAq@&ZktqVoX+wAB<@yp-U zt;Xl}1AT&8r_q7lbnj;ME!6fH9q4`c9(N>B`^M-%{q8+5TZ3APn6ZK4Oro~RexUj2 zEHyUJlWsn=7p|g~G&WENYFFHSarmjRfqLA1akvY$F%m`wDq`H-SI^DptTr;xGw3ur z3sj;#=D%xXpf1$LDRLG_e-ye-k#T{Fxrtg2;xte@IxiX*s0E#E&IA3CENZ8W3-lRk z87iFz(!V~O<1wQGJ%P>{q>wz0IKyv=zN|?gWbS13EXU z40WI?#?bkWm^*=l7W9sVI8cmJ=>5gb4+~sIuRXMZcA$63-A_}jMQ?_NPzGvZ7QI#O z1`>ME8=(pb*%mU2-XX;8K#Q0}Z-BB;<++c`=%x6R`+JwcT zu<~ybp~#b<2P=KVya^=KGLa)uao~AMa^xAO#>>wHwqflAWdR?1PGjw_UVmt^8*2kp z1$gGVjI}Pr>p=H1h_#<-hbBLdTgjO=-_7%%LjZ;q_x!qv{;&&in zb6$zE{EI((5~q&+dH(Fc-;f{z2`$W$k6F4|;%b~H*q8TDR%}3m3M4$w6#1FsG#g#G z(k`Zz&&byV&mlnu66(1|{%-IdYaHIQ74ZyR+$4WjS&jr9NT_BIdw%6G?JOgf=S}jH zbkM^%_FUp2Bn*Lsd+ElB2`&jlpuiw(yWaM z!eU%=OB!e(p_U7TK_4|p7y}7$b}&hhQoKqG2|kd1i}(*gIiz{k99$sHRx^m-{-=47 z6UsnBj13Iqm+v%hNkbAyA89@3@me42h#>|9(yWoar18QO-83NHPVi&*vz@bKa7%_W zY~v4z_Y?eE6-kbeq7dWsu#HN@uLbeR(3Ny@lo9N>!cjVDqzLhMLU1dmnJw(*0B0Fu zgfY@&$dG1?5r+7j1MFrCtC)ubH6fbcfucZBplDH`C{PqA3KRv30!4wMK+*mW%P81z T_=9Ik00000NkvXXu0mjfJ3gG| literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_launcher_take_photo.png b/app/src/main/res/drawable-xxhdpi/ic_launcher_take_photo.png new file mode 100644 index 0000000000000000000000000000000000000000..69feda862c7e94570e8a25b54acc3b706b830793 GIT binary patch literal 6772 zcmYjWbyO5iw5FC=8tG=~?vh?wDJkg`SV}@xr5l$9K}u?oTxwSVrE6)V1VrgZK)Unc z`~C6WA2V}i&V1*)-`u$~XYRxr=xGuI>46v+7{uCIYK9MO%D;k-`%rIctbBWDFkcyJ zf-!1F82>zUa9mV$RWLA6$%HrdfQLSTx0d-U3=EE~e+4tb5y1E`Nb9F==4a&P?Dxjr z*9pVh-u0!Q&`T#jRtX^yA@PKbOalxIx^-5` zfd$0x2SrIPiH%ggxmUOLc-6*{x3|5$UEKC6ggL3A3A%rNd%dtPH1*eaShSDv)kC#1 ze%Hgn=)OwLxEo4b6sgk3`nX>Cp#JD?Ni4Ep-7MF_n~Z@F0NN7J64~;#cXLbTH!^(B zJal^MKk!|zVD*rw=xtlrd9$rmq`MF49x)%D)01!jA^$g_q+c`igrz96CLOIOPyD;P z(?JFMzfA-VbV?9ymga9Yw24;(r+wRMrN|)V>*1kQt5%;n7SWP@Za#vm&HTVo9u2yZ z7Vjh90j2Z@(9#Vce+`1z_=c4dH>^7 z!YwKK#oDvaF1}He>S%O0BYZ2KAbQg(Bn?Pnp@qhx&1;IBD&&LgJ*??`J#;9X-3UYydi^y5Q&m07^Z*fj9L09n1~GDd!UB}u0o8AHDfq_&oa>=G_NRBxfM6vp2S!ZzIFJ=&g&&=+s+23FB0)3Fk zUZg=ix8v8w$6Cuey6wHpFYD<369n!)wb}e!T)bvDQeZvwHft)q{jR#3ATP9U#dSC<+X9`FG;!!r{A?x?D{EZL$`F=eXc>YqNmnQ zmV99>vx1^sKK8n^Fen7!Y6z@osFk53?T7yOJKzNrWzM}Mb+AXH&p_{2{v<@vt%^>G z;O&rsu8!7!D7-|g+3i3<#za)RHwLDri*CKshQz+ed_I%vtKfhDFLW;~RZu-D4oaVy zoozaTU0_J;17iYl9Vgz3Hxjt>M;6HeW8YYo9MM$Z%AoU25LZ>OM*Qkb9aS{OY7gRi%e zF_#NdkDOg<#G|HUwP{^oS;6;uDxD%|1G`IuQc~;H>ytkwJy=j<0Ml-=mYiE3Cs!$3 zhK7!yXzO`rY!`(|o{q^dTL_O~x~7`EPto{ibGq~#5l-P+WX9os`4d?*DJbS$!RYsM zP^pH;CpQB~pL}MeX5*l<5tT1Y=?Pt+KANi5P*VXO18%;VUu`twu>ZSZvf40W(R(NQ z<1bU>483SloJw}yTSG%%PDf5-b;gxU;0HmfUlMKF!~{^|05%SD&!Tg4C6BCz9WO8U zX2pv!%GY{5*-HCELTwQrnXt0_sk-{_!__|B zl8|3wp0{K0HStvd`RsZHs9n7(G3%pqz@GSzzs!$DTS>93+8qGA7~)~DZCp~W0wz*c znw;syU?~Sr^c-a*MQR>*2I?Zuzgp?qqdKJVQbFI5M8KT>r@}MdRdKbdW(HB|5}F8# zNBRBVPp5w`S?YJIurw6S97|}hjI2m-cTx9Yo16Q?8p*F=I%kUWag^)4+u^S@_xxK$S?zUgF4LPz3ObCQM!G+T z?9M;Moql3~_xUA`=Cs$?>_tyW=6gjX5Pg*=KY9skg@+?`NNi*Isn!LzRWiu%A#(Wv z7IHSWwRti=MJ5KAiFov9`#;-4ZhTt*7V8Trqi?+a7ZkuYmo8Fl`E@}uVZW5w0U*b9 zuPrw|rVK2WE$={6X(qePvV>%v#8RcI) zgB=0tz(5rUYn3FM)toG=v#m*j7aaIC;%dUskV97mz`F3u)>!ST7sy9fIpEjWm6He` z+CDNK2@Ahy1R=cnWz0-9fYA}YA(l(ZKB5n-g#gt@Yz#*QFJHCXS(Zms zgN!ksC3*3q4VCR0eg-nt*+D=Ys?Ve6XPA2sSM(<3CUqRFgUgrMHK6oVZ zc~QZ8&SF0%1uCb|B{QISkIb?*;HqlJ*5M5DOxK&^uC3hQR)DUpVM1j7_#sP1?5 z6P2}G*m~jF(}Pci#s_QLZSd}G1h!SJpQ0@0et4hom}gEq;9*ujoAUf4MDuz>^pU%% zvex@1lI;o3GC=~!@npq|LWW3r=j$r--=d=RO5}b@cflTuik{6`d$qK387*H$Ep0+? zX`uKta`@$OC?*AKp_`Pa@p8vkly?zz!x0t?H%XavSdWqRkqe&;R5An%DLv!K%)3+B zU(j_*z!p4?`D-EZ#vcTp8cMjOxAm%h7^nAOV`Y|+DSVpg0qGvoN{dl?-7YCEZ7_r_ zdOyJ{WVA81X-#1G48Sam0j@rDHA9j7m z#FvqYq^X~2=g|=lJIB=fdBBwcJTV1l(h;mNQkWxC(EF96u!%eWK__0ix&n3`0JEBa z=%yyr0TePUErFSrH!*A5vl_^DSdMQo-k+PI8yKD)+mb-HhilG<@SsB3CS(-VW>Q9 zq325f>+aH8wN1i$lgeGohv<56T0NX!?ji)?$ zg`Sht*+Yu8e*if8`BtKxjO|S+D#Bh1nJMF%IA}1}?Tu>=eCCv+5#rb{sjGF&AD1nH#SE{hFpif;&t8 z)*sopfZs~8z}9Wff=IBNo`Yucerb6)wFOs|=6_4O)QzcWX<8g=-Ws%iwUiE(B+V@Z z)&q@jIAnMNGJB$<|Hb+$3shgJns9vXCQv_#tw5}RZkEPNbMI?U(t2@BLYP|7F6p+U&o67 zrw5#sJLu9P#d`k|VNfYzq>NF94JCipDIIvUG}3 z$2oGsG5!V;0377gHpe}y>fIc7iw_L;`|hXpKi%XGzcaG604WJjlw+DU`#WP{aKKtG z8v%X{isYI(J9O}SYbxIyut>1bNJ0W~?e115Slfp9U>#f^LWHwORRw zvX8jCn+gAI@#^)FDr*K7co&YTO31dZG@)0mRj^R;sM7dSaT{~IR}2%t)~>g)m9OwS zHBSzDun!6PuET?Vq70LD5SAE2+-T+uM7cgkqqcF!{1xyax#F^z6uFx( zUt3#-HDb)t*c01Mtck?b3?FBY-iw}&m5V|QjN7w=vja|D632mvkhIUAYpUyIvU^9Fm=KlhOmi8sW;QjRw2HYW+I8hlyM68x0ju2k zYkPq+-I)?|0yI?O=+H?}yM>B^d`=aMD0ud|6VIZ%GqU*0ECuoD>F@ksUrrYTOM=LC zGcKo0@~nh7MrGRgXh_*9$cU_I^@{!soP=(y&BeJgn}kEY*q=Z=cY@DdXSsCU(me8H zp3RE*a{M}jL<&=evdUfKk~>!2vQC%Y>`D^>)j-ivh<#Tf8e!9PUjzlo;gGMoId1@q z37XcDhMapek@yMPS8AkZ<5tu8ED^OG%{YwLIEq#bKK!DW)x_Rgz0d?1cp)6ct6(Ey zq;z<-@$7>SOo{p;=#`pI>NRVFeXac07wucoy=5iV7Tq&G2r$W^DwU&VZ)vd=j{|-E zf~JMHaFox4Qup7GY(%>1&4FGUL*+;adui#WVj}Tiy7=e23MM%c^Ag0z@;KR<0*Viw zl%k9R4*pH#Mn9ZwbhCG(ks4w3Sfi~VxCVuW$yjHb-Ox%yLIME{PW}`vfoM&Z;4!Yz zTjf`}sKV0MvAy?!D2UGkWNPBD9bs@QXp-ALJ29{a$a$uXw0)oX3$;21Qm=;dZQCi4 z@uP`Qk4_{UTn7=a%46`Bms)koTc4MlH8~J(_LT$WX5Kv+A+^cMO}G`R#n=yIi(zZ^ zacdZeqIc04BYw!5T;0Mmjtk1;RHrpLXxtF*mJ$g~N9^v{$HumUbrNP+HTnY`9{Wj38RNto&>Eo% zB0lJbxVqIIR?;o%N)EGKrHi$_@-X}@g9e8g#lVOiUJOLoyX(f`YfDbQODP8kQG@ut zcD~X3Dm_TI7^vd5y$%Z}Cq(0*P$h4R0o+qEu;ge&mybfAjY;nXGXhSs5jojOrVq#X z;)uqwq=F?19_4T7a7~#NcIm6{favqmuNV^gj3b$bu@x0+-dhZiM$m^OF7o1wq7x|GZP zvWK0RI7OD+87oWAK^Lo!2haBXdtVb&QOzgDwJ{0xkowmih7(i=@a3mLR7_`IBU~*1 zaTuyWXs1EQ!svbGUe|9bt#W>}3f6wzQ@QubvO~)*2)g@_Cfd8`QH{)}iR${BTm(a6Z@o8Pm#kra0^X~^Amc=hf6r+UKfv-MN zB}<%WKzPnZkQMKuNFAk{mc_>zTJttTJc%7OdAS=jm*S=I*4lIYKL3>caX=M>yV&`z zVRkmI0S@mRsArrws0U@mkSHkeLT`eS-`EQ}ir$}YnT0omuAPR+^YIc?4sJo07#RCH z-kf*1^uJkp$c#NKpFeq%EhAoM(Wl)+k+y={X2t9`0;iY!zkgItnk|^EoY%G~u&=(p z=s1Ypg)XsQ02SR|4fx7%1+x2ff0pjzCR&v%Ep>@I+?xv>?QaUKu&wqOswIQ>neQ46 z?vD(&5PE9N60>yBeEu1p{ChrS>ix4;@@aAJ5I{FQMQc!B7s<( zzwXasz-iL}8A<5|MEp0&1F|2QYf}bDYbEc6>S&8LD`zG`KN*23a>eTc&wac$Hy;v1 zJhtYy)vnHAZ2lk0uPy!ee&Rv*L+F0Ls3Z17PcrU2hR6kt)x64^&Uw&ir23qAbD7c_ znQ~y-Nf%*1j~>RczH7nSzw!O>w=BYmn~zpbq;K_h>rL?0TzJzbVhzhNt~y;H-h+^3 z?2E2HPz%>ia928uqf%~M1%DHC4Ion*o43gp@#Tk{pG>IX;%=u%4!0d2#Y=%E_(Gr|1h2DKD32QOl4 z#rL``pNrzPt>jm#_U1#yMfuO$egOdD@sv)%oTQAEPU9cHFXOrOh||a2Jek*d(>bj9 zlUj*p(L=`BKy#AnKL&LYlO*q=7oFhuvJ@0g1(rVz^*yF|T~v~F@TXS*mmv{< z;UFO@3Pn>^VtcKlH{w{kLDg~;+jsm2tZveju=#{Z?u_tVwIUFsYn>JKgZV8%h48iF=a80-xRk-?II2tMwfZxfJnwFc%hvRMT zdlbb#F;UYjFZeegEc9Qf65qwRdsmNFD)aDQJHd6*6J6{&Nk`FNp5aXbpb?P&6W&9v zM^xog%h401FX>A_MEv%iq2MhJIbL;2ISARWkn#;QLu-mOd$;tdFN}=mK>EL*1a&$d zLZ>1RdF;_H@k5VEOsGm;F@*kHD#_cyLk!yfYT~nX9NLrKI1qV4j@FJ$}wUd@d|}8 zf0A8;e#1K-^o{6Q!7okxNc0kkq{kM;QMG<#MFMfz@V!-HIq14)E6dD$_!Pp>R@YOj I0oz6V4}&hz-v9sr literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_mic_off_white_48dp.png b/app/src/main/res/drawable-xxhdpi/ic_mic_off_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..b0a10fbf67a8ded874cae95f4c17c937b364efd7 GIT binary patch literal 1326 zcmeAS@N?(olHy`uVBq!ia0vp^6F``Q1xWh(YZ)^zuzdA&aSW+oe0wK0V_K@r@sI!G zKB=A%OVM{?H=SzrBE?j7^P-@{c}v(IdN_5gc&M_X&&Q*A(h{ppGp&r8E?BML(D8h` zKwxXG-`P{D-;&&17ru7YoImgF?T38pe&70R^ZV_yUFC0o{W~}R;PW(_-TdFSpN!O+ z>a{dzWeA9x;j3qj6XxZ!Oum#>O!E+f-tt@tr_A~AHp;i{ORml$nNgM_|(6I&-RC{NWecA)h`c% z0vk&feO$G`!Rq3-9YChlw&VOugk3qF2>o3se3W~MaBjD}hkBPB5|;5$za?cq!B4~% zjt%oe7JU}&h{LK@2u*Fg(yK0XGo?J#jdnd^ztp)1h?7`~I!;*yo^d$FD$o%(SI5CB z@Cgu~0lLwJX_w2*NbP5LD=&23=%~EjbvwXfL4Cn)`OoioR=9|kndgZgX8hZB+vv=0 z<^a<`ZnHMqhTH2OHuydIrtvJ+{E5$%#@n3*4!;YlB|qnhKJnSYx!v|*!>+>L9G~;V zoGpdl9?SK%SfE~L-Tm{p`ULHZ=Xb2-WG|7AJbQFcG@qrA>Ex@23LL@>LLLjReq7P# zvbs>)&{k%`#s1sv694Wp?-YD{^sR!JP=;_`YFgrt?Uw3`UTZwwrl@lvHsvc%TYa$Q z9PW!zo3c2$L(c2(_y`q0TW-JnkpFr8gsYDuSQp97UC~uP`KEx}3$abV zRz6xfEi>oz+!bed85`%=|I4Z`>y|t*^+oL(4u{;7&_`iEPt5izaW`z}vD@@2+3Tvh zw$;s}hHRR~XB3WC&fcH8=x;<|!?X#p=3$>-8So@8^fB3Q;c!fD(`ymCVCAgiT`VT; z^(R-I>2h!h(@{v4t>};wvHR-RxFW%!_sfL4^=qbe{NwUW>A64ar*M3%Nt+rMe~M1J zM11=U=~)^!6E@1#i&rL{*ggcSYis*i7u~`E%V# zzcNIqgNrrg%EqqEJ?ZKZUfRt}fe8xb!I6h09o}=_T6x@{7wF{OZnu7hEYSSS*u8P$ zOP7a@cg{Yv+_z*~o8^( Xx!o}wmJa`bjQ^Z{Q_oYoh8*QHRc9rYf*r*#XcIGad+oN^F3|!W;`Byl@MaRdRjlJq$`AuW7o}QxW^|TD>CBQqV z*f#-(nx3d3j^3_=89HpX96kf}lyMKJ* zO)ki&Q#j>nkQ|_tycyMJ3#C6$Rr#~$A|e+`fzlQ4-|qxH90DFigoTduC&rJZrY$uA z>e5Y1i!-+#2+w4~B+U%sZ^@2(dv~}lk*zC7qs8P9uf~S2*d=|t5;H&7KHDF83nUiG z@-kKoTqV%S+p;R3);TA@G2H6GCVEoN??kXZU?pD9I|$N8Ksy`d_M+JKyI zOe%7s3tRLsEtjy7!;4K1Kd;*KnWpa$*Hx0yoOc0z)K7HCqhmE6iLYNbvaA=rS#Cgu zey5%OztVtUxpjjIFPcH@X4eanp=6{&oDx4gZ?zoR-;DVw=F7y}FF|YukBdIK@!3o} zw@@B|vK@m7bx^Nw@89VGz=fgDt9VwgdA!YvzYwpcc?c4aM`(A(zBZd{@n?Mq7Jw^f znIxW>9h5#O3)K=s0)I6p8P(9(Rzpo;hly`bn^(pC4Ya;{XDKv9*#b|x+YR~W3wW(? zN~*R~cIxyR<+75?ot>pYC02dT@+@*JAN*|D3*4m+JtC6a5K3u!P`CMGF878awqBkt zx8xud_^^A=yNT$d$g&SFqJ?XuiJ4#RIH`Kdf|@F;a8M$Vb`verJ|uK`I|}jIvD`@L z8b@d9NG1mv{a#>rz`e|E=j=(|{*|26!sATMLky$!g#SGK?h&PUpyh>)iS*HW|J1)F&`*8nkBXgEbpiY76tTN!sgxBBhHsvEAbIoY+kswld+Oh!`Qwg z2*bs|f-XNcVi+$Y_19F@J~(q?o+@AQCIjSL{MC94oeX*xNH}C_%)Wy)Ys?gL5*FT$Ovb z(r{no(X3W=O>yK%AK~IkznLEq*D2R?jwBgy+6n5K;btX7!Job$jC=AMlLT2 zS=av%z)g0F!ga;63Y*jY12?`Y?JkFqNV6T7WVU?4><>~i(==)MOhTC1Iq^G_=+ywB zE>II|>I|r(`d`hA3+%2i<12d1tz2O!>nA8{Bl33NUy%h?brrzD##Y2qu1&u;o=}iH#mj!k8FS|Y-OxJ%%RXNPjm^NhF z?rwJief+}s>a}Z1__xMvddP7Z-=><>t70|nf#!bDD)1(Iv3rqXn}t^vEuC-|)Aj3;_av@DyHqs=!F zDs|PF_ocfnzyAUnWs}DGW&fzfNrM|_1cEpfX41`N1sK+uegz@*?EyXDdhMRORYff{ y$6~mP6n*x3&`_dC%(CQ2QTiaeWBPR#H_of)k`}ln^}2DkoQ_vW0O-&Cc7M`%-j-t;mcWdPi;k$ zjbAsdi-_DUnvyYd)0Vhvd(WQP-u}7b-2XFg&ehu(|NmKUH|crhJO~P(I^&GcwhM0R zf4^Mzl#Y%#qf)sfa|!EYF)7d0RTmxDf0kTWkaa}EZfU^To{-XoeTRA6v`bgXUAcHJ zp)_Fms{I+aM-v=G9f2Hfpv;`26Mtg)$qv7q7ZU_|TS~Px=cwiM^<*4wi#1Bp zOZ=~)DWjaTu;+)6wA}>3ZqDDHo)4oB-J2-by&-X4PsRKVbG^8vId3Z}s|D?0Ja(cZ zwxxE-k_~e<2zMQf>)IM?XyJNevlo|fN73S*8U7pJO%?p=vNPE8A(v5$t-5m1A1A|a z+HAfpyarw;7f#(E+ja0RPwcdnT?h3JMNLt;CO9o|P11(Og;l|Z68_URT%W44OfYq8 zl0g$Q&(ZQL8f@7eMYAUzG4K;An-g_3f#1YzqZ^<6p zn57Xqs;8vAq|61Mswj(g3$9siV0mwG*v1`mf?J;Ma6I`~deNR`duE(8{A78mhwgK>dswpS3!VjAj|WOqp{^ z`1{1ePZWYvIKRpqQ#oy`6~E@lns&9U5S6S*NBuNkt^6DXKkLn*EOL*`AJwcVa_?Gu zBs)c;!8|id!`G;K{-XQ)5=YI%qNP0k>7h^u=?}M;?d>)pjdpSKDqJHLs;{ w4+GmLCW#Z=2@3iRByv96oJT_I7yaRX*O6}bL!V(IuykVZboFyt=akR{0Lg4I?*IS* literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_more_horiz_white_48dp.png b/app/src/main/res/drawable-xxhdpi/ic_more_horiz_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..902df1e8616305b8d989dbebf32889b792fa8ae8 GIT binary patch literal 389 zcmeAS@N?(olHy`uVBq!ia0vp^6F``Q1xWh(YZ)^zF#349IEGZ*dVB4zr#7QVYasW> z{hE#&IX)&`sB`&ZwqVJe1*|b&6%THx6<=&zwd2`|DApPovy*3Xa)3sl0(*tuJ5QMK zN2W~t?NdDO?)?iZCDZlmH=l_<9h1JiczM>m$2RFxm&=sC4LNIkGTqO*>{XquvrO6R zE4J11*k!-Wcih9srLWj_osgKU2b5d3m~YqXODm;c zBwlgc3=$Aq%r~q2rNoQ=Wn|te|O0%HY@qk3z5_P?mFyNwmTCSR)1%G z^zx!buMe`vmc6>dyJhln$LD`er!Dt;>f+waXZ7OR$~iC2FMmF#Ujo+1Lb>}p6-0R&hMr- PKS<8g)z4*}Q$iB}yS1u! literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_pause_circle_outline_white_48dp.png b/app/src/main/res/drawable-xxhdpi/ic_pause_circle_outline_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..d141a6c6b7cd7972ad2e3a1623af5ea209b70cba GIT binary patch literal 3904 zcmb7{XH*l+(ttzg2BLtp&@7-x4}_}p-dhL=p$G&B1PGz?1|LbZxDH09#eB#C=Zs1#t)6<9c z>6s7tEYCt8IsFj1?~+Xu^$wfdK>MjRd2!#~H*Bkl-koHYMwdq$ZtqPo%Xf2^LVr$e zx7L4d^&M;-)a7^n?`^ZP((+~*ScP*2Xvpr>DMRz68PaV1xY}>g%Fzp7ceHO_R`%1M z4MqZYN_JOWjuT^hy=bRthdU$L@{Z;A0&aE&XtGIr-EsYJj2KJ%Nqf=>W;r@3N&)Pu z5iDuaVAHTwfUz#W8hQ&b9ryiInNVcjSW3E4Qw7#4ptI!&hHIb8eB?Rp*({JW0T&}v z0qp{Kfpcuv>J3t#~^E@WWP!u_uO#o*aTIu3;eT7)OS#jgIGUP$B7 zRnE?|V~&n?cIQ}cT^*AfDk~T6a)LyZ!b3trHrNa)j~+eBEGGtF`lvouUkj?nwT5u_J7(en2ahaa~0Dhf#d|HUwFrH7|MjF+YTx30xfQ z&+g@G1tuiFF`;GI+1u;WXJ#&Gvey7hwG&|g&VmIQX|UA&v;5jG zlVYav>jO**X)&j{xfz5V-2OI|7ep2|WGayGQb*Bzd=2Mn@|di-F3TvdgpI7TUpy)< z!#O4d^5?=WB4WOHD!Es)z<3x=!45HW=a42Gj`X$fSic#o*WNO z`JQ})3_XIMwArNfN3MMQ_|bG=U_b^zkXKh$R+cfJl-|N>&Zou(MF!Kw09)(iq&q#@ zF+qNQes^#e^`S6W%*lx)HFPs&=UTb7VGbybD+B~92ORew5qh!QCq1kDJhY9 zQ>40|jl+tEgof&Y3=(vrGAQp+cl{SQ(Hk7L>ZMh0X;IGuRqJt>64K#WkVsMMwaT4N z8)NbayHX0So_XPWpl)12ypC+1dZvftQL?-}L?6qJ6_ji`>67T=h?AQrX?bNn#wJXG zTLsv7bgjOf>Ed2eH(ZCO4w;>iu02m?KI`>_xX!}?L++dE9pU4+wWhH$kT3C6#3M#S z%C!;=dNI)98C675-Sz&(vd1y8vEJbF);KL6KKN>BwuPn6@xi2Nk+v^yhPzg8&&|!< zmdwyk0BvX`Izy!9T)c&cdmu#e-JC0t4Wd+8#rX3 z@gie1^K5vx`NMW(I(dZcU4=(x(M*!Ut7_roDTof3WW%(O7?e@z(z4UU0Hv} zdoGDeQ50el;o3c6dI;|rlPajv){s^&JN5k1{s=oaSgiB8TRBvw!f@t_{cWwfG)M0T z=Ze^cT?5D?f+}AE<=&|&TJEF~G=%pN$-VfrUwE(HzP?tc>bfs%3io@hDmjse!BCXi zmT#alz_*#>>E=5xL-5Q)GQ&c0)j+uVJ>)o11Z!`i7_~ygr9{3iG)mLZ&o!N>=KK#b zwhfpA;bbX^UMRa`2JZB}R~PJ?B>uoF*fZV~ZtX!aH~z!M;zCzxeRez&QF_B|glkc5 z9x{THfvg)7-9!tCTg0n7cWIKaocaRP-)WiTR*Nli;Dv;p-@up~Z+J z`6GqVuoT-JuL26LfR8cjl&?wP@qzCoMVzdUj5*{)EfYQ9|2TSN+SXS0Ub)-P`*w7+ z>%t)~UF9SdA{SOeAYd+6?@lZ+xrlu}S z94A=+rXH}(Fp|H%Dm^5HQD}|)VXvt!i*h98aUdAdmETpkkE03FBpUzCNKD zGycM!g+V>Lo()3~-moM<+w*${qK{$sR(Z|JyS7W=AtxhptNaa4(>*xj^u({fnSH=@ z<^BPj{Ym(E)NV7D^p|~shAP$HtA&W(DmH4^e&9D zFbD4-RYxq4Ja#>J2Qu{#BH3ul*F;kV1||4V3YzQOnGZq~mZDD(-5Mu6(r)Kh_okmZ zj3|AQxm&mM}(He=pN|_Aa-*rQv39ac~h9Y32<@qxL!wZ>IqpVAK!%l%M-s< zO((xO`_<~?JGXJ*#9lA6{k_?H;0-=m7(Q4)IWjw&AOQr##igLzytdT#TPw z#Onr#c<#f~!_DE7wV`7|C>OY6Zwht(aoMWS@~5iuU&gouSCm#=#(M{)sro-#kKiMO zPs)#GrZ>CZ_I-&%fe*aM2C@9UTyh#-;t;;Hdt5O#KEA=3$ZuXE6tey012S_O;h@-T zQ^|&$?wLv*US3|dV7zzA*Cj~aUuT7G2K{xts|2-crqdvd==dsv2sS=1#AEO)8Ry|j zJ@n1nsHa7N9ztNqnj$@KV%|5-xiGLZ`B1`__VqzWJxn{DAC4hrTY&1>5D*E8$y&(Q z+v=yX7vi`FpQxT7iZp(n3{CY$rh}WBn!HJpvw08YhJS2+R=peZedMr*qxen!xt*v@DaAxu7LBvj7 zC@gXZ2-_+ie5Vml&jM@H-bAEJ^eOb2Js6#sNCM+uwj!H<2)$(7Z|hE4@D9b{a8)>m zz>6M>iLBYq8M%nZ3<$|Tgb34napR)k@9(&xqC%n|??m)^zsAs1e#+=K8o!=`|!$+kGmYWfv5D=Ry)cYe%?DgFa!aABm4 z@*cgs*t8c&9uW!#PCkSDQat6{E-2OZoopvvNKxTbu&BwgxCvtq{uYN=3O-@5PHqo( z_GMGh^2xRs!gi0H$2Y1hOrHPVy!eBLN3I$5bW6~eOE~?+bU;~>Ybz^Hk-AT%t$!pU zeHk~eOI<0|uk@eiNAuCd{UmijV11o)=kR&B1bu4!*J+uGy81h5eLXcyWQAML3q`uv z)o20(F?afUmvmlUUfW6(pzsOWK@Tf=-q0Bz^)HVTl$8bJ3e>dvxi-%TdD_tMRZsZp zi5Izik6)qSdx26wMZ%xVM$H#kYLjcTdmo`t(5DqKwR1)|{{+p(L?kt_WyNXthyxm7 zaQF%=h~{=IR?vB8sYB1epkuMnE27nj^UTdgK$&L$xoxsI+T8b|u)Hs~`9VInX<8E* z+?Vdx|IlN9Ycg^uaJmFk`lP2uTgOecsMM@I9CdL~_aK+!8G|}(`XxU+QhG8$?8Crf z`ucF*K=drGtrWWQ2@!jDhgSMYW9XGtKrd@eiJ|xlX`*gV%*vV`9UYBE6$XUaK~$(O z+|;q)W{_0zwT6|FXW;|suy=qhp?fQs+EX0gdonXKxh*X$-bJ}yEC*e}8Qa!dzeFvz zsEcKx+FJ7J-dh)28#-!|sTPfWdW^;l;*pO<-?(5j3P-&5Quq}wcI+kI2$vUkB3ir! z=Y}4Ci{F*_r7ZfbcYpfLy_j$39X1`a=2>Pe=^$4!wHFI`mw5DfRu9Es}kt` zGoq=a$WUE@?I$d~%~P`U;(&xR!3`9~{di7+Dv5t?LvIPwJpjd4?w5U>V!pAJfTTWd z=>{8+seV|(>OUzA8?>fKM&x^DK~i#==Sd>xB77Ja$F6#EudF9}{&FEQk0tfD?vZNb z!3MOi(L!(IhWwc_RY@A?(jxkR2HhjL6xhNiqb)SIs}K<`WizFo**8l~2#cLCAX!-j z@aphmyTs~O29r%%$CV1My$OUEhmbzHN2dVH9pM oR1j(oIVs@!zf3f;w05~~v4?`rQ<*mo3S5A-xr14~iQnb_1Fy3xO#lD@ literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_photo_camera_white_48dp.png b/app/src/main/res/drawable-xxhdpi/ic_photo_camera_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..a4e7aea72dad80db6724ac6e961b8d942a7dd03e GIT binary patch literal 1309 zcmV+&1>*XNP)ymi__c6RKd0kD>_QiUlGZMZ=)w=Gn?ehs zg_zW6b*H9Qt#(^*zu>{K;JMl%+lgTFf8JHNW$LuYsC*KqL5tW*-C1=O-G)Firji`kqGet#5%& z(P+N@e*<-Lf)Som!FOgD;~rnp=`K)?Ys?ZFvs@$RBG7)O2$d51lLqQ1bo!G7+D{tT z7dKFjDbhfRY`j1}k`@Nz1nOj#v@lDq{y-;46JOLHXoNIzxBfuSNfV>>2da=Js`Up- z85&3fX&?=xfizGIO5Elod3xxel?)kL>7a)^C%MB^ngfkg34LkskZ^@u|^ zQ4A^21gj9ou4W>XK(E~7F(jG{A<%T>i)#^`biK2aMMxw!QuaPjgj@569Ab!P{Ks3K zF~~vMkoS1R_dtV4O?$XO6&qEC=|*aL-RD4~HQ#%(+#(9sS&mq3Vcg$9wceWZOc9kL zyAiAVd<}FDvDm}~>g6m=h{cC~23l*)^8<0XWKFcr$3XWHix-H?8N}iv{{n46jDAKE zb|FT$`xa>2G7if!MG_`yw~W*#{0ejqF?x%n4B0PSp91w-UJG6DiTOtDQ=pP%6n_I3 zTty5v^V*+4w-JL&sp5h%OAv!2{scOS7(9e43W&ke{shV+28VFP6~th{pFll`!DqPQ zDPnMqKY==o%14h$}851`qiDsc{fjgC7{&zy{*ODL4>8z{ z3%U@4>%pf$=jZH);sM0yMc;<@3lM|LDUyVjEJF+~U@KyD4@uaG7~KKB1$u~B zJWE_oAr^o0FA(%1Mw_@q9KK^7Vsr!e80Z0FO>~BOIc3c=kH>xng1v~{m+VBW z7Qoj)W3(Vv+ZiGX11v+Vwld*wAQ(n!x{5){*qCO3E~KV6!RJ757$pWf=M`_;KpIE`X&?=xY9I}yfi#c?(m)zW1F8Q4L4egV T4fF-=00000NkvXXu0mjfM-)<9 literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_photo_size_select_large_white_48dp.png b/app/src/main/res/drawable-xxhdpi/ic_photo_size_select_large_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..fd01c03f8e80a71a1f09fe930b1d9416803987f8 GIT binary patch literal 815 zcmeAS@N?(olHy`uVBq!ia0vp^6F``Q1xWh(YZ)^zFn#lMaSW+oe0%r2w@9MQ@sI1f z({vS=hR3da9JlC2$n}m*t8cF@Q(Ba~oTEv>>T11!%NB{ZhFU6%_bW7Y9I=&%?!4sq zi;?T{x`q38e{q(TPmf6|c=ogA-gVo(dZ)h|^=HmnhK&C9Up&i^?6axCvPf5ZfBxr2 zE72;0@V3=lK&R|v{k7S$=`G_|L%s*R0tZ-_JsaC*FuiYhV7pYsq1LBS-$8IuW5t5P z4`Li@j~cWdm*b6SgsiExsKSFdu16UhG1Bm%D z;;RUP2h`Hf$@=7;c9|f@iLLrPi4vx=t1lkpQa9jP?BVQiG4}YsFg1}2zibzCM+ZM( z4w!9r@>+`gwuMpu8ug0%RDK^+)CrpCwy#kxL1KGnyy)#MMu~Ne_cpN~*7(q}wp_ee z?1|_LYxa%z-#(Rnw|ANN;aJ^%ht+cqUJFT@clP^Kp>3(IQa_n@wdTH%$XyiuP(>_y zra`=ezDB~tbHaCdbpjpdHKe^bZuXY%wYi`8v<*>leOJxg4c%Gieae@=*{{0JLs{No zy1^pX|L3D*j%^E_Y4F?O`^FhFt z!d)~oMKq)Jas<~@&dr=9!rm(tx*g7ICaerSEp;&K%3kV)BYjD(zU&JS#sd5)F^N~b7i2zHo-k2Mr;1hNe^15|N4ZUJcrWN&_1kwh@R64A znT=DT7l`@3>M8p)Wn&kUCMY^Vu@8(iRba$JIZNsorwJ$Csb1>21(*>SJYD@<);T3K F0RW44N9X_m literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_play_circle_outline_white_48dp.png b/app/src/main/res/drawable-xxhdpi/ic_play_circle_outline_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..7d94234bfaf3dfe3d1cb7c82181d26609714e825 GIT binary patch literal 4092 zcmb7HXH-*5v<`^`ktR(cbVX2_B2hq!^xjkGU4%$)(xQYWMM6hVX_9~tiUv_S284it z3x+ODiWEV52hkVreShDNch;ISXJ)>$zge^Pe0%RiGZS5wbC7cY0DwhbPs@VVr~Peb z!L;|1&RjjMqYJdq)c{oY@&BL=&bX@^s{;VURHk3f^t3UfpPqFf0KgISx6w`C7Ye0K z@&svH1;Kpp2ZcBXxB>i}Q9eO3K5jw6P?;Mta>4y4uzTd9x${1M;iM#7wDHm3(ai0Ic9Kix!z*ry~y)L_v0)mx4Fb9 zH~H5(dNoGtZUj$>reUgsy^o>I#$I)8iwBYi)UNGE$9aM4ElblU%1bZN8H=O)VSPuZ zeJvj&`d6Ea{_p3Qn#d_`LVq!415+3yJ;OG0eMV^J$_*BWr!)PvTOq$*c9HJCrQ4K~ z0sb<2{7_Q_8%&-cy(XoSeChbqZ6KgutdB>}kv5QS1U@{G^$>6 zMf3io)3y{kyyTP*w-3ZZ7jxyK-b8b;hOxf>Hcnu)PzrZ6;U zLITM@X5@>gyTVq?S*8Io(8bs~j1WN8?>1qK#ZS6>Z6_VNh$$%&I@3}uPU2NNl7Qi5 zU0dlx7wAtvI)E*K3-5Z<$jPA$%V!*dc9*Fa4NC?Js8niGLHJ5}Au}^G+Vy0Atzx5j zr!Vm3%a{Fj4FL-o+D|V$O51cFKUDh7^5@x~Pk$ENXWl88EE=xOtf?J+Q5wjlGbA~r zy*U59d~0iq;b3jz;4~#Ag=w+B@Z-Jd=CIe|bayM>y$htj%lD1Q@@)^j`yPcln+MLi z-f{_BpvYi^>4;NH5mFA_yr>ki{_9sXJ=49-L8saFIH8sGb%&=)LtHx~}Z4t&6 zIHhjOrX`%uyIen~Xcy1t<13i(nN9vUfc5#`TEiElgC8%*dUtC;F#rdR)`pM75ysxX1LmAQS?{tLc zcXHoRxpCvIgW|^m9tfn7E;v7tuqM$u7x6o$F4&eA?V1sXHnsEiLViV<)nAu*a*wjW{$RoUtHW-;08R z=~nd7cIs6Id7vb+Qh`%7R5T#IV$7}BCG+e+?)kio*=9UA<>X3)f3_eC$Oaw~{!X$}W@e&Y`Ze`b1~h z+2qCS>@1%^yh<*R)3*64HpMS%%7%Z!g7w@GxOIZJfr@klNIV(Q!-3G+wGKYX6Px&r zDC$x{#QI~^WWqOY7FD_Scv!lUcZYEeM+1{S4wM(~a%KAQyW9-_!%gSglOXK^UAVrZ zB2FX@huoem(ab$ahT63MlV91P*h^toE%Q=y z8`df?cuvJ>v2o!c!-kOb3$)I9^&=RfAl zAf@B7TIWilb@qytZ9epX8a20{0NoiEEUgkLw4txkkOT=HA%C#?|U z;-xGJ5-tYweIn*t-W^l>h-A-8*eA-&$#Ih5%50v)-VifuZ39%EeC)P~j<)>p?IIUD zP81YlduVHHxgf(!Xm9mLsln4NAP(@uaG%TTzCj#j#Dc#NcT=lG+sZP^G5I~6xwbw^ zMMZ_b^lkQF>C%7)fQ?i3LiKccbFY8OAcjJTFqp>yDTf+xJ6IKi#7N*JEO-;Z?&|M; z13{J2E2c9KGevD4X_}iG?|BxvsE7}EV20E6Cy$L0H<>aXsKEvU~Uag82oZEfwt&Txa|i~r?jc)FR!foQ1gCeWF;!p0keSNoD$ z$KlHJn*Wl=pq!)%f^ZT!+}5{?YS1OM!xff+uetdAExEaO2zst8mG?!q>qY+>&;%8u z@z7({<2x|_hsFPye?@ntsEp3Of|AP>=B*|Y*U1Ix|E*)_@gY+x! z^qS)4aw$DdD`2NeaDHF9KIH`qI?@y_3JZJQ$1awIFdv}-VK7*XIn`*vP2>5novj`A z`mHZnBStXf6+4}{n_rRpz#oLW?ojaG%UVl%kS-D(fSxB~FIFU-wBzZxuel)tx&%VdFG7qBga4D2 z=n70WnM__sWFK_ev6LlTtKi6*TBrvt?P}8$>ER<=fwxa9PV_uISHmcDs%0aXq!%?5 zud%nYz@fJNeNFdf&ayFx2=>{S3Ga;kvg*Pap)xRhC=0>3u6m$5Or+8re~1TF8`~6N z!$=qLzN%`CVi7Nfx~lFgd9BFDR&p&8i$Ov4-r^dl>+d2&WLvwASw5 z-PQe1vNx|dU2!;VX=z!_(j16oDHF6nKL7pu_o~2jb6F4OuUsz!*ro*NkQ%~<$_hwD zG3kOw`(La6^4Qqe^gUr~c-DiA1UB3W%CQn^HZ(Re`f1k3*vsM`*4NwnOC|tUH3H<; z^rQu+56j|F*`X%_v@ou&c+D&y#K1bq)9?j384By{XGA4O^*{|uYo7P@_1&VFN6FW? z^*npO`Fw5bnVWTgL;({jnNjy)6@4WCJrhe-reCRhtD}*?)*2fJhgVf!J-S_gquS-Q^QAXy@6yS!3ZSuq;Y~*( zdz=z=HKZUH8$XE10dV8S^jxE%bndeGx3=Zj3&UJWrB^)FI>p%Wvd#)~t5T{~@^cf` zyB`*gNp?s+Gg*CRlu_=H6ORH@3jf;5N+VpcQ84j1%+7JR$0c6GvcD`>^a-?-gqk_; zcY*(ooU$?+7Z9pKq-9k?xa#WsSU3M4iXk2dq8=B@DAA3yQnvYZ?rm2`M~BHb&|6if z7mLN9uJk>FtR@R@Vxo7m0k103gT zhhF-&9-+DM%I%Dsd7pY~bcYipkGEIVj_x4vRL;D+TuS-baZdaT_f98#nxqGV-2>Z# zZSBHiip2tY5<6-aPWxaw5S8{v$*w9^{THGG`Ppzy^(}sa;WgG^pSm7y9AgQ=U@PkD zs$9Ppo+^h{db*e7XzXE>iNoPS4t96#)&sq;*eAA?emTrpZ4YrE*cM`HVgh*!FAEwo z<*Il`e0IM3BDt$e2*S&2-LH9+IeD1LXA@wm;Un3V-?nN0;ib}7T1xD29QHs2%iAWn zYV}F*wnmyGnePi=gF|^L7xhFJw25XEv}n)t3AN*x{kMYR`zU6(=PK-!^#A!FCx-n2zrgeC VY&_SwN-NF*`r0O1)f!IG{{z?AyR85K literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_power_settings_new_white_48dp.png b/app/src/main/res/drawable-xxhdpi/ic_power_settings_new_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..0b95e724ba5d239c692660331efcad2fb92bd90e GIT binary patch literal 1699 zcmV;U23+}xP)@yeZ@NCA zZIcC+M15mos>T4C^`feI_}d>kpXwd~@Gto=(dOiU7oWbw!l_#H^?!1&TIM3X}pR zDNqWO0;NDHPzsa+6}49bip8MrQ9*v-V@muQfB`h8v1m>+2*9uC@USv3fy(UT4FkCc z8srVX^O4ABp1CJ7%r%dlDEmP3c$7CA$Su$yZ}u?7d;>jz%RnxH264GJ&pjB@aw^a0$RwZ4^bWd>3pDa) zirEGlppkQI%zO{Jk@GZiXO@AkV;b98*5~prwt)$3HP&VoXdcgEn`BdNpMyS3725%d z*#!CuwmP54^^^2%%wqdu7J=T%X>4E5{kzzm*haZBe4u-vvBAn|!DuQESOpnfdRQHU;3k)v2{2@~iKShk?(0*PC&{5C|OrOe=V z6h#@Rz>By|x4uyq)v8Zvp4{$MoX~E$U7;=h{}37R*M5Bt`YdUGI%HJb)joc2Z9Xunq zYr_Y6LG9q*)~fskS}wO?wS!~Us{93dyWCEw9gNtk3LR)v?ckK$D*gg3GGDqKjLL1H zzd$9qT~Iry%B>te(3IN2#jt_eAK_2hPsy!A?Vu#Ln!i9bxpk@?bcYXgPHsySKl>`r z`U^BBx20+a6>C-g0-ciE3blik%~xf+LalgNZmZ-+K_RV{+X;Vn&`WZAkJ`aHxsCV> zbWCp7D}MG>9`pB;>l!Q-6#2X(i=SMFRh8QZ6gT?{Cw1K4BUaZTx!tVT^~vp^@E1sW zT5cavK`|?;&t8$wKu^hSxi%xxxGdGiq$qPwpZi(096ll+zz8i=O}J7R6-o${1cXsMww>|md87|b4ArvyuYNoNgNM{szx*5hY#fq?j#Dmy=9z~!bZ1T#g}1$j20rCk5@)9lBg2I;`h)Tq7FQm?l+|R8qH=sG^lxt#!Iy@6_e`uD&JY z2R^5t^W<%UTamvJerGL*$(q_fd}-FJQ>~fQK|QS}FHKb$ zE1=vIwMy%Btyb!4RkTElRk}1%?b4jpm`>@0Ms!R^bX+Jeq-k{zN|OSmKq*iPlmewd tDNqWOCIw1?QlJzl1xkTZpcE+0{{UuIEK@Ii6O#Y{002ovPDHLkV1gh+D#ic+ literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_save_white_48dp.png b/app/src/main/res/drawable-xxhdpi/ic_save_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..3b9de2bf0b8f973d025309e65cbb95600361909b GIT binary patch literal 751 zcmeAS@N?(olHy`uVBq!ia0vp^6F``Q1xWh(YZ)^zFm3a6aSW+oe0%qxmvpSev5%K~ z-?^5r4HFOR=2p}Y{b+S0q`ZmaZcV_HU9)BfT-e+7mF<%ti@rn0*K)k|+u@KwRj8|gIuSonXGifXD@hgzG(ArnKXOJM6L_AqE6DYm(FCnptbPq+T{!( z$`WOdWLF&B!I?4Xj`;<~gFBS(I~1oaU_I}U&hq(Q;Y0C^*#e2X>l&vmV$l7~I;)Q< zXU9$SuO#M7{t_$y^xykFeM6~2YDJ~6JEqbwHpXGO$=88Pl39yG;HTWR3&|8v`l z4QJ%0mstJ1{Qr>LW5&OSlEZgZEZGrdvp~KgM_KNId(ob&NfvSe4>Zx*!GUm_tv5Qo74|<~`roRwH)EMs?TP3A}MQDxEm_|ISAf z<6chG&dM>!*|*D07#?R#m!Odcj;1A|Z{#B%+&jH$dGG;XLT2!E^>bP0l+XkK7U4GM literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_slow_motion_video_white_48dp.png b/app/src/main/res/drawable-xxhdpi/ic_slow_motion_video_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..4a8c613cd4f78ca6e3d577279bd73f298877ad25 GIT binary patch literal 1885 zcmV-j2cr0iP)rScI#)KTGrO~A>lgOM6I+#; zZ+A{B5{X12kw_#Gi9{lM6AtHe?&AXn$bc94DUWdhjfiP)K_Mse0%KSWUy}hJ@wLr- z%4?iWiRv!uaUWx-;A^P_`q~aYY}2o|MQJ^dU9njO0;HB40@$k7Id&!E7Zpb7&$z(1ib!oUQzkb175^6mSN=VH3$) zM|=5!rYs?miCj;Sg~$0SJ|kI`T;R_dwiJ^p`f;Kx;uv~RMKbnqwfsPHmh)q^3~;mT z(vn`(l2|o2$q%$(1>ZNFPI8ELOiC)zRq_KZS;^N*xrdk>BgP$UA(1_tFF(+Vl>mH7 zqYR$^J|dAy+Q|>JW(iLzqlob+dx&ohP2?Vh^r4C3qCiMR?zXtwKQ+bF+ z%p<-(xj&Kuz!I9tL+sDz#CMb1&0r~CTS{|zh$4m)PdP2+1~JC*UCU@84^hZ4;+agI zT;KtIXgMwAA&U5ncph@I*C^x1R?tcwVn60$R!L*oMt_2>q_w9j+G@-O$}UdEYzYPO z5M9}W+3B*0nV3B#Pw^;bvxvzSuE6XQd5ahyVRof#VG$-raR+9X$OfihGL2YTfdDy1EZ)Cg>7xTCwRBWye9ifzMtf>8>7aDc1C#zr z1K;(UoT4u#J(L~}WE;V%Xl*SJ;CK*RMX((lsB~}-CViC_eyo|Cp*JRXDIH9}{Xg$@J3D^6xa zn+Y~b;ZR5g!G5o_k$|k@cLXb=!1jpaZ1;hHCbEjNG3ll-c)?0>1CsHQETWK7f<0^Z z4KzX)kR(~eaDok27_27P!)^k(U)6YsV5`DicQZNBO(6HH8Yf_Kh*HJr1lvx2#sdMe zfPA(Q>=b*O#96We1GkSF(+PI3QpE=Z>yfoU@T$_pp9uDjQpEtbOD`cQEj(+UO?Zaz znkh7{BiQ)d4`gL5LU)2Kl5a?pLZL0eR@SLN;fMVkMzHca7bx84YV1p}9r6v9C?ymS ztR@PyR|a}Ddc-X6hd8_ry`-aMKD?KsjiRGjHP;htyuHKf%XRXw+Q;6wqh}TaJ+CzJ zXM5w0`v^8CYk?X&9GKl>Z{)gzf}9<>7OTRLo?sOeW+>3pvWP;;2=EMkw6V)73X5oMPX1t zIl=y%lV5-N1HsD3S2&C!SSiI$13fG2C}Io2M!NWp>rD;=HIf5dgUOu=izBEe*yNo1 z66bh=)o{4NV;m;!9Rzw_j&M9U`N)vFFd1Y$P?M}&Mz}-i-~cKKR>RRba}}Wt)dbtl z0j>rFFw|P0m*o@#z}bd{c9?ay5~!)1q9e6V58h0~WFC1raG`D*CX;0Y7h(2jT7h1b zbKHa3MY4nWm{iij>r1^2DaB+VG0FY4-YMkCQ{?d(W|zqxregMnJjIh>`$Dn-U8%+7 zUlhngoJ>Fq?4udWNKQFDxtQXRhlum}3uPp46({88j~{Jg zEKk#!A|-)B+VKRVsU$ViX&^sPz|RR|7Ypgf>oVX5dNPZhgzOR>pPm z4~_WDi$LRPAzyJfTeA_!uzU9r_2@-ymI4i+p>hG8nVj)JGdWecgo~Jyu|SKsnwWAA z=QGZEps8F&Ou35__&pU)0&S-!9aI;vFSqh3Rn`L4FoC-{Ky?d;ayvsQO)F3Zqqv`X zs=LUi6HhaYr6~nk!ziBPWD2$S;2=)sPTrx940x6gxSumQOnZ??Boc{4B9TZW5{Y~T Xo=I-!At-lo00000NkvXXu0mjfy_{Rb literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_stat_notify_take_photo.png b/app/src/main/res/drawable-xxhdpi/ic_stat_notify_take_photo.png new file mode 100644 index 0000000000000000000000000000000000000000..2d348d8dd863334fa3e2eeb369866da621f01087 GIT binary patch literal 3574 zcmVPx#1ZP1_K>z@;j|==^1poj532;bRa{vGmZU6uhZUM{ jaZ4U0)cK~#8N)tm`* zTxA)C)5$_Yu%%RMqG&bMQoCW);#T3f7R6c*E~Vnq;?^pNTEtr1&{{#NxWG9sD4=Mo z9<*vbLfa^awVVo^(zJ9H(u&ZA8X$)3nfN|8_fG!#a_?l8JIVN-^PahLlia!Y`@jAB zClkuX0fU2s<$5_?tQV(=r|DePgb5Q8Ixb5llii6#;wc@%e~Bj!95`@f=FFJ`{Cq4? zbOGtX`HW|ZFA^7suMlU7wc<%)jaVsq!ts$55eDoQd&S4ae~5SUIxsMBu%e>k2^|*| zMG=rMlqurN#J7vD5}z-gBF2Ho#0SN#;%0HL*dYznO9=zPh*QLs;tsJ-EH>K3KZ=XR ziDGF=$-lXa#RtUk!ad?r5z~zY1xs5ubFyAAqc4ar5G%w?z^`B;$Nkqs;(x?GaZm)+ z;;G`Z#M8v-B7SVNYXkh%j;^k*%jMYbZES2T$gvkBAPMJ8y<8!_Rm9mw;Ysl!@$cep zu}$m{iBdS^OsEnkixWizHCvn`5(N=t6k2tEH|q7fiinR4oyr|~2}mDI)LAU970=*Y z2w1se;+^7M;$e}96bH~QKKE?#)goFJ_AK1PUAo`PVA%jD>rZ6eA}W$i$>GfG~^_pzP`Rm<>lq;v=SflPg#(w z{962ti1~~q>ckI-ZxF)-)vv`}-P_xHeRXwp)|#BPfOPSxdil2aP7xDxf$IKE{E3LK zDabJv0Q}WU#EV1{Qws#MzPr2os+yXbtQIgU0ZBL{eb@a}J$j421ML@p94TYDe1J+ZRuqIPrc7$v>5jOagkTUVbT( zoVg%`qg4mQe1*g3aI zPZ#YX{%D)`EaO!h4q=I>qZnTbq&NeOAToh%P{yfFvB=a}zVX z6T={Ek_6h2nJogdx>B6Pi9CU6ED@K9HgIGM0t;!0UFdUW%$RYmWRPZ6X>asH5-c)e zTY&yRB&P^-l3e2%BAF{~)G$0vBqHXcIbpyL{Z6C+4g;AyZ4~<1v{5Wrq;=vv@7*xA z?b@~L{Km${|N8HT1>{SP6dm8=0{eWw7?ud48H7;&?m3(&A<$0QB%&>tmc>~jNv>-} z2=*=FR=F8Z!%{!&@9(GQg6g{94+!;9Su&}>XT(wy4xs;iq4;j^-AL>d?;AQyfwbcz zVk%!s38oC%t z=`ypj#Kw&qQK^@Er-#7RUvR-I7gj(}{D#;7K4nLZU%oKG)mw7G~$8h2C6%d4;tlD0DLk#|1mnY|Zzm z7?*&;G{{4yh!Gcd^O}+A1z{dF zwcY#?z1aeQf3rT0hYKCEL;xRQ?~w-za}W7|`?H-46sI%M+uKXe$J!qbG%nCSAX2(O zK=+Da+8YMyGn*VzJI1vs6kkZNa`BYVoT#jL^*p?RuuozoSbocf3Jxm@%nr(ME&>xCwz~=)xl{S~b!Lnn7mP(XAA@qJnq{Ogz zqZrpcqKQ`6U6UqFdLprA%^LRdG1hT`fUHCd0U8;W%a9E-gJaVb**gg!nnDPtS;v6U zHnaeHu4&Q40Tu_BVujt>($dnC*tv6OpE5?P>V#Elw77r+WbzSa5|}ue^l6a|5tJ8X z&$KsaLs-+|TGVR@JucYs4^y6P4Gj%FiKeC|BuI12V!oKI6U_!VReC^Sj+J|$#ezW4 z9J-ldi5t;|-Z>^4R|G?IQcZdv{ZbmX^&~wsM3=q*A zidky6IKl3fXAd2wzyf(z6+|kwvepA0~Ao$JoNL161$zV%#^x_2~6rZeJ0ZrVtd+5X=TLgD2`SO!$;|l}OQQlet2{ zGqO-Fuoxz?Uh0#wB*1Jj*5Rz+eZ~v_hN$=hE0^9M} z`erj>>NxTmc8Zpox#5(*E@Bk{SYT#=d}F{xpgr4N?E$GM=0b@h+QNQB+xEbTGdB0Ewk_0RgNF?3H{nak!EEF`eEj(F^XlsA4*Ks?h!PNLNL@s+=x*jD{of!G-s3Do z#scI#XuB;_I(mD1FWshG89g0LcdkDq_ok*NH8=#7{mndH_Qs+Bk)OvOiDWN_p!LU~#Eoxbey85~Z z7x<~j7kF)cL@B^5ol2Vq+o%E`1Mp0ZBGHQf_%K2^TJ+UNj~>N!WE2j_`hj#&q8WuN z9Y5P&2DAu>ZtUY3FKuF@14$r@<`&%n9@BHIX=!P>xuKyUbdqY3(~9fjv>|9(u|v$7 z8{k8Ch-{w=c8?WXs{$>cz~u{aQ9UfG^ySUX&3DY3H!tg?Hs_o7eG(0GzPk~1Geow( ziZtUXMMiwhs6YflVUaK+1jB!Q2zwrWcYAkt_wwrM>cg2Fav1VjT?vU4V1`lv5-+x} z*lz{}z-Q6h<(ooi6qAdrRNIH^BZ7P7v*;7iGk>iZG~2@Pr0(+uC4%c~YilW5b4FeQ z3Lrtzh^H!ECEDW81rJK$h@Ya@%hnZ|^!O@L2%;fs&+nmqI99TGTA@5VxeRSrN}yoN za;B-OZf%f3#JYX^_T4^v_UycUVnG4|zRA&7`J~9iftt*=Ml28noj$@X?OuqGf}|4x z5g}OT}{^o)gz9`U* zmF?NHC(+Z>bB2zu7WrR7{7Z;ppmkae=r(yp9JH9~2)I zN#NSV$90lMq;!Zoy#GXMYp07*qoM6N<$f^o>j%>V!Z literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_text_format_red_48dp.png b/app/src/main/res/drawable-xxhdpi/ic_text_format_red_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..8b287d3dae7ad3e9bccb5fd233249a6b3c96386c GIT binary patch literal 1455 zcmZ`(do&XY7@yL*(z+!#k381RBVsBS*K@?0d1WYM%VS<)5B4&7RJW8?bk(AEM;52$ zF^|}$-Rn`K(;``p@)#QO7?Ti5y7R|9oqNx@-#NeU_x---e_xKj9~`tzcN+iz03kd* z0+f7X13v(je&U!+pd=My0Q@L``%-U8DOBU!eBA(m+C25O$Sq2(M(_+J0s#B$HlSi1 zQksI|m&#@qFM>b=R%p{euU?{m;AiA-}kG z-aCPdTFklYMt^T=N|lyQ-V~I5^Xx3K!5zKj(OF_VFP?wQ+-{7(J`AmoqNJ}~{zr_= zjEvaZ?K&x5Uw@QffYH+G8ty|7%;tL+1IOZ)AHB%2H7uuHV=)1cg{eh`OP+m?Q+rZo z+Umo-=~@g@F~QWdn4^jr2M!sV^k*6`Z!PuGd?cBqGI9jta4S1WTMwu@h3N3c4&RponPfU;~k?-F9hVxL@+sH+xS)9 zC?9BmTKf}Fy|+(&cu)##Q1Wd$0Tp!N@D=reu0gO+q?+a%6D;a-fI_{$1nFeI1Q8(S z&r@s~IjZiqOrIh0Z*q-l-C>T!K^|=+0WE1dkzJCqndy6ZD?7tbRKDGvL>X;Y$N3YH zeF0os=C43TWW0>B3U}m~)Xt(AmJA*lD-c~;{?YvDD}hQU%$U_^n}Sr!HiA&%aEH+E zUr%8eWZPmN10fG33u*G#$$kQghRoky%oaP)5#3)~lRRRyFQmd~U7#q4xxBN1;q73; zD52m0L`3r&WGH@Ky!5K!4qfd3sIR-*8~33u^xbpXARCGw=!mAIuCTq36#}ecao-A^ zQ(2g3T|i1Qcdr_38P~vv8&*_?)F*c}u$0sHwL7e&_r}?Ej7A`aXmiFFOMV_oQd_RsK-k=Hz&va(KM#h~NjT)_ zlGz6efJ`mhnWLHU3upP&D9PapP2nEA8Vg#nhUbQ7g_R`|DTEKX(qNOR!V_1~S6c-{ z8Uc9A(E&8$h1#Bl1;*~QZzjtRvVfE}P{$4S>lR4I*-AVzQDQ$ys=?x1v$a_}cxH;I zwCasy<79n}rLjD$N4Hn{wr)}Ci~|Gpm0^*9Y;}5=-7B)MRkZU2D%$R(w6~=$Sv_|3 zB1>mEF6pj(B?dXw+lUF>!-(1~%Zi^{vZT^v&zd0G+fv}!Hg`vNVe;>f)qJc;pa&*%jk*WzQN^Dt{HQsC2Ro&x1xJdU+NFk zaWv&StBqK1_yW(KpoIzRWQ)Jn2M-vZzNFlTMr~ELRqT8gxcCsk!2C4j&`wscOXn0w znvG0RzWj7pm-`M_JqC$w8W9#z%f_f7mBLzfe%P`mBlwxVTvDSr*0>TrK+B-hMK9K! z8NsHxvqWkKT|?`2r4T75Pu6kjPbmruU}fL`$f*$n-TaT7uz`0D=WoHK{X5>Ml{!uL zQUreBaU`Gjl)T9-?{R}Zcef|4r)rkKauwglVK2*70}RNCjwsU`|2w+r`;C7`9MRAa XG^t2E-*tIubfbiT`FU`UMqKzC^cS(G literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_text_format_white_48dp.png b/app/src/main/res/drawable-xxhdpi/ic_text_format_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..e38c09b1e8ee37793d6be4d3acd5b4b978a69de2 GIT binary patch literal 961 zcmeAS@N?(olHy`uVBq!ia0vp^6F``Q1xWh(YZ)^zFn4&mIEGX(zC9S9aWz%u_{a0- z%+^j&Q{NU5Abzi>z@E&D04OX9{BTcTGDdUe*MnepnC;fH$Klg&V9sBEB-}7S5bYy1)Eut3-5@$Jth3B zvv$dD3zs(wJ~fBAb(X%=lT$TwTK%cX_6XZnhWo;|*Y0^>w9$Mk!|x7WE%_T>IbC0) z8kDwo=7!3BVBKh}b(mq}mQdl?5X1ZiIvf!8{|(no=JGEWkK|uD zz1~E#bdQVe-W68@7tQnXl8!wS=wDo-nJTmRUY}O%7DLCqz898-6gEt~u-XI|mD&%b zSeJIZT{A^)X$SY3DWARg@7_q`WMOJ_P!Qn2q!Pq#yDe=KzOnD3QNW?Hn9PMIRJP?t zu~lvG4GQskaoEho@w9{S;TylE@I^@+`?hadXI+l7iMHsQLuO~2qtvWs9^4kZ@`2Dc z<5i4o8)8GGZuqGBuV$<0^D@(9opb10cUF^H-SM)Z)$+1FiPi3=hj!msmT^7H-G1Bt z$n{bSPw3{bZ{2nAl*YF6A&^!_2_Lq6-x~vR=-W3ypS(Cxj)z4*}Q$iB}V0ygf literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/ic_timelapse_white_48dp.png b/app/src/main/res/drawable-xxhdpi/ic_timelapse_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..76240a406807acd812daef2cd70ee676bf4951d9 GIT binary patch literal 2368 zcmV-G3BUGtRmP{h&Z*S0aM~zHQLq*BPME`F_TP;My;ewQ>>!NH&W}Nm@Y8yZF zC6gINCMkK-U=^by(b};xQB>5nR4TEPOhzZwf)bJd3%g=~<>~j%`QNv_clWHlzw?>1 z`nc@9d(ZV;a%wY*FP4>A@b zRU`Q?naMo1P)|Efv{BC%=8}0k2g+vyhl!GiKI*CDo3~q_TsF{19IwPV!FqCTk3e(S z#ZCMW<84aPIZy_roWpNt`5_tU7brv-ACWAVC?%9mffjO+WV^tE^ayk}he(z8DAYSp zm`%h;r9Pe|L)SnPIgbvSDAF-dIeqA&m!IktD4Tb1qnk!{u#sgHQ$#M=gb9;PE=3fx zoQ+g*if-JfVU#X`CeVU2oxDpqQ*56yOywzR`5b5dPJs@Ais`_-sSdP+81}?D#3B!lFkv2}o;dbIS(M5^Wej3RKijznZ;zNexrH4Al%^`sD&klh zpo)A9AI4o&Gl;bzR;D6QnXNpBnT+A%n95OGiAs|lXbFQ@X=gb`62=qUz{&uRCNoem zF|5>*gOQ|hCws9HWmd8RO`rpFQGSAvykv8ovXTWA2X$FU-X0r-KhU%62L#W~8H^9>splt@1O_M536f@!(AP zf8FF5&ojZnnf67~f8KpC93$m7LzVaXZF$+q|9)*?|88U6&?h?zYeUCA@^1ut=@ zt#?r!W}fjQ(A~t4o$zc?*h++-lV#(x=rpoE3cU|>2-znd9=Yte!Xq}0XjhQcc^zmW zvZ$x$J$C(pJ1l1U9ygKA_cBn3i-)+433(H!6xlKikB4*5xqkOA^(N3cBu6kb2sCVQ#qb!C z(_RFcgJgioS_Fat9!8#V8iPn?Cmv`Qk}3>M0zn^Bk>`93$(xA=%H<|%{p4#D2>ywD zt*3xns6`o*SfC9^wqs}&2zDUPT8(6FVu2b_i*t{5fncFH>ty1nH6#)!U!3+3hK7Ou z#RzfU!>A4M4flaIAX%hkAb4J!_i-c@?gPD#S{oUf2I>|+bi>?0?SR`rnM6^01w+$7 z@S-^LcGMz_bQfqIk`j#rwUg=1-^V)87SuWkX&gviw+PdP+6(Ri)uZ+gwWC(9d7vT23v;hS?W&tVc}S*c9tfTm=AMou$9bRysC8p#AE;iublij5T<3u* zP-{g0KwpZ7ku#{RaUN(JYCF(B5X^M=rFE9=PoPhqQc{ z*FO(5`+u01PZagf1N;0xQTzb4<@)CVe(ms9p5x8~)h6xB>M!x$YX85izKmLx{&|3D z{$K8>K<$+NdFT{hugSX$W)Gs)O-S=R@>*$#2x<>F4>T6ZRL%3iddC;-$2bpU(1zMm zn&%lH&*8Gqzr9^Jto0zzR{ZlP@m2SscEH=E=g-N|I1l{T@zV1qcY)?2xnJWvSH+vC z<|CQy_V$-P)LzvykHu}}RjBna!d)QwQ>dM=rg^^LYr-p!phLdE?>0~+lE<{n1C`=y zFGEu9KG3&_qgJnFo(uBfgCnR7GR}P z@EZ(qypE(iu|PS*P`kxlTID$}--^iuzCx{!I}!_I@HUcajq-H!9Sm{QSZ`I~K2Qmg zL8fYwr=NKk;+Vk@lKT=5WN;SAQ7!VsEiMB$AUTQQMW7YPN-h2~-mw39EUqp*iEObq zfik#+bf?<}`onngddIo<{`cn{fS8_!O8n2+(w zMH@SBjN?;ewHRIpDx?qDDI43TjpRjoNmLh4+ql@0#aUz#^1Tma@Em69Y+WPD$WynI*6G{zqaUV!pvJXSIn|l%SqxmF~o6JlWFsivI{efg!~C)P(-gqBKubw zzs*MeOfOctIm|PRw>@vzB2PDker^-E3UhT1w{i?~AC>%`_qjwH5#kKdLmMA*h~Kh~ z?>k&U$>Jd9%KYAF@(s+LO5xMM#&H&NJ2CtYltnYL@5K&QQ%}R#3tu@)%8+Fr&$15+#(f znQEGd;6@dhdOd(`6}{-9n=&0A;a5ZxIygxonmz>h#u0000-&_^GEfX)jT1_j;PWU2GyifB*pk1PBly0s2utX9xfl z5FkKrd-wlaRektz{B`=3mX_9B*xCsNhOAjMnMeqjVfO*IU(8%aDgx20+cZlY2{8Qw z7N^^SPUiSB7lWuov$^e~k~!Cz4TGDB3>Xk%Z5e_N2G#MU?XA5Xz3p=iY4+23pL+`L z_c`y|+{fz=;M^zoT)emSzWrT*Qd6cDwO-k0)u?*aseXLr8qu)&bWy!Jqm$}Um;RwK zr6i3&)mo;N<6Ep6#WSeGdRzPSmWGrxlH4pd@F#}2S~)s-fKN@u*Pvo{l1|IY~P%}?(3EMvAQJQ1rISuTh!p}hSc#2_M8Ri$vi;)N5b!vPIbUn{9f@}X_ zE3*pb0oX^KKY=QEh%8CzV-pnx@&LR|y&r*=bBv@NVR3;x0Q;y(J5V({7$XgNen_P` z&xF^gNHfqv4wI&XEHLMpurs|tYZ)MIeLTQvqJ1}w_hMz*qK%rYQ+iiD<2$Yk%8qYP zns`q(Qw!BPzMC{pc6DoodXs*uQN^#AjMF^FH<*KBtc4Dq=L~kdM^o~*SRHQ=S3l42 zB?2_b1+3s%2JGi?A1K4$h^LDVYS1;TmT%H+GmrZ~bsQj$ztc{DEUb*x9I=zfeW0pX zH`iI#BFnpnbGGx?4;1iA3d!?RW*`S^U@Li>dF%(;LLnzuj2vzWCvD}iALwohd5Iaw z@f!Gl3h6+}2U2Z)NYPUG{T~!0+n+VvmutFR04@Ma+n>WJefe>!)$~*QEGw2 zD)N}EPa@D~7>K@vR0D}km|Y~}KF}`AevMKOBwogBi|ash8NuWPGrS1Y$SF*QX>l89 z2PSzIBQFAprHo>-!)2f*a+o}cya^<>W0G_EiQ;3JoMnbrfg0(>Av0?p(y;kpRC3{*xp;quHv@m8j| z)~ZtBHVgS^Oq&(1T&wK=;P4H?^;3hqdq@%C>bM{`OTNUnK+Y3~$a2Mo1^HpBwK2TQvGhbz+y+CzSGEj~CXV73# z2dc6VXiCf53zRkAdfqcX{|DR$sxRt5S$ly7OM$NL88lPHAmctzpTaFvF=#FPsLU!9 z$NT0#+KM2RK=0do2K6`&q*MZRyAO1eiouQMN9F%VuI@JRPi{=V(A4?K(cf zpJOse#$6z>4U;@ekSDv9QB0mdE(6WsGA1W!@Z|RT0Fz;w-3Agn zG5am@V0$pzfm{b_q93yjDCIqb*#&004{rqVu8b$>Yw+0*PgeVb;gZD23fZKW2GuN%G50o`lFmZ7Do^`ib&mlw_bP4iF`m@N3as3=`$A z$$#PYeENuzXH$aza*Yz@JhRjIimrAFd6~up{=nw>4J4+spE!=P#?oik zWvt^EaqMH74+oEAVkUxazD4ad{&n2Pafq3x!IzU+rqfAWS)Swet9UMz@fCJ6NL;Tl z&8MSlDtVE~ILixso4GL$FS(BwImcu?O}TH!X9fC+9@J!gs4n%W`+r~Ao|RO8Wf`?z z*^O$^XeQIBeoEE@IoqJVIvb z90FNQJFi;0;c~phSE-naC-r7nPbY(1t$u#T{WMJVlX@$)R4d1~M74@1tNq$LzJnT7 z!UB|DtHP4aXmcCxy*ZQHhO+h(-7wr$@{>};OM8!Z~d*_ z;wi}Zz!+nUF~%5Uj4{R-L%^=w!mCVW0qba@iFGVs3a@Z8yAbs3G>7vRYjIh_n;g~y zGtr0Zm``{8$2FwXb8rdESYJ!IP(1-#GmUIbW=ka-7f{WHsNx(Y6#)~-g)sz59(wa0 zx$-u>l`I5~JlQ1*lq5_b*Ty94)SM4zC+EUmO+Hj6ngNR|rP%2o<=N+$GPZr z`H6sage&WOYGw!K(bXKb>vFoTAlxbf`G{S~+~GD{usJ>g!GkC;lh8N6d^Z&~*8Tw_*$Qnta49u?4zOK4Q<<0_`RrQNr3-0<9vD zpLi#hKyS!b934xb!{jSU_%H8){v?pUxGC>}u9MFwWm#SWEuoM6#-(`;bfJ7l!1TNY znye&XOKS2GsEW;$3|x?xKxZpC7@PAzPbKan&YT8%i$KXjFW$>pptmSd@(?g4CxM=7Y`#H(HdFFtAIN_e$O2g)3uJ*Tkf$t=1+qXE$O2g)3*=D2RBmIS z1#(-*0}Ql4QIQ^GkOhkV9e9uhii-3wgDsF-Igc>d0!2l7lpz+#t%AoGVu7L}J0ehC6gG#8EjmjKuZ-j zPN0!&tmHf5H%0N{SQ_xjYl;(lQIB8lR-EX=_xR;>Ia#8eP(W`Q@M0cr}km5Uv%c*j*L_12B zDC>*7C0Z;eOEgJNmgp5ZS)w~t%t-Mqg>{OYEYS{futaT?%E1yXkdr0)P_jf>pQ0_% zuaqbwOVq@E@(n3ICmt9{YV+}uZ6{X67{{e+)Z*srjA1>qVQZ zeG4xsc0J;dnJ?aTX`{!ze|(A0FE|LDi((e|<}P5UF3ASe3;_+SSJUUd4oqHqHsZV0 z)x4KB_rFGGe_Qo;Q~3E^pF=`zUnlQ$cz&xX`SGgLHH~t!IZCom2b;z(C=_nIms|Oa zXNzENB?}i}w2ObrrxH1h=P#`;OtFz=pEYgyZHM5`B~lqx^YoavK6z;n5Is+hd8z{w zr^*Cu#QvD{-AaEl<=JsK9szdie?sim4x;SVP2%j;XS=7R`zw+`>(a8*SGR@$JxTdj)CFqKZetNH~DV| RtUL@7^mO%eS?83{1OPT0pql^y literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/iso_icon.png b/app/src/main/res/drawable-xxhdpi/iso_icon.png new file mode 100644 index 0000000000000000000000000000000000000000..d1974ba4bab34af89950d09d3208881f489cfbf6 GIT binary patch literal 1445 zcmZ{kc{Cda0LGILk_aZ+L`apYps1s-2yrEjsz;~hD0L*Jv`RB6iQ>^{B<5b5hEb}q z9?m#waV12bvqf9aq70c1rAXD4RlDiizuO<*_r34m@4fGr^R2I^Doh&&0030I&bXaF z$jGmPC>{78B{aK(08-C;5&#XOn0E(7F~-%$6#!@|RM{ti4?2``<^mM}KqP(@Fg!YR z0RRBod%3xOM-N{sOL>e8(CXhXA2N%7y<>I^_NIl2JkcDhUd^^CGKRu^_zZ@gYKSrK znekbeX{hDn`$Z5?Z=KnjSbe@T+@QtX8|-76Fm`Gw&8i}0%y!Ofw<6BbdGCkWQU|KA zFhx{KT`Z@5y7dhKqW|BvHooC~`{qsH(VT}{yVKko$`L=h83S|tK!{W2)0y4#!2;a| zW=3Dz6nK;B(UU|`z#JmSyG%n%l*X8;gxN4r@2(UR++QryrmNw>1&kCY#zrqnYBb}Q z!c}9lI&Yo;OQWnXdE6Mvc3C#-bB7Ejb5Q*+9I-udJlXy3^;FugF@QPar3=6 zndUMY3Q1R5Ul$KsHfe?hXF4|2bS;h)X{Jw=+$q!(>W5wg$l2}q>6-p`LbzPJ;X281 z&(TaRb0JKUcLz$J;vzNmGSWB1wE~{!s4N=7Lap$Jxk#U*95k^wGZ-}w-up>~R}Mw> zTFh_H_=)WzviFxqYGj>%zZHX0_nI|KHUNnZ`xDGC9MVYzcP1H1bDP(@@BX`DQ80*H z8IUt^Ftv1fDRe3jhd~6xV=jfdD;WUaYlr6TL>rtp%A^Rmq7^jdG;OQLK<0}|=!+1- zr-rymY(oxlEg!80U6yI#wPs-=X#qkvbD&)oK@OXyBviqgTt$Ch1VLiDb(x;MtC1Iu z>25}|B<>{g>Za(di*B-iDf?54xlY?rx$R0s8ah#6Q7iu9#Cl4UIFQHctom@kBkK(R zgUPN5Ssj)JlRWZ5m@-zvc+JV^;G?IBAjrT@)ELcohQ>CGgyTn;9Kmz0e$%ge!&fb! zdKl;C!E+OoXyVnL{O-CFSX~Bh)BjAThn^&Gm^`*EqIZT`M^ZwP*eIEd92M zw{-W$EE8;ZTHd6hjE*B$xy2DKIl>FefI}X19gIR>)Mh_+y)sMM#FGYUEvRW#+}a1~`i~vjytO1(Oc%@(+AcA{gbQ_M7sT*nXL;@?*k&<7p1Y(t3?2WuOF!*q%j^T( zvzPJah^6v~-9HAZb!opQKU1x57ZWCYVGvX9$tnt*{IVX5q$bEU-asP4!O{jCQZx4D9CA zeLbOf!lvI6;>1s;*b z3=Bd=Aj}w-8vPh3C{f}XQ4*Y=R#Ki=l*&+$n3-3imzP?iV4`QJXZF{R^99fxaZeY= zkczmsvu<~ZgiEwOzodRL_Jh(h-Wo~ygAz$09~?|P4lvJ1>|hBLQBD$Z@|)mrA~Yn# zC5dS!kK4(M$ssA9)~>2Qv}J2~X{i6#8`9US4*ed#HLG0qd5_^SiDVqa49V2W z%1Y)c{<&`HVJ|JL8q8Wsbv`Xpexb9g)r#O2)6~b z)pz%%ty^g1yx_y^t6Ez=^W=89ZhG)i`as{VYJq#xe=mFTN(a8ej)sYQB~h93!(UUHjz8(4cC~BYMt3OXy+(j*eCe% zz1nf}L|tYsKh_UQcEK`TuF@CGzcR4Y&JM^^xx*JN8Bn0rQ2ip&_T~fgDKe~E)_XHF z)y@66B!bHuEEPXNZ%NUaYzi~-t(A8PYzvoksI30SC2~)dp||Ny zY2!bkRa2iEms?jwF=nvTwVb;1qv_FcHieAK&eLSX4;OElmus+=p+sqe<-T=35sm8@ zGek04gI`Pu_}ji}r`lwe1*<=5dp!TWGuUt8%Bh<#WQk zBy~QA2f4rP;}WG9x5)OKwp3kuVJq7L-i6;yZhP$Wbo_mUk;5kU;e4LkInE2KZoW*d z>v`w*pkDapzO$$6-K$EUR8P4heYG#BJo2hp8UL}x*Moz;cYfI&eaP%~zzVZNVOykM zzIl|p)wNuIi%eDe)Ll6)x{Ifmd~Y?m`}6x#*Z0RZy;oj)*yXD`nSYnR Y8oN2=?eeEnfyD@er>mdKI;Vst0AO1QNB{r; literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/raw_icon.png b/app/src/main/res/drawable-xxhdpi/raw_icon.png new file mode 100644 index 0000000000000000000000000000000000000000..2c81a7dc4e507c58efa4dea5b2a97e10a1918361 GIT binary patch literal 1925 zcmaJ?dpOezAOFp4NVBQjWu!-i3}HGtGFdFLg9MygTpz@B7E+^L@VG=lOpA_$VpMqVZZiIH&$G@Ar6&D6oh&c5TLDcE(@?0~8V$h<%ugjMygKHaS+`*v;4f3n<% zJ{dZBER%F$2-Ui<&^8ws#0+>@AM2}bmVT82cFN&G2R@ zgGaVQmHwN@heyo0FtnPHW2yTutmN_7_};24n58-0b_)4Pp}(%Z|J3ab2XCt@(Hb-t!34Rf|E8Pp=zx_| zdAC|tgCOC2u=blszb@^9ltP2G#F2MqQvT;F#`d_q^Wy~bb?$u)nx~*W>Z($lOMw)R zM-sK5c1uHF6<|hLZi{%woiIjXI~qZ^_~D1O6w(?qsV!%0D@37$KZM( zWe)6;Ulo3Zhc|YwU)m-P8%N8gE6F;sJ_Jy>e~C?`NzS&l)%^a5A zqSJuY^(SLv!&aKgtfgTU>|hmjckiJ@Qk$%-^|-`sJupl_9OEN zHBYy3R8Y&}zz>A@-M1xzMug{?Po^15Zy&0anp`6r;YXknVlS(qtXf{%?|wV79lbpJ z{AbsYQP^_xMUsru`)iO7fwlJZ)j4r_RlbXs>11d(^>mB$;3ZRI63A=z>D3Svkurj~ zP;KzEg+9xyiCeG#fThSa%8D_Dt34IaR0@RJ2dYUeYQcPuQF&Xd;v_L~oQ?0zqT&%x zgw;$bySIYQ?C2Bk8r04shx9&NlQrGBlpCOXOT1xzytTSD z;V>(?Q|ANZzVQ9t`Rcl;rtmC>RnX6Cr9Oe6@W&mEJqI-^lkPt9Hnd74mH%K>NCoD? zm^wpkB-2|>DvC1#AR7gJ@G`s&ayq2m=*sIDoex;QiKiC-%zH2qdK`5uleQdXEN(>? zRM~&AMc7uMWqZe^V_gXO;EgX8HNZleSGWZh@7$Nip-#~iVm~64^bf*MBi&$6`3y`t zOJ#X-@4mwAL?93WVvxS5n8zQJxzz5tonluPe=C1dZ`_2d7Su4~I5Vqe!Wjmy>!$X& z!@H)>)HzxWW>Q=qB)&8@Q!lWLI05USj4{@Rv-fSq>nRM7gAK7{9yuH|{m{BDxp(s- z>0IFhtxC^n%aIV(KKjOE)?&p}Z!n8Bwae6b?o{zq&d0@N&@>oFzm)Pu(7-~4@K$m^ zy0+Y!Cp;!q`px0vUO2O6jW!<3l6T#Ls>1F8Sq>|IcwBT++J=hlcg+1n5ss86Y}-sY z6hVVmGdl22fzLnly%+Ash;p^mFfaT8iuIp`o$LU3PMrK3TD?ms={u=QGZ>s`OXPof z-szGOUrl*76BeMJ1q}i)U;kWU;J~8?y^0vd$FpUX=>^!-ANrQ;mDh*&FP;u>;;Ynn zQceg5MQfQEYi`oVI%3`Z%ijfV*^f9E{gpg)u*1!0@OJMDiH#QrI^4D7jBAwaXVa$Y zh1B`?+9fzVVm`pF5&Z(vSxf)Oo`@0*!;;g<$98N+CEGE#v%kH9FBBVmJu0F%^p)MK zlKa@Ivcn)GJE7+0aCPC5y#ak#ZS^On0N&kRb;?T8^+3$D_P$x^)T+6OA`?;YkY_Yg zX+3K|uEn-^%{Tm#>wNk3oZUR!ugumWKadZZz9>+sfp5sBzH|sbbBeT|x}Lo9_ky=_ z;WtP9yQ?bBe^IWSQ;74@?{fjWB|$_0PTMmE}=TbTIIQ<=cF$df6{$I%6 zhdJbANk$SXHj!A{)H1yFfjGFB@&Cd{{bP7^!U~eQ>{oV#z``jb6To48Js!G8Wc>%^ CaeRIN literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/raw_off_icon.png b/app/src/main/res/drawable-xxhdpi/raw_off_icon.png new file mode 100644 index 0000000000000000000000000000000000000000..85a42d560b0ea8ba44e217c070f3f066035b41cf GIT binary patch literal 1777 zcmchY>sJy67{;+V9TsC=c4UxdEwnH-QjsE7s3~e*DN=N%15?UG5*1CYrjeLnkhjbX zl*~Iv(;Y*pmIW$a$m^(SdC8cUEh49=)oB02_ThP-^StNx?LFst*+Bu`AQPww0002_ z;5|e19`#vY8R_SAX)0Q8hOsz*901TmCf#H}C!?1C2L^`5A1OU|lfDz5d6L*Xmxl|UWI6}6NXIjE7XE(j( z22KrJ9(={;Q%t7ZcewjnunW6`b`W-W?4$VR^l3-Vd$vt#Omq|CTu+il_x&)^&eTt>6CNT#?(=+I0!UI3-^RkjO-OE3I zbkE+PSKHSVOgVa7kX}-GwPNUlQIS?XcinpLt{>jD^Zp=a_j)oaL>>RKt1>jVFxNZZ zBRz+|Qh@hz7Uht<+3yL{mb*cg~qw+0O@(hxd{%;feumi(-yb{>zv?yxaWNY%WgyB+I23 zkIkQlL?m2e1SDxMlGKGCQ7#V%s!LoPR_9MPf3um{Vi@sLK4fA~=Gg3d5syku{Wi(+ z&pSZgJVd>bg~F#okDU_C4)~1w#~80ww2mfb7!2|anT+O_FcBDp@Q8E$_RegGIB4O( z3qiV(6I68HOE@}FJ~z*NZu28Gep-(|0Dhw?t%uQ;cCt#l z+Fv>x0UlWit~;?z`%G^KwySKY1A^7u;*{X3qLb3KR{Ld1<7Sh%%4&LOQW(+kCZ@7_ z{+jr&suW^W__nQAcD_o;7Wsem)Vo!46&jO_Q_VKaK9L+$Rm|CDPuJRg>i7sNS~5=9 zi^r^&G=jMva~EkKzKnpF2~Hy&*KN0jv1ZpLJD#)o)~>rE8(k1DAgAQSiQsuQ#_x@u z&omRy82|cYb6yn|F{K zddw2V^igQ?Lu1eqa9{0-qZ8!@Z4&0=Iy)F^fTw>HE*hg`gYL>gu6` zg26hBFZP+d@i*ic1^An{B+L&LEx|pv@(NxVU~P9eNp7%%xw}lQp-HYB4&j&HtJ=dT z_RJ(%9-8#KVFvX{F^^t=tL$-wv05{39jF*v6rpzyWgzE-i!vF@-REfP07h2kVJ`TB z8cIsnUY1SQ#7C5rfIJ9+yJ*gMQko7;LUKm=T>te#G+24sF*7BnnqF=#Xg*Kf^u9A^ zOhK@}84t;G8?Mp{XVk0$NShM}Khr011 zwraeDcUCvLv^!Fb;oJJqC!yv}M?ry*%V7+ zj~A1L#BHKaV;7{=lgwuwk#*$1aXZV5;D^Ij7AEj1gtk4HE(krW#lmf+X{_qz(KF|F z`-!;H_dli-d-9~zt7T_{;-G~95ZD5v1P((((}y5*)l3fjkSkm1NGhwd4_BPmfuMR+ z8)D{m=uz+CVS!=IXh?(Ix&!yZyHcrrW_Rv#KTm%KsgL}|^*IOv(>>Xm=AhFlB7w?* zlAo|DXG^N<0<87;_|v`*16R-#2ovswXboW-k)lge`Sl<7tkm WC2$U$$k)FofX|Tt&nEY%pZ@_#Zc#e` literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/raw_only_icon.png b/app/src/main/res/drawable-xxhdpi/raw_only_icon.png new file mode 100644 index 0000000000000000000000000000000000000000..fb49ecc21de9267e7a8a368b369899a9ed2776e4 GIT binary patch literal 3052 zcmZ8jcR1VIAO2d^=(?qct+4?N47L7$MHTW_344oZ+( z{Oksng`_tbt4Pu51#Q*kY}^6Nn^C{4Zua$Rk902SqYmBUav=AQz_{E>NpiGB^KsJf zBo?oqz4$|`{$-D{srZ)B-18Lk%SxVDn0lX-`>Xv~`Nmcd%bu*(acYCZ@qPqTG16cWV97ycK<1arXnQiM-?=`@f!@n4+EJ1|`*MiX)VbTG6=wJj4dKQ^>F0J73ipk7 zV@V*EkG>=xU5gp%0HXYLRVjV|tk)o5zZUvAXrbuT@mTvP)RgC#3R+ zVLSC`Yd_{y0L?9I%Ci z`aa)*+fWcdZ*R_BDFoB1c^|oT+12swhY^LtbfQqXwSSioD#+a9C(9n45duP!nf+d+4C1NEK8$9@cT zxz_4%p6cZMSxAe%lR_>XL$kH|}>1I4fN zNDqwv)=ao*T`$@DY#yW?Z=~j?S8`m{2KyD*a)x?xEt(%cai{7y5hD}8Map>BPAMw; zd?{ws(mQ z8QSu^)`eTIKeqi?BM_YNGIAS!vfd)pI??s}(Zu7+k>&fHfJH(G{iV~uQ*28DnBfZd z`l&x?Ez|;=diC+fO9XFbY}viYhvOT}Eqz3zo2gQmrx3*mGGGA^og>O0AYuGeW;6*LP0wq|B6Ud|%V?l8*? zGAZBy@|yd~;;aSBp5H5QE~b2x6eK~A^IC3&>J)I|RxVUk3Y1z|2g_D(l9&OoXzXWY zvd!^lr2~xD;+59-XJ`wN_%4Y>vC6O-cA90dtZ&gLD`_fV&t?!hrS3x|pKHSoJ4UfzO343H|%pHK!u7&0-!W6xydiek|3o9eN)c zXHC{feru5A$@S!@I{x;GVDz9?NrM*s!g(D)r|_&5aVGimhWBF>F;>n?s0ym!*y~{d zilg$|lKSXPY#r@|{`eIbYOOp~H%1_;YgmCv(U|z8Q;JHNy?^WVhLI-pf*j@~yU^v< zoJ3_w@dyH<*Oayu^yN0@YeoHLzhwaxzw)5Tvck8cwOG?(Nz{HqAIWjl{3iW%)FtDMpTxbNcVYI72p6SiX8eh!7_ECvPXRH2*azTDm7_bIEYCFw$^jGhu*Ro0%H+*W(H4|NnjJ zp-~fRR|zNT6QkeR%L{J5PLJ@ZGO_HHwrIx<)xAh$QXStXiJdR?#pdGLMvDy!!&g37 zrkqB7Lri&>Q`Wk_qMDDK9zZb+E)Bs+2-yL${JdEQwfl_`8?!i4DjC|4u6qN4hCH-R zH=S4lmb%Iy2*Kg*xr{j;sp*S4V=l8s^`5%63~wyrQzACJD`uNPmT+nA?>+-hm3HX7 z|E<`jvX61Nt-C^!`)d!*896S3n^=hvk=N&2F0USwiwf|o4c5VWzHfJlsd|ZR;)IncJvDXXi-59M z&YOG9!zF8`kJMZs8mLE3SB@TE>pj?9G;6DtWiTOJoQH^TpqmKz1WhmJ`eiU9^EXcmh=ZCGfoY7nQ^dE&EF*4LO` zARrn75W-F4JYIhSn^rlIege1^w$bXv-$egEAw#7Uw#0J9+khr%-~*S1_6Du^PtR)t zUnYYjj()zA_Mw|9A@uEqG&DOiXYRm6@%Jy>AizA3ophap{FV;_)IMnb_1W&^q1jd@ zk|FmcG#Frm)80=8hXx4)l)q;ZZ5eC`Z$^RZFz`ziSx))wt<;z&-}A-VqodD4hQ+&? zJ+79wS~XKKbd3O8PjO(t%z)Ks0IV$b-{tnO`gY;K8K2-GrkE`cud8Q2nbmys)plqp zvqcEt!aiPcTiH`8(h)-Pa}40khys+u34g>kHAortjCkfU^Y-Aj0}%uk3>Y(_7yB5h zIev`?%#V+kMvIbxGIUA}SCG6S<|GpxP|nF0%WwNOC_w|oNdI}T5f**B@n)1H2JH=&PI$fS%hAUu#RmzIs2jqv2EM7t+Ow-ZQHhO-5OcU&Oc6$bKPB4eLFSv>&I@DJ}8PH z2!bF8f*=TjAP52ma3W{Kx{jhkg@SP;@eU;_1oR`REmR0NoTScFAz%ziU9UpGLnQUG z3IU&yRHgh!7cwL_hkVEKnCLIxu@NSAlJ7VS6Q_kqKDTS+J08Tu3-TSGV&d1rp5((s z&vKaRBcHJ$rnXnUpp>2zD;5sL)X9p4uJoi_>0>8qfm+6~iXvfN&Y~7m4GdvHMZ$^< zqZ(*nKc$QQwDZ3g*^@Hmz7b1BX~eeQ z7(#`ngB2J;HB+uz?e9Oj~48`og6U@k=N(bfqKrYpc{`*^^ zh>^H<2_^q5ATFYkT&kE`Y2X~9c#b`Jg*3dwo;*erW91%8&`iF{u!>ycR`S`?a*MUd zcbmx_I`KaF?rVzV3j5)MW90_3Q-u%em|re13}0O4<|L;TUu5Yi+qe&(yezv|7vF4W z_aymKeDeb(PBVHuaH{NJZt8F=!z@#wWP(h)hw5eRXUTJH#{=7aC-W z_pt3dmQw<3Pb0Q{EK_Vv7Ms3eNhQQ;eAfY&i>yyIw*1U8GC(()v892{WCu&~7B;le zTZY&XJ3e7~*+U7}VD30s!18i9b zH_;831B@rSMU*y{Cb|>l0H+v?f$q_R=nk3Xg;m`<_5(Vs5FgMA4$9!%{bI{2Z7fD~ zTx;1BScrE^zl7W{VWIgnW$=%KIY^bH-n^u^72aNSGwrN*Tiwc zEY6qwkv{2`LNyc5`?8EDh^K{pl?I9#M-qc9uj8}CcMZjIi$!^ZWNw#5TthOiv9R1> zZ7NAB+hGpv*OVI^M|%ee`@D{cHjZ?WME(Hg&aw2O%-uwp>|tfTz>Xbch~Bhe!+R_# zJJ^OMY-ysK46qD8V@oyb+gbD2^)0I!BvE{gP3>$YQ+$GLO>CF&faN=Edtat#z=g3C zD;Bn(9{Va~iidII1G*_9mgi2~xK*YYfGZj9W+S>NCl+P`nGRj#%MG~nBSYDM*`~X= zIF}b_!=2HxfrV+twG3}C>c3Ki3%P-}al}ER0Zzw92g?>p_yOO%PZ!z44)|s**~QEF z6;J$Q#Syu#i*M-*qsoP|aMzcPen{wc-16xT-6RexYrMp0wl zjKX!RiYM8DB29*k`2knHVm(cUGIr;AW@JYhFmHA;iJ>ergP`LOnz5~s^C_D_>F;Rj zj2hyYV?8GKG&@nG99Wp~R?BgRFrRXuoIQAv|7GZ_bg?guKqbRjPLZ&Xm5#tQoXuQ{ zgzk)`4ya*ArH^uY(p9l=Dy9xpEELm=QsoPF#MHX-8T~M|lzheC3lEft{6qIumXT-WrjP~^eK@bE%5ClOG1VIq|4%fpqQz~Zvt^fc4 M07*qoM6N<$f)fDKxBvhE literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxhdpi/share.png b/app/src/main/res/drawable-xxhdpi/share.png new file mode 100644 index 0000000000000000000000000000000000000000..22ed428fc3d954710cc108d996a6eb3debadeea1 GIT binary patch literal 969 zcmV;)12+7LP)Z zg{%|+000000000000000070NZ!C#zCr#rgLF8G01_+>#n_u6On+vc8iPx7CniZVI@;+GZ+ zEGB+;Swu0(xlM=Bz#%6WW5mK)9VzjpA-=Tet?Z}w-??qmWqZOcINcr2in~xa2oA%sWt1G>ghjDAOfH9FxZArYi ze~wZBae(JO|-If z89CVs(6i_FSpeM(DPzM_fc!E5`McDJ7ZX6a?J!F!qzo>AhkZ6thm=E%fXc%&9f>tn$M0c;}{56@Ilc>7{f(8*7^`Z@bSE zMR|{6Vq3~cgb2Mc>r?Vzj_3urm}f$6O>B&f1_Jy`95&~UL`_DrwP*VC(S)SFFq05D`2(AL|4dam9@>0IsVt-PW+tW>9X6ImHD^(rC3motQhrva_D@;ZtM(~&T r000000000000000000mOe*_o+wkw8z3EcqJ00000NkvXXu0mjfg|(hV literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/baseline_add_a_photo_white_48.png b/app/src/main/res/drawable-xxxhdpi/baseline_add_a_photo_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..fbe202d967424bd09636f789d007451896d8f941 GIT binary patch literal 1498 zcmZ{kc~Db#5XXNn51tT6lORHT<%onpP=mw+sfs)ZLc)zw1%iqxBPM{LV8jD>f=v?+ zkAM&aB_Njv)T1C4rO_c#6eWrmDn}8qctENx3L-X6XZl}fc0ap2-`)LhR~{DPPa>j3 z004>4^IdI->aT8MWZ=gK)%gGzSMq(mBhsNacOeJ>69WB9MWg9A6jurW_;7CLOgexF zl>b40pDoth_#5Ce7Q#l5V28<~q=%l`MnVNrfzF z)z>WhOU{QCk)aBA=a;?5$7bN-&)o~k9i2yiSH2%hbqW#T%366je7JC%*q`S(|l$Cmi5lA{zK^_(HoQxKaa2|oDsGi%5|ag9IfF zqx1%2TSCWdX5+*?(4mcJw-5(?X#tj}L+MuGo9m&6@=F%4OAyw0E57nJHcxmqu%j6xw?~G&}Pw$8FLut(`%hhhCRR454@Ph-=sUXD*H#? zZGW49Uo6UbnHPN9lu2a{XCfZOzA+XT3YhTyr4#nY>xYs6=V0bQN0|x z{+POECVaHbNSRAb-fD+3V@LdP0cXIx?3GY`!X83}YFe3{22ZsobZYRq61K~1>$-KV z_OUD^?QDY!a(OHhZ=>l4$B8ySx7w=`vWyZrgcH4F-QlMCHTyH7@}BmRQ^Z-p3$K#9 z|Ng0$9JX^eF~ozE+WZMRb>3=yuFkzdKDrlhRAZAho;DwC>D#Xy{Pw{yGWH_NGy8zV z3u7g0^QMNxv7?vIQh)B+nPqaL`575};OZ)BWS+vrPk&{&R5>I0XW16;28ov%GjF-p zTugK8`3&rZ7^!t_QNhI#*PUKqzvSRm_H-6oKSy0a(``%lJTvBxFXhk&V1b{oz0Mv| ziIXXboGEl55D(-C^GOGV?N_{@4I0@p&aO{`SpG-7GuC;WcGq=MwAV-P3|8<6qj?!g zUhr>(Gxc&Rv|y-af$0`}BScpi6{QAHs62w-v@srLK}qS72s8N((E~r=>-hWJIc3q? z9pom;$=Tqjg0MrIDC7mNR>5sZdfoh!y1|_JeICUT^=I`Z~HsL~j(cbH5YNeMW1KCXS%77%HX3zt(808oY> z&fa#zdJFzWAQALv1+Y3ljObGss^{Q6iegLLOsE6>t<=*de{51g1M>5c*H$ugfZr#| zHJ*w?Q2McTJ{{ESm!U2AdzP@N2j@Sd1lBX)(s$gjK$)aG0000HbNklP9TAUJhin?jOA zcdl%0HE9{y+BK;y0;OnvnB8`;Wky4g#XELpN-SuRr!RC6;+*}^*<=Zl@- zN|{okmT$9>{p1*ppU9L7)A$j)8D#YSNv+Jp(ZEU$GIl?dnHZ{Q=M9D!kH1qXGk!F% zfnLV%$1>wZ3%@1DM4YBhW<0owS12-3n`Fk0Cb}qvJkw;xhO2mjK}zHIGGjr8`{}22 z?v@!nZeSm!)W>9*(P0uDp4WZ z+%9u*%%zWVIn895i(wJxD4!iN7efc-wM6ENkYN+$)*>@9D)>ol^h^>Gh~L*K^msY{1-F%lr+@JNL27A(orcR zv4nIKWF+e8BOT{tB%UBGXJnW`jqFC?&F503vP(vOUouWoD;A^<*KE;4mT(wIvU@5LD zFkRuJ6LT=oyD6*fM?Z4WAJ^y0Ro)GB10z;&w>G6ry6 zKUXMR4B@^_3Kv7TFGq_)#Q;9&R;W0MAKDZqdhx?2T%{245x&^25ODy1v?)C7#2?3L zP-u7opX{bWVWAVhJfg6$6u%VtoULvC~ zupK`=rZBJ;U#(LJ_zwPBC;w4T0e}5e{^LD-wn4t*Y5evamGT)2@ZByhm%pedkM9oh zRr!m3`0psU$yam`f;=naCuS0g9b6$F(L*Rc=9}^l9fYIElhn#DG*KiZf8|#Bg|`UJ zi!{g|v=g2)tfoqSpoV@zbcBUWk`H){P<==n6>=SowXE9)II0?&5N}gf3z;$R5@*moLc$G&4w?ioC~G9%L?8 zD@}A_2KWoF@i^;wfW^$?7OIsL>NtkE(1%nhIXr+lUy*rA5*fNNpLyENO;i8UC265E@ z4>LufqnR!WxMqlFX;3V9->iZB+Otfd&yG{L+oKW*U4N8b==FZ=%F+U{F%qOo2fEm zLmjQGVF!mPGFF2e;&-f~l_@e4MGdpKlcjXBl{a~tELl!4EJv0sSq`y_=UC4Y?xKYX p8Doqw#u#IaF~%5Uj4{SI;XjZP^n=^G+Drfd002ovPDHLkV1gv`&3*s? literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/baseline_bluetooth_white_48.png b/app/src/main/res/drawable-xxxhdpi/baseline_bluetooth_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..975ba8a2eae2d95658abdc7cf37e3355af2a3fac GIT binary patch literal 935 zcmeAS@N?(olHy`uVBq!ia0vp^2SAvE1xWt5x}=AJfjQ69#WAFU@$Fs4^s9j~?H`-7 zvel}rnVp$g_O>)IGV{E!U8V@$<)Yl(V;0r!O>)bf|Q2CqKnr(qbK8#g9ztb zF635eV3GOI`d0u*6)?wa+%DkIlvLNyEy<#A@W7wX91cy5Kc5R;n9U;dZ&tk=qsIm7 z|NIR%Swd>3KiK~Bxo|+ppC=5CCI5c1h;04#fYV|XyM+1*#!D_DWFi1aQ{d@OgP}52Vn@6&LYadr;3-vIcnIGR&$ROuGE1s$ApV9y4 zo}axPE?)awf7fC^i%72R|IOPpIW>y@$u)HsGIh<_ul(&8r$&_Veir{9DjL*uHAKUF!N{0U@w-Y;ZPKjp!#pOaaxzcF9U zImh1r!JkT|=jVkj>Kh)M`nhL=_}QIPZ?3&(!O6>Mf(-xv zusoZ7p#IP~{YL#GHPKN`2iT2;fIjUEX>ZJ z^5H>Pzur0Fii+}i2Ohuv%o6utg1z%yZJ$ruIVw8OD*{9C)X(Ki1qXaTNongB75`hr zz24}|x&6;m=Y9OS_uMt^xr-~8R_f?~J^oXjiRVoHop)ebg7SYq8Dt8*>;Bms?E0C_@LZqc)X#2UfIs?syy%}&*PQy#)*rt9 zaXq)6*U`3q=I4IFFX!W*{7D9yAa40z;$Y=>pqzZ)TdT}%vFGH^{XA@0Z}FV}`Jab# z?B{%LFaB465Onunem3kWHW`re`)3f~-}XADgnaufsWh(|U?(N{@j!8%s^w1L4!_a zz?7>osp0LD);!TV|81+rv-N%o_4Q0Ye+!?uKi}bBt8*>aPj2B8@pByNghNttC*EgS z`{#P)e&M6{RQCTlp>O}`bpM}E%3x;X`)AYtr)@zK{dC%Y+1)>%&L2MoR|wHl|L>`e z^rz`5n$JD+HrZ*O`cVEkUd6M#iD&2U*FU>~R?c3_DL^Sk%t{gV|wQ9o1V zF6R@F3saeXP8SC`?7xn6<^IaHlj0}R^0I&a-t>JT=dybsmv4N!z_{_x{J9A;dK8R* z+FKutZ4>-teC#atOa-vA4BHjuT*X3v(XseDJDl=|PaVr>mdKI;Vst0QwC7ZU6uP literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/baseline_close_white_48.png b/app/src/main/res/drawable-xxxhdpi/baseline_close_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..0c02f3556c447235445337c2739ecc096d46eba5 GIT binary patch literal 747 zcmeAS@N?(olHy`uVBq!ia0vp^2SAvE1xWt5x}=AJfoYSci(^Oyk9?SJ&!c(1SYEVUv?E^`?X_f#MRKwS8UH}4|N>UWC%Ary{`V9#dpDT z`9g*>-=F*6mVQpA-Rj+u~yywz>#%vr8TP&#OzWpTICSk#A=BsMoM+_K4eIA8%!_aO_ER-M z`h>nChmApF_N70K`A;kAI3N8~?RY+2p^iDA>4$Csi0koudcx$h`Um~ZKK>Cp^4VM9 zyuPE&eik5S#{cGa=6s#`@_)MII6ek{ungbNGWq9cZRTIkryuxJ$@IA$Xq&1*{geZL z0+~L$gE%wJ%xANR(48LNR8hCT?T=(u9p|U*OI1(&H&k}C02|8k$y)&EsD%FK+UxD6 z3EApCXpa1NSW3tR*=C?K9vFsx-n~p=-hEeDpi_3NI|d{+o%tigQm*DDz@g-*(6+lc z_FirsXH8?X{-=ow8$5!V9Duxuvw?vm{rR}~;+gve8-c!kHeF#>d{aRkXGLZBzuJE) zpLTQXv$Z>L;_v76|1KN9pir;K@q}Ms${$^h zXJ$XS58hF$SJe3F?|lB@kvl-S7xVWEn4E~`_yyEh`$@bh`u?BR1@8Zx3Lc-{(D!iW z{k_{v3Z9&~{9(PVg#ak|2|UT)dw@ff2ayy(ERJYbvq`7iqrZSSp00i_>zopr0J@}4 APXGV_ literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/baseline_delete_white_48.png b/app/src/main/res/drawable-xxxhdpi/baseline_delete_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..b40da7917b111877f34a7377a38ffa69d27a43e9 GIT binary patch literal 451 zcmeAS@N?(olHy`uVBq!ia0vp^2SAvE8Azrw%`pX1t^qzFu0Z-f#UN;I@gbl=;w3?T z!3_2PKi$|j#g+B_WU*SdL!$RJCw*|)e{1FXt=S9=jAEWHjv*C{Z_l?TGCK;e9K5=3 zhUNbN*`~sCA+nC)o|9BSXnxMM{G4y6|6gwUQy(|s-+As!|0^AT+3TM8znt~w|BFp> zG7JaQnSd680f&lbP&>$56aoo;kO4ESZiT#jCYu}gQcN~C@Z~hw+{l+7v6Mon`q=mt5YdJlEfJg||z=zAGF2j{p1Aw_NH* zqW#zDW^egTw_W~vE&kfus#PD?zQ29E_?5qzt^DoZyY5W|dw0?orkd`wjrK9#K? ze@{|1Az&+XEPl31LZ}klsLPE>2vvc#ut1juSSzW55HP|AjY!Ha1T2pkK4=D(Ps%I= ze8)%oP7=xk1Fo8{If>Z| zmh6Qj*W&PID8`{pam>Yi_%v2+Q0$Jd^o~gKAz)Js!vy8fh6nK}EL+DoK49rRkc7s; z0GxwIl-pd~iv4RG&z;d}>2+e~B-9tyLo=qUp6Bs745&d|f2R9>mW2AiP8fw&)iwj) z&p5Ul^)Rlj*eI!;fbB6_n22xHVoiX{^xS1h$R4)9UFcA4PvTVcu2GyEhPVuKWN!{G!-h#%GW5Z@SfH9F;v4nI3&PI06Rm2m6?eut!IHP$#;c|loQB?Y zN^kgs#`%NyrcDGiVYX`N#LuyRol=^c@Phn!0Zo-orAB{TuUZ!1hI({Wz;gHwUX@?3 z;yYNbiYW|Fs*dTn5Np<{fPDmJD2^EzQHer(?2XB);iaw<+q$HF-++aRYhj9S$o#`; zn6Lbv#_8x&r*?&7&=H4!IffsgL%ziaAbarvz{RR2UV-8j2)%HHs)#r8 z290e{T%n5MAS@2TFv60BNQEdL{J;`~NQNu(K7WT4LZm|%`Gj}A&?O3DssZa`GK46J ztYTK@ljPCXake}N^Kmsci`IyB zfZiCdxZcJz<l{1HN1iWaYa%J_<~|1rW zet$!+xF)p+(5%>6vDp${?`~3lC&WeBJzMxLQfxOb<@WB?%57ZI4OkvCe9#OmpHu+Olrh)?Zz-Q^deC>MWAU?9QUw^L*nXX1Gv8;G%e?Mp zkz&mMJyojmYE@kCV1tZrqrY0Y4DUf3#%@)n;cJB~lEFyD_Kh0F<#fze4pXsAjjILU zzz>ol!9Yw`Tz^2X8pdZ+j8_g{tzn!#2bhT7QR!75Q?YiPY9hXid5Z0y=v|}4$-nAy zTv8NxL@{;Z_y(Mkk5_D`)F3V=d9VADBEV+oR80Tbz_@)1Z!4xJY7mddz1PA*EQT>e zF+Gev4UXN4_$gWy&wd%kVLu=4D@opAg5r4vtL5xlbQC%i$CwP~|8pO1e3EC_TJg-u zYe6=)!)V3Ofh{t+sixy&{*JZd^(Q;6II18Cg|}gme7i1#x!#mzRa#wPyyED@XOaTJ zI`|1@$fx;fx>93pEb#GumE;jtMw{XoloS*O;A}jIPWkd5HNF^SK9)`L24^Ua^GeoX zZ#Z5oa(V=o$IRNdQHdtE7bYczO>r)sK)c5ITGAzp+Tr|Hli`o?!=E2m5hvj){6&5J z3%%2Hk641o^Z5SXA-_8C-K4^>5)Q+Ua3B7T4z+V?4Rgm@b9FT;j+S3-_+%13Dj{!6 z9ERG=wC-HZgy(hN{&@=IQ28_qM{%cPI{*bOqUk>1}^5ucj-!&CY6DH)T`m0SowBtPVPD0t6CM-gqJWOI+ z`S3OlNkSoDZL~^f<2+2{SdBLZYbK#Eu)6dP&%;E%u5o4*x4(SAH0gY$%5Bf@t$l8I z$a(hgC3%>{9qQ{J<+xKS;AZLEl7~tBR(;IIAoNc{Dd30F`DGp^@s!$Hh@WD$B$NtH zl}<|@Ch?NmScn_3K@v&{he~H+9wsqaw*DKiXxx97oxjKUyANkc=eK1@haS?Pala!w@5J~U(oX-5 zG8Dwz9v^_pegJNlPIDPbB3t4WsIsp>Ezd02C$Gqh|MWb9v!f|3|N_Ltt#WxFD2cr{82WBeE{oer#AwoCHHCY2B{ zRyuzy<29+RSR*O5fHlx6o$JbaQTDf!P-^&g?ho!#tOMGl^Gp&-4bMoYEw{a+FuoMW z+ab?sGvo6-!6DMSBnhR2OQbg>&lB8QdauVNcjbfLcwKs1=Y66RO1a1hdDtYGP=tvN zk=~2wm4s42FT9xl*PjevGjvMtgd~&#PLN(FHY>`6ACX>55=sFr(tD&x6K;}?!;?^G zI9xWGiZtngm|lWC-;j^9q3IY{w26tME@i#t{!8u+ITo}hkBgb^RU{O z?7dZ90efS<+L#nsq}|TyoTN79WAEgH0!~AR+Gxl5=#zY?fIc`M?P{X~rzPQ|;RkA~ zg(bp zWWGDpp}yY0CD^v07t&pVH`G^0ZFhU(oM67jcn06X8acm&`dfHLW6ZD5K7X7TPu4iC z7>nW9AN?8_i+>!;|u1E`E<6<3Oxg!+EcX1My@09&_cxlUOea zbiwm7Yl@1S%7nMoKYLFIUW_V_9!+_LNVa;m@2I>6`xN+VN-pZ zu}J8yMQDzc@$$4|ybB$cBy`}e^0q0AmVTp!k21OfEhYnY!YH%~tr&%!l2FN%ie^k# zJ=4)FQWydT;2b=n{2swM7?8YQ2-pCX*9YPl4930qGdk5?C;p6kF&M|>;s-ti48*oL7GFm*Zp8z5 z0G0000R1Nkl5r>HuLs!<2OoK|LoXRUAnwjuw*&c48T+SVmPb!SMn~ zD87aat#F~0m|z%+3xmZ3!O^&Kl=x-HWCE^CAXEG?oQO-uiywvzM&nYxuM0~9vX7R3qvJJ(NIKXG0AZZT8nY9 z;eG}&g4vX?zBNo|2yb&49jOrIdc7a7FrEZn&0`S#*-6U88Z@m+^53g-HeWCwuhub^ zm*`DJ2@e%HnNKJom{PvsY<7_?Kv@|P*yr&*34$r)Lr$Q)Xrl-3Qb_6&d`Vl`3`)^d zD4Rhyz9T{E3V4l{nkhoR;?~!jeq2_dX_0J%w))@70lG679eFgB^)MGr(_}rg;cIk! z=QLrKYtZ!pHRVrCMAOLi@{M!Rby0i<09JD`S@Lguji%53Ec0;{1!%iKT;XiOFpND# zcn(dEim*3h2*YX0VS)-)a}3v^=@QPQfN&fluF#9{j7QT%!qdiOlC*zjKXHZK@y}Ee zcUXhbJ9kZv@A1dHFD`LE{+R2;B~HN~bBOC2h5ne@u2bfgVZ56!uJI!2p@^S&nv3W~ zBkHg{<)}?V4&ox7;wOqo508p#^df2#xsgs~Uy8Fm9l42#M6JEJMiwiH%2ICU0G}1M z?qn%ZnXi(Q$#XF9vr5LC52noX0Ml_JkY{E314&neE zm`0e!sU?MTKH+(fiVg#5-Xc7wh!)C)G_jR(rZ~nKloEy+WQrbcMBf^^>K8taX$At- zSc|T;G!Z2*;7(i^MFoit zZ7C&nyGaDtgVfDpSBVzAC?GY%Bq9tWHKVC6(W5C#NX2Cm5iTVazfeii1G_Vw;Bq7) zd#y($JU5oqM7oqD|QN+>cx=%lfU=g|sX|8wy zS%|K+%FAiaL*GiZay2W_cfIcOKaVh!az<JQf`?NsTlp{v!q!DJ@C;Fy&e^dm-1@Z63uq}oWTlI9VpO_# z30Yff@$QZ>Dc;?4>n&ej%uhV^clrAAl%vji?1C50)_~q zzPTIY&jaETUt;|EL0n=v#-BBA0}zdWduS%~Cuds3v3pDrgU~dN@brw|B6Km$xD8EL za}uiw$BFT4>R(5M*UbZP_ zTR+lGw9tchDIj$TzND>e2J6sNw1r^%FG-NP0^XyCXrmIR@F~RvvyN{$n_XlJ&{Zs3 zz^s!lAhVm{~(3wh- zUZ_BQn$m*Zs4Rgn1x=F!zjk+{B~7VM1=+gsBbvS!leC(%rZ=Q)hWpTTuawQuH_~Ra zhBjzwB_`?qLQ@q<#xMpg`I4OBakM-t$r+AC%Q2Fip)#dtD59KrVaP*6u9!%(G5d)( zhN=|fS}|26LUtnbrg&%QiEBN?J3|KfxHMXfKb(k5$BXfYOeWyUcrwLASX}8yF+ngG z7lw*aX|bc#`+Mj*iw~=TVTE%hIV~jDz7-Rkg Xx9*>RS!kZt00000NkvXXu0mjf@#jz( literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/baseline_folder_open_white_48.png b/app/src/main/res/drawable-xxxhdpi/baseline_folder_open_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..40e2ea3937d4df703ee81705ab7d6ca13693cd7e GIT binary patch literal 657 zcmeAS@N?(olHy`uVBq!ia0vp^2SAvE1xWt5x}=AJfhoe%#WAFU@$H?)w5f$M$3E6S z$xxb>rN~msfA)w?nrF1wqu7$;n?RaxaSn`z6Y9OjTv>+0>Rs+c%ZZ~vb6^|0Oi z=pToe=apG70u6MNBUj{&h?+9WHX6va}N6BGA%%W@WJPE|SaHDr>)_h8Q^8$VUf zPi9k9Zlp3_Otn3|bM|V>FzE)pZp&Ed2G3)bS`KU7XNoSkmZi5P^W^V0^3pSf7hF3n zBfj7oFJo5j^EnJ#-)(vLN%70oR27G{m5f<;g%(_EHVd(9`xnORF910B#Of_N%rXZY|!=>QB%g7?&z`&@+#1Y`q01S-{stpXLU?G+lECLJK z7{CVH21+|KfRr}KGP0~t1R4pnA{@aqXJpY(aadD6^19KS!?80S_MnY~VYZ&tL*H(c+T?~U14V=^vJziD&!UB$GU zWd=s4EMoul>OJnc<#;Ma?7vi_UiZxG|ITSes^2$F+4?5pcjWn^ch9`~Okr^kq#p$Q Y;;npQb3$*gbtFjG)78&qol`;+08sGs%K!iX literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/baseline_highlight_white_48.png b/app/src/main/res/drawable-xxxhdpi/baseline_highlight_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..73b2c79bdbe76b1c44708dfd44138a2569621edb GIT binary patch literal 598 zcmeAS@N?(olHy`uVBq!ia0vp^2SAvE1xWt5x}=AJfl1cW#WAFU@$E&$NC!uO){E)_ zE(%Od99tAP1UVXRD6lvw2zsyx87i!c|7e#`Zc*Pfvzl-1G!@TDV6@7!oq<6BXbgh` z0|N&G10#b10|N^~0|Ud0=-5*B6{n{@m;V}eeJhW>8+FeJ$LTUXn~%;=g$4nwa(tkmxV7}wo_bDT zs8R+Fte7k9?v(KFzKVf;)0zGQKxdtiXZoxhQCY__Q@-)%ZJ`a6sOLD- zuK=R!n|}V`ICFn~f}K6kpXxKt1NF&rKKs17Zih2kq<~{;K|KLcNYqYJSy>-nkaSR9 SYO1 literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/baseline_panorama_horizontal_white_48.png b/app/src/main/res/drawable-xxxhdpi/baseline_panorama_horizontal_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..7086cdc246b65c032482d63df0dce46828ef6ae3 GIT binary patch literal 1460 zcmd5+{WsGK9R5yr>x$b8#zpFOClrgk47b&avZ<}*edy|x8DkR;YCE#aP2nas5#}wM z+t9r1+O8y)i^9k@?-Y{vm#gKisa<~Pf4HCX`Fx&pp7Z?lobxh4fTmQ=6M-Lmy4t^m!6lmBnehMtA4?1^r%N z;u2w^eLIY(*};It0Isp)N1O-WI-q+4F<0g($$y%q!(u?L=4%XYw6F$(kI+);*Q8oD zg|>6r;dJ)`>KmwHx`ZqzKh7X=m}A%oGIbgZP9qiBFNP;E7m6BAhZNbpwllK**UI;a zuz`;9p71SR=*cf z8~rSFCyMA;pEnAV6KMX_S9i{D@Jpc{Kd7+X%Ihhswxaq|X^8r%Kx6KTW;43FdnVzS{N$9$egTF(O}TuwE)WxHwK;r=VMkbX*|8750*FZo&HSq%v+il9h1Zt)dQlyNJ`XV{}OQvb5OY zu3hWH#pUF#rz{AqshDRYs%p_MEQTbj6k6u?i$rDf#uQ=pG|q9e=NVYFu>X?Gt{ZcT zfruS7)EV-HFKAEwL?^}Bwt)?;{RL}x>R5$BjYvZzCv`O{Nym(tB92NL*}NQ_3JM}u zS|LKKHiLen<&{@x@C}ePUt!Csn+qOUdc_{SAn#TSD>Ix4VoA%_q+JcTK1|}CEf=ZR z@u_e(9N+CE9L9o3-D36CX#j8!%e5?7Mad$jttrXRJ4Q+{YB1 zV|s3IHqf7mn2?{{;k~FWH7M>3bBE&7G6gkR4K$+y5QHWPsGA90frjw&DXGyZT_1Oj z5~c=Thdng5&p3RV&wo8R0c*Sfa}P;?wo2P@n&Qp_bUe(WpNh`saHyIc;$P_>3~v+` z7~RT0I4F*qgdc2Fyx?x|s!~>%DlWB8S@S=>lck_bY8|Dqf$?uk8S&!87Ai_AMx792 zJ!Mm0xmndvQpW>>xu|0#93waxF{e~W2%kBZ3J;&Nh2s;HOQHY*dk`FmCvmf9-H%`( zT}2f0fG;zVz4SHn<(qsn%fo>BTMMCyLudUXqt79WKLXS#h(ZFA^zSSQ}utMUH< D;Ix(< literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/baseline_photo_library_white_48.png b/app/src/main/res/drawable-xxxhdpi/baseline_photo_library_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..7bb86e4a99e423d256264f64c29d36a546555cfd GIT binary patch literal 1077 zcmeAS@N?(olHy`uVBq!ia0vp^2SAvE1xWt5x}=AJf%%W8i(^Oya z-Iu$5dij-UTnUOCE(#1W4Xg^&cW8<-N-$(FB)ooRwSb3%LqSAcv2&!-r8s!HhMa z;f9CzvT~<}vYvQu8SL{hhVj#l)N}ii`(jyF+*6$XhF!041w-iW zJm(i$4qJbH6aK{&Fqe1P8})w@+6Q*LDZ1tMGnq%j!O?KrN7;wH8=3Ybw@I3L79OrU zDw)h`@nBEw@k%z%$FCR2n>MUZwlB!7c|69Yv{Awaq|7$O)MqJ>>6jD>Kt_TcCq(h`2Slzk<;wQ@tS`U$GIQW z*Vl9Yc+!)9NWz`t!bbZ8vTnz}Z(92K_>P5)T*a2&1-FlN?-8G&v@##H_Szg=_EHtkKI;l@!!3Gu^Px<)zp4l=7e*fe9_k7h~*OJ+?2mb#1 zc`?7Jq3`J=yJNf@0TFDR$#)unCZ0bf`IwExi*X;AYl8hFqoW=lmBQHLUQ8 z3S}uMn7^Vbt(3#yd_xD@nNZOK2aN=EXJ1u4V3OaF*8Eb7*}~$e*~B*mht}L;;5+xR zQDa+#!D_pP=EyjKgWhRpKd}^CNa~!VrX9H`b((QQb7d88Z@{JGdo`jg1qNr8viCev zo$$ow>4%7w& Y2|+b89ZYuoS_0yDy85}Sb4q9e0M_!^l>h($ literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/baseline_portrait_white_48.png b/app/src/main/res/drawable-xxxhdpi/baseline_portrait_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..3c0064bd1e3410f270ee7b9c845c7d53dade4b16 GIT binary patch literal 1038 zcmeAS@N?(olHy`uVBq!ia0vp^2SAvE8Azrw%`pX1KLdP1T!Hle5eXdUWG@7IKDZ>v zFPMSVW%B-8pZ<$Z`oLCuNb|nfd)F!ZZV1H8{p`2rm*rvR9=`4Tm$&u)^B0U>C}fw` zzWgzZS>gKU<_&wl*i3&SUUOG_{tu-^e}C#-3C)aB`F6l!=J~#cnHv}wm`RYw%37_1va=b@zc6{JYn7z`(w1FTed_?yX z;#<;v$0}xg%vf=2rjMzdm%sX|)`vX&=F(c}pIKQV&P-kWkV#3W?bOK>HpMjIr%7Cf z%T65(a^~3^@~ui>$@33?V;v_<{cB#y8+h#+BcluF8fK>sp(u720o6@>7X)0R*&i@} zJs;D5o`KDvfsrSHfmz}Jf+?V$SwA7Qxo*}GIkqRRo#LqztHlDY{#dZihAYGD$Dw5( zRTFj}o?aGh&-9JG=JAH_iVG|sMIS%b+#p<6FiAdO?#Je0Rl5bp`mCb)G0000N)Pd8muN?8Euo#cU#!Q9%{! z*iIuUQZ!M`da9_ToCveIn+w>NK9ZddggKUR%;9IYlZoy8#9YR69D~HE4)o+SX7B}D zuxTq1rgH{8B_I?~$nAVg3{5dUV;qIj;U9!Jgc&R)m#Qe?FzLX7%wQ94*}zN=miCD~ zn92scU@gj1$7BGUxsY62>_llbc@gf@9c$vXs z5qZ3fj(LQ{EzC#55}kSl4r2)#X1nwh@EjUGq^BsM7w@5Ac2FMvHXamJgm{QH?0Z^r zi&@wgqb=r;A+-qaORN7RTt#BsSy3CMMBa9OG*2t7DkhLx>-+@8>ig{JW?o9o5`LFL8*U zu&v_ z`;z2~1Kda}*=Xffaex9^vF|TBdo|_wsyM*)*l@i#z^iDuEBhkbI2v|wbn$J)hE`5< z6lBp5r$Dm74`}$_(F4+iElup=+W-2UWR9EAag7+zon_dxjP7E7W!XUx@(~XsHwfzCuf> zILEH&dER*U(lpo{6H>FAIK>?FOf;N*5t=SEoIVjfbDZD*W9T_u?FMAwCdBFJi7`l= z;C>XDLbQB_rq7I_cTM+;8C}?lrb5H{tI)LCc>B||l`dk!<>(3<&aXvNt+Db+*X3fu zU#K$0foMZht1$?u>n|~(+~p@Afu@AZPe8eta0$Bd4CmLQsoq#6r0Wvbzl2TIYpizl zOSoA5OW2J2(6q1N^iR?Bsp0f}(R7~|eu$mo>JPD*#_@vg{}59SMT~bf6mgMp-iw|& z-U!A$XE=K#nnoCVKlSV?&hdte;poahhNIt*bWlVJ9d%;B9ca1TaB?S_(2=50(!&d` zr6l{hmXh2DEiYu!5fD}T8a802(hbOf3FwG2)JtV4?}`CqRLWAuhym}RW4dI5!D@wB zJ;Z=VT`0`ztyY*dNHW9C=(slLrHNH^7Xz+E$4!zszC*)LV#F@A9 zTNMJFp40O5UC|IHU$VnJXn5Ifb+&~DjhDfbePVQG5%wi15C^y}>lUT!95$jPv9E&8 zl0Ehz?rM`>lTVxU;_M}MFd6$2j+;n#VU2p5Y7+YLqJ-Xj2r3W1JD81zrD`iomZD+4xP=hUt1a=#6N?za%jkH4khqDv(NWK1 z3=<0&&ZE?$p^dx6eaeg64$X3r+nXfO5jE%Ag0sr7ucV2-+`?k?RI|UB1!kFNGjDP| zV;q*!Tt_MY3H~xqch}h-#Lf(*JKY(|&c0Y@tB(mJ3AzLm#Jf6Hn(OOW>hE=g7)J|1 z)517H;uqxeH9_(<`QkTRK^;L*!&Tx>gn13Wd!4ZO83#~?UzKry1Ocb80KfT*(ngT)_$-Jos18MS_YDXY(6gRn9qtB=|UrPw<*gI9l2h_FyU-@Pai= zWe;heIEWH9;g_Z<>_22g3KmY&$007AUwGmWRRaI40RaI40RaI3L${Og<4Ozr0 P00000NkvXXu0mjf@O1hZ literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/baseline_rotate_left_white_48.png b/app/src/main/res/drawable-xxxhdpi/baseline_rotate_left_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..2b70ddacdddc3e774fe215fa376f7db963999b1e GIT binary patch literal 1845 zcmV-52g>+~P)G0000L3NklYuc0Q~&`{QUs@1_b=+ z0Q~6y{OJJv*#Paor33J%1MsH<@TUXtrvvb(1MnM=@*9xy8<6rFkn$Ul@*9wKv3Cy) zj$~aN#{cOa+n%*;Ydo)Q+qP|EZQHhYjI~E=ji=pt*50jCmDKIro4UVmkn_a%RKi!t z>Z|O}Ah{ODZ+Mu?_y_N0pxlb%H*_+aKQMuwawv}9wwj|jfRY@F|34h9W0ur#kmhy^H9jqT+3|Q3Z*ye1wCz zn`+Eqh>D6Sv{Vz|MQp*3m_eD!+`>Q=6$j8%ZGaaso?ozpO3W{Eg1stDpf^B?Nt|C% zf__C#uqK}XFJf=zkkt%&s;JnNRb-|_yr28Y<|GvtJFuLLWS|E>U^Q8MQ^m#BJdr(s zg<+gPdL8Vi@X(D**j!a(^K1!}5}&7qw4P#&Lc`yHCvr;gsEgG^wq+4%U9Ye(0U);o z8}Xov-B1RzfV94@a4?)FA=d=!v(&|ME(4fNI!&yvP;d-D&I!hHKQ@#R{Zc2mo{~bq zo&XLKj7*b2nb9Y8f^UWK?&mIeg9KaWlHe{3ssMwyjWkv>T)yLbys_oEBp7CmN)=%Q zi%8=b`Hcar0;dU#cP>?|1*{>B-Ti;}kJALkJC~}Cg3?$Ze=(LeaGbz+=b|d`7t;8+ ze8nNyPE!u($BhmGG;mYEj{%0!ie)Whl?!@vb=CkjHl-EY1o?=+WBZ13Lr*Tt8o{~WN zK41~aeyKq5J(9Jk_UM6SsshDsSVpVX*a=I21&RimNw%+QjgOP;b_I; zd`h*(?pOu{d?eXIvb|Mn3@G%W{3z8PkCE(G3KZYB*HS#dWhA>)f#O_}-6$XM%Q{_6 z>_HRB{vsbR5zBiNAl`=M9lormcarRM1&G6gSvliIk~M|59Y={LgIPJ_M_4|oz%V_e zOO&?6a-9OhrC6rRA1uPMmjc4(L0zVr>O$=+>+A*iNN*RI!hZ}fjy5dQuOHvVJ$$|F55ezUq_B=?HZETd;;S z_Ltu{G|2G}=04K6P`+abPuYLq;1_x@gEY!)BLDFL`!7s=XgJlk;NZW-`Iy1UbKV>a)|Z0fNajERPW2I z;KD8ma3QG0000K=NklDoUE zZxN|l!nsB|F`pGlJKBxr!X&BqlpG@g45XH!^201@i0$}}RQy1mWJE20bz|iRcwX$r9;9Ld zm5tnx>0`M3P{IacGX|k6WqTu6l)6(zvc{IoCdg;*Z$8{+}_W?I4lzTM@^LXA}K&y;%>hkt&6gQtb@2qZ#g@~hWQu4mNSOJ&C**vXY4KBLemPtHd`AL8?txPq-I zMyC&5FwfaBRtj!Ip^P1|FM@r>Ii-&&&_$NC8X|M zYL(%MZ%=lW^&gj!j(J>1zWFs8Pm%-{#Uq0ENzW*bHM~_a764)r0flrST|L;t(2Um~ zSuQ>iRHZlR`-Xi;gt#UD-;=%3qF@Z`;3LT4d9o%703X3RalN$xz@k_@ zEW#nov@`%1M}e&WsKc*T2GE;^5)UM}oMn~;@FE2gCu~GlYXX?UQ4&8SxPUp42T;P( zRFb%&Hg856KwCDKxMMfEL=r$R4iInoKK*ZH8bE(eCL!L+*op7V2SAeV*jr*tK`ZWN zys-eL@tE2c?2%wk-e6Iv0W9TH4kuSqG0000gNNkln918*jAV2^D1Ry{F0t6sH00IQm6951JEPM74U`G}J zQ1rDiwr$(CZQHXN+qM~F6K!JKwr$%u*~a%8eb~dD1Up~)=WW&2Z!h-4!S!i;rvh|#DQzg6&Lf|5`r@ACFbK0ZSs9& zop||0SBbkTM3>#PkQsBe_p4`I?rbMG)Lse|+0!A8ceX1$<5%r1kZrx}9-_*jhDgr} zU3}vfha|Du-0Ewct;|yXT|-nk#dN6~>K&IT4k=RXV(%Kd^X9+SBB@#AKNo}yalZd7 zQ4zXEIsYTpNX1akDi0Z^+_Q#eJbzh;D)&i6TQ`OZQS} z?W3IkREs2Mh;u>~x!Axo^OuCEaG{YLJ%rnU_y|nOy zN1UP{BtX8G&9994_cTH>>e@Sj7vtyEG|*~^Snk=75c{Yjd99+HKhJvR?lDe|r3=9L ziIaDeYumPM#JIL?+qP}nwr$3?t-;Roe%#;oxznesYNosYUz2suRMk;^N^2r( z6Inc;A?NQ%>T0QiEdxV$k4prrQl!bK>d!CNkkp5gq4012jH<7oCkQiBKbyRR-ghk~ z|A{k5>iGQC_>{xGb$T$3Q0M4yAfQdHmO$Znl9`pWk`%rrnNvhGI`RnN2Gd3rlnkbt zJ7_JUaSF-2ny3LfWS>Xw6=Su{w_yT|_Iu*l#xO!3|Sb-uwe%2cW zF39m~fE6fX>95b5+}ISA5rwyLF-J##cmrQI>E;ajV<~0Sti<)$ww4ICqC%5yIwZ#| zlGCrR?eN#g7CL>8?ZRx5k7KYrqf;M@1e#1>L5bklfap&v`VwN0E`;Q1EF)+sX=4v8 z71odl9wX`zQTUQ%dPoGP5$YC+U=1o*x=Y%47Ry^Y-2!Oa=}jtgy`n!0Gm}tdy|d>n zEbmFWXkZ=&-2%<;fHqtQwKx+?86Ew3fLeH8&#=H!PPs#XW3YTB5gbG{cj*;|ABcCW zcl5_2?ao(N4v{qRIhJ0kLvke5RM5MR)hT&Kf5HulR@c!pR|y*0Q8L40Esf{j-hEs` zbw^7C`%_DYs*;0==83>{EIlPHT!&>X z=Y&fm8%YEkQ%)HDxsqC2SDIlhEDuRq%mwmSudKnp=&;2NiH>(GFrQkxPa^o0c>kog z1+5V8T$S@JlEh`YAf16K`m?k|@G`YGCtBjmxmeQKiqaL!rfLCBro=k>@D}-VUzFiQ zJIn6*2*S;xrFVSt58YnDJv13Xn+%IDXPK|y6xhGcx7Ax#F{rw&Bct`{A`85ei+}`u z-7>&*!9{@;t8RP0mQ0yh;z?3}w4&@51L6kUd|f5?R%Y$Y=bBnNq>%@3kx-AQl_=g<+4sV1qyN9R{Ur#u5b86>B9-5y-uR1nk>+du!HcS$^S}bkAcTr= zvH|f1NWGc8EGtVhj3C5w5`^;!GeS*!k2gl@jVZ)iO#S~b-6R0*8BK_((hQ3cZ@SbQ z72>tG|G!Pw&iUdRysswb*9J|bQhyXD!xa^DwgP{wAqAJLh3A4$FOs@pUa zE~Zv)5MTb4(^oo9Q+YLi;pl@1r)%d7bf=|ieVT{6gB-)r_h+Awd0#Te!c5hpE4dnX zn>i+A&Xjti8}Wuo=C~Iaug>HROuP^)+INLs{q<}71HkVAWICIq5VSS0< zLz3Cgxx%T7q=Al9+$K2iKju(tMOH{ui1MDU7$S*T>J8{T;#FwKON-Jky2hVcyIvyL z$OsP_r84Fa?LKN4wSPmea$FE9k&5pXEg z?dv>h^IM7Fwj2#|rM*X^_CD0kkB(E~hN{l@MU0A}y;Wv;67fp3OZl{E=&gQ)zo-@6 zQG6}&PLv2PqPo%Q@#%%-D@hZ_VfkDl=uB-q;n62No(AM zs$3m0hGiszm6?yFk<}$TyyQ8)etD+g=6H#qCqDiX(G^3?WKGYDNneyKv7YbkNo+@% z0FPCnx2Ac^2|w)4JCXy(W8L~3JQQcVDS;$Eyfa z;7ZyCU+8?Jc*SzBVz+9&y_ z%;X+cH_q|{N$y6;Gg|W<$<5`+@KPA%|Lnm7tepoK0MM_t+hDeB+gq&JMm#H+Z7&?^ zhP7?mwvGGVsiOM+|Kr6u0NeENswQz%p0Qo_hN>Oan-*qE;$N*I3u+oAJ8Sgvu{+g^ zW6;vSl9;1;RDVvkN)l_mq-JD6W7m1fZ~7Z;x<%H?*lxM$hUwxPueskTkrg$)WW6L- zJ1KI5%WRg+1lL6+a<$2l+3eED9qy2kR0kJEC2*k*lFGO(a*HRV(c4{7f!wXPG#-uI z<7pXbO!2ISQO-2EJ8Y58Mt`_E%89G}VY76$xFd3xORSXM6mPj+-AGAY z*Lm9%>8*50@)W}KCCMm}+`^>W2}_eTD4lCg5ZSSLk(QrB-v zRcEDsQ#bON%Z*oEjdywEKQ%pXt?FjA7u1MCp@pB6s4g;o)+`E*3$#_Ds+DN#f+$qZ z^p{<#%1(bfBMO}pd~ch|w$=Aei2aJR@rLOt%QSCl69+4Dwco5)k=Fa&wQ*!c>U&6c zTa~x1x_MZ=$VQR6ZuEhEwkxac`uosL>PAs=>bup4I+>|d5+!Eo>;t!|ALXG)1DAW! z*LoOcyy+HLX`_scO;%ZGh6#r2?i)|Ja(6p)EC2ui05HhkdKx0#LT=By}Z;C1rt33 zJ>#Bd(*qb7SnWMs978JN-d@_+tKcreeDQ3?=l;h`ljbnpTKM%KqeiUlf;)+etLs!e zClNxI>d()=f4@G*e4b~#=kz@5k3PS=c9q=jOurOD?`RH1o^Ul9$_r z&o5cO#C!YP3bk6**j;ar7{Anhc_~+&?ppJw)^(+w@tA$d|8|dLX7I1f*@nuwUiP`h zX-j-xF3&yIvqkgoma{V^nk}8bP15+%vM-m-lHG1isk=p{ra4RVZ*^y13f*-j?aU|} zGaSx%rRN>%30vx(eeBGdl4ogWGD54iq@5WR9t{&ZBnS_Jg&ab`lK*?>&#z}p=XHO_ US|@A=EN&S*UHx3vIVCg!0KobS^8f$< literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/baseline_text_fields_white_48.png b/app/src/main/res/drawable-xxxhdpi/baseline_text_fields_white_48.png new file mode 100644 index 0000000000000000000000000000000000000000..a28e05ff8159c8e37ee974f2eb538d1ce63f97b4 GIT binary patch literal 129 zcmeAS@N?(olHy`uVBq!ia0vp^2SAt+NHA0_4_pPLm`Z~Df*BafCZDwc@~k{v978G? z-=1V;JfOh9aL{x?y|>VryM=#`?`+$rBy?Aili}Nj*MIptco#)aVtBy!O&F-@1CL8c ap6$KpD`6+KURwhN7(8A5T-G@yGywn!K`BrG literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/flash_auto.png b/app/src/main/res/drawable-xxxhdpi/flash_auto.png new file mode 100644 index 0000000000000000000000000000000000000000..60e6515190dfee513b261b8da911e88b1611f929 GIT binary patch literal 1437 zcmeAS@N?(olHy`uVBq!ia0vp^2SAvE4M+yv$zf+;U`_FKaSW+od~;Bd5h%!{@PGcb zcWeqkCLFBq*=ya-PXPgq91oX6a8ukV>Avk3fuz9O4v|JM7aLJn=~ z-3pS*_=Frp+SVsNSa@^Sp}i~t6CP%N+mQUtOxVG(ZT-!|GjIN$!tiK$wAaQoiR%%5 z46Ks-q~`uvbDA@-RB|6%%gWo_gDm-S6aoEO_S0bmGHR5x%Qyz8BO~nKN#g^6<6Yx;yu_ z{1(5Xz3-g456FFL8x3R@_;IRfk!CiVP}jy4j;P%o1M2xxk=?mplJz&7OnV^?{qg zDw%>!qm6rBs{qZOWx3}3r(~{cSJtXD@Wk7{K6!pJ<`rs@81qL_65nc$6sB``={{r zbf8OChg$>{-+SO~_D#rPzEO?sfvvOdviZ-BP0|Ef{)%iQ-?TyE8YqUO54v|h2c;UG_|rSKRp=Rj{K@6cziG3{3Vqf&${T-nq=*vtN0I2?Jx{tM>WZ z_m6h+zXC^tonPI$voCY6KIdHU^TSoSxK-BK^6R}z{e>JZKU{4qv+DND&;9wkITP+2 zzW%rJzoU@!t9^+^ml!^I$ZdP}H@tI3!m2!FMiXGT9NxI=Evw|mZ^8#G+SiMgUXPf$ zoqG@0@`&4~KbT5aY);_+G-q}7R0grjiRSm6LShrQcC$>-zbP@DMIiXjYd0;%BWdPA zoCU|?DJZ{>Za zb$W779t1D`Q!DZG{b7xIdAFax4Nl!}5Bz7>^XYry54+UE-!-N8`QQ9dd+J-?d**%o unNZ{}wU0amH-RpUm+zSgqV4aQA%!S==LeptTF>Vsg1DZpelF{r5}E+is+WWS literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/flash_off.png b/app/src/main/res/drawable-xxxhdpi/flash_off.png new file mode 100644 index 0000000000000000000000000000000000000000..2e8637c8f6524eefa94327c30430be2bc7dc0eb3 GIT binary patch literal 957 zcmeAS@N?(olHy`uVBq!ia0vp^2SAvE8Azrw%`pX1)d4;su0Z<#0D-ejZXbZwYnBB0 z1vC8L?-1PmyZ^Zc&*AmoRhl11o6LSMl7F4WnZtXzLjC=4o#_JUmscBPANJVS$-uyL z!qdeuq=ND7t=8TIM;?}oo1Q1koLOo9pF_VX_pVHtcd+1(lT7sqTcg&7t-hKCq|#Q~ zM=xh?vpo3;Dne-3OC!$rnVdk6+ci}k?EKm>dWoM;J`VH zr9q&Fp+U%mkwI00kwN#uTM33b_YJ$BGyLE^Xy?~ZdGO#g(S*2y4&j8j4ZT3(!`l;F z2KpAqIDv%aR;DxD=L$fC{u+kQP2O{Wgscr?ltP~Z^MNS|^5qR;GC4Pyw>+4@&>)Z_ z%plN|?+&!cLXAOz<@s_TYljkp0&5_Hu7Oruz{@5t@sM2$F0oG4TT$Y;lv^v7kWfw| zTTDK;Bg2C>U0nnAH}{lS7#plFFcly8E9xY`PysZS-6W&Y^1*FoMkdAzrs77nIX1!y z0uAB@;u!~CvjPQou+%p)>~ZXSFj-l{eL;QCH&KoQ2Sq)b&i5}6X#8h*D^!8u{=Jhz zCzLfDfwE>mS#~AGA6ZNldml_^xxa4)gI=Orf`R=nj!R7KKQFpZ5VJVbZ`ZWtKw{k& z!6mGGpBMKl=oZY7uVu+>JpAW{GKa1Kh_jIOFRS!{lbSPMDr>NRs1oGcp(6dkE$0`9 z7vr1BycRpS*li~8-cXbN@YYu5gNxaNCnho<9Lv^=KWy;+=L<=ZpoICA`5v3`^N+JV Rl7Jb3!PC{xWt~$(696tMyCVPq literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/flash_on.png b/app/src/main/res/drawable-xxxhdpi/flash_on.png new file mode 100644 index 0000000000000000000000000000000000000000..8d8f1c9b81df59ca6b48e7b3753e287c0cfe509f GIT binary patch literal 629 zcmeAS@N?(olHy`uVBq!ia0vp^2SAvE8Azrw%`pX1P60k4u0Z-fWq>38QX9||v63La zV21zu9k%>b>w3-`e}eI^+5C?Zr8n8UUWV2AGcYio@pN$vsbG9_v7M8NLEylF8`b6i zBj-8_JbbX(amTX!*~>C#fzij?(|#RTRll5h$$-HvKXS8c5;h-YFT2V7o7t}5jGV>X zw~ePws~=3WEvSFMeEv=2b!qt@TcX)b84frIw1iyQfA!ym4U=WGDjKaG8^7hM&NJ5@e|mVHZ=!0U$gArm}>3;vzzYu9Tm% ed_Jf6+)u@GDvSXhx&gqLXYh3Ob6Mw<&;$TI6#dZv literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/ic_burst_mode_white_48dp.png b/app/src/main/res/drawable-xxxhdpi/ic_burst_mode_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..141ce4a3427397f198b0ce727f4bcaa436fc70c8 GIT binary patch literal 871 zcmeAS@N?(olHy`uVBq!ia0vp^2SAvE1xWt5x}=AJfmzSf#WAFU@$F6H1*+*X4j=#D zN)x`lZPkrcNw?o6MVoJ%{3&$fWW8&*ZXcO*ZGS)w}#Bsd!={ zA=52?fB)K`D|&2VKndsL6&rFFGF_PN?x8t(iu;S`2@GGJ)}DG%`%6sufbx{9%B&m* zwk??wdTQHG{(yyNw0`m%^chJ{|IfggI_-(Q!%4{nXL%ABm?aJ{uo*Nk(wuo9-zKqP z-Q*$-+luQ9u}_ypbQqp7atN>Jez#HLx50Uaxw%rx$(HNP9G-jKPz`f3pT%=w=DFUh z26HsoUg(^67k%TB)S&asX!^Auv;GB&K9^SL?zZfIQmRve%HrN8yXN$Zrt=qm@p{$w{q)g|to>ZkvB<@McPI%ZkiWYC>`zHXzC+0*h@ zhRqiP#pjtgc(3cby21YQid4Scu5(`;4`<6bCmqNt9(=lWBBS)jcE(jd`x*m9`!7$L zuB_Z=cwkmt`r{0?y+0bB)tqiP(es;U!?E|1GZ>X;x|r5)@=V!g@$xxa#Ldz`=I9UV z*;ZCjom+n%WjJkeQ8(*R!n4h*tQYfIbX;Lzcq}1Q`j9K^8>8)F-yJ6zK8Q!V+^uL! zO*1sR)giYhy&+kdx#}|~L-9@HTa3vw@7`d{>+X>_EAn7c#^zi0Z%@lV(5-Sw%Q+i- zv2Vw+KxUsbmnRH&GqR2_{NEJ&gyrzeOBIZL{W6K^la6r}#6RH?HwkLse<8W^@akTn z1h!C7Rp6E!CnFjQg{Mpqoj!m0coE9n0pqVIrh}vq19;h z6MJal+Q~0)5R}>kT{$jH|E{_C`^)GKhHhaSC$E`HY@F_QB(X&FiP`k`g@%J9!_7fHH(w!>W=7 zso7t4Kb{0qFY`n7W$vN)fQ)@iM>eud1oHfLF&^6*$e8p|!_lSgW#}pKg;|_&@@gHY z^lQB=9Okl2{JC_#C;N&y5B4U#SNO;k<;Dp;M{95xJ4!BiX$A`8`w9osK5;(!Fni|9y0y}mwg|gC*}rVP z>3mMJQn@<&e(!QCcjV1Cp#F+KJ{02T>f+S{Dh33rdXFv^1&9QBi+3 zU;MG&?76aW-ye%tiD@CC=k9soDZhk z|7RG(!l~Z)pZ16OE|qMsP=khF{cgqw?>!iP#4;avFQuUNKPT+ZQYIjiq3$_@x%9{P zhwqylu#RoGZ|rcEW!#_Mz{vTaUwnZ-(~rk&7SkC1{a{?O)}Hghcg_HnhWq*rTb4#o{qg(*pY(!T z56XEN_Gd%gz)*Lc;lpmWhWn-rf7UVLl){pIl6y9?e3zcFuc>FdMCm7B4rcIl^>bP0 Hl+XkK(PQM0 literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/ic_exposure_red_48dp.png b/app/src/main/res/drawable-xxxhdpi/ic_exposure_red_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..1d841672efbe3604fea36d63d0f4880c3e8ce521 GIT binary patch literal 2165 zcmZXVdpK0<9>?FAOvZ=?BO$qsLhVwWiAY8zZX&A4^sPMtw$W?XU!p>i*bK^aA^ zwNc|P#MnFg*vh3T6p=v~ESxoep69IRc|Y&_`>x-2ec$yy&w5spjkTEsMgao=kho}W zVh7JH!hzlm({9vX6rK>b?99%C%7Ftja3SJtY-J2Ub(%Qjj)H5kU~`vS07$kA2ckRZ znHKir^f@c{o$NJu5gcu>bv7Q$@^- z&x$PHmg?zQ`If~>uPVQt+xP8o`+0XgXGd!ISh;=JPuXhX^f)~}o)%8s zT-`0T+JO-bbJQi=l^7diNnE=odSzp?irrqF_yW-9#xe-=wbGePaE3?aS${`Q? zR6`EwRh4~A!4b)`kDMciaEX6a@OV~bWSpWFqj`Y)*Fn4~St(CmUf#z3w~wgsaQ^3~ zOIF7SC~4IJq|)Big$270@9M;{0|T^fgJ$z9I)qFvs@0Z|dZ`I4M@BMmiF4Encdv~q zMB7W#REC%QyMk@$8-anT3dkf(tlJ9(Qkf%zSx^xAd#Q_6v`K$T%24kfwhzA1n?YV( zR%d@)T}|-apVUiUfbeQt!++632QG;K+-7UoWBIe1c5!0-+tWZj++0or`}bs4eTFxPP>NWYf3)( zpdHwK@50V=9PPsTjLgn<`&KmzI4jUHcYk!&E45TZ_uI^Y-*uLiNm=% z+-{*Z9uG$pEDQ7aISX(Q<0+ahh2mp*JIW~j9v)p0#Ygk#@LGgNQ9$tl?|3TG>i~~( z5cN){jdK!{ZX3rri&Ke3)7LeLUD)kj8^#`0D3b-`jRhBfS5WI$^>F+&?-z=F9u?;-kae5 zEIw0lc4p+r&jFnGWs5wX9H%feWZ3nwX{bhJcc7-xoiB?L?2t}PV#=Si=~t?5ebv0h zYLR%Qglw{VsuU^|9y~IOO|K**S<;VL_X_k?h(K<(>Un}+R`kp3+=XELc8r53TWG_ zWG%Kw{4=IlF7+1zz zV{Q3Ijk`fzJ?tLo{)Vtnfz0nw;lliZqmX$7A!B8Qw48O0_WkrpS@_{5^J>3Hg?Drm z23%Y@c_WhQ`L(e1SDii*Z{~IUaLroAK(Dc%7=GvL#{K&-dB`O8!ramC$MWkbyC(g7 z$!yj1XHnFZTZEKLJEpSsl_cz2DzjVT}vT9=|h6r12P*+0~%`PBF&*O_#?wvYMC z!!BEATuEKp9{9y<9m`Q>7o|A$3B3_}oIOh=TYI{+xh%5d7r}i zS;glppU;64MyDc|&3ta@`|rDHzf&14H!AbJ2G5 zvWupkUC6-qNn(2)za&572iyIR?6$l1{bI8)sP}FvoX-&dX+~Kw|IIi253Jrk*Za+6 zaVGlKc~i;n%mw1%+cV`E;y+7;Z%-}T#XRTe)%Q!R516d0oBWR9^NwFznXth zc0uRr`>U)Qe&0T~IgN>D_I{27&%_zd=`+lUXRz4MP*BJC;Lm>r2gx5l{xE&`$@<_k z_kri)4d+qhnGXD9ZTQU1@El2FN}YU4!yhJwpR5d@xEY>^Go00Dm=(`pvY#QNj`6}D zrUgG)9X@k60PUiK#DPBy4L=nbK7VF-CjMX!kJ^FCE~qmuAbV+kJcHeShMGD?bidB8 zf56C4f<+vs29N~55t>m9z+l4}22PVQ!!veg4Upem!jMim!?{-)p(9mBwJ34K*wZqBWSI_c-elgte zpZu+O^_#EE0Yy{46^m=uGUhyWzjgL*`eOTrNrhrR|J*tMX4x0cfFc#g?mzc~ez9pt zK39DG{N3i+Ml22|)ATMsdAu`JZ_~se{tf?*%}ks5z4-Kcr4L z&%|e@PI{bkeT$CdFB?Xd4=e%)gdG_C85of{zv`Hqr}`*us(N(~n4=gxUHx3vIVCg! E0D_*B6951J literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/ic_face_red_48dp.png b/app/src/main/res/drawable-xxxhdpi/ic_face_red_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..70410a33d1fa1bb0d0a26f00422f5b99b9406ed3 GIT binary patch literal 4693 zcmb7I`9IX(_n$GwGPcT$NR~wQElWa-DEsmrd$LT{q6kAm)-ef@2!)xciKJmHjY=3} z-TRH?lSY0ID*1_K{$g%pGFp8U_IHw;VNKNAMdT zmQpzUic7eCuwQs2GSn9kg1mVrT;+~$xTL0vnuM|JNX;@T#R{ z=HKFr8&u}b20i2Z2(u=P-f%&Q(!lJCUZ3m*Ri`C<6uod~V)m;kM2rt~=Tyw>=BDfx zxo+wToGEblEJ0Ls-dgMp5A#p%OAZ9>G{x&V#|Wb}Rkry8!?f-N)bqTeiFUHLZzXc-)K7YS z-9^e`A&DSW&stn|8I*ZJy8^~XCxBu!s9HyVm%rQ+3iBSCwz*F+ozTp&mdT>#;M9S6 zN|EHv!Ls&%s2$x3SN+qLNQJ@p>6@p%b!C^rhjZZ&`}mac0&HA~mzAvTXuOOYH-Quw zpeDfdcoM{Np@PRJHs_oviZKt>kw&|-on4zT7a8orrgbm4OQGF?puU|T+c@qTWAjZm z#3)!ZePCl7`*l*#3^_h1$xMmVA^dB1fs7}otiHe0ayIw9F=A9mGrn@^v>Nw2saz@< zK2uY$CL(6YOv%>ESPb_wgNy^f3pN!>+BNA;P9AM29&cwuyB^_IKqg`3-N`KKD2XDx z-Q=lh^2_R){PnWBF^p;HQVer5hyDs+YRWkINA6 zMC!Z{ye{LvwP3>pDYjR0bBDiv+*g%ES9t+DL}J6)h)w#OnCg%l&-e98#pfo;d;~Qi z{VL*hTIeqMQ(II+7LOqEiN@+>`d`_eH>yo{X5$d_VA z%G7#6yr;{5+o-pK4JWfME7}3bkH%u2$@MO2-a;|9@tfIEIyHjlgkNs4Vgct2lx#{= zR)nS|rB+2@hq@oB7T27)#oE6R{4SY!QCVE0D6airepT(@hP*(j6w*wD$a2bq3pXGh z@`f={)0YykAwz`$4e?NIFeL1BVVmijA!-aduvRE#n`7T(d8#x@_uIp7@Z<79ket4o z^QE~ve!p8T%mhDMkMNUyge8#VS%=^bG%L2`aE zunUcr=<##CTO`vZmT%-o7lTGIv0uH%yY~gITgND(9TU!X@y1<-|2w}5Q&Rl!(Be?1 z+kkK;yGzXsQhwRjq8oMD#X~KBTD7G*DNR3)n@rm?hb=k)(z}X}LCDHY$R_)YhxGsY)skhs@q&SI{=Fv?SI6Bsd+TEi zDvPQBFX(Sv5pLhRkg@y~qs>bW>UNK9eu`U*9xko9v1tH@lmld30?K@>5IgNT_F{}a z1;0~U080S34A!>_8Y zKmHU6BZN5NOlmXRGXx$xt~oW8vQJq|4M1*GuGO)#rm{ZnfYeg-ChVsZros|4eR&-S zLrElPKSV=S-d?npu`iy-$%NyurizN+OPRs&NfgTf3|% zaDYpEiR7VqTI%%l_cOTOI*DC-{(R$^H zpQCbmdRtOnakZq5xcxU@P{V8uP>q^|P5+49KCt+yr#*wTTXHC9Yr{B*t9QA_{rZB# zrJY1}<`GNBM0967i0@zaD*7Lkt`Z(qt7vuBv#s2|-B9sYOz?N0dW6cCA1=?Ma@WZ? zt%K5)+re%zR{C@ZDUP{sx-_Y7)aU`^|8gHZ+7vQlk2iH>g*4-Rj2O|>IiN^EULyiJ zgZpWt+n*KTxZAVQU>&Op16nOxh^4gc4W~zr&5CevzO#HVPxXe;f}2$8P|3YY0L;td z^=IYIeomo|vld90(nxriBkohAQda4vrc_f#)`n(4`R>!T-QBrramOQ&@%UK}LAZd^ zlB%}IWRL&0AE!2PDGnwuS9i)0c?uMcrAf!5>;Gil`=WT{XedwX{^0suk@&ASokB8g z3_6?j!@gm6&|`*W_TWFLe|_N-iKe8 z2wDF(Hz-ES)E9M*_jzn8DGp@)jUwd`9NeTCyCiR{tK>DUSQ0EA*h9))psZ@ ztFI&!e028+VFyX#-P?}psW8@TuDrZ6&zAHO(a%IINl*Zk{(erkx#qc`p4wGc zZF7m{#>nqENjUTeSNUn96wxTsJ$|lm1K!YI@Tq4p?mRuWYme~L520fyUxB66UDw%h z8k^Em50rjQ;>IsM_b^=!EYumyYc3t0f~m(|&0(o8_Efg-wS?H>R4p5c=*?A(8B#Ff!Ue3dq6cQJ zz1{QfriWx0&6pAD@b9*Jgl*CNYjZ>FtONwrD*FSZgU@3Oly+*l=hr0B*74bX^|^zg z`+UwUJ-RVTpb{2)EpKi2evW$}VicO@egpjZWG8gD;&;!k@Cp-QhP20#T$^LB%GuI1 z7>|kMe}Q~2HIrGUa(Wj$++*t<;&+vlRu2-L6RmKSSJTpj79Q9LR}vdi1EGhOc(C2* z`V59)CnrcN&!#^=(YK8V0Orht2K`?re&rA_l9*j;fd3ja72b_O{Bsu!Q7x7@)kkOL zcxc^i?wC}QVHN>=wLfvDN$c#b0adgUJc`UAgl)kdo3+MybH;f8m_`>%zioQW;u(|? zIZ;vU>{70zI!%x<-}ADP_aSpa7X1u&Yy$ibS>}}?%;IJzDm=JPBs9Y5`=LXo)*`Gx zGXujVEb&@$4>#WdqE6kDp$YFw7OE0rz9u{Q9UL*6_RnZ8#@B$3K~5R0W=Ws82gIAg z<{I=+<;0}sebvF^wU=VNA}q9w(ql3M0}XPiE;uhCYpb z>Op;V>%#)5#U-&!XuF?3a;CW&sI+Qv?BoyatE;y8*f{u{h^TxzAEZ~~hEk()1eXMV zTY`3m`pTI%4$gP~wE81PIh0lzt=!^hxr||UooP1rY3I{n%=Ql)wXTE+2j}%<;yY*5 zB*$NDZ-MO*!8UXGA(`RV>{GEboo-RdB@^(J>xC;nP-4T)uWQs3QlFT@t!hG8dlxVG zVTdY`1-uP+V}D^5?W%sw{eb&BAxMQq;z3tli0HObY+rbdDcc6;C;WDzTOji&Wa@KEB=|gA~P5@ejKuiQ05b`MgjzVm+r8Zl-kM@AHd@9RH~vsJwE>QQr(@+1#&&-eG5 z!UnbJwYdB7le1DcsLz)c_( zD?e*W-O<~3XIcwe_UxT&tMYDY)2g4AMdX7tb-cV)h>2?k+uWK&&VK5$E`eo-8iw;oTtVN__K?5dJq+Ui8DZ%gx1j7YN09Mt1+zFEK zd{bprUuttVa%4KG@gbC7jDrj0yZvub@uG8z`QR&F##j=Qi!k^R48j1DkuAQwPd|(q zbGRK<3e+mFDqUN+CLRPWK-lgKi2*oZFt@h8sYG+JiVP4S-N^H}e7GM57^%S!Iw>)q z1v1*h{}ZUG5NpAZR+qMi4Q7 zIWzn$FIYaj5HE>-tN|qq-c3pTKa6!Tuh^JWT=q)*A2^ub Ai2wiq literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/ic_face_white_48dp.png b/app/src/main/res/drawable-xxxhdpi/ic_face_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..6d1243498026c8fc802844e2e3c83506fa41befa GIT binary patch literal 2752 zcmV;x3P1IUP)G0000VyNkl-OengIJi>;Wm5;@kSgL-R^tO8g;LQ(Sl|vuiae-TyOyHZ zUXSnnb7#rO{GD@M=XcKeopX47{{Q{KJGt6(`n$pfW}t|%OyhnU*hDk?X{Uo8ddZNX zmmWH3XFtt6PXh~?MkPf^5GI(s!Q4t6FYz%Ma+To-P1JDLK?rN_zRPw7vL(kW0KdH7@Yu>Y{NWRUZyJXaVjbL74PW6Qlrjgin^m!3Ypz6=Bt2etF4lp0x68sQhG6{lVo}U3m@eR5?had>Q zA{fqcxnBYLvmf0ulq3JYX9O$@QQL2SWn6WtB)BD$Snc#d~3>d9iRO>PDl zK^om|Lvr3v>BLkihPxMFl^MS~BMjp`OtsRj0E0M#UVG_}EBA^2)O7sP&!MO~-L_A+1GuTQWeyG+LU_DOtG9B@4 z#mr$7op|d>eE|;R%qb=#zAw(z)bkE!(IHD8Y6o=%7)%x?_V8bbL5nkvdsxq2y2w?U zBQ&v?pHR#S)G`$732-Y;tYH9R?0&!v{DP%4@)EoGKR)F&JtWAI=36@Xl#h6u7g)&y z{ESKlBLCX8SedFPKppw&<~Ag>>&MrqEzuL;CGyik84~I>qqbR3fRD-DIUXR6gnY|T zJFFu>5gBr|lPi!2Y$|GL209NgmfU>KY$PK4K32+|2bhL^2^zQO4RPYD19-ZzyV~od@Va?II+?y9Bjv=K*?A>xXFA zK<2WQ<0MIPoR^tn{Sa@&`TbEl=R80LHPNt{e8%}saR-7JetrzKwEF=H5J!u%mhaL? z95MR*0`~*-qE>`B+FDc^5u?u^UMXp@u$)&Bcz{#-1;XcmF|bKWaB3d2cIrG#PuoaFhG5BAQW~f#kj8*m2C*^MyN6 zd&7Bv=TUnQ$$LrcNEmy*u*m(_(G93IMmoS+_x~WX5Vbd4{|IU~_I#n){Xa=gbMX`O zW$f5u?D@h;r7D50^EX?3dGT>Im!8st3MnJIDfoXU(hHei`s9re6zQSV&K7E{el!|z{v)Vm zxI|9?;Sg$XArah8)D9rJ0z8dcl2RmM`yMIO*60f`6DvPQBDMusnV~PhU^1vRU$~fl zo)pFU0tl_Br6YDj?lLl{?M8G4Sca8lNQ738m0G<4#$x4D;z&dm=LA;D^#%|QVP!fJ zk|gMsb@5T3x=?MR4MgS7_4tpH`DvC>W<64DLg1Xhv^b1#6f z8EY$$knS<8ZA9D*FbQi3Dv=OwJSnVAa5sSPI@b1v@U=t%?_=#1#O(kR$zrWGg0J5o z%Qy`Iv|RX0F&PQbt|g7N&4?EP%1L4E6H1VfY%s^LmSmJS0fg1q@fxv^ynJrPjt0c5 z0Ht(d$8sbD`ycG+pxCu$#LECN_FzXJRRMqXmcWjk#Jmk4RFJ@q zZmvWEx=EbDj$TG1UI!4CVNVAYNB~z!C-&4Legx>pKJ58CfC(!30()90@F##UlCQC+ zBVZpyljy{r9!4O31rYAWu5NA!(2w3T*fksRGl1|ccJ(nUAisDfvFjFvz7B>VCIk?w=)=CFTy5wZ&turv%Qc880fZW| z*q34{al`)HQcoKDvQ#4`1rTb<%|6B&^vA~c$<4!vX#s==a+Bn@6dG{T;W1L==J$w+ z0fcqr>KN5N-}5;^t{M?j1H}0=xqHv2N3<#IA$QLZGdF+`V;%W;i|Y^%cLVQ|k2SiV zV+8$MNq+WnJ8=&lwPw(U6~$-2`2mDlvgGGD%Ng$CSL^CIL4LA4j07csP(vU2%J4S# zFhuj$su%DM8S>RjH4>x%LKU4jkzxmn7^mTP{72YH3MV?a1_@dKp^Oi4?n_?bL9Wm7 z&Or%PJj6CSaITHvNDu=Eg*<~kC-@s{S;U>(#CR^HltC0wz#vK)%6M+#P8PA2W=^8d zQw&7R4Pd=_oF%Awn2jL84j_!Al^|+i1QLb-LO+($N09Wgm;xj?0jyU+3qi4q(FhW* z078rfe1+*cm`@A|W&rDzu!O42|v;^hSIS56aIOp|3ZqY$qr__Hf{6@&Qq{~-(h zZ6Ys_!f#17G6C^xg1@_rm2}~kE*cny_&LG<6*7licq_{eW|{Dve!-NDVkv*e3kRrW zBx1${vsS?pT1exD6uYUV95HEvi7TO+Cpkz49WoqX4YyN_m^Z=X6*HA3yvSkF zHdD(~iV<@s1Y-j!=VtEXaW=4>HjZ+dZqAV=%{jU`%~9HTgAF{+eN1JH-naM{0{pW* zIv4@~000=|Z#{>-yAb*WAV2^D1Ry{F0t6sH00IOcKmY+}ggbKJ*|MSl0000Kn_z$Z~;+V*#sx1Ghqhm;-}u149%8;}(X0N3n@AW#CW_k|?My+Ff^g zZ{;D|^6Kv0{}_J9Y&cyzv%dcW=dC9_%GW+TzrUE_K-2Y_MDE;ww*TgTFpH0Tbbh}( zQ$xGV<|O9&f3{o<2juSGI6S|;pM^nsjm0y@e}Ae)7#dWc+Z+XpMb|u=zzPzUP5xecC-^`Glj&c4zja+a1WCw&zLFGuzLM=Wagxd#3ss z^PPeteD)wE57f_B9R3Otn)9i}A7oy^QAYL{-W~1?E51lDOqCN|aA`49NR>;2S3g(4 zOF>30TP=sB?kp?5tbj-=10_R%k{kgq0}+xZfRgeHE-Qaa&H71DHY?7=G?IJJiI+lX0Md9D?Jut;l^}?FY#qqdZ;xT-G@y GGywo6#y?L0 literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/ic_gps_fixed_red_48dp.png b/app/src/main/res/drawable-xxxhdpi/ic_gps_fixed_red_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..650176299b370b060d1f2bfa2665d606eb27e234 GIT binary patch literal 4851 zcmdT|S5#A7w+(>=2)#)N1OgU7il~SX1PMiY5k!Kt1Pn!5geV{_0TWcZbSZ*Ah>9R3 zA|NGzh*I-`^cqpRNK-oe;d{F;_w7F1hrRb%YtOOP*yo(F*PQFIm4zV>1O@>B06fM< z`qs>qc04%1%-MR}hn=~w1X>&F0?K+M7MKQ`2g)1;08}J$?PEbqn-gzj9|!;l5{?JU zq`Oow(+MRQpb0kq?t~z001kl1difEQ{cr??s`7c|OScWt%rt_xjrCDCf}MZmjwQm| z1U~P0?|1A**zYx0hBf~^y6unzfEZ7c49f&`ADIHBSx7uABm^5dc58SE6ZZn~f7Xn&Xe!*zxM#fP z?o1htU7p(~f1^65mQMhXV1&i6=6?9u75)f-*N3o*&(|w?tmv^3Pg+BVb>@6E0;OaC z63HzzFw(y>Ymc90`jIeLjUM9AGakbMFf3FlC-sa4%xdcc)Mdyd9>Qe$#Vu(dP2m43 zMe`sj+V4LplegOw_LQYG);H7oi>K_zK5iL)Nwx78d)Z9QoNY@>3zhaJ_}c7n)=86_ zXX$G&S;w)4J<>>XbjGR!@|`UGVCqkbtV{br#C{N)fn}jejR^7t+{d>!kDak<$AtW( ztN3%9>?dG`Wvro!>RJvv0|QO{&3B?goaf;(P&E{!m^+s|6HI;Sa;fKrrd;Qxbk3%e z{I`r4-}2pv7xAfv8r6Oh9zGb3EvZfJ#Oa_-2&a4wgaAac=6q*Gen~aL<%J=P7{l12 zs`Y=qAEouE;4slm)7m3CNi7vEChoZ2zrQ16(xmp;eAd-gQn=XUB~HE_ejmPfID8{G z|Mz^0ZwiDS&%xoGZ3ql5jdV4^MPt1)4>J*i)k6uv?~Lxl!-CjH~ImGRKB2$&r5A zVqHKA)V#V`aZjOd;c|2cE-!h@(w^e@4a&?RqpHZgKk}kkOWrH9UvxGX_eJB13D31l zYAG#!`KUmx)fd2!4!wbdDGCmoWj%7cR_XOZGa6zVMd>;}P+RRk)zseTpN^X$+)21( z{oX9u1S?M`YB&ePW3@kWDSF=;JNXOSHd^L%j^RvoDk^95RR8^xXQ8O?6-Cp5j01bj zX+1BGm-a|Iv`%UfBr@hRJMzzb`&&K>@}1VGO>+Jw;_V?X0fTh~+RmW|E6^y1Q`ttL zN?gFV!wc8tB?Ms*w}2J>r=K04#ba&NBe_OD1%9z#i~!0@Jl(9eh20hFyD({-u=N4r z%PLvi;E1-@DBk3KEX)gvqjtMJQC3poZJz>Cg{J>GKXv(U6|e!ocSi1wWFelz6RN)W zF&12fsjl1Q8*kmw)Xi?b?{aRmFKW}GICD3tMH;g;tTtGQ$Zu@Z%4w3N!X*aX+$_$! zG$gl-(-5lEg<37IE~%cTQ{@Fa1cEV7ity}13a#ddFBW`3rm++USe{?U#eNwQgI z3PGC>$#!PdF4m+flmm1A^Gy3f{3iZr2mcKHn92hlCh-XD5t?O`lX6GT(?h%Bt{!9LY}l|O86Y{Uduu{m+(_1%2(0hemz1pq*$4D@qVM%x)tn;xLu=r zxLUsQH~FjY&u)AJ6}Ia2R)Q;pACOu`-k%V2h@5}(Yme13DhFkSYY>#*1iBru>ytF2 z(`ClQT^4I=!uKx5?v`Lq_Kx^GL{^08#j$%AwQ|XC0?0Gtk#nND*-1rd#5`nfle9m7 z<|c4K<)Kdl^Gy+TN1&cFKiq;?uN&f8^SZxWka)rSt@}Nwp)<)BdWtOh_SeWpzMN;! zg>GS;jGs04J=ZrgZ(a=33_5fy`b`qP6|GxwZ+b+fjy8dRXEr~LFC8AqD=Qo+kZ2&< zo_qf5Ab|;KL5t1y)_jCf7BbX(n-n!Bg;c`7{nH-%AOm6g?lgI@T&1+-&uPOY|AxMb zQ{%l04-@VzJ~Jw~bfW)xFa>|r?dd{Bd3|#%)u&~Ga>mr@f_aQd!D%_}k8MRmv8 zXTufp-RNZB+AkGEzk|Hch^fIbk;1h%CSx#pDjT_CK`_@NGQaiBQW?t-gWTq0l_2@t zh#5d%#7(zsP84a#xg{hgj%I-mTw!PW74=K3a@Qt*cTxj=+~q-jZ<;5D^2CvahrDqJ z0C$~U+|hG8F9#O54eU_01}VC@Y*0|8uu^r?+VktrlZJUGKueUFI6&W=Ufqu5WFIQ4 z_TfmWD4JO*537619Fbw$=hLcBGr`OntHj$F;9Z(|-*hNdZnVtKBDXHTuUa1>p7c1o zJ#@2ZvjWlYxA08RVC+)3<@AZj#b)174S63U&M#fK&!sO^gv-bBUBs#e1`Jcp6?JxP zw-bcbRudf_<76AM?unILaB9_p43HQLdmYLr5aI9qxd=ro05r3Z)OTcqjG9t!-r?_i z3i`l~jblvRS&|@juugw^oMAi3jFrMmd^ooHv{US1GdA<}q~%(Fc+%zx4EN%hjSxU1qLrDOvesuTN^pcl&L$vGkJNbQ!1Uo7fs zZL)C*1|g^-Ul75pLKN?PU@Q^)R7X1fewiY>+sKuY8z=+z_*$@hvaTum(anP0?{~P_ zXnMMHawz-A0v`;BaO3)%K12y6?UT^?*~+BIpBcv$`lEp_6ADiVg$E6V3CdI54}N?d z8UrmqnVZA_;VJ5p;nk5x?{4fURiJ^*CkJgu2bn*==e;|-I^e4^(i>Kq%ugKoeZ{X8 z_Fx|@PX&YCxSMgn-46AwdG~B>hV&t&SnMtG5_xO<5_WeXI^Y7|`=|>I@gJLhaZkSf z5p3a=r0b1_LSI(*7OwZdFp0dEujsfsw_-)d_R+EXN6$>ASeX}vTW~fX&I2Z{JCx~8 zuH=?gbdQ{k`=vL+2C5PR!>mBZeCfveH)`pzU{fR_s6b(QRy5hY)1vX@>g($eQiiuJD$lh=T{e&kv34cq`fG zQ!kp%Wl?H)tmi=xPmQvkL33R@-_`=f);#e1n@Z=$$M;`F(e@kLY3CoAf=&xJYuKl!}GM87Qsk$PmNgnnes+36HJEj3jJSS+mS zL$pw++F42`j4FU6o|q9_(Yt{RjjlSan5|RN`;*7s;Q+j7R$ATVAletOxr|HF^|$i6 zZ#avK@Vv#g%kH4l{-MZmm@+kz`?7g`*;4gP(@kW3G82TU@)J@rOvhDaGfPeOk(5bk zW>>Mucvs7x{cSCD7Uv&_KR+HXx7y2)C-ck0WLHt0fCiN5_L-GvBpX(eCdSRREpR7m zsFCIzHZOJiRX-&}CpNs`>sTeCH0Z8Wp`&#stMKp&!;5w6;6DzakSul}1E1*Pwp^*7 zsieSHYNFQ`g2dq4{dJt*+I8{v?Fz|vu|Ry)!mss7esj$!r}m zseLx4SU*n=Ld+Gg6U>_v?2o+@qvkV0@ahxal!3|0O8}vt!Y)35yqO0*&>d^0NP6sx z_Ny2(_-S&=v0d&ZB`!0PD7wHXx6+@>vDH)zlhTEZ$G+y1mw>=NyJ?^nIiKXYoCA$M zs$Tcpv3X`NKoaKtdaP2$C>Kvj#`&}QruY>4c%aVvFNqi8jWDPB@0~d^WD-G7q2kv) z8lDGJvvJCvW(K2hapeiQ7%ImfQzvs}-dF_2kUqrc&q8sIA({RkBnl&1x z`eT_!+7jhiHh6#$AJAk?C*yQS-tx*dD!nS%!oO>YI3~z)XIE1U^g@A75X(87peG?SMW$1RxggJL3>{mLiHkzn2 z_nz>|F1eX@rF(;NrDx-8$Fy;Lk7Z#w!P66ZWeop{hftM$PKG~|eHIh~TlU`iDmkEW zHMs?r-yEm~^#{9rTl1!9gn}0{m!d2p;{VyeF>bzEAlXTi-|Mu_Wy(yPb^KZvvA-R= z@JSV{ujj$em{NSnV%YTDOFW!3@$lA5q<@U?Ae#cB0iT?91Bg%kU6zCVbHZV{vpqp8 zXknEhJL_?%Vg-n_`FzwK*%}$!v^M<6^%~E)+Ulrd*#!I%TzWO+b9IBnHUf-qWkCAOV{WzR)@9mJm>xo@1@CZ57TP@)nljGpHYuoHxs51 zWH+)1S5H29V~pS;4?Vk{KWa?fjXl<%r>iPGN=l?E*Ne@N&m#klu{NZ%o({_;`@-lr zk<_f`r=Pu@xqK{ah3I0_!h9Lj!OyTV+ZL-Bm-L+aNNSlbb|z(i@rdCKA5phaqAk2juO}4?|Esp?c(;erw=Q(enRWYvX@*Ply|b)ax&=R;;x#j~?|JK}M;F z6smB%j1mSn-$-gfWJ0W=Y&IxJGzKG0000VuNklnFd|2Y2LyX;*tGFV!ixj@>2pdlPA2P?%1N@Ae`juJG^T$o^j27?JoI~?0e z7%nCX=wQ@Dg9k)KqYT*6;!ji*^%wO*1}MWwI1MsFPxgo{-TOUPbGz5Gp2K?9Q}*lq zANa}MuDzb81lkbBvye(uDp|-laGDFDr=;)&a)w9Z~QNg@bW`1+)_tEzBKU1H{=# znkY$8Lu{}Nkk5YnFu;%0v6)9H=XNGi$Ow`o89^ZvnMpa1v4wh0GJqfI$Quj;6mt@9 zI{1huxs@d1up}isMID`ZbAr)PA7By}aPJ?iE=5QQ6Elh~Y0L5HDM>ka=Jf%&tnjXy)(dZ}) zkk3hU)NvycQiU|2}j0(nz(%ftiqWT zk0F-!I4PW2VcP)Xxr`Gj9z-neA%_$6Fvg|<;vB-6M-j_ghBF6j8sLctefO)|fHO;N z8Q`~c;lws1BE5_gzmRXo059Ui9^yzuic^mhTWuI%99K{~Lmm>L@;QfEA4TC0@E&UY zlp+zUj4P<^2z!7Tq)~eciD0WxOED$f0X{~p4)L)YSWGQ%^F3#1rJsIUIm7q7O)ZPL z!PlG82Gr`p9AGkPU5r9}={B}SdtT6kN}5~38(=$XRfy+)Llwu-(L^P~ z5YMea?ZvPL7|E}w93|oTs(2&qxY5Q2&u6csdU(3Y3ul08)Mk797G6pVUNrLvF^`Y$ z)vCf6Ai)Jx-uHOH_YHpdim`}?K1Ag#Ipznr50x~N5!d~ZPW;m0=?-=(Y8B=Ocn6ie z93O}Qwfg69a@W|uFwUm~*(a?K4e7nLn8 zPd+9*fVk*+RPHo4z$R2~b#?CV#Dor4zYC_JQfqF2Lu7Us(JsdSW6B|7h<3+?X61T-^7!V_GZ2e7};|WoCdTGJ6uytcozKL^P`;v%@9^7($=#N6_Ph;i&IN z(OzP~4=|3*21sf@>kZ3PM5}8^ky%mj11u!79}&&A5|(EX%}$Zoyx<3@^!+uBql88O zJG+6*R(Kx3_+{4(geOgb@yo)%2Y6Kf$wG)0AzCf7e}HoRpLHRsMzpH1e}LN&t==X~ zZy{REvVVYyh*sYdrf(6gCfh$iA)?h8!gLDJs@VPkMj%?X5~gNEs{;E6NFrMG6Q({y ztK94W*#WX&LG})i{hx657@9qX&dmN0%U(q|w|?+lMLZU%%eeaytsb|3fGv@_j?Q=a zf}(qYKai=9&;ywt5X~CNY(@BwBu^rmZ6PerAex;fvw2Z`C_jU+ltl5Ny!y425?5~; znRg_bucZi0zW>u4l^9^)1+v3-yqZ{tXf}*COl#pfM6(7vUQMhavrdK}nr*QDX|Rx> zi_F&8@_KqHqFElzm~@fhi0{`BWw!i*#yUj1M=B+LvTeChl7LWuDdrW_=Q z!^Z)e{z$I`(e5TXFrl4dM7tTN)R-HfJfMHj@xd@J;1vB0mDwi$P`jJVer5>bqRj;D zuZW8hcKxBYunU#>h>K$EBuH-(LtJz(DsLbr2Uvv4r--W()Dx6Ch9Iswh{}Ak16)HF zDl=VAu#+IXNdj@z9jLUCFff4Tv)p>bWid7f@ZBFnT-J!ncEt1mfAD(UtLOnb@Jl=Q zBd(i{+FbJkB=`xH4-gNH2NHOA}J*b=}Zhin^9cptB55-tQGhSTeVZ>8+qqY(W zV}JrKqjH=C;;B5IrVTe*sAf3gsa*brN*C9MGl1|aYO4{?4Pzz8&~bz(xeoE%demM( z!Wv*AYCVibd}%7ra+oxkr1^noC_#Lwh+b6Ej1O-BVLxj7h#|gJz#^)7i*GqaGkx^Y z%qhO*E!J~C`G{}D*pJ#?B+LP(kw&cwiC}9{OED?j0fcu^yTVK)V*Q>0)Luuz9$+l} zsGVa35}^t>kJ@E!41WM&8&1>_M?zJp%|g(Tfu)9zrbcPo!|7i$c2w5LV($%HmJKa#A?6 z9IUn-l11=4K=+0|=#bqw7Pa zA_khy9&~jw9f{HaLOFft+ZXW3=nfju_iJ#UPa((&x^d$(7BU3!xdiue5H~uRgG6xv zp_EqKxybWOLwsrm+i1p}OH4u@jVc=G!kZ>WAyFSdxSl%vkm3{#JjXIBn8jp@DIk|z3MghWv#8*4o~M!1 zr0~OTh9iSv03pVDQbfrBtIb@J2&O8hg{Zh>?q{K3u0p;h3cg?zG8hLC;;f(tn{~0= z?ES1@x-lHURv(+cnGNR-eks3Tn>Lmp<`cpxpRM#^i(h$$JS2>S@G4>lDZ-ZGbw(p$ zC4}1)>PQooG<%tZgqaY2x9}p}nAXh;j7P#wu%kSxIEyK#S;=t3h6H=cp@P?FCrE8< zXD&I2EeW=jWC1(qz;7MA!F&>kT?uxU%bnEnDZO~r!vSiTO)g?%g54#V#aec8SaXVA z4zr6jl#xJePKZizikQa=R8}yLB4S815~7xhM63U{M+ZXy0001k{H;f^Ul&3G1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+fPnx62ta@U1PCC1eyu2SUFt6Y0000G0000S$NklB#w*7%|?Y+}L)8H>@DC7uIEV_FH3oXFR_%d*=MlIe*Ug zdGCw!;ap~Bohd1Lm+LZJqY=g1Nt&fo)gr4K)T=kOM^$B_0?`kDhe~=}KaYiAk`{ly zh5DgRQ7s+Q+v?T7)uR{nnbI85b~zOQU@Ik5w>5DAf8sw>iP|td+{tprqcpBwhrfOV z6S;~f=%?Nd(8(&sWf;IX{Lu-DTtEkd#92R&@>3c!3Sb=mCE-7hIoj1=?Ab6e>8$QbWkO^o?$`y0gS^RfnXwUk#wVUFf;7{#^H}ZFrBwa zzGK|Sw5$Mh0C-0LeOyN&3jiga@RR_a=d@%2a)P6*p_n8{`oGd_2aF7eq4j@Ww z^O&H@1~<3l0vdBAb!wHu6!y*P>DsB8`noE5((_othhs@{Ve#HPJYdqlu^i)Y2&wa(MPm;nU3O}NHf7}{GPKSTnb7oVm-SU zd+#zg-2kF77J>uZUB|b|+StTLjIBc~at)9?!Q>d;u`jWL9gL*`mb(FPPvCw_=%)6D zIX`&-i(q?32t*s(sI>tWxdZT@KrCZ7wRMoWE&==}D6@`1YUy33y9Dr`Ks56-we%(@ zryD?w=i^Kf6u5~B;rlyz17wh3GF^nN%JQ@V%-{%Ql%U81 zgzX^BX#{BCC4kHlh}#L<3l!4_a0@`j34TSDa9y7^fL4xy2LxA!Bp767ngEKt4B!R9 zb%g7=Gy(jE=yj%sU?btWHZ1_(c|q_LRu8)dn9ML% z3y~)T?N}YA>=xh`;omR!f?y|B7rO=M!fGY*h~N^eHoF9vz-I*aE)B>df)f8DxFa;V z16YC89}x12;9jhjx&!zNRCzcKpqm5=qI?}p;}i|?VYLMb_$KHihiai6{gjCR3*7> zL_G?w)y;QD&sT6gGRiE$4h1*c#a9)ah*cXhmemWfj_kgHvM8>jwA^I~>voApr z+bg^5o&~+>{|S@#C65XArXS#dlo&yS;;t|K0EeZ-2p)@_pg;WpM%Zu$XcNgfl-P;jVfEb?cDg;QD!eh zOUw(?1b2i+cL1Ujs};y2f)m+o{+HwuU=>z7kXHoXHV9U`1sF#^R!fc} zf&nJD1rU#6)s2oDf=7^RfJ?D!J8lTha}UtS`&ey5#|6P23hn{KdhC`R7X<525&%;f z!fH2--V+QlB^dzm5O(WOh6uKdf#5-u6u>;}2AS?PL6up_0f=X@dy0a`1Y1#(01Fw# z?ndM(!RyRR7C>yluEG*e2_~SV0Z!!@cKey^DM8`@;y%LEMKLFc2Vfd~gz2v+C$ItH z{Qbq-bAosP3OrA^uFDDH01&5flrUAfGAD=wK&&BL)%+}o2cUr$3ERe;p#A`&g+qkx zf&8Jg{s7`as)Vg8e`!{K0I`Aa?WY~3VCPeHMBo6RKsVv5u$BT!a3wZU1xSiO)O(ZY zrIwzi86~tiyn<2O1fu?H<~?d@keewZ$7^CE!vM(=h&VQvL)6x8#~@{Fv;Lv)D@p_E z(GKm@YZ_L##_0^T>ufF6WW`;lu2xlJu7G0iWIdu4ir%R9srQTRRiFC5+=$9*(#e{n z@8}dYYu4wRqmrVxPt6VpaIpfnP$f%^u$B^!#(yCauC|)#(AS*TQD1Bwdxi(SppC*h+8}R-<$Q*lVSW zQIe`k2R}gAvk59(fzk+IZ#MTcfa`{MkXfjvm3aJX5C7#T)d0pOaU=V1)gEqU8nR~- zL_7RQq-PCW#KRmW&IZ`b`4rHYSK^7U9RABth5(F}xtPryq2Beeg)8_5io6or>UH>$ zNavbZ%5Ci6C{e5MGIz0*MpXMuf@7?5`FVi*$~sFwR;y;~bd6W|Mzu$;>R)IY17OF9*m0^5pk4ybhqT#k5Q7_?^DmaZJ7e?eT6Bsb5V zUTJMT-~Vsg|Ia@TeJCrN%?30b2Z*T14E(k0%xi<+H_xuVV{yCA>%63Y!+yJ`-z?c? z6rPh{R&lSq=G+i`Rx-UorSdM2bw)D3;Yp@G<3Ho{GmL#jtEV%}Ej}ir@biXu!#T^x zd=tJMPKqx+CYP}J+nkN;KVquS-C(X+Z=3#xaoN`AI=&4H&j~s(FtS8EWB4otVhA`i zsQx@uuHkT(p_=Im?+3?*U5xWM0*nthhOw-WOj!NMe-cxQeK^Ys@dxGaOnr|soc?B% z=T`q|M<&Y((FY3rv)vk^826=Z1#{E&%zGJZ&zzh+ZD~Kt3-tq8r!VC*eKAqrt=s*c z>wjkp$KKelh=)r?b4+ z6!>P}y)e5TK9d7qlfIWO|CCrVfpMGnJN5V|y-48&Pv>*(j5ImWysG^oBU^<2?)foi zmOM%b<&tl-@0o2lw00|EheW9&Wt0OhPuG;qAKg|2Mx)F2BmRm|H$!mS@?f qi*+A#gc5QU@AWX@h##PEu_c?q#RWT*IIM4i6nVP(xvXG0000b?Nklu>6*35 za$HMVqgq#O77zpCQyl_A;ITrxS0p}1s{^9PG zA>e&pU^X?S0rb_;!anlEp^v>RyHJfNuhRPHpiIP{WVN;j1jG zs4m_CGJK0u_^y-rWQuWsakP;ZJGipA2Dpf2^pGaqG*ezI1B_uGY1GCQ#W29#oF}c$ zac8;*sN_YW5^$6^S53^kJ);hBOV(On{wsmNF8FsZq3W3_AghX&K-t?Cj=N%8-~V<7@1}&SU-$ zkl|TuY~nh^VN>}PHhxUT=K(VOjKXbW3X(w6*+t=gO2*#-o~2NSnS~@+1ML>S1rl9J zzV`RfLWKucq#vS}!aVNd08JF8jd6%4O<*sDY4mM?IRu!?vV=0kqsmxH4s!twJ`FIP z9?X5jwTOp(g~OQZW{NKZT+T_%ZDcUwX+wAea~+KKVSq{wVk%%68N}l5ynaG331%@ z*M`$~^H;71$k2v~Zl;EKWMdyu*u$4YydC#qVmleP1AGe;L5Ks-xtu2|=R*8<%p<_W zJuU~R;S?s8S)ZBtAyHdp{Zh076CDh8H^7fD@w(+dxq|h?VJ#P1zTCcviKko*FpeC0 zA6cIBsbDj4dCPKwAsj)kms&Ri`~tnKW9Q_ z6=GEjdI9zE5AaL$+94* z&}*?g*4U3LZIToC91&4xPi67bzCt(mz^Wd^(0smQnEyunN6dmZ#jhFn69; zn9pQ}Q9%X6n9Ks!^S_uoX?dFNcJ!W%Yk+~AMrRvh)gzetnE6y8f2P0SpE-i5<%m_g z(K*F{cn0_edQ@)Sxrk}eZodh!>id{D!X(3!$nBU| zgjlr&oo#Upa3y-zAy)k!z5nKmh+!jm554yhtEQn7Fedr|9z~Imt8PQ*-KYnsL#LfG#H#mWy5_nRo!?ns zulNHxW1}6Q1)ZhV_xb32Yhh13L2&%kD+z>xfmC zl1FV zbZ(1ofS;h&8{)C*gW+F*Lc9pQjac>ta;QBU-2jJB`!!u%>XxtJLc7tMXdq}(E!wX5$o#FnHkLh52E%7 z;%%3p_Bj%w&hVd&qPPv^Rm9tBQ9F)=Xd`OtqZ!~s)b2;T?FQ6-i-c$qY9B-~z(De- zO+~z|CEN!{P18~9V?g);>QD<9gm~M3qP8OBW34*l!VhqZ{z%%}CZcl_5~|~<%?v-l zw^4f&@wQgfdZ|J}wHdXA;RjfW+G@npZbfG;60+w|douh0Yf)R|?E&anbfzI8Ta4OE z;Rk3%tpV|{;e3G3K_qmuQQH)LfOk;40r9RcatOWYNa$`t?Lhbej-pnFc-B}xM(<@O z$5}_h573F)2*j)Yjx*?e!f+&lMxypv_yNwLR*iU5h9>&Z%P|d!q#D%Dgddk1_Ko5?SS__4StlW^(~E&misxDDIb_43A>!2Z$!e zFG1;l6TXPGn0g4&=kiTB{fD8Iyn&fK3z5hhh1zGu{Xy&^Heu#-W+igM`*3leig*z- zC-sj`xjz+={*!Udm^n;sj7MK;Z-pOVji;xh|ArhUeye%5#{1LJEA_|K2{e%Z!NjhZ zkJ6q)?TPRM+^7E~OQ04^v~fwo7d;ooe^I_2ab68)(cACfevLoIe^Gt{Y5{{0$32SP zF@`(%wsoDcE^f@4hB)nFy3qL(6Oq`v9<>|;T-=)Y9mHvWkKVnAgBnpgghVmG2Glkn zPJ7wxYg8TddickyqB!FC6meKN=TOTt4sp^+)E;p4fW~;lSrgEC&6^t^zv}7{rSBom z`UmjjZtW$g_;$)Q%{=FG4Ws1=Oanwip=Zc;58`QF9X9%MDilRP7Ax`Q*?Fz&}myt(p zClciV%g|{-oYaF_72=>3=rlz;z*XoRqYUXMm2n)M+Gq#RABw`)kbY7FMD_SNU===)l2W>-VD`Hx-XJKGEQsmu;-kf*_sN^&{ zJCP!9PjX-0Ej)$ZZAcL}7riHtxCR(a7M-J1A~B{TF;>NKbh3<$ZvbHpdJh%t0FR;f z??{{jOhT`h2}L=;L~_ypA*<-WB?@3KWkokYId7x40f~EndIIzwD7pbwpqJ;W_y-Vv zj$V$*MK!=w`p{d8xEP?8EP96-QZxe$r5(K<#<&?kScQo<$P~o@Wwc`AUl3OVRMUxx zin%r}PJRGkG#!|GlOc$w4W$)xCm4bFFn}f`K(vGdYXBOhq074^$*-stfNfUV+y;%NiOMI8C^|CxnC6aJeJVuVfJdF4_ zfbe4q)z0k+-{*6jLOqT6IzWb>Qn+2rNa!ZRJrwQ*#NPpg46CuRg=vVxX0Qz#Pa{bW zpzm?)?BzBG$3|r|Z~!|GBWV~wXe7YSA6U+%F+N(ig5%i9b3c-n0fYv+v6W{l3;06B zk5%8xZ^&co0<(}b4IoURgD`SzXEF5=9OExx2RXtx!B>#94IqqWKjC$-kwr|W8o|6F zTu&pf(@A)H7>T5D0HK2CiOz90^Bjwr%}q>XBsEk~PB~T7Fp7!X#B3Jx9IgD3=&a^q z#NPq*&EqU-b(%YncoX993U-r5JGc}nh5>|fn&>7?KBv*m`C7qMwQMIXwlD^9li;ol z|3nAAJHcIuy9BolW+hpC)x+ZqLR==eua@-$_$JR9#vrZ}lBu58@JGM~u0oPXNUlk| zOqQhe@^925NhKuPD4wK?q@3ajMj}ZjB;Nq$vW)*EMz8QwQsPc0ZNqy zC=F0bC{-GuG(f4+0Hpy+l?EscP^vUQX@F9t0ZIduDh*H?pp=vT4>bGa;dHI5$^ZZW M07*qoM6N<$g0(yY)Bpeg literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/ic_info_outline_white_48dp.png b/app/src/main/res/drawable-xxxhdpi/ic_info_outline_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..939ee3a9660ec99f411c4e417aca9b47a6bdaa37 GIT binary patch literal 2633 zcmV-P3byr$P)G0000UQNklG~8%QKT6(ftKw%{eb z%*=W2fAINyX6DSfea_?cAIvj3GoLeO+F+oB8gAoZ+IWk0j&g<`E-^rc3$!+u`gwtg_6E3t zcSz&66#rtneF3g#H$n0N)9eXQ%?^Uc;XNv&A7CsSNfT5lUZgy_0g|kskD%-2Aresx zP|ab&qLZp<1-Oh>h6s}(ni&z508=?i7#(496ap;eSHkKSmV`S%30rWB!x?t+5-Vxo zMryc{Qc5VIh!RSp<0%tAuExeQ^VgM_yMR$#*6 zE+phzgoBA?UIrLTpYgRZ{A|4CmH^33pQjD`Izyva=b-Ec~EE&}HAeQZ8)Y6o>7oZWHCDsLa5S@kY1=x(*5M|Z{ z7)uJZm)#3+0=1723%4J&uiXkT1_!mrtPHRUwG5^11h^HQ3M&KDqBGx}04=EffLOX7 z)K)d6lott<$?Kt8_fNiL4vo^pE zZywHVLG7Hi0WP4{=-;qLXqk94FLFWPM0xUzP!My-u$e`A4U4TzeOHt}x0O2rdX~tU>;A%3c zbs%mAXhx^mssOF%G`Sm~2AyxM`h!hp(W!JdfN&h0JFN-OfX*?*?Eo$49JD4t7dlPu z2bhZ9Oe+G^qc_?8073^kA6gM$A3FPyLIk)Uy*ZH&Fb};&g$YnXKRO2@AK)N5KQXc} z0fZOOTM+30jp(%@g$gi{G&*M}j%?k zP)7>AYYQ7d*ooeuNd6nE&(YhC6gt2(9Q2wZ7oZhAhuXpi5Z*&C#SM`PFpD<M;h; zRFXpP6lIYKFotgQhMD3`0O3VUd_W=+0g|+1q7CsXKsmjbSc^npf5t=)rQQV)9>Rn} zBNFo6kC~;2mjMzS#Ka)=NQgI&AxwNq!rK5s6+@Wl=Q<>$o6bc{3@{b(I)KoOnI5W; z5U!eD%rqf>1Q@{)%$y5hf@=PYnNEuQ2_Q`77tHj8>_Rl1Ud&u#65>|?VF{-CnH!?F z-iw&}BjRTO;cu84WKl>ycn)LgRmATA#e9h=hgK3ufS2U|FxSQ9{s#~y(u=vBlpz7# z7}_z{!&OKS0todCV(v6EkN|EL-IyC-CK8kYLL(05Qmi3~1ZYLHlE$3FLL^85geJ0c zgc>9utK)OB^CS|q074tt8Rl7v5uZ1T=SY#A|3`utKzN00b+ZuhagCfMTN{v|21xP` zviBKteL8L(Bzv!u2yOr&!7JoqFS8L3H975P z4Inh(kelyV%Y*{{w62x2M8Xz8n8=sp??<+?k~vvEb5KS-Pq2d?^4G-#B#Z%sVqV8R zXW7FhR&h5sQO7vS8ATCAjG~3nSyvd!Jl2n`vl3qsYU#l;NPb4Pg3}8m@Qn3_%*@bUClcB z@Jk{(~2e(ZpmV7!!iEDpu1;nnFm?K@*io zkR}9iWh~@nz9xekG8|(g3n)c`HzCL?Wj?EUn-iqTR*K_nrHT3a7d{T4-$*LCndLms z8|pd<)o)9Qsk>ha$8oM+}azjxke=C7H_ z3J%<5X>Ml@0I>As`h2nMs+EC7FV73wvTOj%guXuBdy=5%4-N?KKPIW7agZ9nKr(RF_dY+sdK6<2S@=e5r`T0MDuU~oo8WHQ%Cm6XLQ&9dtvWx1bX{-j= zyWIaHFFp05EJ-?@r;E;vp)#V#`Ab>UvvlN4NX{ci=22xqyeT){DQqX^L4w$w{sr>w z-rL@0eb7%t_*vohtv|TjK4?lBSkv1UJP}ol!arV=2}aIYu`c{hvjKbz~**nLw9 zJ-KIcFhH&-NR)28T}Fb2E?sKHon&*RFRrjeU~(pz61MYRqrDgMc=2<>p1a=nYBxI^ z23<4B4&0H_Gdwe;N7Gt~&1c45(_0#ISSSiY`%h~hu^~o};S@~1IbiFwoNQcmcBD$= zH;M_f*5|^Wp2;2}WZM=Yl{-?S&4sXp=PLf7qX~f`Q1e|@Sgz*tSNPd2{?{wKz+?hQ zvez_+dbKPB8p9DeMVxnr-6#kFLX>F^E%-enY!K{z4|Ax&zze-@XgPXbAz$-<%_~uF zN%Y@TSN3(x{>ItQGt*7deZeVQoxJA+i(hqUEr@98Br;4l_ zn40|~!sw&F`TfC*mSjDlP87Q&`Ve2B0J8SP$jNylGzVYr7=L9)pb0ehhmFugwf4oV zlbhuUTzX&HjU^-Gsfwso&ubDaYy(;&Hl`qJmGf8o(vEP>!e(>lPguvg&JD*m7>SuS z{jj0f2z4rd(4%TJP7;Xm=E<^Az*^rne6Veudt3yveCk(j z)u5tzIP^}aeKGAR`kVa*brLA&Rb5zj(coe6DV46UOg`3X0<$HJL2h@Fi-zSkL#kEQ zUK|uK@TxL4X3s=Rtt7B89LUDoh8rplhs1H@iVR#FM5=?U&TOq|2x%=0Cs1`rmZhG# zN!!}aFf8Ad5>oSvX^504yLK+GBDC|Z&Xsc)$yx8IEb{$K6=xK7&zgW9pXX}EkgHyHJm)eq=#C1$LDw{vc2#$3hCZ|gP%*Akytih=5=oTsq+)!7S&uh-$tDzkn{ z*}xi$Hf)Yfoe3`@U;%#rX6B*D6wxYDd@avPfA52c5=tr-U;htkTij%5`BMwA!-c|b zT=-xIBYnS(j&7CVpL1Y(f`L_}X|gr=$kC*<=%H5lrnC-Qr=c?|b-VTvzp zZVcxG@G^n(AhWl9(1Tm8CihH5etIflq=+K+=8Z0#Zq{zM801!;pyk~;KGZ@yJSuu( z-1PB-?#S<}iOyp0=pFqI%o@qd6yAKRaWRM`Qn9VXF9U2R88YrBqOk ztL(9c5q*E-UNW*zGC5;HqUerZ*J9xF@O(x+=(bH+TI>TTzySaY00bxipaB8^0lX^$ azd4g$#E(>(E2l31MZkAQpikvCSn^*N(BhE* literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/ic_more_horiz_white_48dp.png b/app/src/main/res/drawable-xxxhdpi/ic_more_horiz_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..00c036848f9b44980f5ac1a001b1b007ce41dee6 GIT binary patch literal 533 zcmeAS@N?(olHy`uVBq!ia0vp^2SAvE1xWt5x}=AJf$@%~i(^Q|t+&@5{X`rkSQE_c{@y?DKb!jANefD@d-?SLl>E%BnTKzEI=%nzS^59ZIVR8gL%Scd4wzB8oH5M#eE5L}+&lFDbl&(>WB>5r{}x&2 z|HX{zbJ;AUK-!O0zABhnZ~Wztoz{*#+XJDzKc8}2WK>Rxej3aBvo@sQVphZEs86TG zH_XT7vYdKK^`{ zx*ty34uzP#d5o~lG`&`uJ$RRX{Uz|io9hKg+I~a3tWJ;2g>b=_s@}GjhE=Z%zGg8q zyCuf&<~K$7R4vhFEN8+`Ti#bI^lsouuF%HCf?BaFt3F+C817VB>^BU_9t7pAoLRwI z*_Um7b9YXL&S%6{e6o5TNoMF?Oa8wLlSH45nFeLhx9|+S648x#r<(dic$LDQD0u}? zb&WW`OA0w*Kf4)H%uIBjp(jaKfle5LQx||v9Cie5Ybp}mBwa9VdVhY`5Q%bHbp51)g=`S5US zTD)EI(G#i!hxP`lKvp8uN{lcY8n0st+RLVohSSqn-Q1*52JKbeCYo7`jjCi)66lB) zhCp=_WfCKft7P&Lh0|qBE5$h->7BWEHc$_LInNgL4N)7{!1{~gT15zTa%hx=#nl11 z2t|H*%-V=;+WKvxWuC2>DY=jJ{He2byxe$nyv97pNAb(G0Xh5tW|TS@touX`tOS|6 zfPCk|;WQx=IJ3Z)&{H+39(c=8qRI?wR&XML2TU-DKWDb<8(yjhps1xH;Vw*ZXius_ z5l#>1FDWYH>U#VG9CrMqTjKWRrfq?F>sW^bhuF6K6FojC7y`-ty}7A8H9b8YxIR{@ zDK96N%B2+O*FM+$%u)LG?O^M*kwWRgT;=n!Qj=oKxvyW*>25~!Vi8nmds`uTTQv~Ji0hGL`l^P@MU#e)T=;@axIQ-D1Z18a>KoufxY#Ss-Kq~Lo z5ZqC39eT=8D+cVYHv7)` zulPWj?W_m0WWN=}Slu-X36orrAW75U9wWvrUL#<}GP#N-{IB$LWISvSPY>opqP219 zW*;68S6n}qCUGZKC8v;bw(axQ#pST<>|CKgUN7l(IyU`@9yr-fF`bo78Mmuk>YN#-;M~O zFfzzba1azf4EOcAW_nUL@J4#bSj!~db@tQVB59CX(wT-+^+T*mCUYBxvUPj$5H~h5 z@?J$qi}|se+i7IX9Piy!J8(P&~u$_-O}qaMRY_o^%veI7r)zI*fgYCt45LtsaRX@O39W@_rt zP;vKHL|BbYy_g#7`Vz(hYZQAC0TY(#;Cv*Q+|cT=*gcIZDTBHYKsx@X&nkQJB z!3DZU`hkPLX(gL&tgZ1q2|HfvtZ{wAP#)TJOVXAbcu-hu)FKKqB<7BfjBG&12b--AQDsJoZf;1b>yX=@g-dVsy7%OAYYD9CbZqKCC{k{yCOTOAy-2M;3r2S8{G6ybt}eSS zz{PpnHT}ipaiB1sq_R9Mp$+59T3B<#akGlIiL{i3#SjI>x|NQRCdj3%;NfdDR35KAcfS{jepYXDOY;`(h+C2v&NTm(bWXjp^mzM8KHl zsV(8+;zak`9h{JN^kY4P*nq&tLWZ(GxSgmloD%_4g@F2L{WsGWd9&mrJ}vOiD|PC# z@Z2l$?71<*fJysDgb8o+W}IEj+#nXZEtQf<9a?U_9R*kX$L>{ANo}oH23->~#M~(j znO07+4Z3J3y99O$={LDe>$LV1uI(Hl^BT8ThM!uwP0*es+4nbDR z^6P2J6i;h4*>=9ysg(lw;xwj5Ev#7#Wz#sE$7HLNmu%F%qaE2H4(i%431sFD@(K*Sr3GABCo4$nAa3dVSZM@NJV!^!Y zq;agDQx}g$$}=)+V>4KaVa;e4uBa_0XBR+lhP3-!Y~Yw)RQf#^OmBNx zqJwtUC*>{DjtOgZQw>76utKxZXyoCUXJXso1Grzz=Yt!>my5pX!;aMgd&_Hj}lEL;cuvr!HWPG9^^_a}2yZ z;`p?9f=DDzrPs@SpkktmJM{BoeyU`kl8GXWB^@6h80ygE)^#QDK2BT_$( z-RWLW5ZSck*t@Wc^yC4zH&|&Ijjr?Ld3yFEYPDOy#y`hOP2$V&oqt5VeSO^#QJMGR z0V&CL8kwk$>_+40W*}6ihCC%M9*u|)z<)}45*|cCcn5tJ%XMtXO`36B9*WnqImODe zxL_miSc-Ro4>o2w_xOFuHit(>JT;BSJ`mKN{Z&iwh9d0j7O9bF+jb5Wnas@0B0+V% zaSmk>`ZKOTzQ2vO zVCxHJpexw{PJn$Hc-T8&jDE8M#gq<_<~^0T+}<{e>ujFD7ml{on^r9+%h(bpR(0tT za6~Mh3!uP63}kbg)%LG^2Axgvf<}TAW_U}S{fY2@^NQvB=qk6nI_3o`Nn87>PjCCF zv60uL{S~DPV4)<9QsN+N44T$gC+K-;y#biF5t4LCs^aZ^mbDMpTgV=l_jkUbpUzR_ zF?-JO7-gb7xe>RfFWY;se+2q{zr4ip->zm!cYkkwcEG@jy+U5V7t71b@j_j?H|lb( z-|oKJ3MlnCA#G736KvZ!RIbR$$z|O}zQ_Unb?-?E4Jk2zAlUuMm%rOK)6e{yh~-t7KMOOj;cCu**_jHLzy7+w#i@=GEwpHU?D)B zZ}E5A+Dq(x&4>yhskpXwCm;LLsY^eto+2e0#Xs$1_B2MO2^hbEbIZ^e)7N*; zi#_Rky@WzyGpy2;PwZhia?drx!Lj_gidKBal3;9qpV#xBAxuU z;fv1sxj%LdGwN@1sgg-pST~&ylukVXIBAnppUu&ZTE4pC)h>EBGfu`Q()bVGD&<=a zl>#WE*IbNml0oQ9As)`{k^RG)QA|4$$+Xi(hF1Mtr656()aWLqGbS=nf0`P0F%h`! zKBu5VDe3YKh^uv*u4~;9-wXZ3Qj(<;OO622KEh=1yv*AFj=VmM8>?u8WdzV3IigtVnALrJ{`1nW3QYBM2 z3TS;3pI-b7Mb`5hJzv+y%ZvYn-F8;15}4SK5|cp^vSEqRDPUOy|1Lj2KU_jDQ5=3z zSU*sDz1Qh8k$(5W-(c?`iGCpJZ%Kx6D-%^|Li=chbd;f-Na^y#0bNMdVP;shBcoBvmF~0yT-n^ zkB@9Nvqz%e>f3su_%yXl;GqKX>dccNPplE@Wg<5U069Zy4V|n>l?GC7AVVYw+n&dBrOqct;;uu|a>R zyIfe%N@^9RZ4~n;?^LFMnuc8+h{12y?IzDY_Y8F>&E_9a zBlRD>fWsUVNMd+YNmXrDy1hX6+5(Ncsp&Vmq0^IoR$Hm@rS{6fqEbi zNZxJ3a1Z??eq{~J7FE#Pe$O8j4)-FQS~K~vZBSXn#>T=>>YVO^8RKg%w$ojO>n}w?L5GD{*jvg#o+=6Y>|~=g*(= zhS+u(qqE27I+fWsY%ZWL-Y*=b5U{IPaCl#esGhch2lcK~k`IOS6x%cgW1V5zfl z8gEYSW-`C0c=Yy1+*ihdsz@GzU}_%y_R~X{ZXmSR@hm#g0bA{R9l^GS=-An!HxFp= z+aes4{)l4dL8N&*Ic&v*BO!AJ zB9n8@Khf_u95a#$jv|bn>f=KpB3O1=E(u0SH`|+Pw&YY*ah9BKjsKgT2-&7qpYz!J z)7T%e_oZHfK|BKPzy+G@)H9_3D~$fRDy9`AU5ip=FPR4!_;`EYochCP%xWD@3$>BF zXnhkeubt0CGplyTjZVh{`xA5JB$=Au7AoVYF+{Bf{-*KM8udb~k#`!I9nQ|qa{3j^ zc>eg(k(!z+|A#Kzd|3CZ=s7)`rJ~#S4>veCI79_bq??!e(&qbf7lyWdNsT9*3aY#z zJEn<($6bWi%Y2chk3!)tF^z5R7?Fn_LZ+3`W`TSI91(ptAQUFI2^@1XsS-zbGo|R* zzAMv6ZVEvYCi`~m6Z$CjnGm(z3oUn=)G|(-mA?ee=X^!fGUAEHvw4v8na&NpC#F~S zUcdN2z8vx6B#FL!e=O+f!K_eR=vZgu#zXG9aCqUNN|A|(Gctx5QvBsn$XQdO1Gg8< zk0t#>T{!+TOIHM(9x!xkn&H>*DkNS`Aqu|g6jO6`fMSacV^R?Uj=C1?sI;hAqo`$x zsKuZkbU6YVkJilsTihwVfUUJr>g3$KeW8z+dMqw9V0Z}l%>><8+AYXI$1+idDp$;X;AhSSRd@9m3%k4|yE50!1+3DNvL{B-HXN zB$#^An&wDF>iTyp)vUw*2SP6Zod5s; literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/ic_photo_camera_white_48dp.png b/app/src/main/res/drawable-xxxhdpi/ic_photo_camera_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..f2fe54bd511e7b6c0303ab3392f12d0fb20359bf GIT binary patch literal 1837 zcmZ`)c{JOJ7XJN0EE$=gh^$-KYb`{Uhn?)RPho%_dk?m73~Tu%=tI7}V}00124 z?C=wxX@94PARoE!>+=9WXcy;Tdoc-IpJhdtalrnq=+%iH$`tmhQ68m=7}Ca^2t

MC;gY#}3%ZI!JtL3E5>d9pU7yT7ajPn?IZGir-_z)sHnQhRDwbzY8 zZg_K_Ky)5T8mn*{(z#8F3p#I;3q9c1X_p zG+Wh|8e^~+bJL=gw~OPVn`p_ABze%s%VGZaqMPLf(rsWnitR(B=Ht@_j z#b`jFl)!Zv5Ku+KiXK2TGc+?GcmF1ux4P`L4EOkPTLpLQ=sDrp$dC*#_4Y@Ao|IYN zE&O$w?}UxMJ@uRyIOY{x96+JBbH3OzJiRhhWsuP%!b<3o6p`2I$+$);vwy4*O=Jo*A|H5~9PA zV9~Bp;3*`adf#*+#L(XyDaAHI`5hn3s_Oi?E)s-0K#COX8 zss$E`g73x=H1(PuW*0pz(Ep~u&QXqrTe~YpXHlc@e?A2Zg4Y%_hE7a2u8H056GaI~ ztu$G0>|k;E%YEGI?O zv!=l%Q9b$xi+vv%sz^{WxTmH!|Msvv++Kqx($sM;@$){Ec+`Xf@sN=_!oM*DiK&eU zkSufsHo0)giM^EGb}Zl~NZb=4ufr|AUpx|)gfPm*M50ak?q z={ryfVZ6-Ii5KRHnoPz4hv~-x?bp(^rZ10xYa@1h;6;vM)t}#iC@}owsUHfVJW-#? z)C`&{TcRaKb0wlxiuP*s-$G1#W}Puly#9U@>%9FT2TOUWPv0PaM~2|F{|HLf?%6S2 zP>+r3+2<3+`;{2t@~4759?~$`KVts~X~f|BATVI?RFv-q0kKP{iMr(_MG zhJEwgfchtoY&Vyh@y^Zxz z1-HvDu=X@;vKT{MFTyH~@%(vjk~lLGW4rAf>T-7@aG%s%_3_PM{gT?MJTaLm|UN{RBTiJe(t_i003&dU?Axv@1zvFOYdq`O(wzyJf2(nX`k6nzVnqhy_=X ze-88#f^xrKX^>vH9z`w4g3#33LKI1bgEhdMqDBrbHrYe+XNV1<(KnO7wP)xMO`G`p zd4zsdU*yOIv_xvzoS)LYtznmc@v5G4yt8lku>}!v;g9uqsP`?Y-Dne-_-ma;v!PR! zU)1_RjQ&Tj!L9MUbq#HOxOt`E=7V3!0fE z%Xb?vCve*|5KKETH=I~mB8;JC(03Ho^MQa;!ir0LAEJm?T~d1VfC3>c>N{hkXc{;d WUx-xwt%d&v0i2_U1KBRaKbD-meOJQ7sLVr0$TbM5z-D>sQ70Pj@BkFtNH^(oH z!4|APvy~sc=y^V;`h4NLIlpbP>(lRjwmVmQes8#r2+%TAps@T!!>`b5TUH&LZz52; zZ&l{htLvHn&8iQ2Y?-xS|DPRxOY;B8^*-CyX!mLHt;x&&riYxYWWM}d`<3Udf3Hn0 z1n;{Z{abK?!Tr~D9~U;(eYVY8yxob}7AU_#+`x;O`HB0R^M6eYeD>Y;J7B@fGev%L z=6^>i34{BYVi)|GnVBu<%Ql;bBtrzbHQcN9f!laYp0}Qvv#a;@ zx{HrfckrpNT z{+up;eRa3kIcNU8Au`(o_A>a(mWAyZo>)3f%oOz#vG684?fd7uO2TAyVydo9ky^z`_fbB^VL0vh*o)fYI|3kWFK zC)Mj0a5LWfY`kp$oTxn~s%+gGuK$_ixBD*t`)lqj6=%NhvY+&#C)iP#<&0{^|3BLk zm>5@nHtrLjuev!wVVm$pEpCZIiM_mHIoIb3{I}3#XF8^H{;!zOtt*SIrLIrE_TP?0 o++ejP@9Oi97BV9xT?o_VRK57!g~`n#f$<;#Pgg&ebxsLQ0A{$vvj6}9 literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/ic_play_circle_outline_white_48dp.png b/app/src/main/res/drawable-xxxhdpi/ic_play_circle_outline_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..81a2222356272d2ceca67f625d85caa5f59b86ad GIT binary patch literal 5581 zcma)AXIxWDuuljbLa(9srXUhPdgw)^Xy^fvA_fpb2+~wSuc0^T0t!f1kzRy=bPS@R zw1DJ-pmY#{3ol&X_xC>R*|TTLZ+3QP&VMGs(%cwK51|JD0AN!SgInae_hQgelb^Td zd}zoMFyNN49-w}L{|9+N>8X2D7XWBVWjMV{MPAdPOzZ;y05;6U0504U3MPN#!5G?M zZu{NC1l>h@08n?4z8HC54~#HW{))U(@;|wj005`9sevv$*!fq{gESk@koSadIfwc? zB#=<#O&^n~JX^`?q6q_r^>`peD#JF-0bfv|!*uFM8IO~1e zel-wy*A4p|^Foi!Cf8HnZq&C#;+5j~PkElTbD@b$P2aQVRkh!bCjv?W>d(jgV%uis zR|_}VH}?>$PXs)!KD{BCIqd#_PjL4ljiAlc-ewm_E6pPsP3nh{=aloMjLsBM#80$m z1uADk;jucSi%5!7z97)qxHmv2s>CB02EpYAtLD*nQ>#SWp^yxiTJ}7p@S!~YDMNIE zIhFgfRuIxUX&!NDQWS$cb)}b^H5mk{OMpIw>w?LwCgnToUm^@$bWFbg*u36Db<(TX z`^t2_09esPL{N1z9c4`>9sTM>|1c}vY-==e11=?z1c;u(TV6CJG>2Eg6-_V%bvF$) zjDVqk#EqcO@}rM{a$$P_=k#`R+)kb_rB*uWJ0u8lOi?xJS8v6^mJ5Yt_o&2Fp8gE` zE6zN5`G?iM)Zt873IQ?De`pWq8vr)z(+W98i!^P5?wN!9+YftxfO@DURf{O^Q2K#J zzC1O8C^3x+Fi|``O?1NBf=GK~qBvgCZy-%$Y=jIip@rY+35})V5dT!3?zO&Q-mDiG ze6>g}h%@Pi2#(iH@JPqS%vK>v4K^UIWa4H-v^)!C$ucj|etbAxef$0K@v%dSkj@#( z({r^CM8h;T-|YP*`NfMD%$FVBtR1Tb{DV+LPE}lgkK+rXTk1i6Ki}~@dVMNz=4)Xw zDMmuKdAPU`sm^|_x#>+gT!B>aP^CieVRb;~V8}})&xt~P*W$HbzaF0srHe7V^;^`Y zY7d>RvdWbJo*$+NXuMc{z1i>SJK*_y7eettH*HvkbBgC$Cak|*b%#T6ko`}kN#9AX zywmX)S*wrke|Aaie{M#2d-XRLrO3Z#bq)GK6Om|7KBZn1m)m-m=1`52NuyI7lR*2) z;jCYek%TMlSfSdU1LAwGf1S=dWmVU0KUx$aEzv2>#H|#i9rjX>O+X`be=Q?ytLwL9 zHcCV({Oq*A|JUM*mO)Eq&QbBMb*U>4{}c}of5b`_369Ti@97C&Jp1=VN9W!c&r^hb zykaIOw32oFF@Q@J<*6Q;4`YJ(|H*qYHQ%(}h%q*pqq|&+|g7gu7tt;BczKOB5 z{Q?SE`)W#m96R5Qvc9E2^HUV;RS1LdQ5pxc^A5XjHaNCy+HhjNkJsMmc_RDBCUI`n zk^d6IJ-i)+53qVDezof^3bmaezPh@?nG)O|WMSmHf7!f%)^#%t`hw9jFtlVQR%*un zQBkXW>gK7QFfO^{P21y!^?x+1Vb- z5=p}K{YeGCeU+ zf}3K$r^cr4IG(u&U@-_(A~nTr`ZT4hEbgxVH!H)&*T*$g4sD<2d&AZt&dqD zZ?O2Q3t z-~BkgNL;hll9XXvp$P2cHwV&`ytOY{>N}XGO>d~4)}%#TkO)eeIYUY9?d@E0@A;N` zvmaWuA_llO_^=}~ekSFforS{#*$5H&dm(3kR>Lu)ms=3kXASYm0vd;p`}_L^tY#1g zl$L5FFmj*P!VSaKZ4;;GGXLR%`gH`Jmoni}NtO{aDmyjvXl1DHVGQ?Q zy3|<6G1n2lW!pc|&`_+gmwE!9Dnx)DL}01VS869A zJ=-bY8wr4?Ul*+>9PVGfB3MEJwS?Pk#eciRK!_SEs&H+^QL@Af&Hi zGlQ?ipk3pqKLTWBT}`J6gm+@rPJy(4B~bz>wv6v~Ifq-x9|2Wm*#^lnn}1@~4xKQ{ zmTlY&NQ?_BicQtinu#&H6^xa2FvUClQ|1^%cnl*sU#cJg1)m8y2J00Le~ZA#gGsd1_K;V4Rs7 z;{v$wFjft{GW73={j*FssC3tXEif>!jVWi>DS*+k&5nDDC#qWbEzer*=C7yjpekuU z3D%(Pdt(9z{-x+9`3gm9b14lbM#e(Rlo1Lf6d+5cyKc6()#vU2kjd{PSsc5%x>Dt& zJR;se0NIrpRSgZ32d7O!!0eauCj&GF(W@jpk;R2U2n6om&qCvVR;}a1Q45a zj2#)yPajkdp~HVJR12uLyx<_m&3y?%XUJZCvi#-xV9S=jJiL@m%495ZqvSB}Iklyg zv;<*gII}ILJINXNcdK}$;D=WK@A6g`N_+rtS_xJ+At!T$zjoSR4NcT(x`trl=z8&# zr~nfG9m=&jAvVXNQxVF128}Dn$ zN6UbCmZd2+JPOOZ8AA(ymVNOKr!8RQF4emfVzhks>YPgHU$5lDe7<7tpVK$5@R~Zm z(AT&e|KrDx5QB@8oCfGAgcxPyQYi*S&B*4Cz7?N}dS$trOxbj9iuvQuiO(>pG~5A! zKnl{U?lCM7m<`zu;lrLpUy=I47UQfz^)_leSET1`86N)Gtk_HQK@ytNQq5jz%1&y6 z&+XStliN%5Ew1};(2(o!?VH!Hk4lv}Cdj=l7FHP)?Ey3et_-E0TwHE3vg+%_Ug{be zMJn*vVk%kg4LyikBP-s<&u`0yRBY_3=VtuA6Sj}wBPnf_!EtYv#MAJu-#a|V9+t0<{j*!p=Xqz?hPk>#UUHHev#mWA9 zr~QCZbfa&Dq9k+Fj9#zBiQ5frP$6dR z!?Q81okuFz&K%88ntNkP=PdK}N!nC!rq4L&V6^dbb#T1DZ~Wg}79t<=QuohmsdiH9 za!XvU*sTPpdLuKQD}&gww>sHluO<(F1jTCRYJR$eXx&!uZ!l*zjgRxGnM;a+suM%E zzqXYdr#`u@rT|AkV65bGF?&AMLQcWyxo79}Q)}3dA$K{7S>6vO^NI=Zi2C!vtjvn( zFS2+r=0qN?&0PyYlA*gs*8197tG=3|!7>zEmZkX#^~0M?-~Ifry)Hz+zi0)+q+oWE z&8}ZjOl|olqcT5J%0e)CjwsqdVootoZ)^?3(Sz^5}#lXUd71 z9*(KCge?lGongY~&ngybDjaZ-bm;KyXI)+Y%3D~3`*xM1$9R$Z_~+P@77e=L{nKbV z_CYzCI9MR4F=G|H2EtDm_nafIFjesf7(ul9JiI4lv(1=gq~c)Dfp)`t9hjgXg|KnO zO$TPUZ-_2L3(bz=%J9B6oe@cn?c&84xo#r6LNhZyQ{4JL`kpxI^Y-?3H%p}n(e#Ut z#3bK;*d;#uW6F|sD1TMqZY(tP9tFxl-rkCggmq*q`Id;tKE8p%Hm3iGlD+qcm!8yV zAFq|)&ialU{7BAoAVu`%QT&R>Y-W-Oj;@#Yk+YZHwO0It12`agP|^Ey?FLJMn^hiS zptwX1J0j6QhHsCA{)2Cs++O1} z)>o4Ee{60(R2OrN4qhv3kx1p%hp?|C^(JixhUJLiWQC1Ff6jOO7PKhu;@p`L&(=r~ z#?kGnY(uw2p^+ly&DIf$8FB3MEKy6+{Mv8$t{B5N#Ur7TW1o5pKC)nMlP&y{N1v9? zPd?oa&TgUjKB9gU*4q?5As!2Tf!$mF<0ZmEs@=StZ+LATToM*mlC<);+4i!D8+|9} zC{OaFoh6O^Gi`>H+1bL-uWo!DTR3wS!juDfgHGPBW7{i(XIV8OTYs1}TdTX|R3Ty% z`xkaU1$EK`)tfy=b7tWH+>wDor?&T5;u%-foCy{3#?x@1a|{gJ+|m+mN;Op_^f0im zlE3Jn?$clRKaxy+PSr`jqduBouj0xtl(PMhd>0PpChu6$9Dz#FqRJIbK-59r*0)_{ zFcXvS>o)y^{x+@RN>zALc~?T+?n@e^4~r0`es9P2gAeo5)6kn#--4A0wdQx&XN?m7 z{WqMPA}Ug#Dru7x3f?qE^gyiua=GH?j$94_FMV3ye9y7GG7Xmx^MPfxv*7J zQUkdR^-HIoyjC?JFw3^Sz+c@g{E4^tKo|$qeJ^o#6Q;-;4Br^U@p)EN41;`D=BiYl z<12&n-5Q52DimQkk#LU3Vi;J!?guw~7;%YXu0&@5Um02Z$5<2>(YNr-pxs14eEPit z>IW&3ib11NosYE~Rr{zWjE-XjYJW zN9G0EkB?8R%Z{-D`3?>a+7Xk-5bXdy?G7DM`)2E zGGu5Q8XEd}w_ut0E^5!g(b3P_*SEMecxRdKpYehq5*bn`@TvOFNkpH&crctHp}PCG z-ywfaZSmp#;;FUpu=cIOsluTpPNXo7_dC-zAGyDR82< zisVB-bVpxlB@}N63BYT-`wdH*mx@SABC1{1YQ}l-ML_>yDYj^2tx(SvEiX$AzGf(U zvZwlAzbR4*YwFEla%xwWmNrPy8!)N@ki$*ns%s`f2_Jd%4IvEp5(@L?GiB-g;K|yZ zsF~sltBVfTo5V@r!vm6i?bdAXrQR@`ZfTAwJJiwcdth_Fu)?t#F_b|X($xJ< zCYn#1Abz{+UJZrf_@IJHKxVO)%lZPNFfKOQjUMr{iH)BEXNN+I>^K}{twQEzb?Z+y zAB*CQZeAJIW;z1zxNY;Be0Pkr`0WWiS^pEgs%+Qy-T;D|K{;z)PjP(bL=@T9Zxlcg zOz8F_ZWB3ssr8Tiz8!_1@}B)v>0Kl(5*J;|?oeIT_^oYFLAa6_356J0r5W{0(@)*! zG1P^C%cV`q?{ZKfp~+=ro2)qgKTM1hxZ`k1D}7AlZY#jl(A=P2&n5DI0Lhea AtpET3 literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/ic_power_settings_new_white_48dp.png b/app/src/main/res/drawable-xxxhdpi/ic_power_settings_new_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..4e42d065f6e55da020b5086b4a8d08d111e2c1ab GIT binary patch literal 2167 zcmV--2#EKIP)G0000O+NklX(*<9jCSe9iA0UZk~WkX7Jb0(1RMM1vE0H<49zD%I)dfVQk01bTLzs&JDfaUlt3H%Ju0ANYrd4Rc`2Z$hnMFhux zIYCvL0^G`JK3u)`0UG#ltxQczfEzi%N2~WbKm#A`7!%SEU?h7ntM@iQ17^Fq#{U2t zv8(qoKm&Ha^F6?7+|+v)paC~4eGagg0YEz zJ_g9J9am3y`FFq?Ty6C+z;|)e&o{k&3VMuw+${Gkz%;sWQQ%>(-vhqkn4p8nJ_X3o zjGITje-nNTH`{#*@O9j*Lf!`uPvYibUjkguX|P|3$^gT78M{qM2Dlx&i%dbO3?Qb{iA_HWH#wn}BQTm_|BpB|UZa4k#XpC)Gc4>o^9$nz_}zD8{R#jyBq zL0`nCz$bzTFq?jC7RMi8lgmp=5CL31>+j7L5p;a%r>ZBA?ed^ zWHwTD@!f*zBx``s*gw(2_-;W5Fx!hFn~fsd>$XoF*Da`AGiA0};jw?JJ5(0WEvQBr znf*y2+9I=z=EW0Wj?CKinnJWsXJt0qj{wa=A?jDN?PUB2(4tW7@*zN(Zjjk-g=&w? zKBnuU4=`P3JvyLJ?N_h;bdnBmP(2D&w+<&CV4BR1Dr85K4{)>0j$PFOZjL^{1eqOI z$lkP{PSOEd6|y#&l}8=G{ztScWbMgc0i&<#0ByF@i9Wz^nRO~$_D?q=`2cx^EN?%Z zp#V|pPW}qW$gD&mE0IO?0eWOMN+BC9vwZRaMk`W70lJ3*BpcxTPymB>qYrTAstz#5 zb~@1qI4iRXg{;DMI?)GclUb!gR(Yj=5R+B0P9+>5<8^?O(O&^4hpvER1DufA6qPDe zBQ(i&I?)F>EVB~LP^e~D&+AAWkD;wH`>aA$C$o2SF8Tn{0h!fZwE+%DaRk^Wvrj9l z5am=Wv;FY|_=ojfc#cAJr$))_m3RW!|CmJz(L$LqkN{#JJBrzBD8L=U>`;8aCAm{( zGgYO~%+<&3rxMq1Np6$PLki7Wc3Q5RYQB69u#uY#xdCzpEKXFE%wOkkw+-!`OHOh$d|Qi~?B`Hd|5L0lth~ z6$)V0*xeU@fKpClvl#`jE!eb?i$8$)DRy-z<+}^JwJ3=I(_@m+fqV&&Wgl*qA>%FHYTHlR>5B=#xcer-vfwB&QXj`zE3HNd8MqOi(;JN7LNOV*s&)E(WHJpECztq0QwPPBSo_EJA4wAQo_zfjh_#m^X;4 zA;)~4^MQ_Xr^dAS*(8LE44wHl?tcvGL(F-0naB3G^# zG+%?)uTwg5X(x3-7vG;W`u&Yjg(@#?l7?yUUediftEdsjay&!YtRst}pNMN;rGvD% z$O8z46+qm|ZqndYrlAl6h#XJThtGO>iYy8>fDp6Ui?8-D3!$(Bh%8TV5#L;31tlnq z0QTL;2Kw$nn-&V%g*iN1+t(ztuVJz}%8RVsrs?!qH z={5}(I;@@Are<}?LmA|?@tUU^&C>0oxq+r2M75t1~*QcAb20Cv-&Tlnx9n(Fxx+6yX2u(SZyA006)sf9rdEErbvt009CJ tAOHaZ5Fh{n0uUfzKmY;+AV2^D1Q1=IC$7%hFKz$;002ovPDHLkV1nx7(;NT* literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/ic_save_white_48dp.png b/app/src/main/res/drawable-xxxhdpi/ic_save_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..4243804ce3af8e69c453374306fbae377cc1ff5d GIT binary patch literal 1031 zcmeAS@N?(olHy`uVBq!ia0vp^2SAvE1xWt5x}=AJf%%-Li(^Oyvy59lpw4ROs2E_L?aqd#XQ&sM*0>Mz{Q_0kw;TI6icAYA^_| z;Wnr_pCO>M_T)5%!&e{kC_XxOQ9NPBG>(biS2H}VcC@u}XwGFkwavq0!)m4}+5IO~ z7=qt$2P~W2;u0;kz-RMG20e~|J7$VE`!*^&1b*k>{O-`ONNf_54p+dHn4pGA#;z_6xmN&^V`Enogv1P>4UAR@B`K0mJ`{R1Q*oUyR+{&D8PB)xsKwav`Xy*1%DkU z3uMm!EW;wP?W@Q7<@KkM1rny@xijwIX6o@X2>f`tUYupcC9AVs$NwEyaJah7!u-+9 z^Th%RZnbp_4WFc5=e^g&@bT8Otdp-FYt}7bR&aRpE%F)Dal3;3|CLU^Vdj{yPF8Ni z37g!s6@kfZW}kIFO^CfTU3S6o2Ww`_B}_jUscw5ft$B0r7lv$}b77a5Yfe9S6I9i} zR{!bWn}#Z@hDY=3H-;XvVA!|d`0mK*!H*d^{v6qT)AY~|2EPy1|If|~kd}8?;M!il zIed;vkFdgrsVx7$bm?w95u&;Z2O90*99u8uxdnfoAOz7$!<8M z>9qA|)%zdPVK){yPw(HWCu>rBp1S}5 literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/ic_slow_motion_video_white_48dp.png b/app/src/main/res/drawable-xxxhdpi/ic_slow_motion_video_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..e15b40b55fdd964675b0cefd5c5e2ab61f0f0738 GIT binary patch literal 2541 zcmV{+P)G0000TJNklr2P)~P!~`}2L@)qs;~{D&xwwFZ03il2 zTh3J!D0yhkrvQ-;0Pr)1DM?6iGer=a0I-?&8WF5zoWlRrs0#S^vpn%u48eFo8+T5^pP^*N5dyvb^E_bZJQN3>)$ z#h2y>2GWuO<$#iOWCVp|$i)&mDUN7UtOOZ;;CkxFL)73prc->IIY)6sTh@?`#f+u7 ze8OQ&WD&)62Pwr5?OZ4Ljf*HHpHY&tnL+VPjAw%Oti|LzdMhxbIGO1b$Ndy2e(1nD z_U$+NC~&0cO(EHPES?EE?IXbgE+M6mP{3s@C0qBCQvA?)j|5wpLM4TaYTQqn?2J_$ z(S`NwFoPC~2#)4AvU8W>h^`a?kY*gE6d{ymBH1}haYT2DSU^`r5PevTdmCx5XrMMN zn)G5r$!19$6e)@ zn{erAh-*Ys`3ISVscER0zh8{fYt$Bnq(->R#FhL0Dyc)N!H_Ru)M%K zID1b%qWBuhXMBOPH{=EWinD>?2MFh=h+zTjhAB%LXT2gD0FaO9fwQfYR<1Y_lbYcK z2!B&Vl>ly1;S8KDmap)_0QrQaI6G0f;x?Rp8-oBqNGD@AC|69u*^|)>5X@pe&*5ym z;tnqjmd6;H&Hd6g01)u4Iv?8rdukluf#`G_U51G_AoS00}KrFM9cY=GP4D~1R1gaB{j?1P8~s4JiGCC*-# z7Z`%G)p>e*dMSzm**0(jCaoeC;1&gdHUYgmgBoOTHaJoN>L?KO#aV_b@&Ur z-Uko+hZx{?1&pQ;=wFiln2ZlCKs^PHQJCB7Qc$SCe~jaAp@iTO+*n9u1&4F+K! zz9yw$(UkRxAAdJC;zkksDR^9;@bPzGJ?`A9aPVn@$KRSYxbqDq6b|aMETQ9X&q~}` zNh5`a6UZcR{AaQex6V|!n2@aTlj06Cxb=X-MhU)1(D+O60Pg)jX@!rP94*)ANL@t~ z_4xw#7Ew=eLoYV4m|lt?PGULkZJ@Q{h8}DHAj4G3C_*U9ePkoUX^I=VQ^XE)X{(5! z4KvBkZE-xUFJf0|9-xXsMok_kLv}_eZsC?lUyhKrd;@r;V!_&czc zY%FCW2g(<(l45u9>bKKzc zWCAnD%>vpeeyGXk!pQxvJq6Fi~;FT5p%yTl}a(-Yf z-g$^JiZhO5DISsmPvD(ZoT>Pu9v{at!1pv&Qg99HVijNmw^2e#LSx>KO@MDXP)Wp@ z{44SS7IP6PB^Biu&W6Ya*uqq*D(PszV{DCJfR|{le8GV{&$b8!c$*f=H|)n_Yz#fX zHeTikIfxUu zp7Ao^T<+v_j;5k4%d#xXvMkH8EX%Si%d#xXvLfNXNt`xl+VOoe00000NkvXXu0mjf D!q|( zMg{=BwEM&=@X+cErPF}o5lX=%<0)xTq*y>oR05G=L5!uGv$lX)z>C30aR7kEM})fz zHa%kP_m_9|Jq$*p){m(!ye|=}h&nxEz!x8@Ic|D-uK=vo&x09ncRYV^*3oj&x@RE4 zwrs}cu7)=>cgJxEiaqegsVk9bAz70*rzdZoRa55vUo@{Sh)&Iv?#r{uz$fwc0d}HS zrn4zE5ffu8rG)h|eg{-dIA(Lx53A8MJ;lUS^BSg*O|>NFaQgd0-9_Q)1Uwr8)fA4{ zP!QOYO;pE5BFN`>I|zni>qTH4_N`+2j+_YA8fS^`&QGxTe6Wg^HY;X6SoIvk2iv|c_!hX zpRhbwJJrRM49;o^xP1c-n62F&2lFMz7r|Pu;`3e~39RwRU3!|7Pc0D)v`?e(*5Px%?~W0Dy@;S9$OeR^_-oF zX;d{;xWYSfu`0x;{*+JHI=Y(!eBkP7kRt-J^wDM5DEBWZc=K8+y2$v zIG7ZJ0a`HHTht$%Vad^@+qbf=UkkZl3{|06TEF%XQA`7=*I1s&Wv6EKnYPvR!xBPw zP(s689Q2-9k*T5}LF`2XbhF7@D~&7(8okwQ$g=4tJt_rS)CXz2@r-}!)wKX*02gp= zWqDag;l?>cuBQ)z9V!l?UYGY}g}{}WI{yNa|3r9LpNE0a#(w)(C&6ieY2|#0^%z|T z9Ki{MP3i#_y^8EL`d7wsH#p7oi>{A#Emfu54X9dK7%{1jBA@|j@q*`mVt+i4NCKU)T=t0}u*l7uk^_m{L=wLQ9gBD2-X`HU!l8AzStCe?cy=TIa; zdtHQvvb+x6pa!^n*qqZ=#*Ya+eXf{LztSq{?OAi<1ZjjOIo!vQP-P&Rta33paT&(c z*^FZ4qV@djh)iFp>sqzg7E@<6_*ikPS<-dCGUYl{EnO1Lesk*xHX%=6<3LQ0D)N5D z+h#JXa-<-LP0!h^o#OFU!TNpp%kHE`&)A&3eDd3033Or8qlw`^jc6)Ms~{5_ ziBNWLyfwVSzbOW*k**wM%N{&p>dT0$qma?XEN|b-!8FwmQjjqON{$yht+>bLY<~YW zsEUj#rQ@x_zMU9m<%h@*h>>W$>^n{R1s0WU!)**AFDaHv)M z9AbE|^dl4&z=9R%qtps6;=ChLQLuWx!wltWJkCuAMn9=DM!M{@L60h=l4}zl2joQ_ zd89=y@%g-&&a20U7|9#Ss-*9(%>4r;(swE({>aW$>^P!yZL0`?p^?pV*6alv&i=}C z(S_3Lm;xj(ACk;S7&1w0nA0ZCj#cm-o>8LaI=jbOfqr@)dNX(5PG|Xw?Ksm8RC2v+ z$b{;jkf#01Uul1sbEX#%=?q8coY@z;YteDw$}_ki-qP`d{l3M$$~<014QXDVSWB)$ z_epmh?V$On8RguI&r88eRbi_c?SiWMNC?g!7vdnCGEcb)&Df_@Ko)zAXl{a`uNFKw zTwWkAksI?ARe6c#*7ic|vwb76_fb06;KP)WkKeMGX_#oegHcH{TVTEs>K^bHKY!sN z@Meb^Djc)BfddlRT~mln_+iXhPR(b`-OR1jmwQG&f!LEaTJ2(vIQliXQZ3};qZ|uX z?oe`W1@!he-26#Y=>F6%?=rDN+AeZ(2pirmneAv;ox?=C7RNz3FuH7qcj>pvJObkh z^macFu>!IX2(x1<~ec?f!4%)x7=;EpgO{iyn0m(xczjL1`L=({n t$5|^zSq1k07FPXpJ^$5{Q%rt4pS}3~HB=!8{GXSF@bGhIxnBMIzX4B-gP8yT literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/ic_text_format_white_48dp.png b/app/src/main/res/drawable-xxxhdpi/ic_text_format_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..dc2b2f9fc130d8e1afb3154ba2a8322e11212dbc GIT binary patch literal 1297 zcmeAS@N?(olHy`uVBq!ia0vp^2SAvE1xWt5x}=AJf#rs$i(^Oyk>CXv{%(fU}nO`H4$$V(s!@%_)EU9 z%3f64_@wO5QHwZcGEw`BZJkrftkg~CHqJhA+lu+UK&8_)O*To@O|Iz={FD~+oU%G> zrj$Hqy|KXwbC-A1OPk{Ogl{e`{-8AR{EMku`)7CDT~NHC??zSR?~3LV)4#luIrCUz z;`P+GkF=7kuQKdc+ZJ<8Zk}4%QmF*XjmuNsCMYJm@M#>l-_f%%;F?ui=iRjo(Ve`f zqmI_>Q!qNm%2l9vU&!*1O=R{BkDT>J6*F(JMP^rY-{?J+V7t*gb!`Ic#{I_el3xnv z+%VnrtYX>?<*cUTowmx^ZQYXUWm{)GSbpSKq{`{Xj!EoSneM57YtU)8>l57VubUvf zaq*XvNjXYp2VJumryF>_F^lYBJ`xw${eyX9d8%OBiJg(UNz9hcn~vRZek1>7rRx`C zr#GyT$~uWnH{M@iD-)ibu<_0Rjm>jJvzV_>+8kS^_si1n4fEE{{*GGL{cmz|)SioH z3GY{WUUu`Qg-6aL`BlC#r$jb>Pf1S_wX6?eujrjKb<_Wcszqk2m~)iU*T31nV@~jq zf1dlT4lw<5&6#vM<^MswqIn_hKWr_nQ~w{U?3rM^gm2@dIf7y*RNf`5%bDmYleDfz zBbm22bL*?V@R{DfL-KV}yLOr!4Z14XZ+KlUwASzd=d)X!Y$4$}OL^V}+Fm@YKIP!? zFDv>Y+#}b7%5I#$>BjnBiISJ695p!|V*X~C&%A)W1}8L9jZQ6i)sp+9)>mnwhl)_A z3m)po^(S}N|5`Bp@RPr8vB&aH{_csp%4$B*{H*9Jqq;LDm9Im^_c-30AHwj@=icj8 z?CTcIj1FPQTRQXUD&~1=@4cU`WH|F&qtbunuZR;8mHaCea@6yCT31=Mot&N0$#tmT z!u8&URlPrJ9Pgdh68L?>`jAbiq;%%9D$$OuCYv2Yk3~PJ_3Gzx;cq`#9enjD*Q}pQ zxsDl}jaj_vQRJt651mANTx-1V8Lv{)OPjOjxheMz=X)02A+9za#@92&J{Zrcbhzp) zfBZys{N@(tC$dZZLl3LX+UdQyr8@JOStu|23F)Wovs8JCChaLtO+$dNu#h>Y`nrN| z1{P{rYx$+RA6@xog~e1^(dC;KKMEG0000aWNkl&taglFmBu<=0zTqOLILLNZvxFJs zlS?KNgb99LC&sdvjT|SAS#gfBfyLZS$5a5b>r4SFsU(3N5*+0fCekStK)pdc&3@uI zAx1fi$PH%z>+4T3mAK(3i|LmN@ZV)Jg?+ef50lAE1?b5#F5{J2o})nOg{i>~kjWd^B|#PM^8$rT;0|)> zLKc~{q7|8B(S?EB!2}9pcyX{=20B5Vla;)e1L$@n7?KL<$T z4b0o((N*v;wqxF(iTE`@gq4`}IX^`_+!(&VtQCF@upD#hm`yt3@zR+?J?1R+V}Qk& zvxA<9Pu7bMF=xKt0z7~j4NOOTwwW}dFF}Ey0^CUp`YOmle7IZ=qpyjPehJWvOXz#u z$IoWlvIc!M^zcJ~Oe)Y7V+P{$Jx&Z=pOE2wfH%-}g)v9~mQNkJ{^WIlhtPG78xRK; zr-@psIKbOH&vb4l3$eHxsYcfnF9Y?PuEfEnnBl7~1vbB>X3 z`a@|99S^%1pfi`yF#~aY=Dkff#KdN!<14b=3-A&;UPpWx0BZRqVq$Ni<9W9N^-CBjFjz2|g*;i%QoGcW-l&TkN{^`-%}q0RRyZLjo57>bV8c*jUt-Iu)QTmr>b* z1Tp~BXnrXD0F_H*I1^wBDhY-LHUL!6#-@)Ulbi{#50yMU>lx%0l}CRH8CE1OZ^REuV!B+Z|vKD#wse1c2KR z#Uh+QWq`c_oIn{GY85{$hN;s7uVF_b-AdazCAr7EEz*ZQGg*`xV=mV(#3JUS@L$%evg2MbmxELVxKZMmELqmN0@TvD7 zLr-CBV2F<&{$lZeh@A}QRYWmvdCmG&MEEY_ig8bo#ml%Y)&s1@SYe2dU;Vywp7Yny zOEC5#;^KabyKb=fh0L?o1I)l!S%{CnnGEm0lFY~0sn8xj?6v%*e4h0Hxfn~(KBUJF z&sctz(%*xJS)n<8_3yp74P#L|9%c#0Fg7cs$KRrSa#o13qlg^=HehUhD32dDBZ_Up z*qiP=Pd^*VWQ(2)ej9t+*^!&*eKComSL zgFOL+qZr#By5omYh+;d{FDmQ`@CwG7=n%5wKVZZEP4#8_0!&0@a;T0U#v_VNLuI^u z0Xh-KSXro!{}^eOuj!+7urGj6j4hXKv;*`oPZsFobia(icnjHI2m9VYW3P5!ln_$YfapXXss`es3aKd zYyhDQZ2Udy`843A%Zx&_7GW1^8xf}i3?*UfKjS%qukp$`6MtR{QHzu7d;nn+YK>fv zXs#ch;f4KlM>IEtCe+p;Z) z7uczS0$L^UpYb9(8tLPH0AUe&&d?Jvy$HWx3sJ0U;vMpkB=#fDqUTA(iva2DM^BYO z0^@b&5q6Ux88JTOL9&q~kN=Gp)1C0#OIqs99`wK_CA2ngBtX$r7hyqwc{=H zU7#D{hXBGzn$UNIfrt+`gi7=^a0}v>073x?^ffYz2;!qfD5S~4*O;CM5Q;G8BYGn~ zSzmTxP9fsA0KyW?X=6 zc(9wC)i0dR$Sm_BxnJI9vs5Db8KWbx0-x&ki~5jvWfFpS57x1hyjEQUd2AA*+waI zxu3fjOjkP6mR7W)Egk8~VD936=26Nv{*8S~X^r?dfO=E-mY}L-BI4`>=hvHZf@lxj zkuU@h(pg9oLDIlH(vaW;P;UVH2#SyCgSazy@7y2eEBvmSX^49h++8-yh~ihxl+XbQ zb^!Hq*gyimiL;hIh}#pq*$uphe8@p^(^)|Z7G!3`D6rx)Un3I5su7O;;PPKdIH`SeHpG{KL{ z<~~X}OdLDJ`HYvjmn_786a2g^@>sxnDv4oMl%uSph&=Vdrvj+gh8)H)of1~Dl><~! zO)d4rh*3{1)l_kSEv%x1>EzL$Hb@XAq?QVh3XobVKq^3LsQ{?}sigv>0;HA-kP47m nDnKegYN-IJ0I7vNKx+R1h8yytB%K}900000NkvXXu0mjfOR}y? literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/ic_timer_white_48dp.png b/app/src/main/res/drawable-xxxhdpi/ic_timer_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..e9b65514dfb9c69a8c5e9a6c84335fb25686072a GIT binary patch literal 2473 zcmV;a30C%rP)G0000SaNkl(#1|% zxt?0)L;+@I=ki_Jc!uL-YM5TO@@-0@0NHFd-{Vo5&+7PJ1GZD(SAb>oFokU? z9%oS&#}ClKEMEeAi&qI!9}O8FKfp`O4u629yo3unxj4h)2l#(V!yKTTCvZoa-IQi@ z`~ZhJC!7Hi{E`V=bCREupTY419HumU0hTa`t6rfZgX0HyojG9&P((XZxNMTm6c`^r zz#+~GOMoi+34<3n-}v|eIw=T4fDKF#7AIL{eEa}ATn|vh9|)T^Uc=nAE(bV||6?Yk zI6@a~tYrnYR8U4SapDwHMg?`OU>*0<&AX(qYag@R4N%1~re}msZe{^-#BgyIvXL%M z1%3R%2Usz!L-+@qSUioFxEM9u!J9!HfA9gW<_y^xqKztKhPIFgIBxs+gAecnl4Ro< zuAl&!;l;R;{jME9lFQ>ylerFRkQ}a#ZX3rRe1ImBWa@FM5DQz#6Lyc^et;F6Av66f zwR{t3(AM$W4^YV|GVuv56e5-v=QhSH9KZDd=kaec(Zzfupq0^U;rOivDB|y!n`9#r z=<@hENejnsEx_+F_dYd<1FPpK=I%z^4X_?_dmQ?bR54wcnqsA^0WM|&QxB1kIJA6r zVCp00yBMH|H!;3j9wQK$^d!vqSqN@fTie-@`aEF5Xu-sZ$WSYI?!uDLK{H19lbvW72wNE zq0>)cr~||qL}!xE2NPf$dP|W|2N0H_cXtp05`2Wt<4EWO2v4FjOw4|OHRw!H=|zA` z&|76cz|-h-AYKFzdeC{wc7S=L(5dkzKs`EVm~A&eGdj;8-UJX1pmT%W0Q=Fo!m9v{ z=ycl*a5hQQhAHqWK#UWpjWf$$fbXDlKjKvYp&gwo?FHC{&I0cORHO5-y#W72?G41c z0K#F^`fUX`2c1n`253cXin(?IT#e3RF9X!0v&v3@`%oL9z{>zJMp65fodEmv_lUd< zpuZ&1Z70AmYBzfu;1<+|Yy>DpXMwi?s?j++_W_ommLl$LfFjbUEzW&_>rgv_cpE@? z54G!ZAK*^Zy1Wk1i`pHz5AY~zZC(f1hT8Vr2k1m?t=9pXQR~QkfJ3OQ@H)Us)DGo7 zz&ohbdL5tvwZYs6IDuM)*8wU~8_s=z5!A}O4p5HTNbUoSqgL#7fD+Wka~~j$THNaZ z3DnY2fM5fpf_(~#o`S}LeG!gcgg?%GfaB5IP=%r3KZw;uAH;&Y6cJsDcp>)Yf=@Q(nJ-Z_3x@K0paN z3%w0cjZSgy0|-N?ZS*!k3u?y@8v*vB*5z%0UetQ*1h^NqQ^XJtSI8)8zqS*g37r}* z12mx1XeYotbnfsnz@4uAKtni!+M8Ym7(i_Ru@&HvPvKk9Z@Ba$y(V-X@GiiEF8xgP zvrL-&0~e20$O+UYILlrD;aQW%gGajxonFLdfc5C?_bR|abbf3%Kq)D7>JU%1#Jzu4 zBkV(``%`ll06G!d0j`Bl$(zwd=v{3;K!X3E^Mn@x_MkINq5S~D1L!S7LcYt!dF@SI*=DH|$=wnwO=Dv@(8bH{ExueuOa?#-!=GqXK z10;AAb4hL`FW@Vln@C~qIbv=H5YFWTGSN#J641)oM<(88F5-Fsp@I=IF~;r05zC9S znbTzALoP(Z5I|VY1eqCRndMvUZ#mPuyXINjX9 zY{abzZtgQQv6CZA-xU2k$Vz4*E>3WJ^JwBe_A*4Kj?&Fmu4E44@`SLglm>ptW_GiO z7kP_ePBKP{Bx8Kc5Cc5VU)ag*tfrRPNH``$^F2VcC_oe-S`;7(5G@K21&9^}hyp~5 n0z?6#MFFA!(V_rRfN1{%1#L(3hf-e?00000NkvXXu0mjfTX9~2 literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/ic_touch_app_white_48dp.png b/app/src/main/res/drawable-xxxhdpi/ic_touch_app_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..773124641ab9c61a25d4944d14867a7e4ec3dcdc GIT binary patch literal 1516 zcmZ`(c{J1u6#k8E3=JWXd6t;$Co{6lv+wI<-)1VuHq>jV5xpVVO_^jHvSf)=lw@!e zdC~B*JHyK^87iVtBay-3jou%f^UitqoO{3T-ur#${&&;eTV`PyvvG#NJ&dW|^K96qx-INcl`J(ZvD zz8~aAy;=Ks5TXAgj%irFce9t4UG(3;$zoa2!~u=BU=~+pBTwC(smMkDNz$F+;8m;T zGHls>rh2po=`W{Bm-_8Q-%NIeIab>+I*B2Vi6Mm`WcSwmHgpH=*%^!tN|2MSiQNpW z)Fia!m!t9VjJtW|==}9#;Y%pn7}+OW$QZ}er@w)75?_5M^xbjiPB|wpPs8$Dc$)n3 zgK5d;0ystgyN4*OJa?n#rG~rKY}>irkNVMiTIt9G(y%woE7HGrg>X_ zKQb=m{g$jwpc$=bFwURR^LyE)T(@-pck02C<|^&xR{-YNVZW}V-TdA7K^y2~d7D9_ zDJ?IlkL@5Iv-&R@8n}YJ$gA zn)zXMmYY3@n6#)lH}khxf5U3USEpO0q!C|~XG#2&Fe`t@ebX*j)k|yod{5Pm-`)~= za|c?EoE4Ww>^Za+REsNANQ&2p``fk7X&H>tB3^B?zIrX_tfZt>&D2i1DbIdjuWPi9 zK|LFo_7vTkQSoDTTiXgCWz$OVygPBrw}Nl?9DUwCO@wnDq(DWaS^glUkq1#AMP5`o zz&Jr(-k{^j5rpdB{)NC(tXmwp9tHX0nKAhn3Ubva!_Vr{aXzGpD`~+wO_+n**GIbn z`}!o2eGvuLSFA@HMvt7xFMTT1T~V(iG*mN!Ev45{MwO>+5$XYRLS@GDVFc2Gf!Hwb zp0eaCsk1-k$qgLhk1NoKEumApi-oGgTFUy##t(`GKP?WeF#YU3XaK?N{%hGYIbNm~ z3_Ccub!gTbJ`AdbTp$~7c#GB-Jy*X)FH!V5(eVSPI*`&=Vz2^nAv9>WkB9|`v4BP3 ze*hE<$gnADUjwdju>Xtj8!au|KK}Sg3`iWw3xOJIvjs{%@6Xt(Y52 zeLwdt+8#&m7q?<`RFd7pXG@qtxO$%KMdFEu0Eoiv%Ogq>8F)<++L`d056F9wN{B6y zuu6Bv)$OCLmgjYSdWCCl4B(alfy~Q-TJlO!sam*?Kwy3gbxiEA4+E|pomh{@Eddp) zt3bt560h-N1A*t3coN(S5Gel%2)sTHVQd%hr;ZJfp~4rFyM-Q}TDu?{&`8akV!!r1 z50#7`xXm8ATe#K)6$O^mvL~?|qlb<=e z@En>4j@le$9$yi-f>h%O!)YBVqp-T%e@7R8jrN}2D`9=kl_IB7oDS3%kK{?U+5Nh8 zMWi1l=IiFdQ5O6lAg-mProR3MkJ)RUWV_~^?ZJSQ=q8UtVJJ&<5X><9@VIMCK~Q(3 o*t{t5;kXf3LuM_ayN0^Q-%`4Heb$L;&HLhjqpgcgomD{cKX}Q%;Q#;t literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/ic_videocam_white_48dp.png b/app/src/main/res/drawable-xxxhdpi/ic_videocam_white_48dp.png new file mode 100644 index 0000000000000000000000000000000000000000..c384d594f86618adb52610016a6e61128be4ead7 GIT binary patch literal 591 zcmeAS@N?(olHy`uVBq!ia0vp^2SAvE8Azrw%`pX1Apt%iuK)l42Qq;q6~PmMV1A&X z(j`HD!3+!y|2Tf!{5e%me>(fMy}C1}T%V>h`7hg~-TMF!p=8IEGZ*dV9yS z=&%8gOCZ~()XI77OWxO?*V){?Xo6Gd^#AsZ_ouN$*qs46o(SL|eIV`jdXwVS|90tp z-eOVewNkol^PKcFraa5FQoSa7`u)+A17R!Yx}RHrvLS9|tlK$z*$qKi zrzd{)XV=&{@v}9v)4hfX<_b@EIVwqDK4p$#!qAw)CxGejhW|&8iiLN7oxVF}{pSOL z|BoC|mOsDdEC1adDT6nG_IZa72rH+5b*}G;HTn@{Uv=h#`Qz_jxdqlN*uWC9zwM}! z%<7K!yLo;oR~&DB5x4T~?Db3Hr{3E8b2-CnzfRk{v-?vunuV&TetXO3_4;#UT83eX v*UOJp(<-gz>3g5OQ&hiSR0$m{vj4)nQNnfM*{!{x7-aBt^>bP0l+XkK0L~Bz literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable-xxxhdpi/settings.png b/app/src/main/res/drawable-xxxhdpi/settings.png new file mode 100644 index 0000000000000000000000000000000000000000..82d679bdc9a626c071238bbe60967f2954d75302 GIT binary patch literal 1962 zcmV;b2UYlqP)G0000MZNklqZQF=FIX)+PZstxf?*CPs%$@4$+Iv?|uU~zG z=KEB=Yi&_VDW#NBN-3q3Qc5YMlu}BmBt;bl@HQXEalOqzs*Cc22CN|n>uDw`c6>lk z7K(}$HHZ?NRuUUUAA)p-#71!`LApm`qZmSv=16Q5FA${9B{qur1Zj=LMv+ahBBH#b zEIMSqKIAGZ|_m19_bZ98L-Gjmm7sjCv9WdNGnW7)~7t zCz`N^?b5k}suDArGnuWJlEb64l9*A6>sZEi8FZHLA(hqaE{CV+Bc9QUx3J+ux`=0V zW)@r7-3BU1*l;KNwv-#G15c$|v zz!=I&A!yAcHdDBK4wNuq7Db!KRg{%I(2sOn`JH2A50vJ7zNBdHN_fx~uQoA-=CTcG zjKQ^;l$LF1!YDHE>L>{dz9BYWb0HBLGmkJ>Mhnt7laGkaN=iu>a0Y&4kV#l%vkpIQ z5ci1i7s)D^>29=mMzY#T9-6)UTw93!KaUs9VK8 z%JLWgqcW)>w(&4KscH7(XEr;jsO31y)9g-t;yQk+Q^XER@DqDezY-A}xQKmIciD-b z>)1E7#X=lqFojdI#Vi_7D9JUoOfQZC_2m>gQcU1zY+6Pw2?(`VhD}|>4DO`> zJC;yg0z`F|6pQC$^k)O+t)`{~jGC;*yv>{}W>KGcn6-um?ia!OvWg~R9uXeJoG3lS z5PA~DoY$x*wr~YGnDLMp#3Ptdz};dO9r&Bz{X{7-h*JDO@HWuj%~;Z>1S_8wVi+yS zCs^~SFK$qZ5d>+xSil5=^e7Q=iIdq%F!HD)7EqTwf|0`&;vOwnMG&ToB}^v>ztcg& zfr`9F{5pyybS8eEQcc2x65K-p@kkeoSV??_Q%b^y{%pj*0b&sY+~006fJWrvSMP*b ziq=lTiU@vXlOh(8LN6a4BYVZ}-KHAyUEGJahkVZ}B0b)8to4fu73 zgcWx?s&@Opso}g|>#8?;gjmLr_%%Ypii!BuLoA~weoZQlZ$LM(jPCd~LBfjR3G@?m zL~;BHdN^?wYjYZYJtASn9r$&90>#jKB&@gqzov*~OvSIuB&_I1gOIUGm zQrwK|EMY}QNP^qZM@u+yGFy}4flLmUOW2UYDB`nHEFzuwJW86F<)EU~SuCL|@%x@S z5)O3b@5I`1iVFIuP>`u$e_!|>`5akV@VZzg*ykQn53=rkCdv}2-Z#W$jdWiCd zc9>98ls7~OUZyDTSnD)|Dn54_LKSnIhET=5PD7~T45uMf(ZOj5RaA5wLKW+rhET=cbzLlD-{M3f&?WdLvUaU9p% w45W%Ay(FcSQc5YMlu}A5rIb=iDW#P97v1F?2}4-q5C8xG07*qoM6N<$f(CtqCIA2c literal 0 HcmV?d00001 diff --git a/app/src/main/res/drawable/circle_background.xml b/app/src/main/res/drawable/circle_background.xml new file mode 100644 index 0000000..b5722af --- /dev/null +++ b/app/src/main/res/drawable/circle_background.xml @@ -0,0 +1,8 @@ + + + + + diff --git a/app/src/main/res/drawable/key_visualizer.xml b/app/src/main/res/drawable/key_visualizer.xml new file mode 100644 index 0000000..94e299a --- /dev/null +++ b/app/src/main/res/drawable/key_visualizer.xml @@ -0,0 +1,5 @@ + + + diff --git a/app/src/main/res/drawable/key_visualizer_red.xml b/app/src/main/res/drawable/key_visualizer_red.xml new file mode 100644 index 0000000..38c4c91 --- /dev/null +++ b/app/src/main/res/drawable/key_visualizer_red.xml @@ -0,0 +1,5 @@ + + + diff --git a/app/src/main/res/drawable/shortcut_gallery.xml b/app/src/main/res/drawable/shortcut_gallery.xml new file mode 100644 index 0000000..bfe1492 --- /dev/null +++ b/app/src/main/res/drawable/shortcut_gallery.xml @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/app/src/main/res/drawable/shortcut_ic_face_white_48dp.xml b/app/src/main/res/drawable/shortcut_ic_face_white_48dp.xml new file mode 100644 index 0000000..61087dd --- /dev/null +++ b/app/src/main/res/drawable/shortcut_ic_face_white_48dp.xml @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/app/src/main/res/drawable/shortcut_ic_photo_camera_white_48dp.xml b/app/src/main/res/drawable/shortcut_ic_photo_camera_white_48dp.xml new file mode 100644 index 0000000..aa0ca23 --- /dev/null +++ b/app/src/main/res/drawable/shortcut_ic_photo_camera_white_48dp.xml @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/app/src/main/res/drawable/shortcut_ic_videocam_white_48dp.xml b/app/src/main/res/drawable/shortcut_ic_videocam_white_48dp.xml new file mode 100644 index 0000000..abc44e2 --- /dev/null +++ b/app/src/main/res/drawable/shortcut_ic_videocam_white_48dp.xml @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/app/src/main/res/drawable/shortcut_settings.xml b/app/src/main/res/drawable/shortcut_settings.xml new file mode 100644 index 0000000..244cf65 --- /dev/null +++ b/app/src/main/res/drawable/shortcut_settings.xml @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/app/src/main/res/drawable/take_photo_selector.xml b/app/src/main/res/drawable/take_photo_selector.xml new file mode 100644 index 0000000..09f4127 --- /dev/null +++ b/app/src/main/res/drawable/take_photo_selector.xml @@ -0,0 +1,6 @@ + + + + + diff --git a/app/src/main/res/drawable/take_photo_shutter.xml b/app/src/main/res/drawable/take_photo_shutter.xml new file mode 100644 index 0000000..f31dc3b --- /dev/null +++ b/app/src/main/res/drawable/take_photo_shutter.xml @@ -0,0 +1,9 @@ + + + + + diff --git a/app/src/main/res/drawable/take_photo_shutter_pressed.xml b/app/src/main/res/drawable/take_photo_shutter_pressed.xml new file mode 100644 index 0000000..22c8536 --- /dev/null +++ b/app/src/main/res/drawable/take_photo_shutter_pressed.xml @@ -0,0 +1,5 @@ + + + + diff --git a/app/src/main/res/drawable/take_video_recording.xml b/app/src/main/res/drawable/take_video_recording.xml new file mode 100644 index 0000000..d5aa097 --- /dev/null +++ b/app/src/main/res/drawable/take_video_recording.xml @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + diff --git a/app/src/main/res/drawable/take_video_selector.xml b/app/src/main/res/drawable/take_video_selector.xml new file mode 100644 index 0000000..e49fd08 --- /dev/null +++ b/app/src/main/res/drawable/take_video_selector.xml @@ -0,0 +1,6 @@ + + + + + diff --git a/app/src/main/res/drawable/take_video_shutter.xml b/app/src/main/res/drawable/take_video_shutter.xml new file mode 100644 index 0000000..1f4abf6 --- /dev/null +++ b/app/src/main/res/drawable/take_video_shutter.xml @@ -0,0 +1,11 @@ + + + + + + diff --git a/app/src/main/res/drawable/take_video_shutter_pressed.xml b/app/src/main/res/drawable/take_video_shutter_pressed.xml new file mode 100644 index 0000000..4554f92 --- /dev/null +++ b/app/src/main/res/drawable/take_video_shutter_pressed.xml @@ -0,0 +1,11 @@ + + + + + + diff --git a/app/src/main/res/layout/activity_device_select.xml b/app/src/main/res/layout/activity_device_select.xml new file mode 100644 index 0000000..b8d79cf --- /dev/null +++ b/app/src/main/res/layout/activity_device_select.xml @@ -0,0 +1,42 @@ + + + + + + + +