Repo created

This commit is contained in:
Fr4nz D13trich 2025-11-22 13:52:14 +01:00
parent cc5fdc9b3a
commit 1ab535ea8c
589 changed files with 130568 additions and 0 deletions

67
app/build.gradle Normal file
View file

@ -0,0 +1,67 @@
apply plugin: 'com.android.application'
android {
compileSdk 35
compileOptions.encoding = 'UTF-8'
defaultConfig {
applicationId "net.sourceforge.opencamera"
minSdkVersion 21
targetSdkVersion 35
//compileSdkVersion 31 // needed to support appcompat:1.4.0 (which we need for emoji policy support, and not yet ready to target SDK 30)
testApplicationId "net.sourceforge.opencamera.test"
//testInstrumentationRunner "android.test.InstrumentationTestRunner"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt'
}
}
// needed to use android.test package (ActivityInstrumentationTestCase2 etc) when targetting sdk 28 (Android 9) -
// see https://developer.android.com/training/testing/set-up-project
useLibrary 'android.test.runner'
useLibrary 'android.test.base'
lint {
abortOnError false
checkReleaseBuilds false
}
namespace 'net.sourceforge.opencamera'
buildFeatures {
}
//useLibrary 'android.test.mock'
}
dependencies {
androidTestImplementation 'androidx.test.ext:junit:1.3.0'
//implementation 'androidx.activity:activity:1.9.3' // needed for EdgeToEdge.enable(this)
// appcompat version must be 1.4.0 or later to satisfy emoji policy!
implementation 'androidx.appcompat:appcompat:1.7.1'
// needed to fix errors since upgrading to appcompat:1.7.0, see https://stackoverflow.com/questions/75263047/duplicate-class-in-kotlin-android
implementation(platform("org.jetbrains.kotlin:kotlin-bom:1.9.0"))
implementation 'androidx.legacy:legacy-support-v4:1.0.0'
implementation 'androidx.exifinterface:exifinterface:1.4.1'
testImplementation 'junit:junit:4.13.2'
// newer AndroidJUnit4 InstrumentedTest
androidTestImplementation "androidx.test:runner:1.7.0"
androidTestImplementation "androidx.test:rules:1.7.0"
androidTestImplementation "androidx.test.espresso:espresso-core:3.7.0"
}
java {
toolchain {
languageVersion = JavaLanguageVersion.of(17)
}
}

View file

@ -0,0 +1,17 @@
package net.sourceforge.opencamera;
import org.junit.experimental.categories.Categories;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
/** Tests for Avg algorithm - only need to run on a single device
* Should manually look over the images dumped onto DCIM/
* To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/
* folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes
* time to transfer to the device every time we run the tests.
* On Android 10+, scoped storage permission needs to be given to Open Camera for the DCIM/testOpenCamera/ folder.
*/
@RunWith(Categories.class)
@Categories.IncludeCategory(AvgTests.class)
@Suite.SuiteClasses({InstrumentedTest.class})
public class AvgInstrumentedTests {}

View file

@ -0,0 +1,17 @@
package net.sourceforge.opencamera;
import org.junit.experimental.categories.Categories;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
/** Tests for HDR algorithm - only need to run on a single device
* Should manually look over the images dumped onto DCIM/
* To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/
* folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes
* time to transfer to the device every time we run the tests.
* On Android 10+, scoped storage permission needs to be given to Open Camera for the DCIM/testOpenCamera/ folder.
*/
@RunWith(Categories.class)
@Categories.IncludeCategory(HDRTests.class)
@Suite.SuiteClasses({InstrumentedTest.class})
public class HDRInstrumentedTests {}

View file

@ -0,0 +1,17 @@
package net.sourceforge.opencamera;
import org.junit.experimental.categories.Categories;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
/** Tests for HDR algorithm with more than 3 images - only need to run on a single device
* Should manually look over the images dumped onto DCIM/
* To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/
* folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes
* time to transfer to the device every time we run the tests.
* On Android 10+, scoped storage permission needs to be given to Open Camera for the DCIM/testOpenCamera/ folder.
*/
@RunWith(Categories.class)
@Categories.IncludeCategory(HDRNTests.class)
@Suite.SuiteClasses({InstrumentedTest.class})
public class HDRNInstrumentedTests {}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,13 @@
package net.sourceforge.opencamera;
import org.junit.experimental.categories.Categories;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
/** Tests that don't fit into another of the Test suites.
*/
@RunWith(Categories.class)
@Categories.IncludeCategory(MainTests.class)
@Suite.SuiteClasses({InstrumentedTest.class})
public class MainInstrumentedTests {}

View file

@ -0,0 +1,17 @@
package net.sourceforge.opencamera;
import org.junit.experimental.categories.Categories;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
/** Tests for Panorama algorithm - only need to run on a single device
* Should manually look over the images dumped onto DCIM/
* To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/
* folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes
* time to transfer to the device every time we run the tests.
* On Android 10+, scoped storage permission needs to be given to Open Camera for the DCIM/testOpenCamera/ folder.
*/
@RunWith(Categories.class)
@Categories.IncludeCategory(PanoramaTests.class)
@Suite.SuiteClasses({InstrumentedTest.class})
public class PanoramaInstrumentedTests {}

View file

@ -0,0 +1,13 @@
package net.sourceforge.opencamera;
import org.junit.experimental.categories.Categories;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
/** Tests related to taking photos; note that tests to do with photo mode that don't take photos are still part of MainInstrumentedTests.
*/
@RunWith(Categories.class)
@Categories.IncludeCategory(PhotoTests.class)
@Suite.SuiteClasses({InstrumentedTest.class})
public class PhotoInstrumentedTests {}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,13 @@
package net.sourceforge.opencamera;
import org.junit.experimental.categories.Categories;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
/** Tests related to video recording; note that tests to do with video mode that don't record are still part of MainTests.
*/
@RunWith(Categories.class)
@Categories.IncludeCategory(VideoTests.class)
@Suite.SuiteClasses({InstrumentedTest.class})
public class VideoInstrumentedTests {}

View file

@ -0,0 +1,71 @@
package net.sourceforge.opencamera.test;
import junit.framework.Test;
import junit.framework.TestSuite;
public class AvgTests {
/** Tests for Avg algorithm - only need to run on a single device
* Should manually look over the images dumped onto DCIM/
* To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/
* folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes
* time to transfer to the device every time we run the tests.
* On Android 10+, scoped storage permission needs to be given to Open Camera for the DCIM/testOpenCamera/ folder.
* UPDATE: now deprecated, replaced with AvgInstrumentedTests.
*/
public static Test suite() {
TestSuite suite = new TestSuite(MainTests.class.getName());
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg1"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg2"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg3"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg4"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg5"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg6"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg7"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg8"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg9"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg10"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg11"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg12"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg13"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg14"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg15"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg16"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg17"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg18"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg19"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg20"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg21"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg22"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg23"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg24"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg25"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg26"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg27"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg28"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg29"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg30"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg31"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg32"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg33"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg34"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg35"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg36"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg37"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg38"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg39"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg40"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg41"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg42"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg43"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg44"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg45"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg46"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg47"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg48"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg49"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg50"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg51"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAvg52"));
return suite;
}
}

View file

@ -0,0 +1,47 @@
package net.sourceforge.opencamera.test;
import junit.framework.Test;
import junit.framework.TestSuite;
public class HDRNTests {
/** Tests for HDR algorithm with more than 3 images - only need to run on a single device
* Should manually look over the images dumped onto DCIM/
* To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/
* folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes
* time to transfer to the device every time we run the tests.
* On Android 10+, scoped storage permission needs to be given to Open Camera for the DCIM/testOpenCamera/ folder.
* UPDATE: now deprecated, replaced with HDRNInstrumentedTests.
*/
public static Test suite() {
TestSuite suite = new TestSuite(MainTests.class.getName());
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR23_exp2"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR23_exp2b"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR47_exp2"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR49_exp2"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR45"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR46"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR47"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR48"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR49"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR23_exp4"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR49_exp4"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR1_exp5"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR23_exp5"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR45_exp5"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR46_exp5"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR47_exp5"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR48_exp5"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR49_exp5"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR23_exp6"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR23_exp7"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR45_exp7"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR47_exp7"));
return suite;
}
}

View file

@ -0,0 +1,85 @@
package net.sourceforge.opencamera.test;
import junit.framework.Test;
import junit.framework.TestSuite;
public class HDRTests {
/** Tests for HDR algorithm - only need to run on a single device
* Should manually look over the images dumped onto DCIM/
* To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/
* folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes
* time to transfer to the device every time we run the tests.
* On Android 10+, scoped storage permission needs to be given to Open Camera for the DCIM/testOpenCamera/ folder.
* UPDATE: now deprecated, replaced with HDRInstrumentedTests.
*/
public static Test suite() {
TestSuite suite = new TestSuite(MainTests.class.getName());
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testDROZero"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testDRODark0"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testDRODark1"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR1"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR2"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR3"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR4"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR5"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR6"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR7"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR8"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR9"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR10"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR11"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR12"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR13"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR14"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR15"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR16"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR17"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR18"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR19"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR20"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR21"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR22"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR23"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR24"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR25"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR26"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR27"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR28"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR29"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR30"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR31"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR32"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR33"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR34"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR35"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR36"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR37"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR38Filmic"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR39"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR40"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR40Exponential"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR40Filmic"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR41"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR42"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR43"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR44"));
// don't include testHDR45, this is tested as part of HDRNTests
// don't include testHDR46, this is tested as part of HDRNTests
// don't include testHDR47, this is tested as part of HDRNTests
// don't include testHDR48, this is tested as part of HDRNTests
// don't include testHDR49, this is tested as part of HDRNTests
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR50"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR51"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR52"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR53"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR54"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR55"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR56"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR57"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR58"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR59"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR60"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDR61"));
return suite;
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,109 @@
package net.sourceforge.opencamera.test;
import android.os.Build;
import junit.framework.Test;
import junit.framework.TestSuite;
import net.sourceforge.opencamera.TestUtils;
public class MainTests {
// Tests that don't fit into another of the Test suites
public static Test suite() {
/*return new TestSuiteBuilder(AllTests.class)
.includeAllPackagesUnderHere()
.build();*/
TestSuite suite = new TestSuite(MainTests.class.getName());
// put these tests first as they require various permissions be allowed, that can only be set by user action
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSwitchVideo"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLocationSettings"));
// other tests:
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testScopedStorageChecks1"));
if( !TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testScopedStorageChecks2"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testScopedStorageChecks3"));
}
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testScopedStorageChecks4"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPause"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testImmediatelyQuit"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testStartCameraPreviewCount"));
if( !TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testCamera2PrefUpgrade"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSaveModes"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFlashVideoMode"));
//suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSaveFlashTorchSwitchCamera"));
}
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFlashStartup"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFlashStartup2"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testHDRRestart"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPreviewSize"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPreviewSizeWYSIWYG"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testResolutionMaxMP"));
if( TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testResolutionBurst"));
}
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAutoFocus"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAutoFocusCorners"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPopup"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPopupLeftLayout"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testRightLayout"));
//suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPopupLayout")); // don't autotest for now, see comments for the test
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSwitchResolution"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFaceDetection"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFocusFlashAvailability"));
if( !TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFocusSwitchVideoSwitchCameras"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFocusRemainMacroSwitchCamera"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFocusRemainMacroSwitchPhoto"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFocusSaveMacroSwitchPhoto"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFocusSwitchVideoResetContinuous"));
}
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testContinuousPictureFocus"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testContinuousPictureRepeatTouch"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testContinuousPictureSwitchAuto"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testContinuousVideoFocusForPhoto"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testStartupAutoFocus"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSaveQuality"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testZoom"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testZoomIdle"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testZoomSwitchCamera"));
if( !TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSwitchCameraIdle"));
}
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSwitchCameraRepeat"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTouchFocusQuick"));
if( !TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testGallery"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSettings"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSettingsSaveLoad"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFolderChooserNew"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFolderChooserInvalid"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSaveFolderHistory"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSaveFolderHistorySAF"));
}
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSettingsPrivacyPolicy"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPreviewRotation"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLayoutNoLimits"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLayoutNoLimitsStartup"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testCameraModes"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFailOpenCamera"));
if( !TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testAudioControlIcon"));
}
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testIconsAgainstCameras"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testOnError"));
if( !TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testGPSString"));
}
if( TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPreviewBitmap"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testVideoFPSHighSpeed"));
}
if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.O ) {
// intensive test, can crash when run as suite on older devices (Nexus 6, Nexus 7) with Camera2 at least
// also run this test last, just in case
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSwitchCameraRepeat2"));
}
return suite;
}
}

View file

@ -0,0 +1,18 @@
package net.sourceforge.opencamera.test;
import junit.framework.Test;
import junit.framework.TestSuite;
public class MultiCameraTests {
// Tests to run specifically on devices where MainActivity.isMultiCamEnabled() returns true.
public static Test suite() {
TestSuite suite = new TestSuite(MainTests.class.getName());
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testIconsAgainstCameras"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFrontCameraAll"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFrontCamera"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFrontCameraMulti"));
return suite;
}
}

View file

@ -0,0 +1,36 @@
package net.sourceforge.opencamera.test;
import android.os.Build;
import junit.framework.Test;
import junit.framework.TestSuite;
import net.sourceforge.opencamera.TestUtils;
public class Nexus7Tests {
// Tests to run specifically on Nexus 7
public static Test suite() {
TestSuite suite = new TestSuite(MainTests.class.getName());
// we run the following tests on the Nexus 7 as a device that supports SAF, but doesn't have Android 7+ (where we use alternative methods for read/writing Exif tags without needing File)
// update: we now (as of 1.48.2) use the same codepaths for exif tags for before and after Android 7, but might as well keep these tests here anyway
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoSAF"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPhotoStampSAF"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testDirectionOnSAF"));
// tests useful for device with no flash, and only 1 camera
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSwitchVideo"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFocusFlashAvailability"));
// tests for testing Camera2 API with LEGACY Camera2 functionality
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhoto"));
if( TestUtils.isEmulator() && ( Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP || Build.VERSION.SDK_INT == Build.VERSION_CODES.M ) ) {
// video doesn't work on Android 5 or 6 emulator!
}
else {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideo"));
}
return suite;
}
}

View file

@ -0,0 +1,48 @@
package net.sourceforge.opencamera.test;
import android.os.Build;
import junit.framework.Test;
import junit.framework.TestSuite;
import net.sourceforge.opencamera.TestUtils;
public class OldDeviceTests {
// Small set of tests to run on very old devices.
public static Test suite() {
TestSuite suite = new TestSuite(MainTests.class.getName());
// put these tests first as they require various permissions be allowed, that can only be set by user action
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSwitchVideo"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPause"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSaveModes"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFocusFlashAvailability"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testGallery"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSettings"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSettingsSaveLoad"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFolderChooserNew"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testFolderChooserInvalid"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSaveFolderHistory"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testSettingsPrivacyPolicy"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLocationOn"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhoto"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAutoLevel"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAutoLevelLowMemory"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAutoLevelAngles"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAutoLevelAnglesLowMemory"));
if( TestUtils.isEmulator() && ( Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP || Build.VERSION.SDK_INT == Build.VERSION_CODES.M ) ) {
// video doesn't work on Android 5 or 6 emulator!
}
else {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideo"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSubtitles"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testIntentVideo"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testIntentVideoDurationLimit"));
}
return suite;
}
}

View file

@ -0,0 +1,61 @@
package net.sourceforge.opencamera.test;
import junit.framework.Test;
import junit.framework.TestSuite;
public class PanoramaTests {
/** Tests for Panorama algorithm - only need to run on a single device
* Should manually look over the images dumped onto DCIM/
* To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/
* folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes
* time to transfer to the device every time we run the tests.
* On Android 10+, scoped storage permission needs to be given to Open Camera for the DCIM/testOpenCamera/ folder.
* UPDATE: now deprecated, replaced with PanoramaInstrumentedTests.
*/
public static Test suite() {
TestSuite suite = new TestSuite(MainTests.class.getName());
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanoramaWhite"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama1"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama2"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama3"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama3_picsperscreen2"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama4"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama5"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama6"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama7"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama8"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama9"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama10"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama11"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama12"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama13"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama14"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama15"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama16"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama17"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama18"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama19"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama20"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama21"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama22"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama23"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama24"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama25"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama26"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama27"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama28"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama28_galaxys10e"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama29"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama30"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama30_galaxys10e"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama31"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama32"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama33"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama34"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama35"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama36"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama37"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPanorama38"));
return suite;
}
}

View file

@ -0,0 +1,42 @@
package net.sourceforge.opencamera.test;
import junit.framework.Test;
import junit.framework.TestSuite;
public class PhotoCamera2Tests {
// Tests related to taking photos that require Camera2 - only need to run this suite with Camera2
public static Test suite() {
TestSuite suite = new TestSuite(MainTests.class.getName());
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAutoFocusReleaseDuringPhoto"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoManualFocus"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoManualISOExposure"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoManualWB"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRaw"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRawWaitCaptureResult"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRawMulti"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRawOnly"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRawExpo"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRawExpoWaitCaptureResult"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRawOnlyExpo"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPreviewPausedTrashRaw"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPreviewPausedTrashRaw2"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoExpo5"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoHDRSlowBurst"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoHDRSaveExpoRaw"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoHDRSaveExpoRawOnly"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFocusBracketing"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFocusBracketingHeavy"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFocusBracketingCancel"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRawFocusBracketing"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRawOnlyFocusBracketing"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFastBurst"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoContinuousBurst"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoContinuousBurstSlow"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoNR"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFlashAutoFakeMode"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFlashOnFakeMode"));
// do testTakePhotoRawRepeat last, and is an intensive test, and if it fails for any reason it seems to cause the following test to crash, terminating the run (at least on Nexus 6)!
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRawRepeat"));
return suite;
}
}

View file

@ -0,0 +1,109 @@
package net.sourceforge.opencamera.test;
import junit.framework.Test;
import junit.framework.TestSuite;
import net.sourceforge.opencamera.TestUtils;
public class PhotoTests {
// Tests related to taking photos; note that tests to do with photo mode that don't take photos are still part of MainTests
public static Test suite() {
TestSuite suite = new TestSuite(MainTests.class.getName());
// put these tests first as they require various permissions be allowed, that can only be set by user action
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoSAF"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLocationOn"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLocationDirectionOn"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLocationOff"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLocationOnSAF"));
if( !TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testDirectionOn"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testDirectionOnSAF"));
}
// then do memory intensive tests:
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAutoLevel"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAutoLevelLowMemory"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAutoLevelAngles"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAutoLevelAnglesLowMemory"));
// other tests:
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhoto"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoContinuous"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoContinuousNoTouch"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFlashAuto"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFlashOn"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFlashTorch"));
if( !TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAudioButton"));
}
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoNoAutofocus"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoNoThumbnail"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFlashBug"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFrontCameraAll"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFrontCamera"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFrontCameraMulti"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoFrontCameraScreenFlash"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAutoFocus"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoLockedFocus"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoExposureCompensation"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoLockedLandscape"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoLockedPortrait"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPreviewPaused"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPreviewPausedAudioButton"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPreviewPausedSAF"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPreviewPausedTrash"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPreviewPausedTrashSAF"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPreviewPausedTrash2"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoQuickFocus"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRepeatFocus"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRepeatFocusLocked"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAfterFocus"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoSingleTap"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoDoubleTap"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoAlt"));
if( !TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTimerBackground"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTimerSettings"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTimerPopup"));
}
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoRepeat"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testContinuousPicture1"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testContinuousPicture2"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testContinuousPictureFocusRepeat"));
if( TestUtils.test_camera2 ) {
// test_wait_capture_result only relevant for Camera2 API
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testContinuousPictureFocusRepeatWaitCaptureResult"));
}
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testKeyboardControls"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPhotoStamp"));
if( !TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPhotoStampSAF"));
}
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoDRO"));
if( !TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoDROPhotoStamp"));
}
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoHDR"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testPhotoBackgroundHDR"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoHDRSaveExpo"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoHDRFrontCamera"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoHDRAutoStabilise"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoHDRPhotoStamp"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoExpo"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPanorama"));
if( !TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPanoramaMax"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPanoramaCancel"));
//suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPanoramaCancelBySettings"));
}
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testCreateSaveFolder1"));
if( !TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testCreateSaveFolder2"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testCreateSaveFolder3"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testCreateSaveFolder4"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testCreateSaveFolderUnicode"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testCreateSaveFolderEmpty"));
}
// testTakePhotoPreviewPausedShare should be last, as sharing the image may sometimes cause later tests to hang
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakePhotoPreviewPausedShare"));
return suite;
}
}

View file

@ -0,0 +1,15 @@
package net.sourceforge.opencamera.test;
import junit.framework.Test;
import junit.framework.TestSuite;
public class TempTests {
// Dummy test suite for running an arbitrary subset of tests.
public static Test suite() {
TestSuite suite = new TestSuite(MainTests.class.getName());
//suite.addTest(TestSuite.createTest(MainActivityTest.class, "testZoom"));
return suite;
}
}

View file

@ -0,0 +1,95 @@
package net.sourceforge.opencamera.test;
import junit.framework.Test;
import junit.framework.TestSuite;
import net.sourceforge.opencamera.TestUtils;
public class VideoTests {
// Tests related to video recording; note that tests to do with video mode that don't record are still part of MainTests
public static Test suite() {
TestSuite suite = new TestSuite(MainTests.class.getName());
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideo"));
// put these tests first as they require various permissions be allowed, that can only be set by user action:
if( !TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoAudioControl"));
}
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSAF"));
if( !TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSubtitles"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSubtitlesSAF"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSubtitlesGPSSAF"));
}
if( TestUtils.test_camera2 ) {
// tests for video log profile (but these don't actually record video)
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLogProfile1"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLogProfile2"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLogProfile3"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLogProfile1_extra_strong"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLogProfile2_extra_strong"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testLogProfile3_extra_strong"));
}
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testIntentVideo"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testIntentVideoDurationLimit"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testImmersiveMode"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testImmersiveModeEverything"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoStabilization"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoExposureLock"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoFocusArea"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoQuick"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoQuickSAF"));
if( !TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoMaxDuration"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoMaxDurationRestart"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoMaxDurationRestartInterrupt"));
}
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSettings"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoMacro"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoPause"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoPauseStop"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSnapshot"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSnapshotTimer"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSnapshotPausePreview"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSnapshotMax"));
if( !TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoFlashVideo"));
}
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testVideoTimerInterrupt"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testVideoPopup"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testVideoTimerPopup"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoAvailableMemory"));
if( !TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoAvailableMemory2"));
}
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoMaxFileSize1"));
if( !TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoMaxFileSize2"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoMaxFileSize3"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoMaxFileSize4"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoMaxFileSize4SAF"));
}
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoTimeLapse"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoForceFailure"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoForceFailureSAF"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoForceIOException"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoForceCameraControllerException"));
if( TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testVideoLogProfile"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testVideoJTLogProfile"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testVideoGammaProfile"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testVideoEdgeModeNoiseReductionMode"));
}
// put tests which change bitrate, fps or test 4K at end
if( TestUtils.test_camera2 ) {
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoFPSHighSpeedManual"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoSlowMotion"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoFPS"));
}
// update: now deprecating these tests, as setting these settings can be dodgy on some devices
/*suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideoBitrate"));
suite.addTest(TestSuite.createTest(MainActivityTest.class, "testTakeVideo4K"));*/
return suite;
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.9 KiB

View file

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">OpenCamera.testTest</string>
</resources>

View file

@ -0,0 +1,159 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:versionCode="93"
android:versionName="1.55"
android:installLocation="auto"
tools:ignore="GoogleAppIndexingWarning">
<!-- ignore GoogleAppIndexingWarning as we don't want to implement that -->
<supports-screens android:xlargeScreens="true" android:largeScreens="true" android:normalScreens="true" android:smallScreens="true" android:anyDensity="true"/>
<uses-permission android:name="android.permission.BLUETOOTH"
android:maxSdkVersion="30"
/>
<uses-permission android:name="android.permission.BLUETOOTH_ADMIN"
android:maxSdkVersion="30"
/>
<uses-permission android:name="android.permission.BLUETOOTH_SCAN"
android:usesPermissionFlags="neverForLocation"
tools:targetApi="s" />
<uses-permission android:name="android.permission.BLUETOOTH_CONNECT" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"
android:maxSdkVersion="28"
tools:ignore="ScopedStorage" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.ACCESS_FINE_LOCATION" />
<uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.microphone" />
<uses-feature android:name="android.hardware.bluetooth_le" android:required="false"/>
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:name=".OpenCameraApplication"
android:theme="@style/AppTheme"
android:largeHeap="true"
>
<!-- should not change the android:name, including moving to a subpackage - see http://android-developers.blogspot.co.uk/2011/06/things-that-cannot-change.html -->
<activity
android:name="net.sourceforge.opencamera.MainActivity"
android:configChanges="orientation|screenSize|keyboardHidden"
android:clearTaskOnLaunch="true"
android:exported="true"
>
<!-- clearTaskOnLaunch set to true, so if user goes to gallery then returns to home, we return to the camera rather than remaining in gallery if user relaunches Open Camera -->
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
<category android:name="android.intent.category.DEFAULT" />
</intent-filter>
<intent-filter>
<action android:name="android.media.action.IMAGE_CAPTURE" />
<category android:name="android.intent.category.DEFAULT" />
</intent-filter>
<intent-filter>
<action android:name="android.media.action.IMAGE_CAPTURE_SECURE" />
<category android:name="android.intent.category.DEFAULT" />
</intent-filter>
<intent-filter>
<action android:name="android.media.action.STILL_IMAGE_CAMERA" />
<category android:name="android.intent.category.DEFAULT" />
</intent-filter>
<intent-filter>
<action android:name="android.media.action.STILL_IMAGE_CAMERA_SECURE" />
<category android:name="android.intent.category.DEFAULT" />
</intent-filter>
<intent-filter>
<action android:name="android.media.action.VIDEO_CAMERA" />
<category android:name="android.intent.category.DEFAULT" />
</intent-filter>
<intent-filter>
<action android:name="android.media.action.VIDEO_CAPTURE" />
<category android:name="android.intent.category.DEFAULT" />
</intent-filter>
<meta-data android:name="android.app.shortcuts" android:resource="@xml/shortcuts" />
</activity>
<activity
android:name="net.sourceforge.opencamera.remotecontrol.DeviceScanner"
android:label="@string/scan_ble"
android:exported="false"
>
</activity>
<!-- should not change the android:name, including moving to a subpackage - see http://android-developers.blogspot.co.uk/2011/06/things-that-cannot-change.html -->
<activity
android:name="TakePhoto"
android:label="@string/take_photo"
android:icon="@drawable/ic_launcher_take_photo"
android:configChanges="orientation|screenSize|keyboardHidden"
android:taskAffinity=""
android:excludeFromRecents="true"
android:exported="false"
>
</activity>
<!-- should not change the android:name, including moving to a subpackage - see http://android-developers.blogspot.co.uk/2011/06/things-that-cannot-change.html -->
<receiver
android:icon="@drawable/ic_launcher_take_photo"
android:label="@string/take_photo"
android:name="MyWidgetProviderTakePhoto"
android:exported="true">
<intent-filter >
<action android:name="android.appwidget.action.APPWIDGET_UPDATE" />
</intent-filter>
<meta-data
android:name="android.appwidget.provider"
android:resource="@xml/widget_info_take_photo" />
</receiver>
<!-- should not change the android:name, including moving to a subpackage - see http://android-developers.blogspot.co.uk/2011/06/things-that-cannot-change.html -->
<service
android:name="net.sourceforge.opencamera.MyTileService"
android:icon="@drawable/ic_photo_camera_white_48dp"
android:label="@string/camera"
android:permission="android.permission.BIND_QUICK_SETTINGS_TILE"
android:exported="true"
tools:targetApi="n">
<intent-filter>
<action android:name="android.service.quicksettings.action.QS_TILE" />
</intent-filter>
</service>
<!-- should not change the android:name, including moving to a subpackage - see http://android-developers.blogspot.co.uk/2011/06/things-that-cannot-change.html -->
<service
android:name="net.sourceforge.opencamera.MyTileServiceVideo"
android:icon="@drawable/ic_videocam_white_48dp"
android:label="@string/record_video"
android:permission="android.permission.BIND_QUICK_SETTINGS_TILE"
android:exported="true"
tools:targetApi="n">
<intent-filter>
<action android:name="android.service.quicksettings.action.QS_TILE" />
</intent-filter>
</service>
<!-- should not change the android:name, including moving to a subpackage - see http://android-developers.blogspot.co.uk/2011/06/things-that-cannot-change.html -->
<service
android:name="net.sourceforge.opencamera.MyTileServiceFrontCamera"
android:icon="@drawable/ic_face_white_48dp"
android:label="@string/selfie"
android:permission="android.permission.BIND_QUICK_SETTINGS_TILE"
android:exported="true"
tools:targetApi="n">
<intent-filter>
<action android:name="android.service.quicksettings.action.QS_TILE" />
</intent-filter>
</service>
<service android:name="net.sourceforge.opencamera.remotecontrol.BluetoothLeService"
android:enabled="true"
android:exported="false"
/>
</application>
<!-- needed for targetting Android 11 - see https://developer.android.com/about/versions/11/behavior-changes-11 -->
<queries>
<intent>
<action android:name="android.intent.action.TTS_SERVICE" />
</intent>
</queries>
</manifest>

View file

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -0,0 +1,674 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<http://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<http://www.gnu.org/philosophy/why-not-lgpl.html>.

View file

@ -0,0 +1,185 @@
package net.sourceforge.opencamera;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.util.Log;
import androidx.annotation.RequiresPermission;
/** Sets up a listener to listen for noise level.
*/
class AudioListener {
private static final String TAG = "AudioListener";
private volatile boolean is_running = true; // should be volatile, as used to communicate between threads
private int buffer_size = -1;
private AudioRecord ar; // modification to ar should always be synchronized (on AudioListener.this), as the ar can be released in the AudioListener's own thread
private Thread thread;
public interface AudioListenerCallback {
void onAudio(int level);
}
/** Create a new AudioListener. The caller should call the start() method to start listening.
*/
@RequiresPermission(android.Manifest.permission.RECORD_AUDIO)
AudioListener(final AudioListenerCallback cb) {
if( MyDebug.LOG )
Log.d(TAG, "new AudioListener");
final int sample_rate = 8000;
int channel_config = AudioFormat.CHANNEL_IN_MONO;
int audio_format = AudioFormat.ENCODING_PCM_16BIT;
try {
buffer_size = AudioRecord.getMinBufferSize(sample_rate, channel_config, audio_format);
//buffer_size = -1; // test
if( MyDebug.LOG )
Log.d(TAG, "buffer_size: " + buffer_size);
if( buffer_size <= 0 ) {
if( MyDebug.LOG ) {
if( buffer_size == AudioRecord.ERROR )
Log.e(TAG, "getMinBufferSize returned ERROR");
else if( buffer_size == AudioRecord.ERROR_BAD_VALUE )
Log.e(TAG, "getMinBufferSize returned ERROR_BAD_VALUE");
}
return;
}
synchronized(AudioListener.this) {
ar = new AudioRecord(MediaRecorder.AudioSource.MIC, sample_rate, channel_config, audio_format, buffer_size);
AudioListener.this.notifyAll(); // probably not needed currently as no thread should be waiting for creation, but just for consistency
}
}
catch(Exception e) {
MyDebug.logStackTrace(TAG, "failed to create audiorecord", e);
return;
}
// check initialised
synchronized(AudioListener.this) {
if( ar.getState() == AudioRecord.STATE_INITIALIZED ) {
if( MyDebug.LOG )
Log.d(TAG, "audiorecord is initialised");
}
else {
Log.e(TAG, "audiorecord failed to initialise");
ar.release();
ar = null;
AudioListener.this.notifyAll(); // again probably not needed, but just in case
return;
}
}
final short[] buffer = new short[buffer_size];
ar.startRecording();
this.thread = new Thread() {
@Override
public void run() {
/*int sample_delay = (1000 * buffer_size) / sample_rate;
if( MyDebug.LOG )
Log.e(TAG, "sample_delay: " + sample_delay);*/
while( is_running ) {
/*try{
Thread.sleep(sample_delay);
}
catch(InterruptedException e) {
MyDebug.logStackTrace(TAG, "InterruptedException from sleep", e);
}*/
try {
int n_read = ar.read(buffer, 0, buffer_size);
if( n_read > 0 ) {
int average_noise = 0;
int max_noise = 0;
for(int i=0;i<n_read;i++){
int value = Math.abs(buffer[i]);
average_noise += value;
max_noise = Math.max(max_noise, value);
}
average_noise /= n_read;
/*if( MyDebug.LOG ) {
Log.d(TAG, "n_read: " + n_read);
Log.d(TAG, "average noise: " + average_noise);
Log.d(TAG, "max noise: " + max_noise);
}*/
cb.onAudio(average_noise);
}
else {
if( MyDebug.LOG ) {
Log.d(TAG, "n_read: " + n_read);
if( n_read == AudioRecord.ERROR_INVALID_OPERATION )
Log.e(TAG, "read returned ERROR_INVALID_OPERATION");
else if( n_read == AudioRecord.ERROR_BAD_VALUE )
Log.e(TAG, "read returned ERROR_BAD_VALUE");
}
}
}
catch(Exception e) {
MyDebug.logStackTrace(TAG, "failed to read from audiorecord", e);
}
}
if( MyDebug.LOG )
Log.d(TAG, "stopped running");
synchronized(AudioListener.this) {
if( MyDebug.LOG )
Log.d(TAG, "release ar");
ar.release();
ar = null;
AudioListener.this.notifyAll(); // notify in case release() is waiting
}
}
};
// n.b., not good practice to start threads in constructors, so we require the caller to call start() instead
}
/**
* @return Whether the audio recorder was created successfully.
*/
boolean status() {
boolean ok;
synchronized(AudioListener.this) {
ok = ar != null;
}
return ok;
}
/** Start listening.
*/
void start() {
if( MyDebug.LOG )
Log.d(TAG, "start");
if( thread != null ) {
thread.start();
}
}
/** Stop listening and release the resources.
* @param wait_until_done If true, this method will block until the resource is freed.
*/
void release(boolean wait_until_done) {
if( MyDebug.LOG ) {
Log.d(TAG, "release");
Log.d(TAG, "wait_until_done: " + wait_until_done);
}
is_running = false;
thread = null;
if( wait_until_done ) {
if( MyDebug.LOG )
Log.d(TAG, "wait until audio listener is freed");
synchronized(AudioListener.this) {
while( ar != null ) {
if( MyDebug.LOG )
Log.d(TAG, "ar still not freed, so wait");
try {
AudioListener.this.wait();
}
catch(InterruptedException e) {
MyDebug.logStackTrace(TAG, "interrupted while waiting for audio recorder to be freed", e);
}
}
}
if( MyDebug.LOG )
Log.d(TAG, "audio listener is now freed");
}
}
}

View file

@ -0,0 +1,665 @@
package net.sourceforge.opencamera;
import android.content.Context;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.util.Log;
import java.util.ArrayList;
import java.util.List;
/** Handles gyro sensor.
*/
public class GyroSensor implements SensorEventListener {
private static final String TAG = "GyroSensor";
final private SensorManager mSensorManager;
final private Sensor mSensor;
final private Sensor mSensorAccel;
private boolean is_recording;
private long timestamp;
private static final float NS2S = 1.0f / 1000000000.0f;
private final float [] deltaRotationVector = new float[4];
private boolean has_gyroVector;
private final float [] gyroVector = new float[3];
private final float [] currentRotationMatrix = new float[9];
private final float [] currentRotationMatrixGyroOnly = new float[9];
private final float [] deltaRotationMatrix = new float[9];
private final float [] tempMatrix = new float[9];
private final float [] temp2Matrix = new float[9];
private boolean has_init_accel = false;
private final float [] initAccelVector = new float[3];
private final float [] accelVector = new float[3];
private boolean has_original_rotation_matrix;
private final float [] originalRotationMatrix = new float[9];
private boolean has_rotationVector;
private final float [] rotationVector = new float[3];
// temporary vectors:
private final float [] tempVector = new float[3];
private final float [] inVector = new float[3];
public interface TargetCallback {
/** Called when the target has been achieved.
* @param indx Index of the target that has been achieved.
*/
void onAchieved(int indx);
/* Called when the orientation is significantly far from the target.
*/
void onTooFar();
}
private boolean hasTarget;
//private final float [] targetVector = new float[3];
private final List<float []> targetVectors = new ArrayList<>();
private float targetAngle; // target angle in radians
private float uprightAngleTol; // in radians
private boolean targetAchieved;
private float tooFarAngle; // in radians
private TargetCallback targetCallback;
private boolean has_lastTargetAngle;
private float lastTargetAngle;
private int is_upright; // if hasTarget==true, this stores whether the "upright" orientation of the device is close enough to the orientation when recording was started: 0 for yes, otherwise -1 for too anti-clockwise, +1 for too clockwise
GyroSensor(Context context) {
mSensorManager = (SensorManager)context.getSystemService(Context.SENSOR_SERVICE);
mSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE);
mSensorAccel = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
//mSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ROTATION_VECTOR);
//mSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_GAME_ROTATION_VECTOR);
//mSensorAccel = null;
if( MyDebug.LOG ) {
Log.d(TAG, "GyroSensor");
if( mSensor == null )
Log.d(TAG, "gyroscope not available");
else if( mSensorAccel == null )
Log.d(TAG, "accelerometer not available");
}
setToIdentity();
}
boolean hasSensors() {
// even though the gyro sensor works if mSensorAccel is not present, for best behaviour we require them both
return mSensor != null && mSensorAccel != null;
}
private void setToIdentity() {
for(int i=0;i<9;i++) {
currentRotationMatrix[i] = 0.0f;
}
currentRotationMatrix[0] = 1.0f;
currentRotationMatrix[4] = 1.0f;
currentRotationMatrix[8] = 1.0f;
System.arraycopy(currentRotationMatrix, 0, currentRotationMatrixGyroOnly, 0, 9);
for(int i=0;i<3;i++) {
initAccelVector[i] = 0.0f;
// don't set accelVector, rotationVector, gyroVector to 0 here, as we continually smooth the values even when not recording
}
has_init_accel = false;
has_original_rotation_matrix = false;
}
/** Helper method to set a 3D vector.
*/
static void setVector(final float[] vector, float x, float y, float z) {
vector[0] = x;
vector[1] = y;
vector[2] = z;
}
/** Helper method to access the (i, j)th component of a 3x3 matrix.
*/
private static float getMatrixComponent(final float [] matrix, int row, int col) {
return matrix[row*3+col];
}
/** Helper method to set the (i, j)th component of a 3x3 matrix.
*/
private static void setMatrixComponent(final float [] matrix, int row, int col, float value) {
matrix[row*3+col] = value;
}
/** Helper method to multiply 3x3 matrix with a 3D vector.
*/
public static void transformVector(final float [] result, final float [] matrix, final float [] vector) {
// result[i] = matrix[ij] . vector[j]
for(int i=0;i<3;i++) {
result[i] = 0.0f;
for(int j=0;j<3;j++) {
result[i] += getMatrixComponent(matrix, i, j) * vector[j];
}
}
}
/** Helper method to multiply the transpose of a 3x3 matrix with a 3D vector.
* For 3x3 rotation (orthonormal) matrices, the transpose is the inverse.
*/
private void transformTransposeVector(final float [] result, final float [] matrix, final float [] vector) {
// result[i] = matrix[ji] . vector[j]
for(int i=0;i<3;i++) {
result[i] = 0.0f;
for(int j=0;j<3;j++) {
result[i] += getMatrixComponent(matrix, j, i) * vector[j];
}
}
}
/* We should enable sensors before startRecording(), so that we can apply smoothing to the
* sensors to reduce noise.
* This should be limited to when we might want to use the gyro, to help battery life.
*/
void enableSensors() {
if( MyDebug.LOG )
Log.d(TAG, "enableSensors");
has_rotationVector = false;
has_gyroVector = false;
for(int i=0;i<3;i++) {
accelVector[i] = 0.0f;
rotationVector[i] = 0.0f;
gyroVector[i] = 0.0f;
}
if( mSensor != null )
mSensorManager.registerListener(this, mSensor, SensorManager.SENSOR_DELAY_UI);
if( mSensorAccel != null )
mSensorManager.registerListener(this, mSensorAccel, SensorManager.SENSOR_DELAY_UI);
}
void disableSensors() {
if( MyDebug.LOG )
Log.d(TAG, "disableSensors");
mSensorManager.unregisterListener(this);
}
void startRecording() {
if( MyDebug.LOG )
Log.d(TAG, "startRecording");
is_recording = true;
timestamp = 0;
setToIdentity();
}
void stopRecording() {
if( is_recording ) {
if( MyDebug.LOG )
Log.d(TAG, "stopRecording");
is_recording = false;
timestamp = 0;
}
}
public boolean isRecording() {
return this.is_recording;
}
void setTarget(float target_x, float target_y, float target_z, float targetAngle, float uprightAngleTol, float tooFarAngle, TargetCallback targetCallback) {
this.hasTarget = true;
this.targetVectors.clear();
addTarget(target_x, target_y, target_z);
this.targetAngle = targetAngle;
this.uprightAngleTol = uprightAngleTol;
this.tooFarAngle = tooFarAngle;
this.targetCallback = targetCallback;
this.has_lastTargetAngle = false;
this.lastTargetAngle = 0.0f;
}
void addTarget(float target_x, float target_y, float target_z) {
float [] vector = new float[]{target_x, target_y, target_z};
this.targetVectors.add(vector);
}
void clearTarget() {
this.hasTarget = false;
this.targetVectors.clear();
this.targetCallback = null;
this.has_lastTargetAngle = false;
this.lastTargetAngle = 0.0f;
}
void disableTargetCallback() {
this.targetCallback = null;
}
boolean hasTarget() {
return this.hasTarget;
}
boolean isTargetAchieved() {
return this.hasTarget && this.targetAchieved;
}
public int isUpright() {
return this.is_upright;
}
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
private void adjustGyroForAccel() {
if( timestamp == 0 ) {
// don't have a gyro matrix yet
return;
}
else if( !has_init_accel ) {
return;
}
/*if( true )
return;*/ // don't use accelerometer for now
//transformVector(tempVector, currentRotationMatrix, initAccelVector);
// tempVector is now the initAccelVector transformed by the gyro matrix
//transformTransposeVector(tempVector, currentRotationMatrix, initAccelVector);
transformVector(tempVector, currentRotationMatrix, accelVector);
// tempVector is now the accelVector transformed by the gyro matrix
double cos_angle = (tempVector[0] * initAccelVector[0] + tempVector[1] * initAccelVector[1] + tempVector[2] * initAccelVector[2]);
/*if( MyDebug.LOG ) {
Log.d(TAG, "adjustGyroForAccel:");
Log.d(TAG, "### currentRotationMatrix row 0: " + currentRotationMatrix[0] + " , " + currentRotationMatrix[1] + " , " + currentRotationMatrix[2]);
Log.d(TAG, "### currentRotationMatrix row 1: " + currentRotationMatrix[3] + " , " + currentRotationMatrix[4] + " , " + currentRotationMatrix[5]);
Log.d(TAG, "### currentRotationMatrix row 2: " + currentRotationMatrix[6] + " , " + currentRotationMatrix[7] + " , " + currentRotationMatrix[8]);
Log.d(TAG, "### initAccelVector: " + initAccelVector[0] + " , " + initAccelVector[1] + " , " + initAccelVector[2]);
Log.d(TAG, "### accelVector: " + accelVector[0] + " , " + accelVector[1] + " , " + accelVector[2]);
Log.d(TAG, "### tempVector: " + tempVector[0] + " , " + tempVector[1] + " , " + tempVector[2]);
Log.d(TAG, "### cos_angle: " + cos_angle);
}*/
if( cos_angle >= 0.99999999995 ) {
// gyroscope already matches accelerometer
return;
}
double angle = Math.acos(cos_angle);
angle *= 0.02f; // filter
cos_angle = Math.cos(angle);
/*
// compute matrix to transform tempVector to accelVector
// compute (tempVector X accelVector) normalised
double a_x = tempVector[1] * accelVector[2] - tempVector[2] * accelVector[1];
double a_y = tempVector[2] * accelVector[0] - tempVector[0] * accelVector[2];
double a_z = tempVector[0] * accelVector[1] - tempVector[1] * accelVector[0];
*/
// compute matrix to transform tempVector to initAccelVector
// compute (tempVector X initAccelVector) normalised
double a_x = tempVector[1] * initAccelVector[2] - tempVector[2] * initAccelVector[1];
double a_y = tempVector[2] * initAccelVector[0] - tempVector[0] * initAccelVector[2];
double a_z = tempVector[0] * initAccelVector[1] - tempVector[1] * initAccelVector[0];
double a_mag = Math.sqrt(a_x*a_x + a_y*a_y + a_z*a_z);
if( a_mag < 1.0e-5 ) {
// parallel or anti-parallel case
return;
}
a_x /= a_mag;
a_y /= a_mag;
a_z /= a_mag;
double sin_angle = Math.sqrt(1.0-cos_angle*cos_angle);
// from http://immersivemath.com/forum/question/rotation-matrix-from-one-vector-to-another/
setMatrixComponent(tempMatrix, 0, 0, (float)(a_x*a_x*(1.0-cos_angle)+cos_angle));
setMatrixComponent(tempMatrix, 0, 1, (float)(a_x*a_y*(1.0-cos_angle)-sin_angle*a_z));
setMatrixComponent(tempMatrix, 0, 2, (float)(a_x*a_z*(1.0-cos_angle)+sin_angle*a_y));
setMatrixComponent(tempMatrix, 1, 0, (float)(a_x*a_y*(1.0-cos_angle)+sin_angle*a_z));
setMatrixComponent(tempMatrix, 1, 1, (float)(a_y*a_y*(1.0-cos_angle)+cos_angle));
setMatrixComponent(tempMatrix, 1, 2, (float)(a_y*a_z*(1.0-cos_angle)-sin_angle*a_x));
setMatrixComponent(tempMatrix, 2, 0, (float)(a_x*a_z*(1.0-cos_angle)-sin_angle*a_y));
setMatrixComponent(tempMatrix, 2, 1, (float)(a_y*a_z*(1.0-cos_angle)+sin_angle*a_x));
setMatrixComponent(tempMatrix, 2, 2, (float)(a_z*a_z*(1.0-cos_angle)+cos_angle));
/*if( MyDebug.LOG ) {
// test:
System.arraycopy(tempVector, 0, inVector, 0, 3);
transformVector(tempVector, tempMatrix, inVector);
Log.d(TAG, "### tempMatrix row 0: " + tempMatrix[0] + " , " + tempMatrix[1] + " , " + tempMatrix[2]);
Log.d(TAG, "### tempMatrix row 1: " + tempMatrix[3] + " , " + tempMatrix[4] + " , " + tempMatrix[5]);
Log.d(TAG, "### tempMatrix row 2: " + tempMatrix[6] + " , " + tempMatrix[7] + " , " + tempMatrix[8]);
Log.d(TAG, "### rotated tempVector: " + tempVector[0] + " , " + tempVector[1] + " , " + tempVector[2]);
}*/
// replace currentRotationMatrix with tempMatrix.currentRotationMatrix
// since [tempMatrix.currentRotationMatrix].[initAccelVector] = tempMatrix.tempVector = accelVector
// since [tempMatrix.currentRotationMatrix].[accelVector] = tempMatrix.tempVector = initAccelVector
for(int i=0;i<3;i++) {
for(int j=0;j<3;j++) {
float value = 0.0f;
// temp2Matrix[ij] = tempMatrix[ik] * currentRotationMatrix[kj]
for(int k=0;k<3;k++) {
value += getMatrixComponent(tempMatrix, i, k) * getMatrixComponent(currentRotationMatrix, k, j);
}
setMatrixComponent(temp2Matrix, i, j, value);
}
}
System.arraycopy(temp2Matrix, 0, currentRotationMatrix, 0, 9);
/*if( MyDebug.LOG ) {
// test:
//transformVector(tempVector, temp2Matrix, initAccelVector);
//transformTransposeVector(tempVector, currentRotationMatrix, initAccelVector);
transformVector(tempVector, temp2Matrix, accelVector);
Log.d(TAG, "### new currentRotationMatrix row 0: " + temp2Matrix[0] + " , " + temp2Matrix[1] + " , " + temp2Matrix[2]);
Log.d(TAG, "### new currentRotationMatrix row 1: " + temp2Matrix[3] + " , " + temp2Matrix[4] + " , " + temp2Matrix[5]);
Log.d(TAG, "### new currentRotationMatrix row 2: " + temp2Matrix[6] + " , " + temp2Matrix[7] + " , " + temp2Matrix[8]);
Log.d(TAG, "### new tempVector: " + tempVector[0] + " , " + tempVector[1] + " , " + tempVector[2]);
}*/
}
@Override
public void onSensorChanged(SensorEvent event) {
/*if( MyDebug.LOG )
Log.d(TAG, "onSensorChanged: " + event);*/
if( event.sensor.getType() == Sensor.TYPE_ACCELEROMETER ) {
final float sensor_alpha = 0.8f; // for filter
for(int i=0;i<3;i++) {
//this.accelVector[i] = event.values[i];
this.accelVector[i] = sensor_alpha * this.accelVector[i] + (1.0f-sensor_alpha) * event.values[i];
}
double mag = Math.sqrt(accelVector[0]*accelVector[0] + accelVector[1]*accelVector[1] + accelVector[2]*accelVector[2]);
if( mag > 1.0e-8 ) {
//noinspection lossy-conversions
accelVector[0] /= mag;
//noinspection lossy-conversions
accelVector[1] /= mag;
//noinspection lossy-conversions
accelVector[2] /= mag;
}
if( !has_init_accel ) {
System.arraycopy(accelVector, 0, initAccelVector, 0, 3);
has_init_accel = true;
}
adjustGyroForAccel();
}
else if( event.sensor.getType() == Sensor.TYPE_GYROSCOPE ) {
if( has_gyroVector ) {
final float sensor_alpha = 0.5f; // for filter
for(int i=0;i<3;i++) {
//this.gyroVector[i] = event.values[i];
this.gyroVector[i] = sensor_alpha * this.gyroVector[i] + (1.0f-sensor_alpha) * event.values[i];
}
}
else {
System.arraycopy(event.values, 0, this.gyroVector, 0, 3);
has_gyroVector = true;
}
// This timestep's delta rotation to be multiplied by the current rotation
// after computing it from the gyro sample data.
if( timestamp != 0 ) {
final float dT = (event.timestamp - timestamp) * NS2S;
// Axis of the rotation sample, not normalized yet.
float axisX = gyroVector[0];
float axisY = gyroVector[1];
float axisZ = gyroVector[2];
// Calculate the angular speed of the sample
double omegaMagnitude = Math.sqrt(axisX*axisX + axisY*axisY + axisZ*axisZ);
// Normalize the rotation vector if it's big enough to get the axis
// (that is, EPSILON should represent your maximum allowable margin of error)
if( omegaMagnitude > 1.0e-5 ) {
//noinspection lossy-conversions
axisX /= omegaMagnitude;
//noinspection lossy-conversions
axisY /= omegaMagnitude;
//noinspection lossy-conversions
axisZ /= omegaMagnitude;
}
// Integrate around this axis with the angular speed by the timestep
// in order to get a delta rotation from this sample over the timestep
// We will convert this axis-angle representation of the delta rotation
// into a quaternion before turning it into the rotation matrix.
double thetaOverTwo = omegaMagnitude * dT / 2.0f;
float sinThetaOverTwo = (float)Math.sin(thetaOverTwo);
float cosThetaOverTwo = (float)Math.cos(thetaOverTwo);
deltaRotationVector[0] = sinThetaOverTwo * axisX;
deltaRotationVector[1] = sinThetaOverTwo * axisY;
deltaRotationVector[2] = sinThetaOverTwo * axisZ;
deltaRotationVector[3] = cosThetaOverTwo;
/*if( MyDebug.LOG ) {
Log.d(TAG, "### values: " + event.values[0] + " , " + event.values[1] + " , " + event.values[2]);
Log.d(TAG, "smoothed values: " + gyroVector[0] + " , " + gyroVector[1] + " , " + gyroVector[2]);
}*/
SensorManager.getRotationMatrixFromVector(deltaRotationMatrix, deltaRotationVector);
// User code should concatenate the delta rotation we computed with the current rotation
// in order to get the updated rotation.
// currentRotationMatrix = currentRotationMatrix * deltaRotationMatrix;
for(int i=0;i<3;i++) {
for(int j=0;j<3;j++) {
float value = 0.0f;
// tempMatrix[ij] = currentRotationMatrix[ik] * deltaRotationMatrix[kj]
for(int k=0;k<3;k++) {
value += getMatrixComponent(currentRotationMatrix, i, k) * getMatrixComponent(deltaRotationMatrix, k, j);
}
setMatrixComponent(tempMatrix, i, j, value);
}
}
System.arraycopy(tempMatrix, 0, currentRotationMatrix, 0, 9);
for(int i=0;i<3;i++) {
for(int j=0;j<3;j++) {
float value = 0.0f;
// tempMatrix[ij] = currentRotationMatrixGyroOnly[ik] * deltaRotationMatrix[kj]
for(int k=0;k<3;k++) {
value += getMatrixComponent(currentRotationMatrixGyroOnly, i, k) * getMatrixComponent(deltaRotationMatrix, k, j);
}
setMatrixComponent(tempMatrix, i, j, value);
}
}
System.arraycopy(tempMatrix, 0, currentRotationMatrixGyroOnly, 0, 9);
/*if( MyDebug.LOG ) {
setVector(inVector, 0.0f, 0.0f, -1.0f); // vector pointing behind the device's screen
transformVector(tempVector, currentRotationMatrix, inVector);
//transformTransposeVector(tempVector, currentRotationMatrix, inVector);
Log.d(TAG, "### gyro vector: " + tempVector[0] + " , " + tempVector[1] + " , " + tempVector[2]);
}*/
adjustGyroForAccel();
}
timestamp = event.timestamp;
}
else if( event.sensor.getType() == Sensor.TYPE_ROTATION_VECTOR || event.sensor.getType() == Sensor.TYPE_GAME_ROTATION_VECTOR ) {
if( has_rotationVector ) {
//final float sensor_alpha = 0.7f; // for filter
final float sensor_alpha = 0.8f; // for filter
for(int i=0;i<3;i++) {
//this.rotationVector[i] = event.values[i];
this.rotationVector[i] = sensor_alpha * this.rotationVector[i] + (1.0f-sensor_alpha) * event.values[i];
}
}
else {
System.arraycopy(event.values, 0, this.rotationVector, 0, 3);
has_rotationVector = true;
}
SensorManager.getRotationMatrixFromVector(tempMatrix, rotationVector);
if( !has_original_rotation_matrix ) {
System.arraycopy(tempMatrix, 0, originalRotationMatrix, 0, 9);
has_original_rotation_matrix = event.values[3] != 1.0;
}
// current = originalT.new
for(int i=0;i<3;i++) {
for(int j=0;j<3;j++) {
float value = 0.0f;
// currentRotationMatrix[ij] = originalRotationMatrix[ki] * tempMatrix[kj]
for(int k=0;k<3;k++) {
value += getMatrixComponent(originalRotationMatrix, k, i) * getMatrixComponent(tempMatrix, k, j);
}
setMatrixComponent(currentRotationMatrix, i, j, value);
}
}
if( MyDebug.LOG ) {
Log.d(TAG, "### values: " + event.values[0] + " , " + event.values[1] + " , " + event.values[2] + " , " + event.values[3]);
Log.d(TAG, " " + currentRotationMatrix[0] + " , " + currentRotationMatrix[1] + " , " + currentRotationMatrix[2]);
Log.d(TAG, " " + currentRotationMatrix[3] + " , " + currentRotationMatrix[4] + " , " + currentRotationMatrix[5]);
Log.d(TAG, " " + currentRotationMatrix[6] + " , " + currentRotationMatrix[7] + " , " + currentRotationMatrix[8]);
}
}
if( hasTarget ) {
int n_too_far = 0;
targetAchieved = false;
for(int indx=0;indx<targetVectors.size();indx++) {
float [] targetVector = targetVectors.get(indx);
// first check if we are still "upright"
setVector(inVector, 0.0f, 1.0f, 0.0f); // vector pointing in "up" direction
transformVector(tempVector, currentRotationMatrix, inVector);
/*if( MyDebug.LOG ) {
Log.d(TAG, "### transformed vector up: " + tempVector[0] + " , " + tempVector[1] + " , " + tempVector[2]);
}*/
/*float sin_angle_up = tempVector[0];
if( Math.abs(sin_angle_up) <= 0.017452406437f ) { // 1 degree
is_upright = 0;
}
else
is_upright = (sin_angle_up > 0) ? 1 : -1;*/
// store up vector
is_upright = 0;
float ux = tempVector[0];
float uy = tempVector[1];
float uz = tempVector[2];
// project up vector into plane perpendicular to targetVector
// v' = v - (v.n)n
float u_dot_n = ux * targetVector[0] + uy * targetVector[1] + uz * targetVector[2];
float p_ux = ux - u_dot_n * targetVector[0];
float p_uy = uy - u_dot_n * targetVector[1];
float p_uz = uz - u_dot_n * targetVector[2];
/*if( MyDebug.LOG ) {
Log.d(TAG, " u: " + ux + " , " + uy + " , " + uz);
Log.d(TAG, " p_u: " + p_ux + " , " + p_uy + " , " + p_uz);
}*/
double p_u_mag = Math.sqrt(p_ux*p_ux + p_uy*p_uy + p_uz*p_uz);
if( p_u_mag > 1.0e-5 ) {
/*if( MyDebug.LOG ) {
Log.d(TAG, " p_u norm: " + p_ux/p_u_mag + " , " + p_uy/p_u_mag + " , " + p_uz/p_u_mag);
}*/
// normalise p_u
//noinspection lossy-conversions
p_ux /= p_u_mag;
//p_uy /= p_u_mag; // commented out as not needed
//noinspection lossy-conversions
p_uz /= p_u_mag;
// compute p_u X (0 1 0)
float cx = - p_uz;
float cy = 0.0f;
float cz = p_ux;
/*if( MyDebug.LOG ) {
Log.d(TAG, " c: " + cx + " , " + cy + " , " + cz);
}*/
float sin_angle_up = (float)Math.sqrt(cx*cx + cy*cy + cz*cz);
float angle_up = (float)Math.asin(sin_angle_up);
setVector(inVector, 0.0f, 0.0f, -1.0f); // vector pointing behind the device's screen
transformVector(tempVector, currentRotationMatrix, inVector);
if( Math.abs(angle_up) > this.uprightAngleTol ) {
float dot = cx*tempVector[0] + cy*tempVector[1] + cz*tempVector[2];
is_upright = (dot < 0) ? 1 : -1;
}
}
float cos_angle = tempVector[0] * targetVector[0] + tempVector[1] * targetVector[1] + tempVector[2] * targetVector[2];
float angle = (float)Math.acos(cos_angle);
if( is_upright == 0 ) {
/*if( MyDebug.LOG )
Log.d(TAG, "gyro vector angle with target: " + Math.toDegrees(angle) + " degrees");*/
if( angle <= targetAngle ) {
if( MyDebug.LOG )
Log.d(TAG, " ### achieved target angle: " + Math.toDegrees(angle) + " degrees");
targetAchieved = true;
if( targetCallback != null ) {
//targetCallback.onAchieved(indx);
if( has_lastTargetAngle ) {
if( MyDebug.LOG )
Log.d(TAG, " last target angle: " + Math.toDegrees(lastTargetAngle) + " degrees");
if( angle > lastTargetAngle ) {
// started to get worse, so call callback
targetCallback.onAchieved(indx);
}
// else, don't call callback yet, as we may get closer to the target
}
}
// only bother setting the lastTargetAngle if within the target angle - otherwise we'll have problems if there is more than one target set
has_lastTargetAngle = true;
lastTargetAngle = angle;
}
}
if( angle > tooFarAngle ) {
n_too_far++;
}
/*if( MyDebug.LOG )
Log.d(TAG, "targetAchieved? " + targetAchieved);*/
}
if( n_too_far > 0 && n_too_far == targetVectors.size() ) {
if( targetCallback != null ) {
targetCallback.onTooFar();
}
}
}
}
/* This returns a 3D vector, that represents the current direction that the device is pointing (looking towards the screen),
* relative to when startRecording() was called.
* That is, the coordinate system is defined by the device's initial orientation when startRecording() was called:
* X: -ve to +ve is left to right
* Y: -ve to +ve is down to up
* Z: -ve to +ve is out of the screen to behind the screen
* So if the device hasn't changed orientation, this will return (0, 0, -1).
* (1, 0, 0) means the device has rotated 90 degrees so it's now pointing to the right.
* @param result An array of length 3 to store the returned vector.
*/
/*void getRelativeVector(float [] result) {
setVector(inVector, 0.0f, 0.0f, -1.0f); // vector pointing behind the device's screen
transformVector(result, currentRotationMatrix, inVector);
}*/
/*void getRelativeInverseVector(float [] result) {
setVector(inVector, 0.0f, 0.0f, -1.0f); // vector pointing behind the device's screen
transformTransposeVector(result, currentRotationMatrix, inVector);
}*/
public void getRelativeInverseVector(float [] out, float [] in) {
transformTransposeVector(out, currentRotationMatrix, in);
}
public void getRelativeInverseVectorGyroOnly(float [] out, float [] in) {
transformTransposeVector(out, currentRotationMatrixGyroOnly, in);
}
public void getRotationMatrix(float [] out) {
System.arraycopy(currentRotationMatrix, 0, out, 0, 9);
}
// for testing
public void testForceTargetAchieved(int indx) {
if( MyDebug.LOG )
Log.d(TAG, "testForceTargetAchieved: " + indx);
if( targetCallback != null ) {
targetCallback.onAchieved(indx);
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,19 @@
package net.sourceforge.opencamera;
/** Exception for HDRProcessor class.
*/
@SuppressWarnings("WeakerAccess")
public class HDRProcessorException extends Exception {
final static public int INVALID_N_IMAGES = 0; // the supplied number of images is not supported
final static public int UNEQUAL_SIZES = 1; // images not of the same resolution
final private int code;
HDRProcessorException(int code) {
this.code = code;
}
public int getCode() {
return code;
}
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,269 @@
package net.sourceforge.opencamera;
import android.graphics.Bitmap;
import android.util.Log;
public class JavaImageProcessing {
private static final String TAG = "JavaImageProcessing";
public interface ApplyFunctionInterface {
void init(int n_threads);
void apply(CachedBitmap output, int thread_index, int off_x, int off_y, int this_width, int this_height); // version with no input
/**
* @param pixels An array of pixels for the subset being operated on. I.e., pixels[0] represents the input pixel at (off_x, off_y), and
* the pixels array is of size this_width*this_height.
*/
void apply(CachedBitmap output, int thread_index, int [] pixels, int off_x, int off_y, int this_width, int this_height);
/**
* @param pixels An array of pixels for the subset being operated on. I.e., pixels[0] represents the input pixel at (off_x, off_y), and
* the pixels array is of size 4*this_width*this_height.
*/
void apply(CachedBitmap output, int thread_index, byte [] pixels, int off_x, int off_y, int this_width, int this_height);
}
/** Encapsulates a Bitmap, but optimised for reading individual pixels.
* This differs to CachedBitmap in that FastAccessBitmap automatically decides which to cache,
* based on the requested pixels.
*/
static class FastAccessBitmap {
private final Bitmap bitmap;
private final int bitmap_width;
private final int cache_height;
private final int [] cache_pixels_i;
private int cache_y = -1;
FastAccessBitmap(Bitmap bitmap) {
this.bitmap = bitmap;
this.bitmap_width = bitmap.getWidth();
this.cache_height = Math.min(128, bitmap.getHeight());
this.cache_pixels_i = new int[bitmap_width*cache_height];
// better for performance to initialise the cache, rather than having to keep checking if it's initialised
cache(0);
}
private void cache(int y) {
/*if( MyDebug.LOG )
Log.d(TAG, ">>> cache: " + y + " [ " + this + " ]");*/
y = Math.max(0, y-4);
this.cache_y = Math.min(y, bitmap.getHeight()-cache_height);
this.bitmap.getPixels(cache_pixels_i, 0, bitmap_width, 0, cache_y, bitmap_width, cache_height);
}
int getPixel(int x, int y) {
if( y < cache_y || y >= cache_y+cache_height ) {
// update cache
cache(y);
}
// read from cache
return cache_pixels_i[(y-cache_y)*bitmap_width+x];
}
void ensureCache(int sy, int ey) {
if( ey - sy > cache_height ) {
throw new RuntimeException("can't cache this many rows: " + sy + " to " + ey + " vs cache_height: " + cache_height);
}
if( sy < cache_y || ey >= cache_y+cache_height ) {
cache(sy);
}
}
int getCacheY() {
return this.cache_y;
}
int [] getCachedPixelsI() {
return this.cache_pixels_i;
}
}
/** Encapsulates a Bitmap, together with caching of pixels.
* This differs to FastAccessBitmap in that CachedBitmap requires the caller to actually do the
* caching.
*/
public static class CachedBitmap {
private final Bitmap bitmap;
private final int [] cache_pixels_i;
private final byte [] cache_pixels_b;
CachedBitmap(Bitmap bitmap, int cache_width, int cache_height) {
this.bitmap = bitmap;
this.cache_pixels_i = new int[cache_width*cache_height];
this.cache_pixels_b = null;
}
int [] getCachedPixelsI() {
return this.cache_pixels_i;
}
byte [] getCachedPixelsB() {
return this.cache_pixels_b;
}
}
/** Generic thread to apply a Java function to a bunch of pixels.
*/
private static class ApplyFunctionThread extends Thread {
private final int thread_index;
private final ApplyFunctionInterface function;
private final CachedBitmap input;
private final int start_x, start_y, stop_x, stop_y;
private int chunk_size; // number of lines to process at a time
private CachedBitmap output; // optional
private int output_start_x, output_start_y;
private static int getChunkSize(int start_y, int stop_y) {
int height = stop_y - start_y;
//return height;
//return (int)Math.ceil(height/4.0);
//return Math.min(512, height);
return Math.min(64, height);
//return Math.min(32, height);
}
ApplyFunctionThread(int thread_index, ApplyFunctionInterface function, Bitmap bitmap, int start_x, int start_y, int stop_x, int stop_y) {
super("ApplyFunctionThread");
/*if( MyDebug.LOG ) {
Log.d(TAG, " thread_index: " + thread_index);
Log.d(TAG, " start_x: " + start_x);
Log.d(TAG, " start_y: " + start_y);
Log.d(TAG, " stop_x: " + stop_x);
Log.d(TAG, " stop_y: " + stop_y);
}*/
this.thread_index = thread_index;
this.function = function;
this.start_x = start_x;
this.start_y = start_y;
this.stop_x = stop_x;
this.stop_y = stop_y;
this.chunk_size = getChunkSize(start_y, stop_y);
/*if( MyDebug.LOG )
Log.d(TAG, " chunk_size: " + chunk_size);*/
if( bitmap != null )
this.input = new CachedBitmap(bitmap, stop_x-start_x, chunk_size);
else
this.input = null;
}
void setOutput(Bitmap bitmap, int output_start_x, int output_start_y) {
/*if( MyDebug.LOG ) {
Log.d(TAG, " output_start_x: " + output_start_x);
Log.d(TAG, " output_start_y: " + output_start_y);
}*/
this.output = new CachedBitmap(bitmap, stop_x-start_x, chunk_size);
this.output_start_x = output_start_x;
this.output_start_y = output_start_y;
}
public void run() {
/*if( MyDebug.LOG )
Log.d(TAG, "ApplyFunctionThread.run");*/
int width = stop_x-start_x;
int this_start_y = start_y;
int output_shift_y = output_start_y - start_y;
/*if( MyDebug.LOG ) {
Log.d(TAG, "start_y: " + start_y);
Log.d(TAG, "output_start_y: " + output_start_y);
Log.d(TAG, "output_shift_y: " + output_shift_y);
}*/
if( input == null && output == null ) {
this.chunk_size = stop_y-start_y;
/*if( MyDebug.LOG )
Log.d(TAG, "reset chunk_size to: " + chunk_size);*/
}
final int chunk_size_f = chunk_size;
while(this_start_y < stop_y) {
int this_stop_y = Math.min(this_start_y+chunk_size_f, stop_y);
int this_height = this_stop_y-this_start_y;
//if( MyDebug.LOG )
// Log.d(TAG, "chunks from " + this_start_y + " to " + this_stop_y);
//long time_s = System.currentTimeMillis();
if( input == null ) {
// nothing to copy to cache
function.apply(output, thread_index, start_x, this_start_y, width, this_height);
}
else if( input.bitmap != null ) {
input.bitmap.getPixels(input.cache_pixels_i, 0, width, start_x, this_start_y, width, this_height);
/*if( MyDebug.LOG )
Log.d(TAG, "### ApplyFunctionThread: time after reading pixels: " + (System.currentTimeMillis() - time_s));*/
function.apply(output, thread_index, input.cache_pixels_i, start_x, this_start_y, width, this_height);
}
/*if( MyDebug.LOG )
Log.d(TAG, "### ApplyFunctionThread: time after apply: " + (System.currentTimeMillis() - time_s));*/
if( output != null ) {
// write cached pixels back to output bitmap
if( output.bitmap != null ) {
/*if( MyDebug.LOG ) {
Log.d(TAG, "this_start_y: " + this_start_y);
Log.d(TAG, "output_shift_y: " + output_shift_y);
Log.d(TAG, "this_height: " + this_height);
Log.d(TAG, "height: " + output.bitmap.getHeight());
}*/
output.bitmap.setPixels(output.cache_pixels_i, 0, width, output_start_x, this_start_y+output_shift_y, width, this_height);
}
}
this_start_y = this_stop_y;
}
}
}
/** Applies a function to the specified pixels of the supplied bitmap.
*/
public static void applyFunction(ApplyFunctionInterface function, Bitmap bitmap, Bitmap output, int start_x, int start_y, int stop_x, int stop_y) {
applyFunction(function, bitmap, output, start_x, start_y, stop_x, stop_y, start_x, start_y);
}
/** Applies a function to the specified pixels of the supplied bitmap.
*/
static void applyFunction(ApplyFunctionInterface function, Bitmap bitmap, Bitmap output, int start_x, int start_y, int stop_x, int stop_y, int output_start_x, int output_start_y) {
if( MyDebug.LOG )
Log.d(TAG, "applyFunction [bitmap]");
long time_s = System.currentTimeMillis();
int height = stop_y-start_y;
if( MyDebug.LOG )
Log.d(TAG, "height: " + height);
//final int n_threads = 1;
final int n_threads = height >= 16 ? 4 : 1;
//final int n_threads = height >= 16 ? 8 : 1;
function.init(n_threads);
ApplyFunctionThread [] threads = new ApplyFunctionThread[n_threads];
int st_indx = 0;
for(int i=0;i<n_threads;i++) {
int nd_indx = (((i+1)*height)/n_threads);
/*if( MyDebug.LOG )
Log.d(TAG, "thread " + i + " from " + st_indx + " to " + nd_indx);*/
threads[i] = new ApplyFunctionThread(i, function, bitmap, start_x, start_y+st_indx, stop_x, start_y+nd_indx);
if( output != null )
threads[i].setOutput(output, output_start_x, output_start_y+st_indx);
st_indx = nd_indx;
}
if( MyDebug.LOG )
Log.d(TAG, "start threads");
for(int i=0;i<n_threads;i++) {
threads[i].start();
}
if( MyDebug.LOG )
Log.d(TAG, "wait for threads to complete");
try {
for(int i=0;i<n_threads;i++) {
threads[i].join();
}
}
catch(InterruptedException e) {
Log.e(TAG, "applyFunction threads interrupted");
throw new RuntimeException(e);
}
//function.init(1);
//ApplyFunctionThread thread = new ApplyFunctionThread(0, function, bitmap, start_x, start_y, stop_x, stop_y);
//thread.run();
if( MyDebug.LOG )
Log.d(TAG, "applyFunction time: " + (System.currentTimeMillis() - time_s));
}
}

View file

@ -0,0 +1,34 @@
package net.sourceforge.opencamera;
import static android.content.Context.KEYGUARD_SERVICE;
import android.app.Activity;
import android.app.KeyguardManager;
import android.os.Build;
import android.util.Log;
public class KeyguardUtils {
private static final String TAG = "KeyguardUtils";
public static void requireKeyguard(Activity activity, Runnable callback) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
KeyguardManager keyguardManager = (KeyguardManager) activity.getSystemService(KEYGUARD_SERVICE);
if (keyguardManager == null || !keyguardManager.isKeyguardLocked()) {
callback.run();
return;
}
keyguardManager.requestDismissKeyguard(activity, new KeyguardManager.KeyguardDismissCallback() {
@Override
public void onDismissSucceeded() {
if( MyDebug.LOG )
Log.d(TAG, "onDismissSucceeded");
callback.run();
if( MyDebug.LOG )
Log.d(TAG, "onDismissSucceeded: after callback run");
}
});
} else {
callback.run();
}
}
}

View file

@ -0,0 +1,357 @@
package net.sourceforge.opencamera;
import android.Manifest;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.location.LocationProvider;
import android.os.Build;
import android.os.Bundle;
import android.preference.PreferenceManager;
import androidx.annotation.NonNull;
import androidx.core.content.ContextCompat;
import android.util.Log;
/** Handles listening for GPS location (both coarse and fine).
*/
public class LocationSupplier {
private static final String TAG = "LocationSupplier";
private final Context context;
private final LocationManager locationManager;
private MyLocationListener [] locationListeners;
private volatile boolean test_force_no_location; // if true, always return null location; must be volatile for test project setting the state
private Location cached_location;
private long cached_location_ms;
LocationSupplier(Context context) {
this.context = context;
locationManager = (LocationManager)context.getSystemService(Context.LOCATION_SERVICE);
}
private Location getCachedLocation() {
if( cached_location != null ) {
long time_ms = System.currentTimeMillis();
if( time_ms <= cached_location_ms + 20000 ) {
return cached_location;
}
else {
cached_location = null;
}
}
return null;
}
/** Cache the current best location. Note that we intentionally call getLocation() from this
* method rather than passing it a location from onLocationChanged(), as we don't want a
* coarse location overriding a better fine location.
*/
private void cacheLocation() {
if( MyDebug.LOG )
Log.d(TAG, "cacheLocation");
Location location = getLocation();
if( location == null ) {
// this isn't an error as it can happen that we receive a call to onLocationChanged() after
// having freed the location listener (possibly because LocationManager had already queued
// a call to onLocationChanged?
// we should not set cached_location to null in such cases
Log.d(TAG, "### asked to cache location when location not available");
}
else {
cached_location = new Location(location);
cached_location_ms = System.currentTimeMillis();
}
}
public static class LocationInfo {
private boolean location_was_cached;
public boolean LocationWasCached() {
return location_was_cached;
}
}
/** If adding extra calls to this, consider whether explicit user permission is required, and whether
* privacy policy or data privacy section needs updating.
* @return Returns null if location not available.
*/
public Location getLocation() {
return getLocation(null);
}
/** If adding extra calls to this, consider whether explicit user permission is required, and whether
* privacy policy or data privacy section needs updating.
* @param locationInfo Optional class to return additional information about the location.
* @return Returns null if location not available.
*/
public Location getLocation(LocationInfo locationInfo) {
if( locationInfo != null )
locationInfo.location_was_cached = false; // init
if( locationListeners == null ) {
// if we have disabled location listening, then don't return a cached location anyway -
// in theory, callers should have already checked for user permission/setting before calling
// getLocation(), but just in case we didn't, don't want to return a cached location
return null;
}
if( test_force_no_location )
return null;
// location listeners should be stored in order best to worst
for(MyLocationListener locationListener : locationListeners) {
Location location = locationListener.getLocation();
if( location != null )
return location;
}
Location location = getCachedLocation();
if( location != null && locationInfo != null )
locationInfo.location_was_cached = true;
return location;
}
private class MyLocationListener implements LocationListener {
private Location location;
volatile boolean test_has_received_location; // must be volatile for test project reading the state
Location getLocation() {
return location;
}
public void onLocationChanged(@NonNull Location location) {
if( MyDebug.LOG )
Log.d(TAG, "onLocationChanged");
this.test_has_received_location = true;
// Android camera source claims we need to check lat/long != 0.0d
// also check for not being null just in case - had a nullpointerexception on Google Play!
if( location != null && ( location.getLatitude() != 0.0d || location.getLongitude() != 0.0d ) ) {
if( MyDebug.LOG ) {
Log.d(TAG, "received location");
// don't log location, in case of privacy!
}
this.location = location;
cacheLocation();
}
}
public void onStatusChanged(String provider, int status, Bundle extras) {
switch( status ) {
case LocationProvider.OUT_OF_SERVICE:
case LocationProvider.TEMPORARILY_UNAVAILABLE:
{
if( MyDebug.LOG ) {
if( status == LocationProvider.OUT_OF_SERVICE )
Log.d(TAG, "location provider out of service");
else if( status == LocationProvider.TEMPORARILY_UNAVAILABLE )
Log.d(TAG, "location provider temporarily unavailable");
}
this.location = null;
this.test_has_received_location = false;
cached_location = null;
break;
}
default:
break;
}
}
public void onProviderEnabled(@NonNull String provider) {
}
public void onProviderDisabled(@NonNull String provider) {
if( MyDebug.LOG )
Log.d(TAG, "onProviderDisabled");
this.location = null;
this.test_has_received_location = false;
cached_location = null;
}
}
/** Best to only call this from MainActivity.initLocation().
* @return Returns false if location permission not available for either coarse or fine.
* Important to only return false if we actually want/need to ask the user for location
* permission!
*/
boolean setupLocationListener() {
if( MyDebug.LOG )
Log.d(TAG, "setupLocationListener");
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context);
// Define a listener that responds to location updates
// we only set it up if store_location is true, important for privacy and unnecessary battery use
boolean store_location = sharedPreferences.getBoolean(PreferenceKeys.LocationPreferenceKey, false);
if( store_location && locationListeners == null ) {
// Note, ContextCompat.checkSelfPermission is meant to handle being called on any Android version, i.e., pre
// Android Marshmallow it should return true as permissions are set an installation, and can't be switched off by
// the user. However on Galaxy Nexus Android 4.3 and Nexus 7 (2013) Android 5.1.1, ACCESS_COARSE_LOCATION returns
// PERMISSION_DENIED! So we keep the checks to Android Marshmallow or later (where we need them), and avoid
// checking behaviour for earlier devices.
boolean has_coarse_location_permission;
boolean has_fine_location_permission;
if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.M ) {
if( MyDebug.LOG )
Log.d(TAG, "check for location permissions");
has_coarse_location_permission = ContextCompat.checkSelfPermission(context, Manifest.permission.ACCESS_COARSE_LOCATION) == PackageManager.PERMISSION_GRANTED;
has_fine_location_permission = ContextCompat.checkSelfPermission(context, Manifest.permission.ACCESS_FINE_LOCATION) == PackageManager.PERMISSION_GRANTED;
if( MyDebug.LOG ) {
Log.d(TAG, "has_coarse_location_permission? " + has_coarse_location_permission);
Log.d(TAG, "has_fine_location_permission? " + has_fine_location_permission);
}
//has_coarse_location_permission = false; // test
//has_fine_location_permission = false; // test
// require at least one permission to be present
// will be important for Android 12+ where user can grant only coarse permission - we still
// want to support geotagging in such cases
if( !has_coarse_location_permission && !has_fine_location_permission ) {
if( MyDebug.LOG )
Log.d(TAG, "location permission not available");
// return false, which tells caller to request permission - we'll call this function again if permission is granted
return false;
}
}
else {
// permissions always available pre-Android 6
has_coarse_location_permission = true;
has_fine_location_permission = true;
}
locationListeners = new MyLocationListener[2];
locationListeners[0] = new MyLocationListener();
locationListeners[1] = new MyLocationListener();
// location listeners should be stored in order best to worst
// also see https://sourceforge.net/p/opencamera/tickets/1/ - need to check provider is available
// now also need to check for permissions - need to support devices that might have one but not both of fine and coarse permissions supplied
if( has_coarse_location_permission && locationManager.getAllProviders().contains(LocationManager.NETWORK_PROVIDER) ) {
locationManager.requestLocationUpdates(LocationManager.NETWORK_PROVIDER, 1000, 0, locationListeners[1]);
if( MyDebug.LOG )
Log.d(TAG, "created coarse (network) location listener");
}
else {
if( MyDebug.LOG )
Log.d(TAG, "don't have a NETWORK_PROVIDER");
}
if( has_fine_location_permission && locationManager.getAllProviders().contains(LocationManager.GPS_PROVIDER) ) {
locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 1000, 0, locationListeners[0]);
if( MyDebug.LOG )
Log.d(TAG, "created fine (gps) location listener");
}
else {
if( MyDebug.LOG )
Log.d(TAG, "don't have a GPS_PROVIDER");
}
}
else if( !store_location ) {
freeLocationListeners();
}
// important to return true even if we didn't set up decide the location listeners - as
// returning false indicates to ask user for location permission (which we don't want to
// do if PreferenceKeys.LocationPreferenceKey preference isn't true)
return true;
}
void freeLocationListeners() {
if( MyDebug.LOG )
Log.d(TAG, "freeLocationListeners");
if( locationListeners != null ) {
if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.M ) {
// Android Lint claims we need location permission for LocationManager.removeUpdates().
// also see http://stackoverflow.com/questions/32715189/location-manager-remove-updates-permission
if( MyDebug.LOG )
Log.d(TAG, "check for location permissions");
boolean has_coarse_location_permission = ContextCompat.checkSelfPermission(context, Manifest.permission.ACCESS_COARSE_LOCATION) == PackageManager.PERMISSION_GRANTED;
boolean has_fine_location_permission = ContextCompat.checkSelfPermission(context, Manifest.permission.ACCESS_FINE_LOCATION) == PackageManager.PERMISSION_GRANTED;
if( MyDebug.LOG ) {
Log.d(TAG, "has_coarse_location_permission? " + has_coarse_location_permission);
Log.d(TAG, "has_fine_location_permission? " + has_fine_location_permission);
}
// require at least one permission to be present
if( !has_coarse_location_permission && !has_fine_location_permission ) {
if( MyDebug.LOG )
Log.d(TAG, "location permission not available");
return;
}
}
for(int i=0;i<locationListeners.length;i++) {
locationManager.removeUpdates(locationListeners[i]);
locationListeners[i] = null;
}
locationListeners = null;
if( MyDebug.LOG )
Log.d(TAG, "location listeners now freed");
}
}
// for testing:
public boolean testHasReceivedLocation() {
if( locationListeners == null )
return false;
for(MyLocationListener locationListener : locationListeners) {
if( locationListener.test_has_received_location )
return true;
}
return false;
}
public void setForceNoLocation(boolean test_force_no_location) {
this.test_force_no_location = test_force_no_location;
}
/** Use this when we want to test (assert) that location listeners are turned on.
* If we want to assert that they are turned off, then use noLocationListeners.
*/
public boolean hasLocationListeners() {
if( this.locationListeners == null )
return false;
if( this.locationListeners.length != 2 )
return false;
for(MyLocationListener locationListener : locationListeners) {
if( locationListener == null )
return false;
}
return true;
}
/** Use this when we want to test (assert) that location listeners are turned on. Note that this
* is NOT an inverse of hasLocationListeners. For example this means that if
* locationListeners.length==1, hasLocationListeners would return false (so we'd flag up that
* we've not set them up correctly), but noLocationListeners would also return false (to flag
* up that we did set some location listeners up).
*/
public boolean noLocationListeners() {
if( this.locationListeners == null )
return true;
return false;
}
public static String locationToDMS(double coord) {
String sign = (coord < 0.0) ? "-" : "";
coord = Math.abs(coord);
int intPart = (int)coord;
boolean is_zero = (intPart==0);
String degrees = String.valueOf(intPart);
double mod = coord - intPart;
coord = mod * 60;
intPart = (int)coord;
is_zero = is_zero && (intPart==0);
mod = coord - intPart;
String minutes = String.valueOf(intPart);
coord = mod * 60;
intPart = (int)coord;
is_zero = is_zero && (intPart==0);
String seconds = String.valueOf(intPart);
if( is_zero ) {
// so we don't show -ve for coord that is -ve but smaller than 1"
sign = "";
}
// use unicode rather than degrees symbol, due to Android Studio warning - see https://sourceforge.net/p/opencamera/tickets/107/
return sign + degrees + "\u00b0" + minutes + "'" + seconds + "\"";
}
}

View file

@ -0,0 +1,216 @@
package net.sourceforge.opencamera;
import android.app.AlertDialog;
import android.content.SharedPreferences;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.preference.PreferenceManager;
import android.util.Log;
/** Handles magnetic sensor.
*/
class MagneticSensor {
private static final String TAG = "MagneticSensor";
private final MainActivity main_activity;
private Sensor mSensorMagnetic;
private int magnetic_accuracy = -1;
private AlertDialog magnetic_accuracy_dialog;
private boolean magneticListenerIsRegistered;
MagneticSensor(final MainActivity main_activity) {
this.main_activity = main_activity;
}
void initSensor(final SensorManager mSensorManager) {
if( MyDebug.LOG )
Log.d(TAG, "initSensor");
if( mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD) != null ) {
if( MyDebug.LOG )
Log.d(TAG, "found magnetic sensor");
mSensorMagnetic = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD);
}
else {
if( MyDebug.LOG )
Log.d(TAG, "no support for magnetic sensor");
}
}
/** Registers the magnetic sensor, only if it's required (by user preferences), and hasn't already
* been registered.
* If the magnetic sensor was previously registered, but is no longer required by user preferences,
* then it is unregistered.
*/
void registerMagneticListener(final SensorManager mSensorManager) {
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity);
if( !magneticListenerIsRegistered ) {
if( needsMagneticSensor(sharedPreferences) ) {
if( MyDebug.LOG )
Log.d(TAG, "register magneticListener");
mSensorManager.registerListener(magneticListener, mSensorMagnetic, SensorManager.SENSOR_DELAY_NORMAL);
magneticListenerIsRegistered = true;
}
else {
if( MyDebug.LOG )
Log.d(TAG, "don't register magneticListener as not needed");
}
}
else {
if( needsMagneticSensor(sharedPreferences) ) {
if( MyDebug.LOG )
Log.d(TAG, "magneticListener already registered");
}
else {
if( MyDebug.LOG )
Log.d(TAG, "magneticListener already registered but no longer needed");
mSensorManager.unregisterListener(magneticListener);
magneticListenerIsRegistered = false;
}
}
}
/** Unregisters the magnetic sensor, if it was registered.
*/
void unregisterMagneticListener(final SensorManager mSensorManager) {
if( magneticListenerIsRegistered ) {
if( MyDebug.LOG )
Log.d(TAG, "unregister magneticListener");
mSensorManager.unregisterListener(magneticListener);
magneticListenerIsRegistered = false;
}
else {
if( MyDebug.LOG )
Log.d(TAG, "magneticListener wasn't registered");
}
}
private final SensorEventListener magneticListener = new SensorEventListener() {
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
if( MyDebug.LOG )
Log.d(TAG, "magneticListener.onAccuracyChanged: " + accuracy);
//accuracy = SensorManager.SENSOR_STATUS_ACCURACY_LOW; // test
MagneticSensor.this.magnetic_accuracy = accuracy;
setMagneticAccuracyDialogText(); // update if a dialog is already open for this
checkMagneticAccuracy();
// test accuracy changing after dialog opened:
/*Handler handler = new Handler();
handler.postDelayed(new Runnable() {
public void run() {
MainActivity.this.magnetic_accuracy = SensorManager.SENSOR_STATUS_ACCURACY_HIGH;
setMagneticAccuracyDialogText();
checkMagneticAccuracy();
}
}, 5000);*/
}
@Override
public void onSensorChanged(SensorEvent event) {
main_activity.getPreview().onMagneticSensorChanged(event);
}
};
private void setMagneticAccuracyDialogText() {
if( MyDebug.LOG )
Log.d(TAG, "setMagneticAccuracyDialogText()");
if( magnetic_accuracy_dialog != null ) {
String message = main_activity.getResources().getString(R.string.magnetic_accuracy_info) + " ";
switch( magnetic_accuracy ) {
case SensorManager.SENSOR_STATUS_UNRELIABLE:
message += main_activity.getResources().getString(R.string.accuracy_unreliable);
break;
case SensorManager.SENSOR_STATUS_ACCURACY_LOW:
message += main_activity.getResources().getString(R.string.accuracy_low);
break;
case SensorManager.SENSOR_STATUS_ACCURACY_MEDIUM:
message += main_activity.getResources().getString(R.string.accuracy_medium);
break;
case SensorManager.SENSOR_STATUS_ACCURACY_HIGH:
message += main_activity.getResources().getString(R.string.accuracy_high);
break;
default:
message += main_activity.getResources().getString(R.string.accuracy_unknown);
break;
}
if( MyDebug.LOG )
Log.d(TAG, "message: " + message);
magnetic_accuracy_dialog.setMessage(message);
}
}
private boolean shown_magnetic_accuracy_dialog = false; // whether the dialog for poor magnetic accuracy has been shown since application start
/** Checks whether the user should be informed about poor magnetic sensor accuracy, and shows
* the dialog if so.
*/
void checkMagneticAccuracy() {
if( MyDebug.LOG )
Log.d(TAG, "checkMagneticAccuracy(): " + magnetic_accuracy);
if( magnetic_accuracy != SensorManager.SENSOR_STATUS_UNRELIABLE && magnetic_accuracy != SensorManager.SENSOR_STATUS_ACCURACY_LOW ) {
if( MyDebug.LOG )
Log.d(TAG, "accuracy is good enough (or accuracy not yet known)");
}
else if( shown_magnetic_accuracy_dialog ) {
// if we've shown the dialog since application start, then don't show again even if the user didn't click to not show again
if( MyDebug.LOG )
Log.d(TAG, "already shown_magnetic_accuracy_dialog");
}
else if( main_activity.getPreview().isTakingPhotoOrOnTimer() || main_activity.getPreview().isVideoRecording() ) {
if( MyDebug.LOG )
Log.d(TAG, "don't disturb whilst taking photo, on timer, or recording video");
}
else if( main_activity.isCameraInBackground() ) {
if( MyDebug.LOG )
Log.d(TAG, "don't show magnetic accuracy dialog due to camera in background");
// don't want to show dialog if another is open, or in settings, etc
}
else {
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity);
if( !needsMagneticSensor(sharedPreferences) ) {
if( MyDebug.LOG )
Log.d(TAG, "don't need magnetic sensor");
// note, we shouldn't set shown_magnetic_accuracy_dialog to true here, otherwise we won't pick up if the user enables one of these options
}
else if( sharedPreferences.contains(PreferenceKeys.MagneticAccuracyPreferenceKey) ) {
if( MyDebug.LOG )
Log.d(TAG, "user selected to no longer show the dialog");
shown_magnetic_accuracy_dialog = true; // also set this flag, so future calls to checkMagneticAccuracy() will exit without needing to get/read the SharedPreferences
}
else {
if( MyDebug.LOG )
Log.d(TAG, "show dialog for magnetic accuracy");
shown_magnetic_accuracy_dialog = true;
magnetic_accuracy_dialog = main_activity.getMainUI().showInfoDialog(R.string.magnetic_accuracy_title, 0, PreferenceKeys.MagneticAccuracyPreferenceKey);
setMagneticAccuracyDialogText();
}
}
}
/* Whether the user preferences indicate that we need the magnetic sensor to be enabled.
*/
private boolean needsMagneticSensor(SharedPreferences sharedPreferences) {
if( main_activity.getApplicationInterface().getGeodirectionPref() ||
sharedPreferences.getBoolean(PreferenceKeys.AddYPRToComments, false) ||
sharedPreferences.getBoolean(PreferenceKeys.ShowGeoDirectionLinesPreferenceKey, false) ||
sharedPreferences.getBoolean(PreferenceKeys.ShowGeoDirectionPreferenceKey, false) ) {
return true;
}
return false;
}
int getMagneticAccuracy() {
return this.magnetic_accuracy;
}
void clearDialog() {
this.magnetic_accuracy_dialog = null;
}
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,106 @@
package net.sourceforge.opencamera;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import android.util.Log;
/** Handles the audio "noise" trigger option.
*/
public class MyAudioTriggerListenerCallback implements AudioListener.AudioListenerCallback {
private static final String TAG = "MyAudioTriggerLstnrCb";
private final MainActivity main_activity;
private int last_level = -1;
private long time_quiet_loud = -1;
private long time_last_audio_trigger_photo = -1;
private int audio_noise_sensitivity = -1;
MyAudioTriggerListenerCallback(MainActivity main_activity) {
this.main_activity = main_activity;
}
void setAudioNoiseSensitivity(int audio_noise_sensitivity) {
this.audio_noise_sensitivity = audio_noise_sensitivity;
}
/** Listens to audio noise and decides when there's been a "loud" noise to trigger taking a photo.
*/
@Override
public void onAudio(int level) {
boolean audio_trigger = false;
/*if( level > 150 ) {
if( MyDebug.LOG )
Log.d(TAG, "loud noise!: " + level);
audio_trigger = true;
}*/
if( last_level == -1 ) {
last_level = level;
return;
}
int diff = level - last_level;
if( MyDebug.LOG ) {
Log.d(TAG, "noise_sensitivity: " + audio_noise_sensitivity);
Log.d(TAG, "diff: " + diff);
}
if( diff > audio_noise_sensitivity ) {
if( MyDebug.LOG )
Log.d(TAG, "got louder!: " + last_level + " to " + level + " , diff: " + diff);
time_quiet_loud = System.currentTimeMillis();
if( MyDebug.LOG )
Log.d(TAG, " time: " + time_quiet_loud);
}
else if( diff < -audio_noise_sensitivity && time_quiet_loud != -1 ) {
if( MyDebug.LOG )
Log.d(TAG, "got quieter!: " + last_level + " to " + level + " , diff: " + diff);
long time_now = System.currentTimeMillis();
long duration = time_now - time_quiet_loud;
if( MyDebug.LOG ) {
Log.d(TAG, "stopped being loud - was loud since: " + time_quiet_loud);
Log.d(TAG, " time_now: " + time_now);
Log.d(TAG, " duration: " + duration);
}
if( duration < 1500 ) {
if( MyDebug.LOG )
Log.d(TAG, "audio_trigger set");
audio_trigger = true;
}
time_quiet_loud = -1;
}
else {
if( MyDebug.LOG )
Log.d(TAG, "audio level: " + last_level + " to " + level + " , diff: " + diff);
}
last_level = level;
if( audio_trigger ) {
if( MyDebug.LOG )
Log.d(TAG, "audio trigger");
// need to run on UI thread so that this function returns quickly (otherwise we'll have lag in processing the audio)
// but also need to check we're not currently taking a photo or on timer, so we don't repeatedly queue up takePicture() calls, or cancel a timer
long time_now = System.currentTimeMillis();
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity);
boolean want_audio_listener = sharedPreferences.getString(PreferenceKeys.AudioControlPreferenceKey, "none").equals("noise");
if( time_last_audio_trigger_photo != -1 && time_now - time_last_audio_trigger_photo < 5000 ) {
// avoid risk of repeatedly being triggered - as well as problem of being triggered again by the camera's own "beep"!
if( MyDebug.LOG )
Log.d(TAG, "ignore loud noise due to too soon since last audio triggered photo: " + (time_now - time_last_audio_trigger_photo));
}
else if( !want_audio_listener ) {
// just in case this is a callback from an AudioListener before it's been freed (e.g., if there's a loud noise when exiting settings after turning the option off
if( MyDebug.LOG )
Log.d(TAG, "ignore loud noise due to audio listener option turned off");
}
else {
if( MyDebug.LOG )
Log.d(TAG, "audio trigger from loud noise");
time_last_audio_trigger_photo = time_now;
main_activity.audioTrigger();
}
}
}
}

View file

@ -0,0 +1,21 @@
package net.sourceforge.opencamera;
import android.util.Log;
/** Helper class for logging.
*/
public class MyDebug {
/** Global constant to control logging, should always be set to false in
* released versions.
*/
public static final boolean LOG = false;
/** Wrapper to print exceptions, should use instead of e.printStackTrace().
*/
public static void logStackTrace(String tag, String msg, Throwable tr) {
if( LOG ) {
// don't log exceptions in releases
Log.e(tag, msg, tr);
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,63 @@
package net.sourceforge.opencamera;
import android.app.PendingIntent;
import android.content.Intent;
import android.os.Build;
import android.service.quicksettings.TileService;
import androidx.annotation.RequiresApi;
import android.util.Log;
/** Provides service for quick settings tile.
*/
@RequiresApi(api = Build.VERSION_CODES.N)
public class MyTileService extends TileService {
private static final String TAG = "MyTileService";
public static final String TILE_ID = "net.sourceforge.opencamera.TILE_CAMERA";
@Override
public void onDestroy() {
super.onDestroy();
}
@Override
public void onTileAdded() {
super.onTileAdded();
}
@Override
public void onTileRemoved() {
super.onTileRemoved();
}
@Override
public void onStartListening() {
super.onStartListening();
}
@Override
public void onStopListening() {
super.onStopListening();
}
@Override
public void onClick() {
if( MyDebug.LOG )
Log.d(TAG, "onClick");
super.onClick();
Intent intent = new Intent(this, MainActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TOP);
intent.setAction(TILE_ID);
// use startActivityAndCollapse() instead of startActivity() so that the notification panel doesn't remain pulled down
if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE ) {
// startActivityAndCollapse(Intent) throws UnsupportedOperationException on Android 14+
// FLAG_IMMUTABLE needed for PendingIntents on Android 12+
PendingIntent pending_intent = PendingIntent.getActivity(this, 0, intent, PendingIntent.FLAG_IMMUTABLE);
startActivityAndCollapse(pending_intent);
}
else {
// still get warning for startActivityAndCollapse being deprecated, but startActivityAndCollapse(PendingIntent) requires Android 14+
// and only seems possible to disable the warning for the function, not this statement
startActivityAndCollapse(intent);
}
}
}

View file

@ -0,0 +1,63 @@
package net.sourceforge.opencamera;
import android.app.PendingIntent;
import android.content.Intent;
import android.os.Build;
import android.service.quicksettings.TileService;
import androidx.annotation.RequiresApi;
import android.util.Log;
/** Provides service for quick settings tile.
*/
@RequiresApi(api = Build.VERSION_CODES.N)
public class MyTileServiceFrontCamera extends TileService {
private static final String TAG = "MyTileServiceFrontCam";
public static final String TILE_ID = "net.sourceforge.opencamera.TILE_FRONT_CAMERA";
@Override
public void onDestroy() {
super.onDestroy();
}
@Override
public void onTileAdded() {
super.onTileAdded();
}
@Override
public void onTileRemoved() {
super.onTileRemoved();
}
@Override
public void onStartListening() {
super.onStartListening();
}
@Override
public void onStopListening() {
super.onStopListening();
}
@Override
public void onClick() {
if( MyDebug.LOG )
Log.d(TAG, "onClick");
super.onClick();
Intent intent = new Intent(this, MainActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TOP);
intent.setAction(TILE_ID);
// use startActivityAndCollapse() instead of startActivity() so that the notification panel doesn't remain pulled down
if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE ) {
// startActivityAndCollapse(Intent) throws UnsupportedOperationException on Android 14+
// FLAG_IMMUTABLE needed for PendingIntents on Android 12+
PendingIntent pending_intent = PendingIntent.getActivity(this, 0, intent, PendingIntent.FLAG_IMMUTABLE);
startActivityAndCollapse(pending_intent);
}
else {
// still get warning for startActivityAndCollapse being deprecated, but startActivityAndCollapse(PendingIntent) requires Android 14+
// and only seems possible to disable the warning for the function, not this statement
startActivityAndCollapse(intent);
}
}
}

View file

@ -0,0 +1,63 @@
package net.sourceforge.opencamera;
import android.app.PendingIntent;
import android.content.Intent;
import android.os.Build;
import android.service.quicksettings.TileService;
import androidx.annotation.RequiresApi;
import android.util.Log;
/** Provides service for quick settings tile.
*/
@RequiresApi(api = Build.VERSION_CODES.N)
public class MyTileServiceVideo extends TileService {
private static final String TAG = "MyTileServiceVideo";
public static final String TILE_ID = "net.sourceforge.opencamera.TILE_VIDEO";
@Override
public void onDestroy() {
super.onDestroy();
}
@Override
public void onTileAdded() {
super.onTileAdded();
}
@Override
public void onTileRemoved() {
super.onTileRemoved();
}
@Override
public void onStartListening() {
super.onStartListening();
}
@Override
public void onStopListening() {
super.onStopListening();
}
@Override
public void onClick() {
if( MyDebug.LOG )
Log.d(TAG, "onClick");
super.onClick();
Intent intent = new Intent(this, MainActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TOP);
intent.setAction(TILE_ID);
// use startActivityAndCollapse() instead of startActivity() so that the notification panel doesn't remain pulled down
if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE ) {
// startActivityAndCollapse(Intent) throws UnsupportedOperationException on Android 14+
// FLAG_IMMUTABLE needed for PendingIntents on Android 12+
PendingIntent pending_intent = PendingIntent.getActivity(this, 0, intent, PendingIntent.FLAG_IMMUTABLE);
startActivityAndCollapse(pending_intent);
}
else {
// still get warning for startActivityAndCollapse being deprecated, but startActivityAndCollapse(PendingIntent) requires Android 14+
// and only seems possible to disable the warning for the function, not this statement
startActivityAndCollapse(intent);
}
}
}

View file

@ -0,0 +1,59 @@
package net.sourceforge.opencamera;
import android.app.PendingIntent;
import android.appwidget.AppWidgetManager;
import android.appwidget.AppWidgetProvider;
import android.content.Context;
import android.content.Intent;
import android.os.Build;
import android.util.Log;
import android.widget.RemoteViews;
/** Handles the Open Camera "take photo" widget. This widget launches Open
* Camera, and immediately takes a photo.
*/
public class MyWidgetProviderTakePhoto extends AppWidgetProvider {
private static final String TAG = "MyWidgetProviderTakePho";
// see http://developer.android.com/guide/topics/appwidgets/index.html
public void onUpdate(Context context, AppWidgetManager appWidgetManager, int [] appWidgetIds) {
if( MyDebug.LOG )
Log.d(TAG, "onUpdate");
if( MyDebug.LOG )
Log.d(TAG, "length = " + appWidgetIds.length);
for(int appWidgetId : appWidgetIds) {
if( MyDebug.LOG )
Log.d(TAG, "appWidgetId: " + appWidgetId);
Intent intent = new Intent(context, TakePhoto.class);
int flags = PendingIntent.FLAG_UPDATE_CURRENT;
if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.M )
flags = flags | PendingIntent.FLAG_IMMUTABLE; // needed for targetting Android 12+, but fine to set it all versions from Android 6 onwards
PendingIntent pendingIntent = PendingIntent.getActivity(context, 0, intent, flags);
RemoteViews remote_views = new RemoteViews(context.getPackageName(), R.layout.widget_layout_take_photo);
remote_views.setOnClickPendingIntent(R.id.widget_take_photo, pendingIntent);
appWidgetManager.updateAppWidget(appWidgetId, remote_views);
}
}
/*@Override
public void onReceive(Context context, Intent intent) {
if( MyDebug.LOG ) {
Log.d(TAG, "onReceive " + intent);
}
if (intent.getAction().equals("net.sourceforge.opencamera.LAUNCH_OPEN_CAMERA")) {
if( MyDebug.LOG )
Log.d(TAG, "Launching MainActivity");
final Intent activity = new Intent(context, MainActivity.class);
activity.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(activity);
if( MyDebug.LOG )
Log.d(TAG, "done");
}
super.onReceive(context, intent);
}*/
}

View file

@ -0,0 +1,30 @@
package net.sourceforge.opencamera;
import android.app.Application;
import android.os.Process;
import android.util.Log;
/** We override the Application class to implement the workaround at
* https://issuetracker.google.com/issues/36972466#comment14 for Google bug crash. It seems ugly,
* but Google consider this a low priority despite calling these "bad behaviours" in applications!
*/
public class OpenCameraApplication extends Application {
private static final String TAG = "OpenCameraApplication";
@Override
public void onCreate() {
if( MyDebug.LOG )
Log.d(TAG, "onCreate");
super.onCreate();
checkAppReplacingState();
}
private void checkAppReplacingState() {
if( MyDebug.LOG )
Log.d(TAG, "checkAppReplacingState");
if( getResources() == null ) {
Log.e(TAG, "app is replacing, kill");
Process.killProcess(Process.myPid());
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,20 @@
package net.sourceforge.opencamera;
/** Exception for PanoramaProcessor class.
*/
@SuppressWarnings("WeakerAccess")
public class PanoramaProcessorException extends Exception {
final static public int INVALID_N_IMAGES = 0; // the supplied number of images is not supported
final static public int UNEQUAL_SIZES = 1; // images not of the same resolution
final static public int FAILED_TO_CROP = 1; // failed to crop
final private int code;
PanoramaProcessorException(int code) {
this.code = code;
}
public int getCode() {
return code;
}
}

View file

@ -0,0 +1,354 @@
package net.sourceforge.opencamera;
import android.Manifest;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.os.Build;
import android.preference.PreferenceManager;
import androidx.annotation.NonNull;
import androidx.core.app.ActivityCompat;
import android.util.Log;
/** Android 6+ permission handling:
*/
public class PermissionHandler {
private static final String TAG = "PermissionHandler";
private final MainActivity main_activity;
final private static int MY_PERMISSIONS_REQUEST_CAMERA = 0;
final private static int MY_PERMISSIONS_REQUEST_STORAGE = 1;
final private static int MY_PERMISSIONS_REQUEST_RECORD_AUDIO = 2;
final private static int MY_PERMISSIONS_REQUEST_LOCATION = 3;
private boolean camera_denied; // whether the user requested to deny a camera permission
private long camera_denied_time_ms; // if denied, the time when this occurred
private boolean storage_denied; // whether the user requested to deny a camera permission
private long storage_denied_time_ms; // if denied, the time when this occurred
private boolean audio_denied; // whether the user requested to deny a camera permission
private long audio_denied_time_ms; // if denied, the time when this occurred
private boolean location_denied; // whether the user requested to deny a camera permission
private long location_denied_time_ms; // if denied, the time when this occurred
// In some cases there can be a problem if the user denies a permission, we then get an onResume()
// (since application goes into background when showing system UI to request permission) at which
// point we try to request permission again! This would happen for camera and storage permissions.
// Whilst that isn't necessarily wrong, there would also be a problem if the user says
// "Don't ask again", we get stuck in a loop repeatedly asking the OS for permission (and it
// repeatedly being automatically denied) causing the UI to become sluggish.
// So instead we only try asking again if not within deny_delay_ms of the user denying that
// permission.
// Time shouldn't be too long, as the user might restart and then not be asked again for camera
// or storage permission.
final private static long deny_delay_ms = 1000;
PermissionHandler(MainActivity main_activity) {
this.main_activity = main_activity;
}
/** Show a "rationale" to the user for needing a particular permission, then request that permission again
* once they close the dialog.
*/
private void showRequestPermissionRationale(final int permission_code) {
if( MyDebug.LOG )
Log.d(TAG, "showRequestPermissionRational: " + permission_code);
if( Build.VERSION.SDK_INT < Build.VERSION_CODES.M ) {
if( MyDebug.LOG )
Log.e(TAG, "shouldn't be requesting permissions for pre-Android M!");
return;
}
boolean ok = true;
String [] permissions = null;
int message_id = 0;
switch (permission_code) {
case MY_PERMISSIONS_REQUEST_CAMERA:
if (MyDebug.LOG)
Log.d(TAG, "display rationale for camera permission");
permissions = new String[]{Manifest.permission.CAMERA};
message_id = R.string.permission_rationale_camera;
break;
case MY_PERMISSIONS_REQUEST_STORAGE:
if (MyDebug.LOG)
Log.d(TAG, "display rationale for storage permission");
permissions = new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE};
message_id = R.string.permission_rationale_storage;
break;
case MY_PERMISSIONS_REQUEST_RECORD_AUDIO:
if (MyDebug.LOG)
Log.d(TAG, "display rationale for record audio permission");
permissions = new String[]{Manifest.permission.RECORD_AUDIO};
message_id = R.string.permission_rationale_record_audio;
break;
case MY_PERMISSIONS_REQUEST_LOCATION:
if (MyDebug.LOG)
Log.d(TAG, "display rationale for location permission");
permissions = new String[]{Manifest.permission.ACCESS_FINE_LOCATION, Manifest.permission.ACCESS_COARSE_LOCATION};
message_id = R.string.permission_rationale_location;
break;
default:
if (MyDebug.LOG)
Log.e(TAG, "showRequestPermissionRational unknown permission_code: " + permission_code);
ok = false;
break;
}
if( ok ) {
final String [] permissions_f = permissions;
new AlertDialog.Builder(main_activity)
.setTitle(R.string.permission_rationale_title)
.setMessage(message_id)
.setIcon(android.R.drawable.ic_dialog_alert)
.setPositiveButton(android.R.string.ok, null)
.setOnDismissListener(new DialogInterface.OnDismissListener() {
public void onDismiss(DialogInterface dialog) {
if( MyDebug.LOG )
Log.d(TAG, "requesting permission...");
ActivityCompat.requestPermissions(main_activity, permissions_f, permission_code);
}
}).show();
}
}
void requestCameraPermission() {
if( MyDebug.LOG )
Log.d(TAG, "requestCameraPermission");
if( Build.VERSION.SDK_INT < Build.VERSION_CODES.M ) {
if( MyDebug.LOG )
Log.e(TAG, "shouldn't be requesting permissions for pre-Android M!");
return;
}
else if( camera_denied && System.currentTimeMillis() < camera_denied_time_ms + deny_delay_ms ) {
if( MyDebug.LOG )
Log.d(TAG, "too soon since user last denied permission");
return;
}
if( ActivityCompat.shouldShowRequestPermissionRationale(main_activity, Manifest.permission.CAMERA) ) {
// Show an explanation to the user *asynchronously* -- don't block
// this thread waiting for the user's response! After the user
// sees the explanation, try again to request the permission.
showRequestPermissionRationale(MY_PERMISSIONS_REQUEST_CAMERA);
}
else {
// Can go ahead and request the permission
if( MyDebug.LOG )
Log.d(TAG, "requesting camera permission...");
ActivityCompat.requestPermissions(main_activity, new String[]{Manifest.permission.CAMERA}, MY_PERMISSIONS_REQUEST_CAMERA);
}
}
void requestStoragePermission() {
if( MyDebug.LOG )
Log.d(TAG, "requestStoragePermission");
if( Build.VERSION.SDK_INT < Build.VERSION_CODES.M ) {
if( MyDebug.LOG )
Log.e(TAG, "shouldn't be requesting permissions for pre-Android M!");
return;
}
else if( MainActivity.useScopedStorage() ) {
if( MyDebug.LOG )
Log.e(TAG, "shouldn't be requesting permissions for scoped storage!");
return;
}
else if( storage_denied && System.currentTimeMillis() < storage_denied_time_ms + deny_delay_ms ) {
if( MyDebug.LOG )
Log.d(TAG, "too soon since user last denied permission");
return;
}
if( ActivityCompat.shouldShowRequestPermissionRationale(main_activity, Manifest.permission.WRITE_EXTERNAL_STORAGE) ) {
// Show an explanation to the user *asynchronously* -- don't block
// this thread waiting for the user's response! After the user
// sees the explanation, try again to request the permission.
showRequestPermissionRationale(MY_PERMISSIONS_REQUEST_STORAGE);
}
else {
// Can go ahead and request the permission
if( MyDebug.LOG )
Log.d(TAG, "requesting storage permission...");
ActivityCompat.requestPermissions(main_activity, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, MY_PERMISSIONS_REQUEST_STORAGE);
}
}
void requestRecordAudioPermission() {
if( MyDebug.LOG )
Log.d(TAG, "requestRecordAudioPermission");
if( Build.VERSION.SDK_INT < Build.VERSION_CODES.M ) {
if( MyDebug.LOG )
Log.e(TAG, "shouldn't be requesting permissions for pre-Android M!");
return;
}
else if( audio_denied && System.currentTimeMillis() < audio_denied_time_ms + deny_delay_ms ) {
if( MyDebug.LOG )
Log.d(TAG, "too soon since user last denied permission");
return;
}
if( ActivityCompat.shouldShowRequestPermissionRationale(main_activity, Manifest.permission.RECORD_AUDIO) ) {
// Show an explanation to the user *asynchronously* -- don't block
// this thread waiting for the user's response! After the user
// sees the explanation, try again to request the permission.
showRequestPermissionRationale(MY_PERMISSIONS_REQUEST_RECORD_AUDIO);
}
else {
// Can go ahead and request the permission
if( MyDebug.LOG )
Log.d(TAG, "requesting record audio permission...");
ActivityCompat.requestPermissions(main_activity, new String[]{Manifest.permission.RECORD_AUDIO}, MY_PERMISSIONS_REQUEST_RECORD_AUDIO);
}
}
void requestLocationPermission() {
if( MyDebug.LOG )
Log.d(TAG, "requestLocationPermission");
if( Build.VERSION.SDK_INT < Build.VERSION_CODES.M ) {
if( MyDebug.LOG )
Log.e(TAG, "shouldn't be requesting permissions for pre-Android M!");
return;
}
else if( location_denied && System.currentTimeMillis() < location_denied_time_ms + deny_delay_ms ) {
if( MyDebug.LOG )
Log.d(TAG, "too soon since user last denied permission");
return;
}
if( ActivityCompat.shouldShowRequestPermissionRationale(main_activity, Manifest.permission.ACCESS_FINE_LOCATION) ||
ActivityCompat.shouldShowRequestPermissionRationale(main_activity, Manifest.permission.ACCESS_COARSE_LOCATION) ) {
// Show an explanation to the user *asynchronously* -- don't block
// this thread waiting for the user's response! After the user
// sees the explanation, try again to request the permission.
showRequestPermissionRationale(MY_PERMISSIONS_REQUEST_LOCATION);
}
else {
// Can go ahead and request the permission
if( MyDebug.LOG )
Log.d(TAG, "requesting location permissions...");
ActivityCompat.requestPermissions(main_activity, new String[]{Manifest.permission.ACCESS_FINE_LOCATION, Manifest.permission.ACCESS_COARSE_LOCATION}, MY_PERMISSIONS_REQUEST_LOCATION);
}
}
public void onRequestPermissionsResult(int requestCode, @NonNull int[] grantResults) {
if( MyDebug.LOG )
Log.d(TAG, "onRequestPermissionsResult: requestCode " + requestCode);
if( Build.VERSION.SDK_INT < Build.VERSION_CODES.M ) {
if( MyDebug.LOG )
Log.e(TAG, "shouldn't be requesting permissions for pre-Android M!");
return;
}
switch( requestCode ) {
case MY_PERMISSIONS_REQUEST_CAMERA:
{
// If request is cancelled, the result arrays are empty.
if( grantResults.length > 0
&& grantResults[0] == PackageManager.PERMISSION_GRANTED ) {
// permission was granted, yay! Do the
// contacts-related task you need to do.
if( MyDebug.LOG )
Log.d(TAG, "camera permission granted");
main_activity.getPreview().retryOpenCamera();
}
else {
if( MyDebug.LOG )
Log.d(TAG, "camera permission denied");
camera_denied = true;
camera_denied_time_ms = System.currentTimeMillis();
// permission denied, boo! Disable the
// functionality that depends on this permission.
// Open Camera doesn't need to do anything: the camera will remain closed
}
return;
}
case MY_PERMISSIONS_REQUEST_STORAGE:
{
// If request is cancelled, the result arrays are empty.
if( grantResults.length > 0
&& grantResults[0] == PackageManager.PERMISSION_GRANTED ) {
// permission was granted, yay! Do the
// contacts-related task you need to do.
if( MyDebug.LOG )
Log.d(TAG, "storage permission granted");
main_activity.getPreview().retryOpenCamera();
}
else {
if( MyDebug.LOG )
Log.d(TAG, "storage permission denied");
storage_denied = true;
storage_denied_time_ms = System.currentTimeMillis();
// permission denied, boo! Disable the
// functionality that depends on this permission.
// Open Camera doesn't need to do anything: the camera will remain closed
}
return;
}
case MY_PERMISSIONS_REQUEST_RECORD_AUDIO:
{
// If request is cancelled, the result arrays are empty.
if( grantResults.length > 0
&& grantResults[0] == PackageManager.PERMISSION_GRANTED ) {
// permission was granted, yay! Do the
// contacts-related task you need to do.
if( MyDebug.LOG )
Log.d(TAG, "record audio permission granted");
// no need to do anything
}
else {
if( MyDebug.LOG )
Log.d(TAG, "record audio permission denied");
audio_denied = true;
audio_denied_time_ms = System.currentTimeMillis();
// permission denied, boo! Disable the
// functionality that depends on this permission.
// no need to do anything
// note that we don't turn off record audio option, as user may then record video not realising audio won't be recorded - best to be explicit each time
}
return;
}
case MY_PERMISSIONS_REQUEST_LOCATION:
{
// If request is cancelled, the result arrays are empty.
if( grantResults.length == 2 && (grantResults[0] == PackageManager.PERMISSION_GRANTED || grantResults[1] == PackageManager.PERMISSION_GRANTED) ) {
// On Android 12 users can choose to only grant approximation location. This means
// one of the permissions will be denied, but as long as one location permission
// is granted, we can still go ahead and use location.
// Otherwise we have a problem that if user selects approximate location, we end
// up turning the location option back off.
if( MyDebug.LOG )
Log.d(TAG, "location permission granted [1]");
main_activity.initLocation();
}
else if( grantResults.length > 0
&& grantResults[0] == PackageManager.PERMISSION_GRANTED ) {
// in theory this code path is now redundant, but keep here just in case
if( MyDebug.LOG )
Log.d(TAG, "location permission granted [2]");
main_activity.initLocation();
}
else {
if( MyDebug.LOG )
Log.d(TAG, "location permission denied");
location_denied = true;
location_denied_time_ms = System.currentTimeMillis();
// permission denied, boo! Disable the
// functionality that depends on this permission.
// for location, seems best to turn the option back off
if( MyDebug.LOG )
Log.d(TAG, "location permission not available, so switch location off");
main_activity.getPreview().showToast(null, R.string.permission_location_not_available);
SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(main_activity);
SharedPreferences.Editor editor = settings.edit();
editor.putBoolean(PreferenceKeys.LocationPreferenceKey, false);
editor.apply();
}
return;
}
default:
{
if( MyDebug.LOG )
Log.e(TAG, "unknown requestCode " + requestCode);
}
}
}
}

View file

@ -0,0 +1,386 @@
package net.sourceforge.opencamera;
/** Stores all of the string keys used for SharedPreferences.
*/
public class PreferenceKeys {
// must be static, to safely call from other Activities
/** If this preference is set, no longer show the intro dialog.
*/
public static final String FirstTimePreferenceKey = "done_first_time";
/** This preference stores the version number seen by the user - used to show "What's New" dialog.
*/
public static final String LatestVersionPreferenceKey = "latest_version";
/** This preference stores whether to allow showing the "What's New" dialog.
*/
public static final String ShowWhatsNewPreferenceKey = "preference_show_whats_new";
/** If this preference is set, no longer show the auto-stabilise info dialog.
*/
public static final String AutoStabiliseInfoPreferenceKey = "done_auto_stabilise_info";
/** If this preference is set, no longer show the HDR info dialog.
*/
public static final String HDRInfoPreferenceKey = "done_hdr_info";
/** If this preference is set, no longer show the Panorama info dialog.
*/
public static final String PanoramaInfoPreferenceKey = "done_panorama_info";
/** If this preference is set, no longer show the raw info dialog.
*/
public static final String RawInfoPreferenceKey = "done_raw_info";
/** If this preference is set, no longer show the dialog for poor magnetic accuracy
*/
public static final String MagneticAccuracyPreferenceKey = "done_magnetic_accuracy";
public static final String CameraAPIPreferenceDefault = "preference_camera_api_old";
public static final String CameraAPIPreferenceKey = "preference_camera_api";
private static String getCameraIDKey(int cameraId, String cameraIdSPhysical) {
if( cameraIdSPhysical != null )
return cameraId + "_" + cameraIdSPhysical;
else
return String.valueOf(cameraId);
}
// don't set to be specific for physical cameras, as too confusing to have lots of different flash preferences
// also in Preview, we don't save the flash back if not supported
public static String getFlashPreferenceKey(int cameraId) {
return "flash_value_" + cameraId;
}
public static String getFocusPreferenceKey(int cameraId, boolean is_video) {
return "focus_value_" + cameraId + "_" + is_video;
}
public static final String FocusAssistPreferenceKey = "preference_focus_assist";
public static String getResolutionPreferenceKey(int cameraId, String cameraIdSPhysical) {
return "camera_resolution_" + getCameraIDKey(cameraId, cameraIdSPhysical);
}
public static String getVideoQualityPreferenceKey(int cameraId, String cameraIdSPhysical, boolean high_speed) {
return "video_quality_" + getCameraIDKey(cameraId, cameraIdSPhysical) + (high_speed ? "_highspeed" : "");
}
public static final String OptimiseFocusPreferenceKey = "preference_photo_optimise_focus";
public static final String ImageFormatPreferenceKey = "preference_image_format";
public static final String IsVideoPreferenceKey = "is_video";
public static final String ExposurePreferenceKey = "preference_exposure";
public static final String ColorEffectPreferenceKey = "preference_color_effect";
public static final String SceneModePreferenceKey = "preference_scene_mode";
public static final String WhiteBalancePreferenceKey = "preference_white_balance";
public static final String WhiteBalanceTemperaturePreferenceKey = "preference_white_balance_temperature";
public static final String AntiBandingPreferenceKey = "preference_antibanding";
public static final String EdgeModePreferenceKey = "preference_edge_mode";
public static final String CameraNoiseReductionModePreferenceKey = "preference_noise_reduction_mode"; // n.b., this is for the Camera driver noise reduction mode, not Open Camera's NR photo mode
public static final String ISOPreferenceKey = "preference_iso";
public static final String ExposureTimePreferenceKey = "preference_exposure_time";
public static final String RawPreferenceKey = "preference_raw";
public static final String AllowRawForExpoBracketingPreferenceKey = "preference_raw_expo_bracketing";
public static final String AllowRawForFocusBracketingPreferenceKey = "preference_raw_focus_bracketing";
public static final String PanoramaCropPreferenceKey = "preference_panorama_crop";
public static final String PanoramaSaveExpoPreferenceKey = "preference_panorama_save";
public static final String ExpoBracketingNImagesPreferenceKey = "preference_expo_bracketing_n_images";
public static final String ExpoBracketingStopsPreferenceKey = "preference_expo_bracketing_stops";
public static final String FocusDistancePreferenceKey = "preference_focus_distance";
public static final String FocusBracketingTargetDistancePreferenceKey = "preference_focus_bracketing_target_distance";
public static final String FocusBracketingAutoSourceDistancePreferenceKey = "preference_focus_bracketing_auto_source_distance";
public static final String FocusBracketingNImagesPreferenceKey = "preference_focus_bracketing_n_images";
public static final String FocusBracketingAddInfinityPreferenceKey = "preference_focus_bracketing_add_infinity";
public static final String VolumeKeysPreferenceKey = "preference_volume_keys";
public static final String AudioControlPreferenceKey = "preference_audio_control";
public static final String AudioNoiseControlSensitivityPreferenceKey = "preference_audio_noise_control_sensitivity";
public static final String QualityPreferenceKey = "preference_quality";
public static final String AutoStabilisePreferenceKey = "preference_auto_stabilise";
public static final String PhotoModePreferenceKey = "preference_photo_mode";
public static final String HDRSaveExpoPreferenceKey = "preference_hdr_save_expo";
public static final String HDRTonemappingPreferenceKey = "preference_hdr_tonemapping";
public static final String HDRContrastEnhancementPreferenceKey = "preference_hdr_contrast_enhancement";
public static final String NRSaveExpoPreferenceKey = "preference_nr_save";
public static final String FastBurstNImagesPreferenceKey = "preference_fast_burst_n_images";
public static final String LocationPreferenceKey = "preference_location";
public static final String RemoveDeviceExifPreferenceKey = "preference_remove_device_exif";
public static final String GPSDirectionPreferenceKey = "preference_gps_direction";
public static final String RequireLocationPreferenceKey = "preference_require_location";
public static final String ExifArtistPreferenceKey = "preference_exif_artist";
public static final String ExifCopyrightPreferenceKey = "preference_exif_copyright";
public static final String StampPreferenceKey = "preference_stamp";
public static final String StampDateFormatPreferenceKey = "preference_stamp_dateformat";
public static final String StampTimeFormatPreferenceKey = "preference_stamp_timeformat";
public static final String StampGPSFormatPreferenceKey = "preference_stamp_gpsformat";
//public static final String StampGeoAddressPreferenceKey = "preference_stamp_geo_address";
public static final String UnitsDistancePreferenceKey = "preference_units_distance";
public static final String TextStampPreferenceKey = "preference_textstamp";
public static final String StampFontSizePreferenceKey = "preference_stamp_fontsize";
public static final String StampFontColorPreferenceKey = "preference_stamp_font_color";
public static final String StampStyleKey = "preference_stamp_style";
public static final String VideoSubtitlePref = "preference_video_subtitle";
public static final String FrontCameraMirrorKey = "preference_front_camera_mirror";
public static final String EnableRemote = "preference_enable_remote";
public static final String RemoteName = "preference_remote_device_name";
public static final String RemoteType = "preference_remote_type";
public static final String WaterType = "preference_water_type";
//public static final String BackgroundPhotoSavingPreferenceKey = "preference_background_photo_saving";
public static final String Camera2FakeFlashPreferenceKey = "preference_camera2_fake_flash";
public static final String Camera2DummyCaptureHackPreferenceKey = "preference_camera2_dummy_capture_hack";
public static final String Camera2FastBurstPreferenceKey = "preference_camera2_fast_burst";
public static final String Camera2PhotoVideoRecordingPreferenceKey = "preference_camera2_photo_video_recording";
public static final String UIPlacementPreferenceKey = "preference_ui_placement";
public static final String TouchCapturePreferenceKey = "preference_touch_capture";
public static final String PausePreviewPreferenceKey = "preference_pause_preview";
public static final String ShowToastsPreferenceKey = "preference_show_toasts";
public static final String ThumbnailAnimationPreferenceKey = "preference_thumbnail_animation";
public static final String TakePhotoBorderPreferenceKey = "preference_take_photo_border";
public static final String DimWhenDisconnectedPreferenceKey = "preference_remote_disconnect_screen_dim";
public static final String AllowHapticFeedbackPreferenceKey = "preference_allow_haptic_feedback";
public static final String ShowWhenLockedPreferenceKey = "preference_show_when_locked";
public static final String AllowLongPressPreferenceKey = "preference_allow_long_press";
public static final String StartupFocusPreferenceKey = "preference_startup_focus";
public static final String MultiCamButtonPreferenceKey = "preference_multi_cam_button";
public static final String KeepDisplayOnPreferenceKey = "preference_keep_display_on";
public static final String MaxBrightnessPreferenceKey = "preference_max_brightness";
public static final String UsingSAFPreferenceKey = "preference_using_saf";
public static final String SaveLocationPreferenceKey = "preference_save_location";
public static final String SaveLocationSAFPreferenceKey = "preference_save_location_saf";
public static final String SaveLocationHistoryBasePreferenceKey = "save_location_history";
public static final String SaveLocationHistorySAFBasePreferenceKey = "save_location_history_saf";
public static final String SavePhotoPrefixPreferenceKey = "preference_save_photo_prefix";
public static final String SaveVideoPrefixPreferenceKey = "preference_save_video_prefix";
public static final String SaveZuluTimePreferenceKey = "preference_save_zulu_time";
public static final String SaveIncludeMillisecondsPreferenceKey = "preference_save_include_milliseconds";
public static final String ShowZoomSliderControlsPreferenceKey = "preference_show_zoom_slider_controls";
public static final String ShowTakePhotoPreferenceKey = "preference_show_take_photo";
public static final String ShowFaceDetectionPreferenceKey = "preference_show_face_detection";
public static final String ShowCycleFlashPreferenceKey = "preference_show_cycle_flash";
public static final String ShowFocusPeakingPreferenceKey = "preference_show_focus_peaking";
public static final String ShowAutoLevelPreferenceKey = "preference_show_auto_level";
public static final String ShowStampPreferenceKey = "preference_show_stamp";
public static final String ShowTextStampPreferenceKey = "preference_show_textstamp";
public static final String ShowStoreLocationPreferenceKey = "preference_show_store_location";
public static final String ShowCycleRawPreferenceKey = "preference_show_cycle_raw";
public static final String ShowWhiteBalanceLockPreferenceKey = "preference_show_white_balance_lock";
public static final String ShowExposureLockPreferenceKey = "preference_show_exposure_lock";
public static final String ShowZoomPreferenceKey = "preference_show_zoom";
public static final String ShowISOPreferenceKey = "preference_show_iso";
public static final String HistogramPreferenceKey = "preference_histogram";
public static final String ZebraStripesPreferenceKey = "preference_zebra_stripes";
public static final String ZebraStripesForegroundColorPreferenceKey = "preference_zebra_stripes_foreground_color";
public static final String ZebraStripesBackgroundColorPreferenceKey = "preference_zebra_stripes_background_color";
public static final String FocusPeakingPreferenceKey = "preference_focus_peaking";
public static final String FocusPeakingColorPreferenceKey = "preference_focus_peaking_color";
public static final String PreShotsPreferenceKey = "preference_save_preshots";
public static final String ShowVideoMaxAmpPreferenceKey = "preference_show_video_max_amp";
public static final String ShowAnglePreferenceKey = "preference_show_angle";
public static final String ShowAngleLinePreferenceKey = "preference_show_angle_line";
public static final String ShowPitchLinesPreferenceKey = "preference_show_pitch_lines";
public static final String ShowGeoDirectionLinesPreferenceKey = "preference_show_geo_direction_lines";
public static final String ShowAngleHighlightColorPreferenceKey = "preference_angle_highlight_color";
public static final String CalibratedLevelAnglePreferenceKey = "preference_calibrate_level_angle";
public static final String ShowGeoDirectionPreferenceKey = "preference_show_geo_direction";
public static final String ShowFreeMemoryPreferenceKey = "preference_free_memory";
public static final String ShowTimePreferenceKey = "preference_show_time";
public static final String ShowCameraIDPreferenceKey = "preference_show_camera_id";
public static final String ShowBatteryPreferenceKey = "preference_show_battery";
public static final String ShowGridPreferenceKey = "preference_grid";
public static final String ShowCropGuidePreferenceKey = "preference_crop_guide";
public static final String FaceDetectionPreferenceKey = "preference_face_detection";
public static final String GhostImagePreferenceKey = "preference_ghost_image";
public static final String GhostSelectedImageSAFPreferenceKey = "preference_ghost_selected_image_saf";
public static final String GhostImageAlphaPreferenceKey = "ghost_image_alpha";
public static final String VideoStabilizationPreferenceKey = "preference_video_stabilization";
public static final String ForceVideo4KPreferenceKey = "preference_force_video_4k";
public static final String VideoFormatPreferenceKey = "preference_video_output_format";
public static final String VideoBitratePreferenceKey = "preference_video_bitrate";
public static String getVideoFPSPreferenceKey(int cameraId, String cameraIdSPhysical) {
// for cameraId==0 and cameraIdSPhysical==null, we return preference_video_fps instead of preference_video_fps_0, for
// backwards compatibility for people upgrading
return "preference_video_fps" + ((cameraId==0 && cameraIdSPhysical==null) ? "" : ("_"+getCameraIDKey(cameraId, cameraIdSPhysical)));
}
public static String getVideoCaptureRatePreferenceKey(int cameraId, String cameraIdSPhysical) {
return "preference_capture_rate_" + getCameraIDKey(cameraId, cameraIdSPhysical);
}
public static final String VideoLogPreferenceKey = "preference_video_log";
public static final String VideoProfileGammaPreferenceKey = "preference_video_profile_gamma";
public static final String VideoMaxDurationPreferenceKey = "preference_video_max_duration";
public static final String VideoRestartPreferenceKey = "preference_video_restart";
public static final String VideoMaxFileSizePreferenceKey = "preference_video_max_filesize";
public static final String VideoRestartMaxFileSizePreferenceKey = "preference_video_restart_max_filesize";
public static final String VideoFlashPreferenceKey = "preference_video_flash";
public static final String VideoLowPowerCheckPreferenceKey = "preference_video_low_power_check";
public static final String LockVideoPreferenceKey = "preference_lock_video";
public static final String RecordAudioPreferenceKey = "preference_record_audio";
public static final String RecordAudioChannelsPreferenceKey = "preference_record_audio_channels";
public static final String RecordAudioSourcePreferenceKey = "preference_record_audio_src";
public static final String PreviewSizePreferenceKey = "preference_preview_size";
public static final String RotatePreviewPreferenceKey = "preference_rotate_preview";
public static final String LockOrientationPreferenceKey = "preference_lock_orientation";
public static final String TimerPreferenceKey = "preference_timer";
public static final String TimerBeepPreferenceKey = "preference_timer_beep";
public static final String TimerSpeakPreferenceKey = "preference_timer_speak";
// note for historical reasons the preference refers to burst; the feature was renamed to
// "repeat" in v1.43, but we still need to use the old string to avoid changing user settings
// when people upgrade
public static final String RepeatModePreferenceKey = "preference_burst_mode";
// see note about "repeat" vs "burst" under RepeatModePreferenceKey
public static final String RepeatIntervalPreferenceKey = "preference_burst_interval";
public static final String ShutterSoundPreferenceKey = "preference_shutter_sound";
public static final String ImmersiveModePreferenceKey = "preference_immersive_mode";
public static final String AddYPRToComments="preference_comment_ypr";
}

View file

@ -0,0 +1,244 @@
package net.sourceforge.opencamera;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.preference.ListPreference;
import android.preference.Preference;
import android.preference.PreferenceGroup;
import android.preference.PreferenceManager;
import android.util.Log;
import android.widget.Toast;
import net.sourceforge.opencamera.ui.FolderChooserDialog;
import java.io.File;
public class PreferenceSubCameraControlsMore extends PreferenceSubScreen {
private static final String TAG = "PfSubCameraControlsMore";
@Override
public void onCreate(Bundle savedInstanceState) {
if( MyDebug.LOG )
Log.d(TAG, "onCreate");
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.preferences_sub_camera_controls_more);
final Bundle bundle = getArguments();
/*final int cameraId = bundle.getInt("cameraId");
if( MyDebug.LOG )
Log.d(TAG, "cameraId: " + cameraId);
final int nCameras = bundle.getInt("nCameras");
if( MyDebug.LOG )
Log.d(TAG, "nCameras: " + nCameras);*/
final SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getActivity());
final boolean can_disable_shutter_sound = bundle.getBoolean("can_disable_shutter_sound");
if( MyDebug.LOG )
Log.d(TAG, "can_disable_shutter_sound: " + can_disable_shutter_sound);
if( !can_disable_shutter_sound ) {
// Camera.enableShutterSound requires JELLY_BEAN_MR1 or greater
Preference pref = findPreference("preference_shutter_sound");
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_screen_camera_controls_more");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
/*{
EditTextPreference edit = (EditTextPreference)findPreference("preference_save_location");
InputFilter filter = new InputFilter() {
// whilst Android seems to allow any characters on internal memory, SD cards are typically formatted with FAT32
String disallowed = "|\\?*<\":>";
public CharSequence filter(CharSequence source, int start, int end, Spanned dest, int dstart, int dend) {
for(int i=start;i<end;i++) {
if( disallowed.indexOf( source.charAt(i) ) != -1 ) {
return "";
}
}
return null;
}
};
edit.getEditText().setFilters(new InputFilter[]{filter});
}*/
{
Preference pref = findPreference("preference_save_location");
pref.setOnPreferenceClickListener(new Preference.OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference arg0) {
if( MyDebug.LOG )
Log.d(TAG, "clicked save location");
MainActivity main_activity = (MainActivity)PreferenceSubCameraControlsMore.this.getActivity();
if( main_activity.getStorageUtils().isUsingSAF() ) {
main_activity.openFolderChooserDialogSAF(true);
return true;
}
else if( MainActivity.useScopedStorage() ) {
// we can't use an EditTextPreference (or MyEditTextPreference) due to having to support non-scoped-storage, or when SAF is enabled...
// anyhow, this means we can share code when called from gallery long-press anyway
AlertDialog.Builder alertDialog = main_activity.createSaveFolderDialog();
final AlertDialog alert = alertDialog.create();
// AlertDialog.Builder.setOnDismissListener() requires API level 17, so do it this way instead
alert.setOnDismissListener(new DialogInterface.OnDismissListener() {
@Override
public void onDismiss(DialogInterface arg0) {
if( MyDebug.LOG )
Log.d(TAG, "save folder dialog dismissed");
dialogs.remove(alert);
}
});
alert.show();
dialogs.add(alert);
return true;
}
else {
File start_folder = main_activity.getStorageUtils().getImageFolder();
FolderChooserDialog fragment = new MyPreferenceFragment.SaveFolderChooserDialog();
fragment.setStartFolder(start_folder);
fragment.show(getFragmentManager(), "FOLDER_FRAGMENT");
return true;
}
}
});
}
{
final Preference pref = findPreference("preference_using_saf");
pref.setOnPreferenceClickListener(new Preference.OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference arg0) {
if( pref.getKey().equals("preference_using_saf") ) {
if( MyDebug.LOG )
Log.d(TAG, "user clicked saf");
if( sharedPreferences.getBoolean(PreferenceKeys.UsingSAFPreferenceKey, false) ) {
if( MyDebug.LOG )
Log.d(TAG, "saf is now enabled");
// seems better to alway re-show the dialog when the user selects, to make it clear where files will be saved (as the SAF location in general will be different to the non-SAF one)
//String uri = sharedPreferences.getString(PreferenceKeys.getSaveLocationSAFPreferenceKey(), "");
//if( uri.length() == 0 )
{
MainActivity main_activity = (MainActivity)PreferenceSubCameraControlsMore.this.getActivity();
Toast.makeText(main_activity, R.string.saf_select_save_location, Toast.LENGTH_SHORT).show();
main_activity.openFolderChooserDialogSAF(true);
}
}
else {
if( MyDebug.LOG )
Log.d(TAG, "saf is now disabled");
// need to update the summary, as switching back to non-SAF folder
MyPreferenceFragment.setSummary(findPreference("preference_save_location"));
}
}
return false;
}
});
}
{
final Preference pref = findPreference("preference_calibrate_level");
pref.setOnPreferenceClickListener(new Preference.OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference arg0) {
if( pref.getKey().equals("preference_calibrate_level") ) {
if( MyDebug.LOG )
Log.d(TAG, "user clicked calibrate level option");
AlertDialog.Builder alertDialog = new AlertDialog.Builder(PreferenceSubCameraControlsMore.this.getActivity());
alertDialog.setTitle(getActivity().getResources().getString(R.string.preference_calibrate_level));
alertDialog.setMessage(R.string.preference_calibrate_level_dialog);
alertDialog.setPositiveButton(R.string.preference_calibrate_level_calibrate, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
if( MyDebug.LOG )
Log.d(TAG, "user clicked calibrate level");
MainActivity main_activity = (MainActivity)PreferenceSubCameraControlsMore.this.getActivity();
if( main_activity.getPreview().hasLevelAngleStable() ) {
double current_level_angle = main_activity.getPreview().getLevelAngleUncalibrated();
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putFloat(PreferenceKeys.CalibratedLevelAnglePreferenceKey, (float)current_level_angle);
editor.apply();
main_activity.getPreview().updateLevelAngles();
Toast.makeText(main_activity, R.string.preference_calibrate_level_calibrated, Toast.LENGTH_SHORT).show();
}
}
});
alertDialog.setNegativeButton(R.string.preference_calibrate_level_reset, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
if( MyDebug.LOG )
Log.d(TAG, "user clicked reset calibration level");
MainActivity main_activity = (MainActivity)PreferenceSubCameraControlsMore.this.getActivity();
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putFloat(PreferenceKeys.CalibratedLevelAnglePreferenceKey, 0.0f);
editor.apply();
main_activity.getPreview().updateLevelAngles();
Toast.makeText(main_activity, R.string.preference_calibrate_level_calibration_reset, Toast.LENGTH_SHORT).show();
}
});
final AlertDialog alert = alertDialog.create();
// AlertDialog.Builder.setOnDismissListener() requires API level 17, so do it this way instead
alert.setOnDismissListener(new DialogInterface.OnDismissListener() {
@Override
public void onDismiss(DialogInterface arg0) {
if( MyDebug.LOG )
Log.d(TAG, "calibration dialog dismissed");
dialogs.remove(alert);
}
});
alert.show();
dialogs.add(alert);
return false;
}
return false;
}
});
}
// preference_save_location done in onResume
MyPreferenceFragment.setSummary(findPreference("preference_save_photo_prefix"));
MyPreferenceFragment.setSummary(findPreference("preference_save_video_prefix"));
setupDependencies();
if( MyDebug.LOG )
Log.d(TAG, "onCreate done");
}
@Override
public void onResume() {
super.onResume();
// we need to call this onResume too, to handle updating the summary when changing location via SAF dialoga
MyPreferenceFragment.setSummary(findPreference("preference_save_location"));
}
/** Programmatically set up dependencies for preference types (e.g., ListPreference) that don't
* support this in xml (such as SwitchPreference and CheckBoxPreference), or where this depends
* on the device (e.g., Android version).
*/
private void setupDependencies() {
// set up dependency for preference_audio_noise_control_sensitivity on preference_audio_control
ListPreference pref = (ListPreference)findPreference("preference_audio_control");
if( pref != null ) { // may be null if preference not supported
pref.setOnPreferenceChangeListener(new Preference.OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference arg0, Object newValue) {
String value = newValue.toString();
setAudioNoiseControlSensitivityDependency(value);
return true;
}
});
setAudioNoiseControlSensitivityDependency(pref.getValue()); // ensure dependency is enabled/disabled as required for initial value
}
}
private void setAudioNoiseControlSensitivityDependency(String newValue) {
Preference dependent = findPreference("preference_audio_noise_control_sensitivity");
if( dependent != null ) { // just in case
boolean enable_dependent = "noise".equals(newValue);
if( MyDebug.LOG )
Log.d(TAG, "clicked audio control: " + newValue + " enable_dependent: " + enable_dependent);
dependent.setEnabled(enable_dependent);
}
}
}

View file

@ -0,0 +1,127 @@
package net.sourceforge.opencamera;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.preference.Preference;
import android.preference.PreferenceGroup;
import android.preference.PreferenceManager;
import android.util.Log;
public class PreferenceSubGUI extends PreferenceSubScreen {
private static final String TAG = "PreferenceSubGUI";
@Override
public void onCreate(Bundle savedInstanceState) {
if( MyDebug.LOG )
Log.d(TAG, "onCreate");
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.preferences_sub_gui);
final Bundle bundle = getArguments();
final SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getActivity());
final boolean camera_open = bundle.getBoolean("camera_open");
if( MyDebug.LOG )
Log.d(TAG, "camera_open: " + camera_open);
final boolean supports_face_detection = bundle.getBoolean("supports_face_detection");
if( MyDebug.LOG )
Log.d(TAG, "supports_face_detection: " + supports_face_detection);
final boolean supports_flash = bundle.getBoolean("supports_flash");
if( MyDebug.LOG )
Log.d(TAG, "supports_flash: " + supports_flash);
final boolean supports_preview_bitmaps = bundle.getBoolean("supports_preview_bitmaps");
if( MyDebug.LOG )
Log.d(TAG, "supports_preview_bitmaps: " + supports_preview_bitmaps);
final boolean supports_auto_stabilise = bundle.getBoolean("supports_auto_stabilise");
if( MyDebug.LOG )
Log.d(TAG, "supports_auto_stabilise: " + supports_auto_stabilise);
final boolean supports_raw = bundle.getBoolean("supports_raw");
if( MyDebug.LOG )
Log.d(TAG, "supports_raw: " + supports_raw);
final boolean supports_white_balance_lock = bundle.getBoolean("supports_white_balance_lock");
if( MyDebug.LOG )
Log.d(TAG, "supports_white_balance_lock: " + supports_white_balance_lock);
final boolean supports_exposure_lock = bundle.getBoolean("supports_exposure_lock");
if( MyDebug.LOG )
Log.d(TAG, "supports_exposure_lock: " + supports_exposure_lock);
final boolean is_multi_cam = bundle.getBoolean("is_multi_cam");
if( MyDebug.LOG )
Log.d(TAG, "is_multi_cam: " + is_multi_cam);
final boolean has_physical_cameras = bundle.getBoolean("has_physical_cameras");
if( MyDebug.LOG )
Log.d(TAG, "has_physical_cameras: " + has_physical_cameras);
if( !supports_face_detection && ( camera_open || sharedPreferences.getBoolean(PreferenceKeys.FaceDetectionPreferenceKey, false) == false ) ) {
// if camera not open, we'll think this setting isn't supported - but should only remove
// this preference if it's set to the default (otherwise if user sets to a non-default
// value that causes camera to not open, user won't be able to put it back to the
// default!)
Preference pref = findPreference("preference_show_face_detection");
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_screen_gui");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
if( !supports_flash ) {
Preference pref = findPreference("preference_show_cycle_flash");
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_screen_gui");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
if( !supports_preview_bitmaps ) {
Preference pref = findPreference("preference_show_focus_peaking");
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_screen_gui");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
if( !supports_auto_stabilise ) {
Preference pref = findPreference("preference_show_auto_level");
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_screen_gui");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
if( !supports_raw ) {
Preference pref = findPreference("preference_show_cycle_raw");
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_screen_gui");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
if( !supports_white_balance_lock ) {
Preference pref = findPreference("preference_show_white_balance_lock");
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_screen_gui");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
if( !supports_exposure_lock ) {
Preference pref = findPreference("preference_show_exposure_lock");
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_screen_gui");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
if( !is_multi_cam && !has_physical_cameras ) {
Preference pref = findPreference("preference_multi_cam_button");
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_screen_gui");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
if( MyDebug.LOG )
Log.d(TAG, "onCreate done");
}
}

View file

@ -0,0 +1,122 @@
package net.sourceforge.opencamera;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.preference.Preference;
import android.util.Log;
import java.io.IOException;
import java.io.InputStream;
import java.util.Scanner;
public class PreferenceSubLicences extends PreferenceSubScreen {
private static final String TAG = "PreferenceSubLicences";
@Override
public void onCreate(Bundle savedInstanceState) {
if( MyDebug.LOG )
Log.d(TAG, "onCreate");
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.preferences_sub_licences);
{
final Preference pref = findPreference("preference_licence_open_camera");
pref.setOnPreferenceClickListener(new Preference.OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference arg0) {
if( pref.getKey().equals("preference_licence_open_camera") ) {
if( MyDebug.LOG )
Log.d(TAG, "user clicked open camera licence");
// display the GPL v3 text
displayTextDialog(R.string.preference_licence_open_camera, "gpl-3.0.txt");
return false;
}
return false;
}
});
}
{
final Preference pref = findPreference("preference_licence_androidx");
pref.setOnPreferenceClickListener(new Preference.OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference arg0) {
if( pref.getKey().equals("preference_licence_androidx") ) {
if( MyDebug.LOG )
Log.d(TAG, "user clicked androidx licence");
// display the Apache licence 2.0 text
displayTextDialog(R.string.preference_licence_androidx, "androidx_LICENSE-2.0.txt");
return false;
}
return false;
}
});
}
{
final Preference pref = findPreference("preference_licence_google_icons");
pref.setOnPreferenceClickListener(new Preference.OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference arg0) {
if( pref.getKey().equals("preference_licence_google_icons") ) {
if( MyDebug.LOG )
Log.d(TAG, "user clicked google material design icons licence");
// display the Apache licence 2.0 text
displayTextDialog(R.string.preference_licence_google_icons, "google_material_design_icons_LICENSE-2.0.txt");
return false;
}
return false;
}
});
}
{
final Preference pref = findPreference("preference_licence_online");
pref.setOnPreferenceClickListener(new Preference.OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference arg0) {
if( pref.getKey().equals("preference_licence_online") ) {
if( MyDebug.LOG )
Log.d(TAG, "user clicked online licences");
MainActivity main_activity = (MainActivity)PreferenceSubLicences.this.getActivity();
main_activity.launchOnlineLicences();
return false;
}
return false;
}
});
}
if( MyDebug.LOG )
Log.d(TAG, "onCreate done");
}
/* Displays a dialog with text loaded from a file in assets.
*/
private void displayTextDialog(int title_id, String file) {
try {
InputStream inputStream = getActivity().getAssets().open(file);
Scanner scanner = new Scanner(inputStream).useDelimiter("\\A");
AlertDialog.Builder alertDialog = new AlertDialog.Builder(PreferenceSubLicences.this.getActivity());
alertDialog.setTitle(getActivity().getResources().getString(title_id));
alertDialog.setMessage(scanner.next());
alertDialog.setPositiveButton(android.R.string.ok, null);
final AlertDialog alert = alertDialog.create();
// AlertDialog.Builder.setOnDismissListener() requires API level 17, so do it this way instead
alert.setOnDismissListener(new DialogInterface.OnDismissListener() {
@Override
public void onDismiss(DialogInterface arg0) {
if( MyDebug.LOG )
Log.d(TAG, "text dialog dismissed");
dialogs.remove(alert);
}
});
alert.show();
dialogs.add(alert);
}
catch(IOException e) {
MyDebug.logStackTrace(TAG, "failed to load text for dialog", e);
}
}
}

View file

@ -0,0 +1,19 @@
package net.sourceforge.opencamera;
import android.os.Bundle;
import android.util.Log;
public class PreferenceSubLocation extends PreferenceSubScreen {
private static final String TAG = "PreferenceSubLocation";
@Override
public void onCreate(Bundle savedInstanceState) {
if( MyDebug.LOG )
Log.d(TAG, "onCreate");
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.preferences_sub_location);
if( MyDebug.LOG )
Log.d(TAG, "onCreate done");
}
}

View file

@ -0,0 +1,299 @@
package net.sourceforge.opencamera;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.SharedPreferences;
import android.os.Build;
import android.os.Bundle;
import android.preference.ListPreference;
import android.preference.Preference;
import android.preference.PreferenceGroup;
import android.preference.PreferenceManager;
import android.util.Log;
import net.sourceforge.opencamera.preview.Preview;
import net.sourceforge.opencamera.ui.ArraySeekBarPreference;
public class PreferenceSubPhoto extends PreferenceSubScreen {
private static final String TAG = "PreferenceSubPhoto";
@Override
public void onCreate(Bundle savedInstanceState) {
if( MyDebug.LOG )
Log.d(TAG, "onCreate");
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.preferences_sub_photo);
final Bundle bundle = getArguments();
final SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getActivity());
final int cameraId = bundle.getInt("cameraId");
if( MyDebug.LOG )
Log.d(TAG, "cameraId: " + cameraId);
final String cameraIdSPhysical = bundle.getString("cameraIdSPhysical");
if( MyDebug.LOG )
Log.d(TAG, "cameraIdSPhysical: " + cameraIdSPhysical);
final boolean using_android_l = bundle.getBoolean("using_android_l");
if( MyDebug.LOG )
Log.d(TAG, "using_android_l: " + using_android_l);
final int [] widths = bundle.getIntArray("resolution_widths");
final int [] heights = bundle.getIntArray("resolution_heights");
final boolean [] supports_burst = bundle.getBooleanArray("resolution_supports_burst");
final boolean supports_jpeg_r = bundle.getBoolean("supports_jpeg_r");
if( MyDebug.LOG )
Log.d(TAG, "supports_jpeg_r: " + supports_jpeg_r);
final boolean supports_raw = bundle.getBoolean("supports_raw");
if( MyDebug.LOG )
Log.d(TAG, "supports_raw: " + supports_raw);
final boolean supports_burst_raw = bundle.getBoolean("supports_burst_raw");
if( MyDebug.LOG )
Log.d(TAG, "supports_burst_raw: " + supports_burst_raw);
final boolean supports_optimise_focus_latency = bundle.getBoolean("supports_optimise_focus_latency");
if( MyDebug.LOG )
Log.d(TAG, "supports_optimise_focus_latency: " + supports_optimise_focus_latency);
final boolean supports_preshots = bundle.getBoolean("supports_preshots");
if( MyDebug.LOG )
Log.d(TAG, "supports_preshots: " + supports_preshots);
final boolean supports_nr = bundle.getBoolean("supports_nr");
if( MyDebug.LOG )
Log.d(TAG, "supports_nr: " + supports_nr);
final boolean supports_hdr = bundle.getBoolean("supports_hdr");
if( MyDebug.LOG )
Log.d(TAG, "supports_hdr: " + supports_hdr);
final boolean supports_expo_bracketing = bundle.getBoolean("supports_expo_bracketing");
if( MyDebug.LOG )
Log.d(TAG, "supports_expo_bracketing: " + supports_expo_bracketing);
final int max_expo_bracketing_n_images = bundle.getInt("max_expo_bracketing_n_images");
if( MyDebug.LOG )
Log.d(TAG, "max_expo_bracketing_n_images: " + max_expo_bracketing_n_images);
final boolean supports_panorama = bundle.getBoolean("supports_panorama");
if( MyDebug.LOG )
Log.d(TAG, "supports_panorama: " + supports_panorama);
final boolean supports_photo_video_recording = bundle.getBoolean("supports_photo_video_recording");
if( MyDebug.LOG )
Log.d(TAG, "supports_photo_video_recording: " + supports_photo_video_recording);
if( widths != null && heights != null && supports_burst != null ) {
CharSequence [] entries = new CharSequence[widths.length];
CharSequence [] values = new CharSequence[widths.length];
for(int i=0;i<widths.length;i++) {
entries[i] = widths[i] + " x " + heights[i] + " " + Preview.getAspectRatioMPString(getResources(), widths[i], heights[i], supports_burst[i]);
values[i] = widths[i] + " " + heights[i];
}
ListPreference lp = (ListPreference)findPreference("preference_resolution");
lp.setEntries(entries);
lp.setEntryValues(values);
String resolution_preference_key = PreferenceKeys.getResolutionPreferenceKey(cameraId, cameraIdSPhysical);
String resolution_value = sharedPreferences.getString(resolution_preference_key, "");
if( MyDebug.LOG )
Log.d(TAG, "resolution_value: " + resolution_value);
lp.setValue(resolution_value);
// now set the key, so we save for the correct cameraId
lp.setKey(resolution_preference_key);
}
else {
Preference pref = findPreference("preference_resolution");
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_screen_photo_settings");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
{
final int n_quality = 100;
CharSequence [] entries = new CharSequence[n_quality];
CharSequence [] values = new CharSequence[n_quality];
for(int i=0;i<n_quality;i++) {
entries[i] = (i+1) + "%";
values[i] = String.valueOf(i + 1);
}
ArraySeekBarPreference sp = (ArraySeekBarPreference)findPreference("preference_quality");
sp.setEntries(entries);
sp.setEntryValues(values);
}
if( !supports_jpeg_r ) {
ListPreference pref = (ListPreference)findPreference("preference_image_format");
pref.setEntries(R.array.preference_image_format_entries_nojpegr);
pref.setEntryValues(R.array.preference_image_format_values_nojpegr);
}
if( !supports_raw ) {
Preference pref = findPreference("preference_raw");
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_screen_photo_settings");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
else {
ListPreference pref = (ListPreference)findPreference("preference_raw");
if( Build.VERSION.SDK_INT < Build.VERSION_CODES.N ) {
// RAW only mode requires at least Android 7; earlier versions seem to have poorer support for DNG files
pref.setEntries(R.array.preference_raw_entries_preandroid7);
pref.setEntryValues(R.array.preference_raw_values_preandroid7);
}
pref.setOnPreferenceChangeListener(new Preference.OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
if( MyDebug.LOG )
Log.d(TAG, "clicked raw: " + newValue);
if( newValue.equals("preference_raw_yes") || newValue.equals("preference_raw_only") ) {
// we check done_raw_info every time, so that this works if the user selects RAW again without leaving and returning to Settings
boolean done_raw_info = sharedPreferences.contains(PreferenceKeys.RawInfoPreferenceKey);
if( !done_raw_info ) {
AlertDialog.Builder alertDialog = new AlertDialog.Builder(PreferenceSubPhoto.this.getActivity());
alertDialog.setTitle(R.string.preference_raw);
alertDialog.setMessage(R.string.raw_info);
alertDialog.setPositiveButton(android.R.string.ok, null);
alertDialog.setNegativeButton(R.string.dont_show_again, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if( MyDebug.LOG )
Log.d(TAG, "user clicked dont_show_again for raw info dialog");
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putBoolean(PreferenceKeys.RawInfoPreferenceKey, true);
editor.apply();
}
});
final AlertDialog alert = alertDialog.create();
// AlertDialog.Builder.setOnDismissListener() requires API level 17, so do it this way instead
alert.setOnDismissListener(new DialogInterface.OnDismissListener() {
@Override
public void onDismiss(DialogInterface arg0) {
if( MyDebug.LOG )
Log.d(TAG, "raw dialog dismissed");
dialogs.remove(alert);
}
});
alert.show();
dialogs.add(alert);
}
}
return true;
}
});
}
if( !( supports_raw && supports_burst_raw ) ) {
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_screen_photo_settings");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
Preference pref = findPreference("preference_raw_expo_bracketing");
pg.removePreference(pref);
pref = findPreference("preference_raw_focus_bracketing");
pg.removePreference(pref);
}
if( !supports_optimise_focus_latency ) {
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
Preference pref = findPreference("preference_photo_optimise_focus");
pg.removePreference(pref);
}
if( !supports_preshots ) {
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
Preference pref = findPreference("preference_save_preshots");
pg.removePreference(pref);
}
if( !supports_nr ) {
Preference pref = findPreference("preference_nr_save");
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_screen_photo_settings");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
if( !supports_hdr ) {
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_screen_photo_settings");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
Preference pref = findPreference("preference_hdr_save_expo");
pg.removePreference(pref);
pref = findPreference("preference_hdr_tonemapping");
pg.removePreference(pref);
pref = findPreference("preference_hdr_contrast_enhancement");
pg.removePreference(pref);
}
if( !supports_expo_bracketing || max_expo_bracketing_n_images <= 3 ) {
Preference pref = findPreference("preference_expo_bracketing_n_images");
//PreferenceGroup pg = (PreferenceGroup) this.findPreference("preference_screen_photo_settings");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
if( !supports_expo_bracketing ) {
Preference pref = findPreference("preference_expo_bracketing_stops");
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_screen_photo_settings");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
if( !supports_panorama ) {
//PreferenceGroup pg = (PreferenceGroup) this.findPreference("preference_screen_photo_settings");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
Preference pref = findPreference("preference_panorama_crop");
pg.removePreference(pref);
pref = findPreference("preference_panorama_save");
pg.removePreference(pref);
}
if( !using_android_l ) {
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_category_photo_debugging");
Preference pref = findPreference("preference_camera2_fake_flash");
pg.removePreference(pref);
pref = findPreference("preference_camera2_dummy_capture_hack");
pg.removePreference(pref);
pref = findPreference("preference_camera2_fast_burst");
pg.removePreference(pref);
pref = findPreference("preference_camera2_photo_video_recording");
pg.removePreference(pref);
}
else {
if( !supports_photo_video_recording ) {
Preference pref = findPreference("preference_camera2_photo_video_recording");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_category_photo_debugging");
pg.removePreference(pref);
}
}
{
// remove preference_category_photo_debugging category if empty (which will be the case for old api)
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_category_photo_debugging");
if( MyDebug.LOG )
Log.d(TAG, "preference_category_photo_debugging children: " + pg.getPreferenceCount());
if( pg.getPreferenceCount() == 0 ) {
// pg.getParent() requires API level 26
//PreferenceGroup parent = (PreferenceGroup)this.findPreference("preference_screen_photo_settings");
PreferenceGroup parent = (PreferenceGroup)this.findPreference("preferences_root");
parent.removePreference(pg);
}
}
MyPreferenceFragment.setSummary(findPreference("preference_exif_artist"));
MyPreferenceFragment.setSummary(findPreference("preference_exif_copyright"));
MyPreferenceFragment.setSummary(findPreference("preference_textstamp"));
if( MyDebug.LOG )
Log.d(TAG, "onCreate done");
}
}

View file

@ -0,0 +1,115 @@
package net.sourceforge.opencamera;
import android.os.Bundle;
import android.preference.ListPreference;
import android.preference.Preference;
import android.preference.PreferenceGroup;
import android.util.Log;
import net.sourceforge.opencamera.ui.ArraySeekBarPreference;
public class PreferenceSubPreview extends PreferenceSubScreen {
private static final String TAG = "PreferenceSubPreview";
@Override
public void onCreate(Bundle savedInstanceState) {
if( MyDebug.LOG )
Log.d(TAG, "onCreate");
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.preferences_sub_preview);
final Bundle bundle = getArguments();
final boolean using_android_l = bundle.getBoolean("using_android_l");
if( MyDebug.LOG )
Log.d(TAG, "using_android_l: " + using_android_l);
final boolean is_multi_cam = bundle.getBoolean("is_multi_cam");
if( MyDebug.LOG )
Log.d(TAG, "is_multi_cam: " + is_multi_cam);
final boolean supports_preview_bitmaps = bundle.getBoolean("supports_preview_bitmaps");
if( MyDebug.LOG )
Log.d(TAG, "supports_preview_bitmaps: " + supports_preview_bitmaps);
{
ListPreference pref = (ListPreference)findPreference("preference_ghost_image");
pref.setOnPreferenceChangeListener(new Preference.OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference arg0, Object newValue) {
if( MyDebug.LOG )
Log.d(TAG, "clicked ghost image: " + newValue);
if( newValue.equals("preference_ghost_image_selected") ) {
MainActivity main_activity = (MainActivity) PreferenceSubPreview.this.getActivity();
main_activity.openGhostImageChooserDialogSAF(true);
}
return true;
}
});
}
{
final int max_ghost_image_alpha = 80; // limit max to 80% for privacy reasons, so it isn't possible to put in a state where camera is on, but no preview is shown
final int ghost_image_alpha_step = 5; // should be exact divisor of max_ghost_image_alpha
final int n_ghost_image_alpha = max_ghost_image_alpha/ghost_image_alpha_step;
CharSequence [] entries = new CharSequence[n_ghost_image_alpha];
CharSequence [] values = new CharSequence[n_ghost_image_alpha];
for(int i=0;i<n_ghost_image_alpha;i++) {
int alpha = ghost_image_alpha_step*(i+1);
entries[i] = alpha + "%";
values[i] = String.valueOf(alpha);
}
ArraySeekBarPreference sp = (ArraySeekBarPreference)findPreference("ghost_image_alpha");
sp.setEntries(entries);
sp.setEntryValues(values);
}
if( !using_android_l ) {
Preference pref = findPreference("preference_focus_assist");
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_preview");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
if( !is_multi_cam ) {
Preference pref = findPreference("preference_show_camera_id");
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_preview");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
if( !using_android_l ) {
Preference pref = findPreference("preference_show_iso");
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_preview");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
if( !supports_preview_bitmaps ) {
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_preview");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
Preference pref = findPreference("preference_histogram");
pg.removePreference(pref);
pref = findPreference("preference_zebra_stripes");
pg.removePreference(pref);
pref = findPreference("preference_zebra_stripes_foreground_color");
pg.removePreference(pref);
pref = findPreference("preference_zebra_stripes_background_color");
pg.removePreference(pref);
pref = findPreference("preference_focus_peaking");
pg.removePreference(pref);
pref = findPreference("preference_focus_peaking_color");
pg.removePreference(pref);
}
if( MyDebug.LOG )
Log.d(TAG, "onCreate done");
}
}

View file

@ -0,0 +1,96 @@
package net.sourceforge.opencamera;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.preference.Preference;
import android.preference.PreferenceGroup;
import android.preference.PreferenceManager;
import android.util.Log;
import net.sourceforge.opencamera.cameracontroller.CameraController;
public class PreferenceSubProcessing extends PreferenceSubScreen {
private static final String TAG = "PreferenceSubProcessing";
@Override
public void onCreate(Bundle savedInstanceState) {
if( MyDebug.LOG )
Log.d(TAG, "onCreate");
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.preferences_sub_processing);
final Bundle bundle = getArguments();
final SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getActivity());
final boolean camera_open = bundle.getBoolean("camera_open");
if( MyDebug.LOG )
Log.d(TAG, "camera_open: " + camera_open);
boolean has_antibanding = false;
String [] antibanding_values = bundle.getStringArray("antibanding");
if( antibanding_values != null && antibanding_values.length > 0 ) {
String [] antibanding_entries = bundle.getStringArray("antibanding_entries");
if( antibanding_entries != null && antibanding_entries.length == antibanding_values.length ) { // should always be true here, but just in case
MyPreferenceFragment.readFromBundle(this, antibanding_values, antibanding_entries, PreferenceKeys.AntiBandingPreferenceKey, CameraController.ANTIBANDING_DEFAULT, "preferences_root");
has_antibanding = true;
}
}
if( MyDebug.LOG )
Log.d(TAG, "has_antibanding?: " + has_antibanding);
if( !has_antibanding && ( camera_open || sharedPreferences.getString(PreferenceKeys.AntiBandingPreferenceKey, CameraController.ANTIBANDING_DEFAULT).equals(CameraController.ANTIBANDING_DEFAULT) ) ) {
// if camera not open, we'll think this setting isn't supported - but should only remove
// this preference if it's set to the default (otherwise if user sets to a non-default
// value that causes camera to not open, user won't be able to put it back to the
// default!)
Preference pref = findPreference(PreferenceKeys.AntiBandingPreferenceKey);
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
boolean has_edge_mode = false;
String [] edge_mode_values = bundle.getStringArray("edge_modes");
if( edge_mode_values != null && edge_mode_values.length > 0 ) {
String [] edge_mode_entries = bundle.getStringArray("edge_modes_entries");
if( edge_mode_entries != null && edge_mode_entries.length == edge_mode_values.length ) { // should always be true here, but just in case
MyPreferenceFragment.readFromBundle(this, edge_mode_values, edge_mode_entries, PreferenceKeys.EdgeModePreferenceKey, CameraController.EDGE_MODE_DEFAULT, "preferences_root");
has_edge_mode = true;
}
}
if( MyDebug.LOG )
Log.d(TAG, "has_edge_mode?: " + has_edge_mode);
if( !has_edge_mode && ( camera_open || sharedPreferences.getString(PreferenceKeys.EdgeModePreferenceKey, CameraController.EDGE_MODE_DEFAULT).equals(CameraController.EDGE_MODE_DEFAULT) ) ) {
// if camera not open, we'll think this setting isn't supported - but should only remove
// this preference if it's set to the default (otherwise if user sets to a non-default
// value that causes camera to not open, user won't be able to put it back to the
// default!)
Preference pref = findPreference(PreferenceKeys.EdgeModePreferenceKey);
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
boolean has_noise_reduction_mode = false;
String [] noise_reduction_mode_values = bundle.getStringArray("noise_reduction_modes");
if( noise_reduction_mode_values != null && noise_reduction_mode_values.length > 0 ) {
String [] noise_reduction_mode_entries = bundle.getStringArray("noise_reduction_modes_entries");
if( noise_reduction_mode_entries != null && noise_reduction_mode_entries.length == noise_reduction_mode_values.length ) { // should always be true here, but just in case
MyPreferenceFragment.readFromBundle(this, noise_reduction_mode_values, noise_reduction_mode_entries, PreferenceKeys.CameraNoiseReductionModePreferenceKey, CameraController.NOISE_REDUCTION_MODE_DEFAULT, "preferences_root");
has_noise_reduction_mode = true;
}
}
if( MyDebug.LOG )
Log.d(TAG, "has_noise_reduction_mode?: " + has_noise_reduction_mode);
if( !has_noise_reduction_mode && ( camera_open || sharedPreferences.getString(PreferenceKeys.CameraNoiseReductionModePreferenceKey, CameraController.NOISE_REDUCTION_MODE_DEFAULT).equals(CameraController.NOISE_REDUCTION_MODE_DEFAULT) ) ) {
// if camera not open, we'll think this setting isn't supported - but should only remove
// this preference if it's set to the default (otherwise if user sets to a non-default
// value that causes camera to not open, user won't be able to put it back to the
// default!)
Preference pref = findPreference(PreferenceKeys.CameraNoiseReductionModePreferenceKey);
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
if( MyDebug.LOG )
Log.d(TAG, "onCreate done");
}
}

View file

@ -0,0 +1,18 @@
package net.sourceforge.opencamera;
import android.os.Bundle;
import android.util.Log;
public class PreferenceSubRemoteCtrl extends PreferenceSubScreen {
private static final String TAG = "PreferenceSubRemoteCtrl";
@Override
public void onCreate(Bundle savedInstanceState) {
if( MyDebug.LOG )
Log.d(TAG, "onCreate");
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.preferences_sub_remote_ctrl);
if( MyDebug.LOG )
Log.d(TAG, "onCreate done");
}
}

View file

@ -0,0 +1,80 @@
package net.sourceforge.opencamera;
import android.app.AlertDialog;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.preference.Preference;
import android.preference.PreferenceFragment;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.View;
import java.util.HashSet;
/** Must be used as the parent class for all sub-screens.
*/
public class PreferenceSubScreen extends PreferenceFragment implements SharedPreferences.OnSharedPreferenceChangeListener {
private static final String TAG = "PreferenceSubScreen";
private boolean edge_to_edge_mode = false;
// see note for dialogs in MyPreferenceFragment
protected final HashSet<AlertDialog> dialogs = new HashSet<>();
@Override
public void onCreate(Bundle savedInstanceState) {
if( MyDebug.LOG )
Log.d(TAG, "onCreate");
super.onCreate(savedInstanceState);
final Bundle bundle = getArguments();
this.edge_to_edge_mode = bundle.getBoolean("edge_to_edge_mode");
if( MyDebug.LOG )
Log.d(TAG, "onCreate done");
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
if( edge_to_edge_mode ) {
MyPreferenceFragment.handleEdgeToEdge(view);
}
}
@Override
public void onDestroy() {
if( MyDebug.LOG )
Log.d(TAG, "onDestroy");
super.onDestroy();
MyPreferenceFragment.dismissDialogs(getFragmentManager(), dialogs);
}
public void onResume() {
super.onResume();
MyPreferenceFragment.setBackground(this);
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getActivity());
sharedPreferences.registerOnSharedPreferenceChangeListener(this);
}
/* See comment for MyPreferenceFragment.onSharedPreferenceChanged().
*/
public void onSharedPreferenceChanged(SharedPreferences prefs, String key) {
if( MyDebug.LOG )
Log.d(TAG, "onSharedPreferenceChanged: " + key);
if( key == null ) {
// On Android 11+, when targetting Android 11+, this method is called with key==null
// if preferences are cleared. Unclear if this happens here in practice, but return
// just in case.
return;
}
Preference pref = findPreference(key);
MyPreferenceFragment.handleOnSharedPreferenceChanged(prefs, key, pref);
}
}

View file

@ -0,0 +1,246 @@
package net.sourceforge.opencamera;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.SharedPreferences;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.preference.Preference;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.EditText;
import net.sourceforge.opencamera.ui.FolderChooserDialog;
import java.io.IOException;
import java.util.Date;
public class PreferenceSubSettingsManager extends PreferenceSubScreen {
private static final String TAG = "PrefSubSettingsManager";
@Override
public void onCreate(Bundle savedInstanceState) {
if( MyDebug.LOG )
Log.d(TAG, "onCreate");
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.preferences_sub_settings_manager);
final SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getActivity());
{
final Preference pref = findPreference("preference_save_settings");
pref.setOnPreferenceClickListener(new Preference.OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference arg0) {
if( pref.getKey().equals("preference_save_settings") ) {
if( MyDebug.LOG )
Log.d(TAG, "user clicked save settings");
AlertDialog.Builder alertDialog = new AlertDialog.Builder(PreferenceSubSettingsManager.this.getActivity());
alertDialog.setTitle(R.string.preference_save_settings_filename);
final View dialog_view = LayoutInflater.from(getActivity()).inflate(R.layout.alertdialog_edittext, null);
final EditText editText = dialog_view.findViewById(R.id.edit_text);
editText.setSingleLine();
// set hint instead of content description for EditText, see https://support.google.com/accessibility/android/answer/6378120
editText.setHint(getResources().getString(R.string.preference_save_settings_filename));
alertDialog.setView(dialog_view);
final MainActivity main_activity = (MainActivity)PreferenceSubSettingsManager.this.getActivity();
try {
// find a default name - although we're only interested in the name rather than full path, this still
// requires checking the folder, so that we don't reuse an existing filename
String mediaFilename = main_activity.getStorageUtils().createOutputMediaFile(
main_activity.getStorageUtils().getSettingsFolder(),
StorageUtils.MEDIA_TYPE_PREFS, "", "xml", new Date()
).getName();
if( MyDebug.LOG )
Log.d(TAG, "mediaFilename: " + mediaFilename);
int index = mediaFilename.lastIndexOf('.');
if( index != -1 ) {
// remove extension
mediaFilename = mediaFilename.substring(0, index);
}
editText.setText(mediaFilename);
editText.setSelection(mediaFilename.length());
}
catch(IOException e) {
MyDebug.logStackTrace(TAG, "failed to obtain a filename", e);
}
alertDialog.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
if( MyDebug.LOG )
Log.d(TAG, "save settings clicked okay");
String filename = editText.getText().toString() + ".xml";
main_activity.getSettingsManager().saveSettings(filename);
}
});
alertDialog.setNegativeButton(android.R.string.cancel, null);
final AlertDialog alert = alertDialog.create();
// AlertDialog.Builder.setOnDismissListener() requires API level 17, so do it this way instead
alert.setOnDismissListener(new DialogInterface.OnDismissListener() {
@Override
public void onDismiss(DialogInterface arg0) {
if( MyDebug.LOG )
Log.d(TAG, "save settings dialog dismissed");
dialogs.remove(alert);
}
});
alert.show();
dialogs.add(alert);
//MainActivity main_activity = (MainActivity)PreferenceSubSettingsManager.this.getActivity();
//main_activity.getSettingsManager().saveSettings();
}
return false;
}
});
}
{
final Preference pref = findPreference("preference_restore_settings");
pref.setOnPreferenceClickListener(new Preference.OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference arg0) {
if( pref.getKey().equals("preference_restore_settings") ) {
if( MyDebug.LOG )
Log.d(TAG, "user clicked restore settings");
loadSettings();
}
return false;
}
});
}
{
final Preference pref = findPreference("preference_reset");
pref.setOnPreferenceClickListener(new Preference.OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference arg0) {
if( pref.getKey().equals("preference_reset") ) {
if( MyDebug.LOG )
Log.d(TAG, "user clicked reset settings");
AlertDialog.Builder alertDialog = new AlertDialog.Builder(PreferenceSubSettingsManager.this.getActivity());
alertDialog.setIcon(android.R.drawable.ic_dialog_alert);
alertDialog.setTitle(R.string.preference_reset);
alertDialog.setMessage(R.string.preference_reset_question);
alertDialog.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if( MyDebug.LOG )
Log.d(TAG, "user confirmed reset");
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.clear();
editor.putBoolean(PreferenceKeys.FirstTimePreferenceKey, true);
try {
PackageInfo pInfo = PreferenceSubSettingsManager.this.getActivity().getPackageManager().getPackageInfo(PreferenceSubSettingsManager.this.getActivity().getPackageName(), 0);
int version_code = pInfo.versionCode;
editor.putInt(PreferenceKeys.LatestVersionPreferenceKey, version_code);
}
catch(PackageManager.NameNotFoundException e) {
MyDebug.logStackTrace(TAG, "NameNotFoundException trying to get version number", e);
}
editor.apply();
MainActivity main_activity = (MainActivity)PreferenceSubSettingsManager.this.getActivity();
main_activity.setDeviceDefaults();
if( MyDebug.LOG )
Log.d(TAG, "user clicked reset - need to restart");
main_activity.restartOpenCamera();
}
});
alertDialog.setNegativeButton(android.R.string.no, null);
final AlertDialog alert = alertDialog.create();
// AlertDialog.Builder.setOnDismissListener() requires API level 17, so do it this way instead
alert.setOnDismissListener(new DialogInterface.OnDismissListener() {
@Override
public void onDismiss(DialogInterface arg0) {
if( MyDebug.LOG )
Log.d(TAG, "reset dialog dismissed");
dialogs.remove(alert);
}
});
alert.show();
dialogs.add(alert);
}
return false;
}
});
}
if( MyDebug.LOG )
Log.d(TAG, "onCreate done");
}
private void loadSettings() {
if( MyDebug.LOG )
Log.d(TAG, "loadSettings");
AlertDialog.Builder alertDialog = new AlertDialog.Builder(PreferenceSubSettingsManager.this.getActivity());
alertDialog.setIcon(android.R.drawable.ic_dialog_alert);
alertDialog.setTitle(R.string.preference_restore_settings);
alertDialog.setMessage(R.string.preference_restore_settings_question);
alertDialog.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if( MyDebug.LOG )
Log.d(TAG, "user confirmed to restore settings");
MainActivity main_activity = (MainActivity)PreferenceSubSettingsManager.this.getActivity();
/*if( main_activity.getStorageUtils().isUsingSAF() ) {
main_activity.openLoadSettingsChooserDialogSAF(true);
}
else*/ {
FolderChooserDialog fragment = new PreferenceSubSettingsManager.LoadSettingsFileChooserDialog();
fragment.setShowDCIMShortcut(false);
fragment.setShowNewFolderButton(false);
fragment.setModeFolder(false);
fragment.setExtension(".xml");
fragment.setStartFolder(main_activity.getStorageUtils().getSettingsFolder());
if( MainActivity.useScopedStorage() ) {
// since we use File API to load, don't allow going outside of the application's folder, as we won't be able to read those files!
fragment.setMaxParent(main_activity.getExternalFilesDir(null));
}
fragment.show(getFragmentManager(), "FOLDER_FRAGMENT");
}
}
});
alertDialog.setNegativeButton(android.R.string.no, null);
final AlertDialog alert = alertDialog.create();
// AlertDialog.Builder.setOnDismissListener() requires API level 17, so do it this way instead
alert.setOnDismissListener(new DialogInterface.OnDismissListener() {
@Override
public void onDismiss(DialogInterface arg0) {
if( MyDebug.LOG )
Log.d(TAG, "reset dialog dismissed");
dialogs.remove(alert);
}
});
alert.show();
dialogs.add(alert);
}
public static class LoadSettingsFileChooserDialog extends FolderChooserDialog {
@Override
public void onDismiss(DialogInterface dialog) {
if( MyDebug.LOG )
Log.d(TAG, "FolderChooserDialog dismissed");
// n.b., fragments have to be static (as they might be inserted into a new Activity - see http://stackoverflow.com/questions/15571010/fragment-inner-class-should-be-static),
// so we access the MainActivity via the fragment's getActivity().
MainActivity main_activity = (MainActivity)this.getActivity();
if( main_activity != null ) { // main_activity may be null if this is being closed via MainActivity.onNewIntent()
String settings_file = this.getChosenFile();
if( MyDebug.LOG )
Log.d(TAG, "settings_file: " + settings_file);
if( settings_file != null ) {
main_activity.getSettingsManager().loadSettings(settings_file);
}
}
super.onDismiss(dialog);
}
}
}

View file

@ -0,0 +1,245 @@
package net.sourceforge.opencamera;
import android.content.SharedPreferences;
import android.os.Build;
import android.os.Bundle;
import android.preference.ListPreference;
import android.preference.Preference;
import android.preference.PreferenceGroup;
import android.preference.PreferenceManager;
import android.util.Log;
public class PreferenceSubVideo extends PreferenceSubScreen {
private static final String TAG = "PreferenceSubVideo";
@Override
public void onCreate(Bundle savedInstanceState) {
if( MyDebug.LOG )
Log.d(TAG, "onCreate");
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.preferences_sub_video);
final Bundle bundle = getArguments();
final SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getActivity());
final int cameraId = bundle.getInt("cameraId");
if( MyDebug.LOG )
Log.d(TAG, "cameraId: " + cameraId);
final String cameraIdSPhysical = bundle.getString("cameraIdSPhysical");
if( MyDebug.LOG )
Log.d(TAG, "cameraIdSPhysical: " + cameraIdSPhysical);
final boolean camera_open = bundle.getBoolean("camera_open");
if( MyDebug.LOG )
Log.d(TAG, "camera_open: " + camera_open);
final String [] video_quality = bundle.getStringArray("video_quality");
final String [] video_quality_string = bundle.getStringArray("video_quality_string");
final int [] video_fps = bundle.getIntArray("video_fps");
final boolean [] video_fps_high_speed = bundle.getBooleanArray("video_fps_high_speed");
String fps_preference_key = PreferenceKeys.getVideoFPSPreferenceKey(cameraId, cameraIdSPhysical);
if( MyDebug.LOG )
Log.d(TAG, "fps_preference_key: " + fps_preference_key);
String fps_value = sharedPreferences.getString(fps_preference_key, "default");
if( MyDebug.LOG )
Log.d(TAG, "fps_value: " + fps_value);
final boolean supports_tonemap_curve = bundle.getBoolean("supports_tonemap_curve");
if( MyDebug.LOG )
Log.d(TAG, "supports_tonemap_curve: " + supports_tonemap_curve);
final boolean supports_video_stabilization = bundle.getBoolean("supports_video_stabilization");
if( MyDebug.LOG )
Log.d(TAG, "supports_video_stabilization: " + supports_video_stabilization);
final boolean supports_force_video_4k = bundle.getBoolean("supports_force_video_4k");
if( MyDebug.LOG )
Log.d(TAG, "supports_force_video_4k: " + supports_force_video_4k);
/* Set up video resolutions.
Note that this will be the resolutions for either standard or high speed frame rate (where
the latter may also include being in slow motion mode), depending on the current setting when
this settings fragment is launched. A limitation is that if the user changes the fps value
within the settings, this list won't update until the user exits and re-enters the settings.
This could be fixed by setting a setOnPreferenceChangeListener for the preference_video_fps
ListPreference and updating, but we must not assume that the preview will be non-null (since
if the application is being recreated, MyPreferenceFragment.onCreate() is called via
MainActivity.onCreate()->super.onCreate() before the preview is created! So we still need to
read the info via a bundle, and only update when fps changes if the preview is non-null.
*/
if( video_quality != null && video_quality_string != null ) {
CharSequence [] entries = new CharSequence[video_quality.length];
CharSequence [] values = new CharSequence[video_quality.length];
for(int i=0;i<video_quality.length;i++) {
entries[i] = video_quality_string[i];
values[i] = video_quality[i];
}
ListPreference lp = (ListPreference)findPreference("preference_video_quality");
lp.setEntries(entries);
lp.setEntryValues(values);
String video_quality_preference_key = bundle.getString("video_quality_preference_key");
if( MyDebug.LOG )
Log.d(TAG, "video_quality_preference_key: " + video_quality_preference_key);
String video_quality_value = sharedPreferences.getString(video_quality_preference_key, "");
if( MyDebug.LOG )
Log.d(TAG, "video_quality_value: " + video_quality_value);
// set the key, so we save for the correct cameraId and high-speed setting
// this must be done before setting the value (otherwise the video resolutions preference won't be
// updated correctly when this is called from the callback when the user switches between
// normal and high speed frame rates
lp.setKey(video_quality_preference_key);
lp.setValue(video_quality_value);
boolean is_high_speed = bundle.getBoolean("video_is_high_speed");
String title = is_high_speed ? getResources().getString(R.string.video_quality) + " [" + getResources().getString(R.string.high_speed) + "]" : getResources().getString(R.string.video_quality);
lp.setTitle(title);
lp.setDialogTitle(title);
}
else {
Preference pref = findPreference("preference_video_quality");
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_screen_video_settings");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
if( video_fps != null ) {
// build video fps settings
CharSequence [] entries = new CharSequence[video_fps.length+1];
CharSequence [] values = new CharSequence[video_fps.length+1];
int i=0;
// default:
entries[i] = getResources().getString(R.string.preference_video_fps_default);
values[i] = "default";
i++;
final String high_speed_append = " [" + getResources().getString(R.string.high_speed) + "]";
for(int k=0;k<video_fps.length;k++) {
int fps = video_fps[k];
if( video_fps_high_speed != null && video_fps_high_speed[k] ) {
entries[i] = fps + high_speed_append;
}
else {
entries[i] = String.valueOf(fps);
}
values[i] = String.valueOf(fps);
i++;
}
ListPreference lp = (ListPreference)findPreference("preference_video_fps");
lp.setEntries(entries);
lp.setEntryValues(values);
lp.setValue(fps_value);
// now set the key, so we save for the correct cameraId
lp.setKey(fps_preference_key);
}
if( !supports_tonemap_curve && ( camera_open || sharedPreferences.getString(PreferenceKeys.VideoLogPreferenceKey, "off").equals("off") ) ) {
// if camera not open, we'll think this setting isn't supported - but should only remove
// this preference if it's set to the default (otherwise if user sets to a non-default
// value that causes camera to not open, user won't be able to put it back to the
// default!)
// (needed for Pixel 6 Pro where setting to sRGB causes camera to fail to open when in video mode)
Preference pref = findPreference(PreferenceKeys.VideoLogPreferenceKey);
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_screen_video_settings");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
pref = findPreference(PreferenceKeys.VideoProfileGammaPreferenceKey);
//pg = (PreferenceGroup)this.findPreference("preference_screen_video_settings");
pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
if( !supports_video_stabilization ) {
Preference pref = findPreference("preference_video_stabilization");
//PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_screen_video_settings");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preferences_root");
pg.removePreference(pref);
}
if( !supports_force_video_4k || video_quality == null ) {
Preference pref = findPreference("preference_force_video_4k");
PreferenceGroup pg = (PreferenceGroup)this.findPreference("preference_category_video_debugging");
pg.removePreference(pref);
}
if( Build.VERSION.SDK_INT < Build.VERSION_CODES.N ) {
MyPreferenceFragment.filterArrayEntry((ListPreference)findPreference("preference_video_output_format"), "preference_video_output_format_mpeg4_hevc");
}
{
ListPreference pref = (ListPreference)findPreference("preference_record_audio_src");
if( Build.VERSION.SDK_INT < Build.VERSION_CODES.N ) {
// some values require at least Android 7
pref.setEntries(R.array.preference_record_audio_src_entries_preandroid7);
pref.setEntryValues(R.array.preference_record_audio_src_values_preandroid7);
}
}
setupDependencies();
if( MyDebug.LOG )
Log.d(TAG, "onCreate done");
}
/** Programmatically set up dependencies for preference types (e.g., ListPreference) that don't
* support this in xml (such as SwitchPreference and CheckBoxPreference), or where this depends
* on the device (e.g., Android version).
*/
private void setupDependencies() {
// set up dependency for preference_video_profile_gamma on preference_video_log
ListPreference pref = (ListPreference)findPreference("preference_video_log");
if( pref != null ) { // may be null if preference not supported
pref.setOnPreferenceChangeListener(new Preference.OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference arg0, Object newValue) {
String value = newValue.toString();
setVideoProfileGammaDependency(value);
return true;
}
});
setVideoProfileGammaDependency(pref.getValue()); // ensure dependency is enabled/disabled as required for initial value
}
if( !MyApplicationInterface.mediastoreSupportsVideoSubtitles() ) {
// video subtitles only supported with SAF on Android 11+
// since these preferences are entirely in separate sub-screens (and one isn't the parent of the other), we don't need
// a dependency (and indeed can't use one, as the preference_using_saf won't exist here as a Preference)
pref = (ListPreference)findPreference("preference_video_subtitle");
if( pref != null ) {
boolean using_saf = false;
// n.b., not safe to call main_activity.getApplicationInterface().getStorageUtils().isUsingSAF() if fragment
// is being recreated
{
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getActivity());
if( sharedPreferences.getBoolean(PreferenceKeys.UsingSAFPreferenceKey, false) ) {
using_saf = true;
}
}
if( MyDebug.LOG )
Log.d(TAG, "using_saf: " + using_saf);
//pref.setDependency("preference_using_saf");
if( using_saf ) {
pref.setEnabled(true);
}
else {
pref.setEnabled(false);
}
}
}
}
private void setVideoProfileGammaDependency(String newValue) {
Preference dependent = findPreference("preference_video_profile_gamma");
if( dependent != null ) { // just in case
boolean enable_dependent = "gamma".equals(newValue);
if( MyDebug.LOG )
Log.d(TAG, "clicked video log: " + newValue + " enable_dependent: " + enable_dependent);
dependent.setEnabled(enable_dependent);
}
}
}

View file

@ -0,0 +1,157 @@
package net.sourceforge.opencamera;
import java.util.ArrayList;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import android.util.Log;
/** Handles a history of save locations.
*/
public class SaveLocationHistory {
private static final String TAG = "SaveLocationHistory";
private final MainActivity main_activity;
private final String pref_base;
private final ArrayList<String> save_location_history = new ArrayList<>();
/** Creates a new SaveLocationHistory class. This manages a history of save folder locations.
* @param main_activity MainActivity.
* @param pref_base String to use for shared preferences.
* @param folder_name The current save folder.
*/
SaveLocationHistory(MainActivity main_activity, String pref_base, String folder_name) {
if( MyDebug.LOG )
Log.d(TAG, "pref_base: " + pref_base);
this.main_activity = main_activity;
this.pref_base = pref_base;
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity);
// read save locations
save_location_history.clear();
int save_location_history_size = sharedPreferences.getInt(pref_base + "_size", 0);
if( MyDebug.LOG )
Log.d(TAG, "save_location_history_size: " + save_location_history_size);
for(int i=0;i<save_location_history_size;i++) {
String string = sharedPreferences.getString(pref_base + "_" + i, null);
if( string != null ) {
if( MyDebug.LOG )
Log.d(TAG, "save_location_history " + i + ": " + string);
save_location_history.add(string);
}
}
// also update, just in case a new folder has been set
updateFolderHistory(folder_name, false); // update_icon can be false, as updateGalleryIcon() is called later in MainActivity.onResume()
//updateFolderHistory("/sdcard/Pictures/OpenCameraTest");
}
/** Updates the save history with the current save location (should be called after changing the save location).
* @param folder_name The folder name to add or update in the history.
* @param update_icon Whether to update the gallery icon. If false, it's the caller's responsibility to call
* MainActivity.updateGalleryIcon().
*/
void updateFolderHistory(String folder_name, boolean update_icon) {
updateFolderHistory(folder_name);
if( update_icon ) {
// If the folder has changed, need to update the gallery icon.
// Note that if using option to strip all exif tags, we won't be able to find the most recent image - so seems
// better to stick with the current gallery thumbnail. (Also beware that we call this method when changing
// non-trivial settings, even if the save folder wasn't actually changed.)
if( !main_activity.getStorageUtils().getLastMediaScannedHasNoExifDateTime() ) {
main_activity.updateGalleryIcon();
}
}
}
/** Updates the save history with the supplied folder name
* @param folder_name The folder name to add or update in the history.
*/
private void updateFolderHistory(String folder_name) {
if( MyDebug.LOG ) {
Log.d(TAG, "updateFolderHistory: " + folder_name);
Log.d(TAG, "save_location_history size: " + save_location_history.size());
for(int i=0;i<save_location_history.size();i++) {
Log.d(TAG, save_location_history.get(i));
}
}
while( save_location_history.remove(folder_name) ) {
}
save_location_history.add(folder_name);
while( save_location_history.size() > 6 ) {
save_location_history.remove(0);
}
writeSaveLocations();
if( MyDebug.LOG ) {
Log.d(TAG, "updateFolderHistory exit:");
Log.d(TAG, "save_location_history size: " + save_location_history.size());
for(int i=0;i<save_location_history.size();i++) {
Log.d(TAG, save_location_history.get(i));
}
}
}
/** Clears the folder history, and reinitialise it with the current folder.
* @param folder_name The current folder name.
*/
void clearFolderHistory(String folder_name) {
if( MyDebug.LOG )
Log.d(TAG, "clearFolderHistory: " + folder_name);
save_location_history.clear();
updateFolderHistory(folder_name, true); // to re-add the current choice, and save
}
/** Writes the history to the SharedPreferences.
*/
private void writeSaveLocations() {
if( MyDebug.LOG )
Log.d(TAG, "writeSaveLocations");
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity);
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putInt(pref_base + "_size", save_location_history.size());
if( MyDebug.LOG )
Log.d(TAG, "save_location_history_size = " + save_location_history.size());
for(int i=0;i<save_location_history.size();i++) {
String string = save_location_history.get(i);
editor.putString(pref_base + "_" + i, string);
}
editor.apply();
}
/** Return the size of the history.
* @return The size of the history.
*/
public int size() {
return save_location_history.size();
}
/** Returns a save location entry.
* @param index The index to return.
* @return The save location at this index.
*/
public String get(int index) {
return save_location_history.get(index);
}
/** Removes a save location entry.
* @param index The index to remove.
*/
public void remove(int index) {
save_location_history.remove(index);
}
/** Sets a save location entry.
* @param index The index to set.
* @param element The new entry.
*/
public void set(int index, String element) {
save_location_history.set(index, element);
}
// for testing:
/** Should be used for testing only.
* @param value The value to search the location history for.
* @return Whether the save location history contains the supplied value.
*/
public boolean contains(String value) {
return save_location_history.contains(value);
}
}

View file

@ -0,0 +1,278 @@
package net.sourceforge.opencamera;
import android.content.SharedPreferences;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.preference.PreferenceManager;
import android.util.Log;
import android.util.Xml;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import org.xmlpull.v1.XmlSerializer;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.StringWriter;
import java.nio.charset.Charset;
import java.util.Map;
/** Code for options for saving and restoring settings.
*/
public class SettingsManager {
private static final String TAG = "SettingsManager";
private final MainActivity main_activity;
SettingsManager(MainActivity main_activity) {
this.main_activity = main_activity;
}
private final static String doc_tag = "open_camera_prefs";
private final static String boolean_tag = "boolean";
private final static String float_tag = "float";
private final static String int_tag = "int";
private final static String long_tag = "long";
private final static String string_tag = "string";
public boolean loadSettings(String file) {
if( MyDebug.LOG )
Log.d(TAG, "loadSettings: " + file);
InputStream inputStream;
try {
inputStream = new FileInputStream(file);
}
catch(FileNotFoundException e) {
MyDebug.logStackTrace(TAG, "failed to load: " + file, e);
main_activity.getPreview().showToast(null, R.string.restore_settings_failed);
return false;
}
return loadSettings(inputStream);
}
public boolean loadSettings(Uri uri) {
if( MyDebug.LOG )
Log.d(TAG, "loadSettings: " + uri);
InputStream inputStream;
try {
inputStream = main_activity.getContentResolver().openInputStream(uri);
}
catch(FileNotFoundException e) {
MyDebug.logStackTrace(TAG, "failed to load: " + uri, e);
main_activity.getPreview().showToast(null, R.string.restore_settings_failed);
return false;
}
return loadSettings(inputStream);
}
/** Loads all settings from the supplied inputStream. If successful, Open Camera will restart.
* The supplied inputStream will be closed.
* @return Whether the operation was succesful.
*/
private boolean loadSettings(InputStream inputStream) {
if( MyDebug.LOG )
Log.d(TAG, "loadSettings: " + inputStream);
try {
XmlPullParser parser = Xml.newPullParser();
parser.setFeature(XmlPullParser.FEATURE_PROCESS_NAMESPACES, false);
parser.setInput(inputStream, null);
parser.nextTag();
parser.require(XmlPullParser.START_TAG, null, doc_tag);
/*if( true )
throw new IOException(); // test*/
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity);
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.clear();
while( parser.next() != XmlPullParser.END_TAG ) {
if( parser.getEventType() != XmlPullParser.START_TAG) {
continue;
}
String name = parser.getName();
String key = parser.getAttributeValue(null, "key");
if( MyDebug.LOG ) {
Log.d(TAG, "name: " + name);
Log.d(TAG, " key: " + key);
Log.d(TAG, " value: " + parser.getAttributeValue(null, "value"));
}
switch( name ) {
case boolean_tag:
editor.putBoolean(key, Boolean.parseBoolean(parser.getAttributeValue(null, "value")));
break;
case float_tag:
editor.putFloat(key, Float.parseFloat(parser.getAttributeValue(null, "value")));
break;
case int_tag:
editor.putInt(key, Integer.parseInt(parser.getAttributeValue(null, "value")));
break;
case long_tag:
editor.putLong(key, Long.parseLong(parser.getAttributeValue(null, "value")));
break;
case string_tag:
editor.putString(key, parser.getAttributeValue(null, "value"));
break;
default:
break;
}
skipXml(parser);
}
// even though we're restoring from settings, we don't want the first time or what's new dialog showing up again!
// important to do this after reading from xml, so that the keys aren't overwritten
editor.putBoolean(PreferenceKeys.FirstTimePreferenceKey, true);
try {
PackageInfo pInfo = main_activity.getPackageManager().getPackageInfo(main_activity.getPackageName(), 0);
int version_code = pInfo.versionCode;
editor.putInt(PreferenceKeys.LatestVersionPreferenceKey, version_code);
}
catch(PackageManager.NameNotFoundException e) {
MyDebug.logStackTrace(TAG, "NameNotFoundException exception trying to get version number", e);
}
editor.apply();
if( !main_activity.is_test ) {
// restarting seems to cause problems for test code (e.g., see testSettingsSaveLoad - even if that test is fine, it risks affecting subsequent tests)
main_activity.restartOpenCamera();
}
return true;
}
catch(Exception e) {
MyDebug.logStackTrace(TAG, "failed to restore settings", e);
main_activity.getPreview().showToast(null, R.string.restore_settings_failed);
return false;
}
finally {
try {
inputStream.close();
}
catch(IOException e) {
MyDebug.logStackTrace(TAG, "failed to close inputStream", e);
}
}
}
private static void skipXml(XmlPullParser parser) throws XmlPullParserException, IOException {
if( parser.getEventType() != XmlPullParser.START_TAG ) {
throw new IllegalStateException();
}
int depth = 1;
while (depth != 0) {
switch (parser.next()) {
case XmlPullParser.END_TAG:
depth--;
break;
case XmlPullParser.START_TAG:
depth++;
break;
}
}
}
public void saveSettings(String filename) {
if( MyDebug.LOG )
Log.d(TAG, "saveSettings: " + filename);
OutputStream outputStream = null;
try {
StorageUtils storageUtils = main_activity.getStorageUtils();
/*OutputStream outputStream;
Uri uri = null;
File file = null;
if( storageUtils.isUsingSAF() ) {
uri = storageUtils.createOutputMediaFileSAF(StorageUtils.MEDIA_TYPE_PREFS, "", "xml", new Date());
outputStream = main_activity.getContentResolver().openOutputStream(uri);
}
else {
file = storageUtils.createOutputMediaFile(StorageUtils.MEDIA_TYPE_PREFS, "", "xml", new Date());
main_activity.test_save_settings_file = file.getAbsolutePath();
outputStream = new FileOutputStream(file);
}*/
File settings_folder = storageUtils.getSettingsFolder();
// in theory the folder should have been created when choosing a name, but just in case...
storageUtils.createFolderIfRequired(settings_folder);
File file = new File(settings_folder.getPath() + File.separator + filename);
main_activity.test_save_settings_file = file.getAbsolutePath();
outputStream = new FileOutputStream(file);
XmlSerializer xmlSerializer = Xml.newSerializer();
StringWriter writer = new StringWriter();
xmlSerializer.setOutput(writer);
xmlSerializer.startDocument("UTF-8", true);
xmlSerializer.startTag(null, doc_tag);
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity);
Map<String, ?> map = sharedPreferences.getAll();
for( Map.Entry<String, ?> entry : map.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
if( key != null ) {
String tag_type = null;
if( value instanceof Boolean ) {
tag_type = boolean_tag;
}
else if( value instanceof Float ) {
tag_type = float_tag;
}
else if( value instanceof Integer ) {
tag_type = int_tag;
}
else if( value instanceof Long ) {
tag_type = long_tag;
}
else if( value instanceof String ) {
tag_type = string_tag;
}
else {
Log.e(TAG, "unknown value type: " + value);
}
if( tag_type != null ) {
xmlSerializer.startTag(null, tag_type);
xmlSerializer.attribute(null, "key", key);
xmlSerializer.attribute(null, "value", value.toString());
xmlSerializer.endTag(null, tag_type);
}
}
}
xmlSerializer.endTag(null, doc_tag);
xmlSerializer.endDocument();
xmlSerializer.flush();
String dataWrite = writer.toString();
/*if( true )
throw new IOException(); // test*/
outputStream.write(dataWrite.getBytes(Charset.forName("UTF-8")));
main_activity.getPreview().showToast(null, R.string.saved_settings);
/*if( uri != null ) {
storageUtils.broadcastUri(uri, false, false, false);
}
else*/ {
storageUtils.broadcastFile(file, false, false, false, false, null);
}
}
catch(IOException e) {
MyDebug.logStackTrace(TAG, "failed to save settings", e);
main_activity.getPreview().showToast(null, R.string.save_settings_failed);
}
finally {
if( outputStream != null ) {
try {
outputStream.close();
}
catch(IOException e) {
MyDebug.logStackTrace(TAG, "failed to close outputStream", e);
}
}
}
}
}

View file

@ -0,0 +1,81 @@
package net.sourceforge.opencamera;
import android.content.Context;
import android.media.AudioAttributes;
import android.media.AudioManager;
import android.media.SoundPool;
import android.util.Log;
import android.util.SparseIntArray;
/** Manages loading and playing sounds, via SoundPool.
*/
class SoundPoolManager {
private static final String TAG = "SoundPoolManager";
private final Context context;
private SoundPool sound_pool;
private SparseIntArray sound_ids;
SoundPoolManager(Context context) {
this.context = context;
}
void initSound() {
if( sound_pool == null ) {
if( MyDebug.LOG )
Log.d(TAG, "create new sound_pool");
{
AudioAttributes audio_attributes = new AudioAttributes.Builder()
.setLegacyStreamType(AudioManager.STREAM_SYSTEM)
.setContentType(AudioAttributes.CONTENT_TYPE_SONIFICATION)
.build();
sound_pool = new SoundPool.Builder()
.setMaxStreams(1)
.setAudioAttributes(audio_attributes)
.build();
}
sound_ids = new SparseIntArray();
}
}
void releaseSound() {
if( sound_pool != null ) {
if( MyDebug.LOG )
Log.d(TAG, "release sound_pool");
sound_pool.release();
sound_pool = null;
sound_ids = null;
}
}
/* Must be called before playSound (allowing enough time to load the sound).
*/
void loadSound(int resource_id) {
if( sound_pool != null ) {
if( MyDebug.LOG )
Log.d(TAG, "loading sound resource: " + resource_id);
int sound_id = sound_pool.load(context, resource_id, 1);
if( MyDebug.LOG )
Log.d(TAG, " loaded sound: " + sound_id);
sound_ids.put(resource_id, sound_id);
}
}
/* Must call loadSound first (allowing enough time to load the sound).
*/
void playSound(int resource_id) {
if( sound_pool != null ) {
if( sound_ids.indexOfKey(resource_id) < 0 ) {
if( MyDebug.LOG )
Log.d(TAG, "resource not loaded: " + resource_id);
}
else {
int sound_id = sound_ids.get(resource_id);
if( MyDebug.LOG )
Log.d(TAG, "play sound: " + sound_id);
sound_pool.play(sound_id, 1.0f, 1.0f, 0, 0, 1);
}
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,42 @@
package net.sourceforge.opencamera;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import androidx.appcompat.app.AppCompatActivity;
/** Entry Activity for the "take photo" widget (see MyWidgetProviderTakePhoto).
* This redirects to MainActivity, but uses an intent extra/bundle to pass the
* "take photo" request.
*/
public class TakePhoto extends AppCompatActivity {
private static final String TAG = "TakePhoto";
// Usually passing data via intent is preferred to using statics - however here a static is better for security,
// as we don't want other applications calling Open Camera's MainActivity with a take photo intent!
//public static final String TAKE_PHOTO = "net.sourceforge.opencamera.TAKE_PHOTO";
public static boolean TAKE_PHOTO;
@Override
protected void onCreate(Bundle savedInstanceState) {
if( MyDebug.LOG )
Log.d(TAG, "onCreate");
super.onCreate(savedInstanceState);
Intent intent = new Intent(this, MainActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TOP);
//intent.putExtra(TAKE_PHOTO, true);
TakePhoto.TAKE_PHOTO = true;
this.startActivity(intent);
if( MyDebug.LOG )
Log.d(TAG, "finish");
this.finish();
}
protected void onResume() {
if( MyDebug.LOG )
Log.d(TAG, "onResume");
super.onResume();
}
}

View file

@ -0,0 +1,123 @@
package net.sourceforge.opencamera;
import android.content.Context;
import android.location.Location;
import android.util.Log;
import java.text.DateFormat;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
/** Handles various text formatting options, used for photo stamp and video subtitles.
*/
public class TextFormatter {
private static final String TAG = "TextFormatter";
private final Context context;
private final DecimalFormat decimalFormat = new DecimalFormat("#0.0");
TextFormatter(Context context) {
this.context = context;
}
/** Formats the date according to the user preference preference_stamp_dateformat.
* Returns "" if preference_stamp_dateformat is "preference_stamp_dateformat_none".
*/
public static String getDateString(String preference_stamp_dateformat, Date date) {
String date_stamp = "";
if( !preference_stamp_dateformat.equals("preference_stamp_dateformat_none") ) {
switch(preference_stamp_dateformat) {
case "preference_stamp_dateformat_yyyymmdd":
// use dashes instead of slashes - this should follow https://en.wikipedia.org/wiki/ISO_8601
date_stamp = new SimpleDateFormat("yyyy-MM-dd", Locale.getDefault()).format(date);
break;
case "preference_stamp_dateformat_ddmmyyyy":
date_stamp = new SimpleDateFormat("dd/MM/yyyy", Locale.getDefault()).format(date);
break;
case "preference_stamp_dateformat_mmddyyyy":
date_stamp = new SimpleDateFormat("MM/dd/yyyy", Locale.getDefault()).format(date);
break;
default:
date_stamp = DateFormat.getDateInstance().format(date);
break;
}
}
return date_stamp;
}
/** Formats the time according to the user preference preference_stamp_timeformat.
* Returns "" if preference_stamp_timeformat is "preference_stamp_timeformat_none".
*/
public static String getTimeString(String preference_stamp_timeformat, Date date) {
String time_stamp = "";
if( !preference_stamp_timeformat.equals("preference_stamp_timeformat_none") ) {
switch(preference_stamp_timeformat) {
case "preference_stamp_timeformat_12hour":
time_stamp = new SimpleDateFormat("hh:mm:ss a", Locale.getDefault()).format(date);
break;
case "preference_stamp_timeformat_24hour":
time_stamp = new SimpleDateFormat("HH:mm:ss", Locale.getDefault()).format(date);
break;
default:
time_stamp = DateFormat.getTimeInstance().format(date);
break;
}
}
return time_stamp;
}
private String getDistanceString(double distance, String preference_units_distance) {
double converted_distance = distance;
String units = context.getResources().getString(R.string.metres_abbreviation);
if( preference_units_distance.equals("preference_units_distance_ft") ) {
converted_distance = 3.28084 * distance;
units = context.getResources().getString(R.string.feet_abbreviation);
}
return decimalFormat.format(converted_distance) + units;
}
/** Formats the GPS information according to the user preference_stamp_gpsformat preference_stamp_timeformat.
* Returns "" if preference_stamp_gpsformat is "preference_stamp_gpsformat_none", or both store_location and
* store_geo_direction are false.
*/
public String getGPSString(String preference_stamp_gpsformat, String preference_units_distance, boolean store_location, Location location, boolean store_geo_direction, double geo_direction) {
String gps_stamp = "";
if( !preference_stamp_gpsformat.equals("preference_stamp_gpsformat_none") ) {
if( store_location ) {
if( MyDebug.LOG )
Log.d(TAG, "location: " + location);
if( preference_stamp_gpsformat.equals("preference_stamp_gpsformat_dms") )
gps_stamp += LocationSupplier.locationToDMS(location.getLatitude()) + ", " + LocationSupplier.locationToDMS(location.getLongitude());
else
gps_stamp += Location.convert(location.getLatitude(), Location.FORMAT_DEGREES) + ", " + Location.convert(location.getLongitude(), Location.FORMAT_DEGREES);
if( location.hasAltitude() ) {
gps_stamp += ", " + getDistanceString(location.getAltitude(), preference_units_distance);
}
}
if( store_geo_direction ) {
float geo_angle = (float)Math.toDegrees(geo_direction);
if( geo_angle < 0.0f ) {
geo_angle += 360.0f;
}
if( MyDebug.LOG )
Log.d(TAG, "geo_angle: " + geo_angle);
if( !gps_stamp.isEmpty() )
gps_stamp += ", ";
gps_stamp += String.valueOf(Math.round(geo_angle)) + (char)0x00B0;
}
}
// don't log gps_stamp, in case of privacy!
return gps_stamp;
}
public static String formatTimeMS(long time_ms) {
int ms = (int) (time_ms) % 1000 ;
int seconds = (int) (time_ms / 1000) % 60 ;
int minutes = (int) ((time_ms / (1000*60)) % 60);
int hours = (int) ((time_ms / (1000*60*60)));
return String.format(Locale.getDefault(), "%02d:%02d:%02d,%03d", hours, minutes, seconds, ms);
}
}

View file

@ -0,0 +1,12 @@
package net.sourceforge.opencamera;
import android.widget.Toast;
/** Allows methods to update a Toast with a new Toast.
*/
public class ToastBoxer {
public Toast toast;
public ToastBoxer() {
}
}

View file

@ -0,0 +1,815 @@
package net.sourceforge.opencamera.cameracontroller;
import net.sourceforge.opencamera.MyDebug;
import java.io.Serial;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import android.graphics.Rect;
import android.location.Location;
import android.media.MediaRecorder;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.TextureView;
import androidx.annotation.NonNull;
/** CameraController is an abstract class that wraps up the access/control to
* the Android camera, so that the rest of the application doesn't have to
* deal directly with the Android camera API. It also allows us to support
* more than one camera API through the same API (this is used to support both
* the original camera API, and Android 5's Camera2 API).
* The class is fairly low level wrapper about the APIs - there is some
* additional logical/workarounds where such things are API-specific, but
* otherwise the calling application still controls the behaviour of the
* camera.
*/
public abstract class CameraController {
private static final String TAG = "CameraController";
private final int cameraId;
public static final String SCENE_MODE_DEFAULT = "auto"; // chosen to match Camera.Parameters.SCENE_MODE_AUTO, but we also use compatible values for Camera2 API
public static final String COLOR_EFFECT_DEFAULT = "none"; // chosen to match Camera.Parameters.EFFECT_NONE, but we also use compatible values for Camera2 API
public static final String WHITE_BALANCE_DEFAULT = "auto"; // chosen to match Camera.Parameters.WHITE_BALANCE_AUTO, but we also use compatible values for Camera2 API
public static final String ANTIBANDING_DEFAULT = "auto"; // chosen to match Camera.Parameters.ANTIBANDING_AUTO, but we also use compatible values for Camera2 API
public static final String EDGE_MODE_DEFAULT = "default";
public static final String NOISE_REDUCTION_MODE_DEFAULT = "default";
public static final String ISO_DEFAULT = "auto";
public static final long EXPOSURE_TIME_DEFAULT = 1000000000L/30; // note, responsibility of callers to check that this is within the valid min/max range
public static final int N_IMAGES_NR_DARK = 8;
public static final int N_IMAGES_NR_DARK_LOW_LIGHT = 15;
// for testing:
public volatile int count_camera_parameters_exception;
public volatile int count_precapture_timeout;
public volatile boolean test_wait_capture_result; // whether to test delayed capture result in Camera2 API
public volatile boolean test_release_during_photo; // for Camera2 API, will force takePictureAfterPrecapture() to call release() on UI thread
public volatile int test_capture_results; // for Camera2 API, how many capture requests completed with RequestTagType.CAPTURE
public volatile int test_fake_flash_focus; // for Camera2 API, records torch turning on for fake flash during autofocus
public volatile int test_fake_flash_precapture; // for Camera2 API, records torch turning on for fake flash during precapture
public volatile int test_fake_flash_photo; // for Camera2 API, records torch turning on for fake flash for photo capture
public volatile int test_af_state_null_focus; // for Camera2 API, records af_state being null even when we've requested autofocus
public volatile boolean test_used_tonemap_curve;
public volatile int test_texture_view_buffer_w; // for TextureView, keep track of buffer size
public volatile int test_texture_view_buffer_h;
public volatile boolean test_force_run_post_capture; // for Camera2 API, test using adjustPreview() / RequestTagType.RUN_POST_CAPTURE
public static volatile boolean test_force_slow_preview_start; // for Camera2 API, test waiting for 6s when starting preview
/** Class for caching a subset of CameraFeatures, that are slow to read.
* For now only used for vendor extensions which are slow to read.
*/
public static class CameraFeaturesCache {
public List<Integer> supported_extensions;
public List<Integer> supported_extensions_zoom;
final Map<Integer, List<android.util.Size>> extension_picture_sizes_map; // key is extension
final Map<Integer, List<android.util.Size>> extension_preview_sizes_map; // key is extension
CameraFeaturesCache(CameraFeatures camera_features, Map<Integer, List<android.util.Size>> extension_picture_sizes_map, Map<Integer, List<android.util.Size>> extension_preview_sizes_map) {
if( camera_features.supported_extensions != null )
this.supported_extensions = new ArrayList<>(camera_features.supported_extensions);
if( camera_features.supported_extensions_zoom != null )
this.supported_extensions_zoom = new ArrayList<>(camera_features.supported_extensions_zoom);
this.extension_picture_sizes_map = extension_picture_sizes_map;
this.extension_preview_sizes_map = extension_preview_sizes_map;
}
}
public static class CameraFeatures {
public Set<String> physical_camera_ids; // if non-null, this camera is part of a logical camera that exposes these physical camera IDs
public boolean is_zoom_supported;
public int max_zoom;
public List<Integer> zoom_ratios; // list of supported zoom ratios; each value is the zoom multiplied by 100
public boolean supports_face_detection;
public List<CameraController.Size> picture_sizes;
public List<CameraController.Size> video_sizes;
public List<CameraController.Size> video_sizes_high_speed; // may be null if high speed not supported
public List<CameraController.Size> preview_sizes;
public List<Integer> supported_extensions; // if non-null, list of supported camera vendor extensions, see https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics
public List<Integer> supported_extensions_zoom; // if non-null, list of camera vendor extensions that support zoom
public List<String> supported_flash_values;
public List<String> supported_focus_values;
public float [] apertures; // may be null if not supported, else will have at least 2 values
public int max_num_focus_areas;
public float minimum_focus_distance;
public boolean is_exposure_lock_supported;
public boolean is_white_balance_lock_supported;
public boolean is_optical_stabilization_supported;
public boolean is_video_stabilization_supported;
public boolean is_photo_video_recording_supported;
public boolean supports_white_balance_temperature;
public int min_temperature;
public int max_temperature;
public boolean supports_iso_range;
public int min_iso;
public int max_iso;
public boolean supports_exposure_time;
public long min_exposure_time;
public long max_exposure_time;
public int min_exposure;
public int max_exposure;
public float exposure_step;
public boolean can_disable_shutter_sound;
public int tonemap_max_curve_points;
public boolean supports_tonemap_curve;
public boolean supports_expo_bracketing; // whether setBurstTye(BURSTTYPE_EXPO) can be used
public int max_expo_bracketing_n_images;
public boolean supports_focus_bracketing; // whether setBurstTye(BURSTTYPE_FOCUS) can be used
public boolean supports_burst; // whether setBurstTye(BURSTTYPE_NORMAL) can be used
public boolean supports_jpeg_r; // whether supports JPEG_R (Ultra HDR)
public boolean supports_raw;
public float view_angle_x; // horizontal angle of view in degrees (when unzoomed)
public float view_angle_y; // vertical angle of view in degrees (when unzoomed)
/** Returns whether any of the supplied sizes support the requested fps.
*/
public static boolean supportsFrameRate(List<Size> sizes, int fps) {
if( MyDebug.LOG )
Log.d(TAG, "supportsFrameRate: " + fps);
if( sizes == null )
return false;
for(Size size : sizes) {
if( size.supportsFrameRate(fps) ) {
if( MyDebug.LOG )
Log.d(TAG, "fps is supported");
return true;
}
}
if( MyDebug.LOG )
Log.d(TAG, "fps is NOT supported");
return false;
}
/**
* @param return_closest If true, return a match for the width/height, even if the fps doesn't
* match.
*/
public static Size findSize(List<Size> sizes, Size size, double fps, boolean return_closest) {
Size last_s = null;
for(Size s : sizes) {
if (size.equals(s)) {
last_s = s;
if (fps > 0) {
if (s.supportsFrameRate(fps)) {
return s;
}
} else {
return s;
}
}
}
return return_closest ? last_s : null;
}
}
// Android docs and FindBugs recommend that Comparators also be Serializable
static class RangeSorter implements Comparator<int[]>, Serializable {
@Serial
private static final long serialVersionUID = 5802214721073728212L;
@Override
public int compare(int[] o1, int[] o2) {
if (o1[0] == o2[0]) return o1[1] - o2[1];
return o1[0] - o2[0];
}
}
/* Sorts resolutions from highest to lowest, by area.
* Android docs and FindBugs recommend that Comparators also be Serializable
*/
static class SizeSorter implements Comparator<Size>, Serializable {
@Serial
private static final long serialVersionUID = 5802214721073718212L;
@Override
public int compare(final CameraController.Size a, final CameraController.Size b) {
return b.width * b.height - a.width * a.height;
}
}
public static class Size {
public final int width;
public final int height;
public boolean supports_burst; // for photo
public List<Integer> supported_extensions; // for photo and preview: if non-null, list of supported camera vendor extensions
final List<int[]> fps_ranges; // for video
public final boolean high_speed; // for video
Size(int width, int height, List<int[]> fps_ranges, boolean high_speed) {
this.width = width;
this.height = height;
this.supports_burst = true;
this.fps_ranges = fps_ranges;
this.high_speed = high_speed;
Collections.sort(this.fps_ranges, new RangeSorter());
}
public Size(int width, int height) {
this(width, height, new ArrayList<>(), false);
}
/** Whether this size supports the requested burst and/or extension
*/
public boolean supportsRequirements(boolean want_burst, boolean want_extension, int extension) {
return (!want_burst || this.supports_burst) && (!want_extension || this.supportsExtension(extension));
}
public boolean supportsExtension(int extension) {
return supported_extensions != null && supported_extensions.contains(extension);
}
public boolean supportsFrameRate(double fps) {
for (int[] f : this.fps_ranges) {
if (f[0] <= fps && fps <= f[1])
return true;
}
return false;
}
public int closestFrameRate(double fps) {
int closest_fps = -1;
int closest_dist = -1;
for (int[] f : this.fps_ranges) {
if (f[0] <= fps && fps <= f[1])
return (int)fps;
int this_fps;
if( fps < f[0] )
this_fps = f[0];
else
this_fps = f[1];
int dist = Math.abs(this_fps - (int)fps);
if( closest_dist == -1 || dist < closest_dist ) {
closest_fps = this_fps;
closest_dist = dist;
}
}
return closest_fps;
}
@Override
public boolean equals(Object o) {
if( !(o instanceof Size) )
return false;
Size that = (Size)o;
return this.width == that.width && this.height == that.height;
}
@Override
public int hashCode() {
// must override this, as we override equals()
// can't use:
//return Objects.hash(width, height);
// as this requires API level 19
// so use this from http://stackoverflow.com/questions/11742593/what-is-the-hashcode-for-a-custom-class-having-just-two-int-properties
return width*41 + height;
}
@NonNull
public String toString() {
StringBuilder s = new StringBuilder();
for (int[] f : this.fps_ranges) {
s.append(" [").append(f[0]).append("-").append(f[1]).append("]");
}
return this.width + "x" + this.height + " " + s + (this.high_speed ? "-hs" : "");
}
}
/** An area has values from [-1000,-1000] (for top-left) to [1000,1000] (for bottom-right) for whatever is
* the current field of view (i.e., taking zoom into account).
*/
public static class Area {
final Rect rect;
final int weight;
public Area(Rect rect, int weight) {
this.rect = rect;
this.weight = weight;
}
}
public interface FaceDetectionListener {
void onFaceDetection(Face[] faces);
}
/** Interface to define callbacks related to taking photos. These callbacks are all called on the UI thread.
*/
public interface PictureCallback {
void onStarted(); // called immediately before we start capturing the picture
void onCompleted(); // called after all relevant on*PictureTaken() callbacks have been called and returned
void onPictureTaken(byte[] data);
/** Only called if RAW is requested.
* Caller should call raw_image.close() when done with the image.
*/
void onRawPictureTaken(RawImage raw_image);
/** Only called if burst is requested.
*/
void onBurstPictureTaken(List<byte[]> images);
/** Only called if burst is requested.
*/
void onRawBurstPictureTaken(List<RawImage> raw_images);
/** Reports percentage progress for vendor camera extensions. Note that not all devices support this being called.
*/
void onExtensionProgress(int progress);
/* This is called for when burst mode is BURSTTYPE_FOCUS or BURSTTYPE_CONTINUOUS, to ask whether it's safe to take
* n_raw extra RAW images and n_jpegs extra JPEG images, or whether to wait.
*/
boolean imageQueueWouldBlock(int n_raw, int n_jpegs);
/* This is called for flash_frontscreen_auto or flash_frontscreen_on mode to indicate the caller should light up the screen
* (for flash_frontscreen_auto it will only be called if the scene is considered dark enough to require the screen flash).
* The screen flash can be removed when or after onCompleted() is called.
*/
void onFrontScreenTurnOn();
}
/** Interface to define callback for autofocus completing. This callback may be called on the UI thread (CameraController1)
* or a background thread (CameraController2).
*/
public interface AutoFocusCallback {
void onAutoFocus(boolean success);
}
/** Interface to define callback for continuous focus starting/stopping. This callback may be called on the
* UI thread (CameraController1) or a background thread (CameraController2).
*/
public interface ContinuousFocusMoveCallback {
void onContinuousFocusMove(boolean start);
}
public interface ErrorCallback {
void onError();
}
public static class Face {
public final int score;
/* The rect has values from [-1000,-1000] (for top-left) to [1000,1000] (for bottom-right) for whatever is
* the current field of view (i.e., taking zoom into account).
*/
public final Rect rect;
/** The temp rect is temporary storage that can be used by callers.
*/
public final Rect temp = new Rect();
Face(int score, Rect rect) {
this.score = score;
this.rect = rect;
}
}
public static class SupportedValues {
public final List<String> values;
public final String selected_value;
SupportedValues(List<String> values, String selected_value) {
this.values = values;
this.selected_value = selected_value;
}
}
public abstract void release();
public abstract void onError(); // triggers error mechanism - should only be called externally for testing purposes
CameraController(int cameraId) {
this.cameraId = cameraId;
}
public abstract String getAPI();
public abstract CameraFeatures getCameraFeatures() throws CameraControllerException;
public int getCameraId() {
return cameraId;
}
/** For CameraController2 only. Applications should cover the preview textureview if since last resuming, camera_controller
* has never been non-null or this method has never returned false.
* Otherwise there is a risk when opening the camera that the textureview still shows an image from when
* the camera was previously opened (e.g., from pausing and resuming the application). This returns false (for CameraController2)
* when the camera has received its first frame.
* Update: on more recent Android versions this didn't work very well, possibly due to a screenshot being used for "recent apps"
* view; on Android 13+, the activity can make use of shouldCoverPreview(false) for this.
*/
public boolean shouldCoverPreview() {
return false;
}
/** For CameraController2 only. After calling this, shouldCoverPreview() will return true, until a new
* frame from the camera has been received.
*/
public void resetCoverPreview() {
}
public abstract SupportedValues setSceneMode(String value);
/**
* @return The current scene mode. Will be null if scene mode not supported.
*/
public abstract String getSceneMode();
/**
* @return Returns true iff changing the scene mode can affect the available camera functionality
* (e.g., changing to Night scene mode might mean flash modes are no longer available).
*/
public abstract boolean sceneModeAffectsFunctionality();
public abstract SupportedValues setColorEffect(String value);
public abstract String getColorEffect();
public abstract SupportedValues setWhiteBalance(String value);
public abstract String getWhiteBalance();
public abstract boolean setWhiteBalanceTemperature(int temperature);
public abstract int getWhiteBalanceTemperature();
public abstract SupportedValues setAntiBanding(String value);
public abstract String getAntiBanding();
public abstract SupportedValues setEdgeMode(String value);
public abstract String getEdgeMode();
public abstract SupportedValues setNoiseReductionMode(String value);
public abstract String getNoiseReductionMode();
/** Set an ISO value. Only supported if supports_iso_range is false.
*/
public abstract SupportedValues setISO(String value);
/** Switch between auto and manual ISO mode. Only supported if supports_iso_range is true.
* @param manual_iso Whether to switch to manual mode or back to auto.
* @param iso If manual_iso is true, this specifies the desired ISO value. If this is outside
* the min_iso/max_iso, the value will be snapped so it does lie within that range.
* If manual_iso i false, this value is ignored.
*/
public abstract void setManualISO(boolean manual_iso, int iso);
/**
* @return Whether in manual ISO mode (as opposed to auto).
*/
public abstract boolean isManualISO();
/** Specify a specific ISO value. Only supported if supports_iso_range is true. Callers should
* first switch to manual ISO mode using setManualISO().
*/
public abstract boolean setISO(int iso);
public abstract String getISOKey();
/** Returns the manual ISO value. Only supported if supports_iso_range is true.
*/
public abstract int getISO();
public abstract long getExposureTime();
public abstract boolean setExposureTime(long exposure_time);
public abstract void setAperture(float aperture);
public abstract CameraController.Size getPictureSize();
public abstract void setPictureSize(int width, int height);
public abstract CameraController.Size getPreviewSize();
public abstract void setPreviewSize(int width, int height);
public abstract void setCameraExtension(boolean enabled, int extension);
public abstract boolean isCameraExtension();
public abstract int getCameraExtension();
// whether to take a burst of images, and if so, what type
public enum BurstType {
BURSTTYPE_NONE, // no burst
BURSTTYPE_EXPO, // enable expo bracketing mode
BURSTTYPE_FOCUS, // enable focus bracketing mode;
BURSTTYPE_NORMAL, // take a regular burst
BURSTTYPE_CONTINUOUS // as BURSTTYPE_NORMAL, but bursts will fire continually until stopContinuousBurst() is called.
}
public abstract void setBurstType(BurstType new_burst_type);
public abstract BurstType getBurstType();
/** Only relevant if setBurstType() is also called with BURSTTYPE_NORMAL. Sets the number of
* images to take in the burst.
*/
public abstract void setBurstNImages(int burst_requested_n_images);
/** Only relevant if setBurstType() is also called with BURSTTYPE_NORMAL. If this method is
* called with burst_for_noise_reduction, then the number of burst images, and other settings,
* will be set for noise reduction mode (and setBurstNImages() is ignored).
*/
public abstract void setBurstForNoiseReduction(boolean burst_for_noise_reduction, boolean noise_reduction_low_light);
public abstract boolean isContinuousBurstInProgress();
public abstract void stopContinuousBurst();
public abstract void stopFocusBracketingBurst();
/** Only relevant if setBurstType() is also called with BURSTTYPE_EXPO. Sets the number of
* images to take in the expo burst.
* @param n_images Must be an odd number greater than 1.
*/
public abstract void setExpoBracketingNImages(int n_images);
/** Only relevant if setBurstType() is also called with BURSTTYPE_EXPO.
*/
public abstract void setExpoBracketingStops(double stops);
public abstract void setUseExpoFastBurst(boolean use_expo_fast_burst);
/** Whether to enable a workaround hack for some Galaxy devices - take an additional dummy photo
* when taking an expo/HDR burst, to avoid problem where manual exposure is ignored for the
* first image.
*/
public abstract void setDummyCaptureHack(boolean dummy_capture_hack);
/** Whether the current BurstType is one that requires the camera driver to capture the images
* as a burst at a fast rate. If true, we should not use high resolutions that don't support a
* capture burst (for Camera2 API, see StreamConfigurationMap.getHighResolutionOutputSizes()).
*/
public abstract boolean isCaptureFastBurst();
/** If true, then the camera controller is currently capturing a burst of images.
*/
public abstract boolean isCapturingBurst();
/** If isCapturingBurst() is true, then this returns the number of images in the current burst
* captured so far.
*/
public abstract int getNBurstTaken();
/** If isCapturingBurst() is true, then this returns the total number of images in the current
* burst if known. If not known (e.g., for continuous burst mode), returns 0.
*/
public abstract int getBurstTotal();
/**
* @param want_jpeg_r Whether to enable taking photos in JPEG_R (Ultra HDR) format.
*/
public abstract void setJpegR(boolean want_jpeg_r);
/**
* @param want_raw Whether to enable taking photos in RAW (DNG) format.
* @param max_raw_images The maximum number of unclosed DNG images that may be held in memory at any one
* time. Trying to take a photo, when the number of unclosed DNG images is already
* equal to this number, will result in an exception (java.lang.IllegalStateException
* - note, the exception will come from a CameraController2 callback, so can't be
* caught by the callera).
*/
public abstract void setRaw(boolean want_raw, int max_raw_images);
/** Request a capture session compatible with high speed frame rates.
* This should be called only when the preview is paused or not yet started.
*/
public abstract void setVideoHighSpeed(boolean setVideoHighSpeed);
/**
* setUseCamera2FakeFlash() should be called after creating the CameraController, and before calling getCameraFeatures() or
* starting the preview (as it changes the available flash modes).
* "Fake flash" is an alternative mode for handling flash, for devices that have poor Camera2 support - typical symptoms
* include precapture never starting, flash not firing, photos being over or under exposed.
* Instead, we fake the precapture and flash simply by turning on the torch. After turning on torch, we wait for ae to stop
* scanning (and af too, as it can start scanning in continuous mode) - this is effectively the equivalent of precapture -
* before taking the photo.
* In auto-focus mode, we make the decision ourselves based on the current ISO.
* We also handle the flash firing for autofocus by turning the torch on and off too. Advantages are:
* - The flash tends to be brighter, and the photo can end up overexposed as a result if capture follows the autofocus.
* - Some devices also don't seem to fire flash for autofocus in Camera2 mode (e.g., Samsung S7)
* - When capture follows autofocus, we need to make the same decision for firing flash for both the autofocus and the capture.
*/
public void setUseCamera2FakeFlash(boolean use_fake_precapture) {
}
public boolean getUseCamera2FakeFlash() {
return false;
}
public abstract boolean getOpticalStabilization();
/** Whether to enable digital video stabilization. Should only be set to true when intending to
* capture video.
*/
public abstract void setVideoStabilization(boolean enabled);
public abstract boolean getVideoStabilization();
public enum TonemapProfile {
TONEMAPPROFILE_OFF,
TONEMAPPROFILE_REC709,
TONEMAPPROFILE_SRGB,
TONEMAPPROFILE_LOG,
TONEMAPPROFILE_GAMMA,
TONEMAPPROFILE_JTVIDEO,
TONEMAPPROFILE_JTLOG,
TONEMAPPROFILE_JTLOG2
}
/** Sets a tonemap profile.
* @param tonemap_profile The type of the tonemap profile.
* @param log_profile_strength Only relevant if tonemap_profile set to TONEMAPPROFILE_LOG.
* @param gamma Only relevant if tonemap_profile set to TONEMAPPROFILE_GAMMA
*/
public abstract void setTonemapProfile(TonemapProfile tonemap_profile, float log_profile_strength, float gamma);
public abstract TonemapProfile getTonemapProfile();
public abstract int getJpegQuality();
public abstract void setJpegQuality(int quality);
/** Returns the current zoom. The returned value is an index into the CameraFeatures.zoom_ratios
* array.
*/
public abstract int getZoom();
/** Set the zoom.
* @param value The index into the CameraFeatures.zoom_ratios array.
*/
public abstract void setZoom(int value);
/** Set the zoom. Unlike setZoom(value), this allows specifying any zoom level within the
* supported range.
* @param value The index into the CameraFeatures.zoom_ratios array.
* @param smooth_zoom The desired zoom. With CameraController1 (old Camera API), this is ignored.
* With CameraController2 (Camera2 API), this is used instead of the zoom_ratios
* value. Note that getZoom() will return the value passed to this method, so
* passing an appropriate value (e.g., whatever zoom_ratio is closest to the
* smooth_zoom) is still useful if you want to make use of getZoom().
* smooth_zoom must still be within the supported range of zoom values.
*/
public abstract void setZoom(int value, float smooth_zoom);
public abstract void resetZoom(); // resets to zoom 1x
public abstract int getExposureCompensation();
public abstract boolean setExposureCompensation(int new_exposure);
public abstract void setPreviewFpsRange(int min, int max);
public abstract void clearPreviewFpsRange();
public abstract List<int []> getSupportedPreviewFpsRange(); // result depends on setting of setVideoHighSpeed()
public abstract void setFocusValue(String focus_value);
public abstract String getFocusValue();
public abstract float getFocusDistance();
public abstract boolean setFocusDistance(float focus_distance);
/** Only relevant if setBurstType() is also called with BURSTTYPE_FOCUS. Sets the number of
* images to take in the focus burst.
*/
public abstract void setFocusBracketingNImages(int n_images);
/** Only relevant if setBurstType() is also called with BURSTTYPE_FOCUS. If set to true, an
* additional image will be included at infinite distance.
*/
public abstract void setFocusBracketingAddInfinity(boolean focus_bracketing_add_infinity);
/** Only relevant if setBurstType() is also called with BURSTTYPE_FOCUS. Sets the source focus
* distance for focus bracketing.
*/
public abstract void setFocusBracketingSourceDistance(float focus_bracketing_source_distance);
public abstract float getFocusBracketingSourceDistance();
/** Only relevant if setBurstType() is also called with BURSTTYPE_FOCUS. Sets the source focus
* distance to match the camera's current focus distance (typically useful if running in a
* non-manual focus mode).
*/
public abstract void setFocusBracketingSourceDistanceFromCurrent();
/** Only relevant if setBurstType() is also called with BURSTTYPE_FOCUS. Sets the target focus
* distance for focus bracketing.
*/
public abstract void setFocusBracketingTargetDistance(float focus_bracketing_target_distance);
public abstract float getFocusBracketingTargetDistance();
public abstract void setFlashValue(String flash_value);
public abstract String getFlashValue();
public abstract void setRecordingHint(boolean hint);
public abstract void setAutoExposureLock(boolean enabled);
public abstract boolean getAutoExposureLock();
public abstract void setAutoWhiteBalanceLock(boolean enabled);
public abstract boolean getAutoWhiteBalanceLock();
public abstract void setRotation(int rotation);
public abstract void setLocationInfo(Location location);
public abstract void removeLocationInfo();
public abstract void enableShutterSound(boolean enabled);
public abstract boolean setFocusAndMeteringArea(List<CameraController.Area> areas);
public abstract void clearFocusAndMetering();
public abstract List<CameraController.Area> getFocusAreas();
public abstract List<CameraController.Area> getMeteringAreas();
public abstract boolean supportsAutoFocus();
public abstract boolean supportsMetering();
public abstract boolean focusIsContinuous();
public abstract boolean focusIsVideo();
public abstract void reconnect() throws CameraControllerException;
public abstract void setPreviewDisplay(SurfaceHolder holder) throws CameraControllerException;
public abstract void setPreviewTexture(TextureView texture) throws CameraControllerException;
/** This should be called when using a TextureView, and the texture view has reported a change
* in size via onSurfaceTextureSizeChanged.
*/
public void updatePreviewTexture() {
// dummy implementation
}
/** Starts the camera preview.
* @throws CameraControllerException if the camera preview fails to start.
*/
/** Starts the camera preview.
* @param wait_until_started Whether to wait until the preview is started. Only relevant for
* CameraController2; CameraController1 will always wait.
* @param runnable If non-null, a runnable to be called once preview is started. If
* wait_until_started==true, or using CameraController1, this will be
* called on the current thread, before this method exits. Otherwise,
* this will be called on the UI thread, after this method exits (once
* the preview has started).
* @param on_failed If non-null, a runnable to be called if the preview fails to start.
* Only relevant for wait_until_started==false and when using
* CameraController2. In such cases, failing to start the camera preview
* may result in either CameraControllerException being thrown, or
* on_failed being called on the UI thread after this method exits
* (depending on when the failure occurs). If either of these happens,
* the "runnable" runnable will not be called.
* @throws CameraControllerException Failed to start preview. In this case, the runnable will not
* be called.
*/
public abstract void startPreview(boolean wait_until_started, Runnable runnable, Runnable on_failed) throws CameraControllerException;
/** Only relevant for CameraController2: stops the repeating burst for the previous (so effectively
* stops the preview), but does not close the capture session for the preview (for that, using
* stopPreview() instead of stopRepeating()).
*/
public abstract void stopRepeating();
public abstract void stopPreview();
public abstract boolean startFaceDetection();
public abstract void setFaceDetectionListener(final CameraController.FaceDetectionListener listener);
/**
* @param cb Callback to be called when autofocus completes.
* @param capture_follows_autofocus_hint Set to true if you intend to take a photo immediately after autofocus. If the
* decision changes after autofocus has started (e.g., user initiates autofocus,
* then takes photo before autofocus has completed), use setCaptureFollowAutofocusHint().
*/
public abstract void autoFocus(final CameraController.AutoFocusCallback cb, boolean capture_follows_autofocus_hint);
/** See autoFocus() for details - used to update the capture_follows_autofocus_hint setting.
*/
public abstract void setCaptureFollowAutofocusHint(boolean capture_follows_autofocus_hint);
public abstract void cancelAutoFocus();
public abstract void setContinuousFocusMoveCallback(ContinuousFocusMoveCallback cb);
public abstract void takePicture(final CameraController.PictureCallback picture, final ErrorCallback error);
public abstract void setDisplayOrientation(int degrees);
public abstract int getDisplayOrientation();
public abstract int getCameraOrientation();
public enum Facing {
FACING_BACK,
FACING_FRONT,
FACING_EXTERNAL,
FACING_UNKNOWN // returned if the Camera API returned an error or an unknown type
}
/** Returns whether the camera is front, back or external.
*/
public abstract Facing getFacing();
public abstract void unlock();
/** Call to initialise video recording, should call before MediaRecorder.prepare().
* @param video_recorder The media recorder object.
*/
public abstract void initVideoRecorderPrePrepare(MediaRecorder video_recorder);
/** Call to initialise video recording, should call after MediaRecorder.prepare(), but before MediaRecorder.start().
* @param video_recorder The media recorder object.
* @param want_photo_video_recording Whether support for taking photos whilst video recording is required. If this feature isn't supported, the option has no effect.
*/
public abstract void initVideoRecorderPostPrepare(MediaRecorder video_recorder, boolean want_photo_video_recording) throws CameraControllerException;
public abstract String getParametersString();
public boolean captureResultIsAEScanning() {
return false;
}
/**
* @return whether flash will fire; returns false if not known
*/
public boolean needsFlash() {
return false;
}
/**
* @return whether front screen "flash" will fire; returns false if not known
*/
public boolean needsFrontScreenFlash() {
return false;
}
public boolean captureResultHasWhiteBalanceTemperature() {
return false;
}
public int captureResultWhiteBalanceTemperature() {
return 0;
}
public boolean captureResultHasIso() {
return false;
}
public int captureResultIso() {
return 0;
}
public boolean captureResultHasExposureTime() {
return false;
}
public long captureResultExposureTime() {
return 0;
}
public boolean captureResultHasFrameDuration() {
return false;
}
public long captureResultFrameDuration() {
return 0;
}
public boolean captureResultHasFocusDistance() {
return false;
}
public float captureResultFocusDistance() {
return 0.0f;
}
public boolean captureResultHasAperture() {
return false;
}
public float captureResultAperture() {
return 0.0f;
}
/*public boolean captureResultHasFocusDistance() {
return false;
}*/
/*public float captureResultFocusDistanceMin() {
return 0.0f;
}*/
/*public float captureResultFocusDistanceMax() {
return 0.0f;
}*/
// gets the available values of a generic mode, e.g., scene, color etc, and makes sure the requested mode is available
SupportedValues checkModeIsSupported(List<String> values, String value, String default_value) {
if( values != null && values.size() > 1 ) { // n.b., if there is only 1 supported value, we also return null, as no point offering the choice to the user (there are some devices, e.g., Samsung, that only have a scene mode of "auto")
if( MyDebug.LOG ) {
for(int i=0;i<values.size();i++) {
Log.d(TAG, "supported value: " + values.get(i));
}
}
// make sure result is valid
if( !values.contains(value) ) {
if( MyDebug.LOG )
Log.d(TAG, "value not valid!");
if( values.contains(default_value) )
value = default_value;
else
value = values.get(0);
if( MyDebug.LOG )
Log.d(TAG, "value is now: " + value);
}
return new SupportedValues(values, value);
}
return null;
}
}

View file

@ -0,0 +1,11 @@
package net.sourceforge.opencamera.cameracontroller;
import java.io.Serial;
/** Exception for CameraController classes.
*/
public class CameraControllerException extends Exception {
@Serial
private static final long serialVersionUID = 7904697847749213106L;
}

View file

@ -0,0 +1,28 @@
package net.sourceforge.opencamera.cameracontroller;
import android.content.Context;
import android.util.SizeF;
/** Provides additional support related to the Android camera APIs. This is to
* support functionality that doesn't require a camera to have been opened.
*/
public abstract class CameraControllerManager {
public abstract int getNumberOfCameras();
/** Returns whether the supplied cameraId is front, back or external.
*/
public abstract CameraController.Facing getFacing(int cameraId);
/** Tries to return a textual description for the camera, such as front/back, along with extra
* details if possible such as "ultra-wide". Will be null if no description can be determined.
*/
public abstract String getDescription(Context context, int cameraId);
public static class CameraInfo {
public SizeF view_angle;
}
/** Version of getDescription() that supports Camera2 camera ID strings (used for physical cameras), also returns the
* view angles in info, if info is non-null.
*/
public abstract String getDescription(CameraInfo info, Context context, String cameraIdS, boolean include_type, boolean include_angles);
}

View file

@ -0,0 +1,58 @@
package net.sourceforge.opencamera.cameracontroller;
import net.sourceforge.opencamera.MyDebug;
import net.sourceforge.opencamera.R;
import android.content.Context;
import android.hardware.Camera;
import android.util.Log;
/** Provides support using Android's original camera API
* android.hardware.Camera.
* Deprecation warnings are suppressed, as we intentionally
* offer both old and Camera2 APIs to users.
* @noinspection deprecation
*/
public class CameraControllerManager1 extends CameraControllerManager {
private static final String TAG = "CControllerManager1";
public int getNumberOfCameras() {
return Camera.getNumberOfCameras();
}
@Override
public CameraController.Facing getFacing(int cameraId) {
try {
Camera.CameraInfo camera_info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, camera_info);
switch( camera_info.facing ) {
case Camera.CameraInfo.CAMERA_FACING_FRONT:
return CameraController.Facing.FACING_FRONT;
case Camera.CameraInfo.CAMERA_FACING_BACK:
return CameraController.Facing.FACING_BACK;
}
Log.e(TAG, "unknown camera_facing: " + camera_info.facing);
}
catch(RuntimeException e) {
// Had a report of this crashing on Galaxy Nexus - may be device specific issue, see http://stackoverflow.com/questions/22383708/java-lang-runtimeexception-fail-to-get-camera-info
// but good to catch it anyway
MyDebug.logStackTrace(TAG, "failed to get facing", e);
}
return CameraController.Facing.FACING_UNKNOWN;
}
@Override
public String getDescription(Context context, int cameraId) {
switch( getFacing(cameraId) ) {
case FACING_FRONT:
return context.getResources().getString(R.string.front_camera);
case FACING_BACK:
return context.getResources().getString(R.string.back_camera);
}
return null;
}
@Override
public String getDescription(CameraInfo info, Context context, String cameraIdS, boolean include_type, boolean include_angles) {
throw new RuntimeException("getDescription() not supported for old Camera API");
}
}

View file

@ -0,0 +1,246 @@
package net.sourceforge.opencamera.cameracontroller;
import net.sourceforge.opencamera.MyDebug;
import net.sourceforge.opencamera.R;
import android.content.Context;
import android.graphics.Rect;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.util.Log;
import android.util.SizeF;
/** Provides support using Android 5's Camera 2 API
* android.hardware.camera2.*.
*/
public class CameraControllerManager2 extends CameraControllerManager {
private static final String TAG = "CControllerManager2";
private final Context context;
public CameraControllerManager2(Context context) {
this.context = context;
}
@Override
public int getNumberOfCameras() {
CameraManager manager = (CameraManager)context.getSystemService(Context.CAMERA_SERVICE);
try {
return manager.getCameraIdList().length;
}
catch(Throwable e) {
// in theory we should only get CameraAccessException, but Google Play shows we can get a variety of exceptions
// from some devices, e.g., AssertionError, IllegalArgumentException, RuntimeException, so just catch everything!
// We don't want users to experience a crash just because of buggy camera2 drivers - instead the user can switch
// back to old camera API.
MyDebug.logStackTrace(TAG, "exception trying to get camera ids", e);
}
return 0;
}
@Override
public CameraController.Facing getFacing(int cameraId) {
CameraManager manager = (CameraManager)context.getSystemService(Context.CAMERA_SERVICE);
try {
String cameraIdS = manager.getCameraIdList()[cameraId];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraIdS);
switch( characteristics.get(CameraCharacteristics.LENS_FACING) ) {
case CameraMetadata.LENS_FACING_FRONT:
return CameraController.Facing.FACING_FRONT;
case CameraMetadata.LENS_FACING_BACK:
return CameraController.Facing.FACING_BACK;
case CameraMetadata.LENS_FACING_EXTERNAL:
return CameraController.Facing.FACING_EXTERNAL;
}
Log.e(TAG, "unknown camera_facing: " + characteristics.get(CameraCharacteristics.LENS_FACING));
}
catch(Throwable e) {
// in theory we should only get CameraAccessException, but Google Play shows we can get a variety of exceptions
// from some devices, e.g., AssertionError, IllegalArgumentException, RuntimeException, so just catch everything!
// We don't want users to experience a crash just because of buggy camera2 drivers - instead the user can switch
// back to old camera API.
MyDebug.logStackTrace(TAG, "exception trying to get camera characteristics", e);
}
return CameraController.Facing.FACING_UNKNOWN;
}
@Override
public String getDescription(Context context, int cameraId) {
CameraManager manager = (CameraManager)context.getSystemService(Context.CAMERA_SERVICE);
String description = null;
try {
String cameraIdS = manager.getCameraIdList()[cameraId];
description = getDescription(null, context, cameraIdS, true, false);
}
catch(Throwable e) {
// see note under isFrontFacing() why we catch anything, not just CameraAccessException
MyDebug.logStackTrace(TAG, "exception trying to get camera characteristics", e);
}
return description;
}
@Override
public String getDescription(CameraInfo info, Context context, String cameraIdS, boolean include_type, boolean include_angles) {
long debug_time = 0;
if( MyDebug.LOG ) {
debug_time = System.currentTimeMillis();
}
CameraManager manager = (CameraManager)context.getSystemService(Context.CAMERA_SERVICE);
String description = "";
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraIdS);
if( MyDebug.LOG )
Log.d(TAG, "getDescription: time after getCameraCharacteristics: " + (System.currentTimeMillis() - debug_time));
if( include_type ) {
switch( characteristics.get(CameraCharacteristics.LENS_FACING) ) {
case CameraMetadata.LENS_FACING_FRONT:
description = context.getResources().getString(R.string.front_camera);
break;
case CameraMetadata.LENS_FACING_BACK:
description = context.getResources().getString(R.string.back_camera);
break;
case CameraMetadata.LENS_FACING_EXTERNAL:
description = context.getResources().getString(R.string.external_camera);
break;
default:
Log.e(TAG, "unknown camera type");
return null;
}
}
SizeF view_angle = CameraControllerManager2.computeViewAngles(characteristics);
if( info != null )
info.view_angle = view_angle;
if( MyDebug.LOG )
Log.d(TAG, "getDescription: time after computeViewAngles: " + (System.currentTimeMillis() - debug_time));
if( view_angle.getWidth() > 90.5f ) {
// count as ultra-wide
if( !description.isEmpty() )
description += ", ";
description += context.getResources().getString(R.string.ultrawide);
}
else if( view_angle.getWidth() < 29.5f ) {
// count as telephoto
// Galaxy S24+ telephoto is 29x22 degrees
if( !description.isEmpty() )
description += ", ";
description += context.getResources().getString(R.string.telephoto);
}
if( include_angles ) {
if( !description.isEmpty() )
description += ", ";
description += ((int)(view_angle.getWidth()+0.5f)) + String.valueOf((char)0x00B0) + " x " + ((int)(view_angle.getHeight()+0.5f)) + (char) 0x00B0;
}
}
catch(Throwable e) {
// see note under isFrontFacing() why we catch anything, not just CameraAccessException
MyDebug.logStackTrace(TAG, "exception trying to get camera characteristics", e);
}
return description;
}
/** Helper class to compute view angles from the CameraCharacteristics.
* @return The width and height of the returned size represent the x and y view angles in
* degrees.
*/
static SizeF computeViewAngles(CameraCharacteristics characteristics) {
// Note this is an approximation (see http://stackoverflow.com/questions/39965408/what-is-the-android-camera2-api-equivalent-of-camera-parameters-gethorizontalvie ).
// This does not take into account the aspect ratio of the preview or camera, it's up to the caller to do this (e.g., see Preview.getViewAngleX(), getViewAngleY()).
Rect active_size = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
SizeF physical_size = characteristics.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE);
android.util.Size pixel_size = characteristics.get(CameraCharacteristics.SENSOR_INFO_PIXEL_ARRAY_SIZE);
float [] focal_lengths = characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS);
if( active_size == null || physical_size == null || pixel_size == null || focal_lengths == null || focal_lengths.length == 0 ) {
// in theory this should never happen according to the documentation, but I've had a report of physical_size (SENSOR_INFO_PHYSICAL_SIZE)
// being null on an EXTERNAL Camera2 device, see https://sourceforge.net/p/opencamera/tickets/754/
if( MyDebug.LOG ) {
Log.e(TAG, "can't get camera view angles");
}
// fall back to a default
return new SizeF(55.0f, 43.0f);
}
//camera_features.view_angle_x = (float)Math.toDegrees(2.0 * Math.atan2(physical_size.getWidth(), (2.0 * focal_lengths[0])));
//camera_features.view_angle_y = (float)Math.toDegrees(2.0 * Math.atan2(physical_size.getHeight(), (2.0 * focal_lengths[0])));
float frac_x = ((float)active_size.width())/(float)pixel_size.getWidth();
float frac_y = ((float)active_size.height())/(float)pixel_size.getHeight();
float view_angle_x = (float)Math.toDegrees(2.0 * Math.atan2(physical_size.getWidth() * frac_x, (2.0 * focal_lengths[0])));
float view_angle_y = (float)Math.toDegrees(2.0 * Math.atan2(physical_size.getHeight() * frac_y, (2.0 * focal_lengths[0])));
if( MyDebug.LOG ) {
Log.d(TAG, "frac_x: " + frac_x);
Log.d(TAG, "frac_y: " + frac_y);
Log.d(TAG, "view_angle_x: " + view_angle_x);
Log.d(TAG, "view_angle_y: " + view_angle_y);
}
return new SizeF(view_angle_x, view_angle_y);
}
/* Returns true if the device supports the required hardware level, or better.
* See https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL .
* From Android N, higher levels than "FULL" are possible, that will have higher integer values.
* Also see https://sourceforge.net/p/opencamera/tickets/141/ .
*/
static boolean isHardwareLevelSupported(CameraCharacteristics c, int requiredLevel) {
int deviceLevel = c.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
if( MyDebug.LOG ) {
switch (deviceLevel) {
case CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY:
Log.d(TAG, "Camera has LEGACY Camera2 support");
break;
case CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL:
Log.d(TAG, "Camera has EXTERNAL Camera2 support");
break;
case CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED:
Log.d(TAG, "Camera has LIMITED Camera2 support");
break;
case CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL:
Log.d(TAG, "Camera has FULL Camera2 support");
break;
case CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_3:
Log.d(TAG, "Camera has Level 3 Camera2 support");
break;
default:
Log.d(TAG, "Camera has unknown Camera2 support: " + deviceLevel);
break;
}
}
// need to treat legacy and external as special cases; otherwise can then use numerical comparison
if( deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY ) {
return requiredLevel == deviceLevel;
}
if( deviceLevel == CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL ) {
deviceLevel = CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
}
if( requiredLevel == CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL ) {
requiredLevel = CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
}
return requiredLevel <= deviceLevel;
}
/* Rather than allowing Camera2 API on all Android 5+ devices, we restrict it to certain cases.
* This returns whether the specified camera has at least LIMITED support.
*/
public boolean allowCamera2Support(int cameraId) {
CameraManager manager = (CameraManager)context.getSystemService(Context.CAMERA_SERVICE);
try {
String cameraIdS = manager.getCameraIdList()[cameraId];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraIdS);
//return isHardwareLevelSupported(characteristics, CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY);
return isHardwareLevelSupported(characteristics, CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED);
}
catch(Throwable e) {
// in theory we should only get CameraAccessException, but Google Play shows we can get a variety of exceptions
// from some devices, e.g., AssertionError, IllegalArgumentException, RuntimeException, so just catch everything!
// We don't want users to experience a crash just because of buggy camera2 drivers - instead the user can switch
// back to old camera API.
MyDebug.logStackTrace(TAG, "exception trying to get camera characteristics", e);
}
return false;
}
}

View file

@ -0,0 +1,56 @@
package net.sourceforge.opencamera.cameracontroller;
import android.hardware.camera2.DngCreator;
import android.media.Image;
import android.util.Log;
import net.sourceforge.opencamera.MyDebug;
import java.io.IOException;
import java.io.OutputStream;
/** Wrapper class to store DngCreator and Image.
*/
public class RawImage {
private static final String TAG = "RawImage";
private final DngCreator dngCreator;
private final Image image;
public RawImage(DngCreator dngCreator, Image image) {
this.dngCreator = dngCreator;
this.image = image;
}
/** Writes the dng file to the supplied output.
*/
public void writeImage(OutputStream dngOutput) throws IOException {
if( MyDebug.LOG )
Log.d(TAG, "writeImage");
try {
dngCreator.writeImage(dngOutput, image);
}
catch(AssertionError e) {
// have had AssertionError from OnePlus 5 on Google Play; rethrow as an IOException so it's handled
// in the same way
MyDebug.logStackTrace(TAG, "failed to write SNG image", e);
throw new IOException();
}
catch(IllegalStateException e) {
// have had IllegalStateException from Galaxy Note 8 on Google Play; rethrow as an IOException so it's handled
// in the same way
MyDebug.logStackTrace(TAG, "failed to write SNG image", e);
throw new IOException();
}
}
/** Closes the image. Must be called to free up resources when no longer needed. After calling
* this method, this object should not be used.
*/
public void close() {
if( MyDebug.LOG )
Log.d(TAG, "close");
image.close();
dngCreator.close();
}
}

View file

@ -0,0 +1,277 @@
package net.sourceforge.opencamera.preview;
import java.io.File;
import java.io.IOException;
import java.io.Serial;
import java.util.Date;
import java.util.List;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Point;
import android.location.Location;
import android.net.Uri;
import android.os.Build;
import android.util.Log;
import android.util.Pair;
import android.view.MotionEvent;
import androidx.annotation.RequiresApi;
import net.sourceforge.opencamera.MyDebug;
import net.sourceforge.opencamera.cameracontroller.CameraController;
import net.sourceforge.opencamera.cameracontroller.RawImage;
/** Provides communication between the Preview and the rest of the application
* - so in theory one can drop the Preview/ (and CameraController/) classes
* into a new application, by providing an appropriate implementation of this
* ApplicationInterface.
*/
public interface ApplicationInterface {
class NoFreeStorageException extends Exception {
@Serial
private static final long serialVersionUID = -2021932609486148748L;
}
class VideoMaxFileSize {
public long max_filesize; // maximum file size in bytes for video (return 0 for device default - typically this is ~2GB)
public boolean auto_restart; // whether to automatically restart on hitting max filesize (this setting is still relevant for max_filesize==0, as typically there will still be a device max filesize)
}
enum VideoMethod {
FILE, // video will be saved to a file
SAF, // video will be saved using Android 5's Storage Access Framework
MEDIASTORE, // video will be saved to the supplied MediaStore Uri
URI // video will be written to the supplied Uri
}
// methods that request information
Context getContext(); // get the application context
boolean useCamera2(); // should Android 5's Camera 2 API be used?
Location getLocation(); // get current location - null if not available (or you don't care about geotagging)
VideoMethod createOutputVideoMethod(); // return a VideoMethod value to specify how to create a video file
File createOutputVideoFile(String extension) throws IOException; // will be called if createOutputVideoUsingSAF() returns VideoMethod.FILE; extension is the recommended filename extension for the chosen video type
Uri createOutputVideoSAF(String extension) throws IOException; // will be called if createOutputVideoUsingSAF() returns VideoMethod.SAF; extension is the recommended filename extension for the chosen video type
Uri createOutputVideoMediaStore(String extension) throws IOException; // will be called if createOutputVideoUsingSAF() returns VideoMethod.MEDIASTORE; extension is the recommended filename extension for the chosen video type
Uri createOutputVideoUri(); // will be called if createOutputVideoUsingSAF() returns VideoMethod.URI
// for all of the get*Pref() methods, you can use Preview methods to get the supported values (e.g., getSupportedSceneModes())
// if you just want a default or don't really care, see the comments for each method for a default or possible options
// if Preview doesn't support the requested setting, it will check this, and choose its own
int getCameraIdPref(); // camera to use, from 0 to getCameraControllerManager().getNumberOfCameras()
String getCameraIdSPhysicalPref(); // if non-null, the Camera2 physical camera ID (must be one of Preview.getPhysicalCameras())
String getFlashPref(); // flash_off, flash_auto, flash_on, flash_torch, flash_red_eye
String getFocusPref(boolean is_video); // focus_mode_auto, focus_mode_infinity, focus_mode_macro, focus_mode_locked, focus_mode_fixed, focus_mode_manual2, focus_mode_edof, focus_mode_continuous_picture, focus_mode_continuous_video
boolean isVideoPref(); // start up in video mode?
String getSceneModePref(); // "auto" for default (strings correspond to Android's scene mode constants in android.hardware.Camera.Parameters)
String getColorEffectPref(); // "node" for default (strings correspond to Android's color effect constants in android.hardware.Camera.Parameters)
String getWhiteBalancePref(); // "auto" for default (strings correspond to Android's white balance constants in android.hardware.Camera.Parameters)
int getWhiteBalanceTemperaturePref();
String getAntiBandingPref(); // "auto" for default (strings correspond to Android's antibanding constants in android.hardware.Camera.Parameters)
String getEdgeModePref(); // CameraController.EDGE_MODE_DEFAULT for device default, or "off", "fast", "high_quality"
String getCameraNoiseReductionModePref(); // CameraController.NOISE_REDUCTION_MODE_DEFAULT for device default, or "off", "minimal", "fast", "high_quality"
String getISOPref(); // "auto" for auto-ISO, otherwise a numerical value; see documentation for Preview.supportsISORange().
int getExposureCompensationPref(); // 0 for default
class CameraResolutionConstraints {
private static final String TAG = "CameraResConstraints";
public boolean has_max_mp;
public int max_mp;
boolean hasConstraints() {
return has_max_mp;
}
boolean satisfies(CameraController.Size size) {
if( this.has_max_mp && size.width * size.height > this.max_mp ) {
if( MyDebug.LOG )
Log.d(TAG, "size index larger than max_mp: " + this.max_mp);
return false;
}
return true;
}
}
/** The resolution to use for photo mode.
* If the returned resolution is not supported by the device, or this method returns null, then
* the preview will choose a size, and then call setCameraResolutionPref() with the chosen
* size.
* If the returned resolution is supported by the device, setCameraResolutionPref() will be
* called with the returned resolution.
* Note that even if the device supports the resolution in general, the Preview may choose a
* different resolution in some circumstances:
* * A burst mode as been requested, but the resolution does not support burst.
* * A constraint has been set via constraints.
* In such cases, the resolution actually in use should be found by calling
* Preview.getCurrentPictureSize() rather than relying on the setCameraResolutionPref(). (The
* logic behind this is that if a resolution is not supported by the device at all, it's good
* practice to correct the preference stored in user settings; but this shouldn't be done if
* the resolution is changed for something more temporary such as enabling burst mode.)
* @param constraints Optional constraints that may be set. If the returned resolution does not
* satisfy these constraints, then the preview will choose the closest
* resolution that does.
*/
Pair<Integer, Integer> getCameraResolutionPref(CameraResolutionConstraints constraints); // return null to let Preview choose size
int getImageQualityPref(); // jpeg quality for taking photos; "90" is a recommended default
boolean getFaceDetectionPref(); // whether to use face detection mode
String getVideoQualityPref(); // should be one of Preview.getSupportedVideoQuality() (use Preview.getCamcorderProfile() or Preview.getCamcorderProfileDescription() for details); or return "" to let Preview choose quality
boolean getVideoStabilizationPref(); // whether to use video stabilization for video
boolean getForce4KPref(); // whether to force 4K mode - experimental, only really available for some devices that allow 4K recording but don't return it as an available resolution - not recommended for most uses
String getRecordVideoOutputFormatPref(); // preference_video_output_format_default, preference_video_output_format_mpeg4_h264, preference_video_output_format_mpeg4_hevc, preference_video_output_format_3gpp, preference_video_output_format_webm
String getVideoBitratePref(); // return "default" to let Preview choose
String getVideoFPSPref(); // return "default" to let Preview choose; if getVideoCaptureRateFactor() returns a value other than 1.0, this is the capture fps; the resultant video's fps will be getVideoFPSPref()*getVideoCaptureRateFactor()
float getVideoCaptureRateFactor(); // return 1.0f for standard operation, less than 1.0 for slow motion, more than 1.0 for timelapse; consider using a higher fps for slow motion, see getVideoFPSPref()
CameraController.TonemapProfile getVideoTonemapProfile(); // tonemap profile to use for video mode
float getVideoLogProfileStrength(); // strength of the log profile for video mode, if getVideoTonemapProfile() returns TONEMAPPROFILE_LOG
float getVideoProfileGamma(); // gamma for video mode, if getVideoTonemapProfile() returns TONEMAPPROFILE_GAMMA
long getVideoMaxDurationPref(); // time in ms after which to automatically stop video recording (return 0 for off)
int getVideoRestartTimesPref(); // number of times to restart video recording after hitting max duration (return 0 for never auto-restarting)
VideoMaxFileSize getVideoMaxFileSizePref() throws NoFreeStorageException; // see VideoMaxFileSize class for details
boolean getVideoFlashPref(); // option to switch flash on/off while recording video (should be false in most cases!)
boolean getVideoLowPowerCheckPref(); // whether to stop video automatically on critically low battery
String getPreviewSizePref(); // "preference_preview_size_wysiwyg" is recommended (preview matches aspect ratio of photo resolution as close as possible), but can also be "preference_preview_size_display" to maximise the preview size
String getLockOrientationPref(); // return "none" for default; use "portrait" or "landscape" to lock photos/videos to that orientation
boolean getTouchCapturePref(); // whether to enable touch to capture
boolean getDoubleTapCapturePref(); // whether to enable double-tap to capture
boolean getPausePreviewPref(); // whether to pause the preview after taking a photo
boolean getShowToastsPref();
boolean getShutterSoundPref(); // whether to play sound when taking photo
boolean getStartupFocusPref(); // whether to do autofocus on startup
long getTimerPref(); // time in ms for timer (so 0 for off)
String getRepeatPref(); // return number of times to repeat photo in a row (as a string), so "1" for default; return "unlimited" for unlimited
long getRepeatIntervalPref(); // time in ms between repeat
boolean getGeotaggingPref(); // whether to geotag photos
boolean getRequireLocationPref(); // if getGeotaggingPref() returns true, and this method returns true, then phot/video will only be taken if location data is available
boolean getRecordAudioPref(); // whether to record audio when recording video
String getRecordAudioChannelsPref(); // either "audio_default", "audio_mono" or "audio_stereo"
String getRecordAudioSourcePref(); // "audio_src_camcorder" is recommended, but other options are: "audio_src_mic", "audio_src_default", "audio_src_voice_communication", "audio_src_unprocessed" (unprocessed required Android 7+); see corresponding values in android.media.MediaRecorder.AudioSource
int getZoomPref(); // index into Preview.getSupportedZoomRatios() array (each entry is the zoom factor, scaled by 100; array is sorted from min to max zoom); return -1 for default 1x zoom
double getCalibratedLevelAngle(); // set to non-zero to calibrate the accelerometer used for the level angles
boolean canTakeNewPhoto(); // whether taking new photos is allowed (e.g., can return false if queue for processing images would become full)
boolean imageQueueWouldBlock(int n_raw, int n_jpegs); // called during some burst operations, whether we can allow taking the supplied number of extra photos
/** Same behaviour as Activity.getWindowManager().getDefaultDisplay().getRotation() (including
* returning a member of Surface.ROTATION_*), but allows application to modify e.g. for
* upside-down preview.
* @param prefer_later When the device orientation changes, there can be some ambiguity if this
* is called during this rotation, since getRotation() may updated shortly
* before the UI appears to rotate. If prefer_later==false, then prefer the
* previous rotation in such cases. This can be implemented by caching the
* value. prefer_later should be set to false when this is being called
* frequently e.g. as part of a UI that should smoothly rotate as the device
* rotates. prefer_later should be set to true for "one-off" calls.
*/
int getDisplayRotation(boolean prefer_later);
// Camera2 only modes:
long getExposureTimePref(); // only called if getISOPref() is not "default"
float getFocusDistancePref(boolean is_target_distance); // if isFocusBracketingPref()==true, returns the source or target focus distance
boolean isFocusBracketingSourceAutoPref(); // if isFocusBracketingPref()==true, returns whether the source focus distance should be set by calling CameraController.setFocusBracketingSourceDistanceFromCurrent()
boolean isExpoBracketingPref(); // whether to enable burst photos with expo bracketing
int getExpoBracketingNImagesPref(); // how many images to take for exposure bracketing
double getExpoBracketingStopsPref(); // stops per image for exposure bracketing
int getFocusBracketingNImagesPref(); // how many images to take for focus bracketing
boolean getFocusBracketingAddInfinityPref(); // whether to include an additional image at infinite focus distance, for focus bracketing
boolean isFocusBracketingPref(); // whether to enable burst photos with focus bracketing
boolean isCameraBurstPref(); // whether to shoot the camera in burst mode (n.b., not the same as the "auto-repeat" mode)
int getBurstNImages(); // only relevant if isCameraBurstPref() returns true; see CameraController doc for setBurstNImages().
boolean getBurstForNoiseReduction(); // only relevant if isCameraBurstPref() returns true; see CameraController doc for setBurstForNoiseReduction().
enum NRModePref {
NRMODE_NORMAL,
NRMODE_LOW_LIGHT
}
NRModePref getNRModePref(); // only relevant if getBurstForNoiseReduction() returns true; if this changes without reopening the preview's camera, call Preview.setupBurstMode()
boolean isCameraExtensionPref(); // whether to use camera vendor extension (see https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics )
@RequiresApi(api = Build.VERSION_CODES.S)
int getCameraExtensionPref(); // if isCameraExtensionPref() returns true, the camera extension mode to use
float getAperturePref(); // get desired aperture (called if Preview.getSupportedApertures() returns non-null); return -1.0f for no preference
boolean getJpegRPref(); // whether to request JPEG_R (Ultra HDR) photos
enum RawPref {
RAWPREF_JPEG_ONLY, // JPEG only
RAWPREF_JPEG_DNG // JPEG and RAW (DNG)
}
RawPref getRawPref(); // whether to enable RAW photos
int getMaxRawImages(); // see documentation of CameraController.setRaw(), corresponds to max_raw_images
boolean useCamera2DummyCaptureHack(); // whether to enable CameraController.setDummyCaptureHack() for Camera2 API
boolean useCamera2FakeFlash(); // whether to enable CameraController.setUseCamera2FakeFlash() for Camera2 API
boolean useCamera2FastBurst(); // whether to enable Camera2's captureBurst() for faster taking of expo-bracketing photos (generally should be true, but some devices have problems with captureBurst())
boolean usePhotoVideoRecording(); // whether to enable support for taking photos when recording video (if not supported, this won't be called)
boolean isPreviewInBackground(); // if true, then Preview can disable real-time effects (e.g., computing histogram); also it won't try to open the camera when in the background
boolean allowZoom(); // if false, don't allow zoom functionality even if the device supports it - Preview.supportsZoom() will also return false; if true, allow zoom if the device supports it
boolean optimiseFocusForLatency(); // behaviour for taking photos with continuous focus mode: if true, optimise focus for latency (take photo asap); if false, optimise for quality (don't take photo until scene is focused)
/** Return size of default display, e.g., Activity.getWindowManager().getDefaultDisplay().getSize().
* @param display_size The returned display size.
* @param exclude_insets If the activity is running in edge-to-edge mode, then whether to exclude
* insets. If the activity is not running in edge-to-edge mode, then this should
* be ignored, and insets should always be excluded.
*/
void getDisplaySize(Point display_size, boolean exclude_insets);
// for testing purposes:
boolean isTestAlwaysFocus(); // if true, pretend autofocus always successful
// methods that transmit information/events (up to the Application whether to do anything or not)
void cameraSetup(); // called when the camera is (re-)set up - should update UI elements/parameters that depend on camera settings
void touchEvent(MotionEvent event);
void startingVideo(); // called just before video recording starts
void startedVideo(); // called just after video recording starts
void stoppingVideo(); // called just before video recording stops; note that if startingVideo() is called but then video recording fails to start, this method will still be called, but startedVideo() and stoppedVideo() won't be called
void stoppedVideo(final VideoMethod video_method, final Uri uri, final String filename); // called after video recording stopped (uri/filename will be null if video is corrupt or not created); will be called iff startedVideo() was called
void restartedVideo(final VideoMethod video_method, final Uri uri, final String filename); // called after a seamless restart (supported on Android 8+) has occurred - in this case stoppedVideo() is only called for the final video file; this method is instead called for all earlier video file segments
void deleteUnusedVideo(final VideoMethod video_method, final Uri uri, final String filename); // application should delete the requested video (which will correspond to a video file previously returned via the createOutputVideo*() methods), either because it is corrupt or unused
void onFailedStartPreview(); // called if failed to start camera preview
void onCameraError(); // called if the camera closes due to serious error.
void onPhotoError(); // callback for failing to take a photo
void onVideoInfo(int what, int extra); // callback for info when recording video (see MediaRecorder.OnInfoListener)
void onVideoError(int what, int extra); // callback for errors when recording video (see MediaRecorder.OnErrorListener)
void onVideoRecordStartError(VideoProfile profile); // callback for video recording failing to start
void onVideoRecordStopError(VideoProfile profile); // callback for video recording being corrupted
void onFailedReconnectError(); // failed to reconnect camera after stopping video recording
void onFailedCreateVideoFileError(); // callback if unable to create file for recording video
void hasPausedPreview(boolean paused); // called when the preview is paused or unpaused (due to getPausePreviewPref())
void cameraInOperation(boolean in_operation, boolean is_video); // called when the camera starts/stops being operation (taking photos or recording video, including if preview is paused after taking a photo), use to disable GUI elements during camera operation
void turnFrontScreenFlashOn(); // called when front-screen "flash" required (for modes flash_frontscreen_auto, flash_frontscreen_on); the application should light up the screen, until cameraInOperation(false) is called
void cameraClosed();
void timerBeep(long remaining_time); // n.b., called once per second on timer countdown - so application can beep, or do whatever it likes
// methods that request actions
void multitouchZoom(int new_zoom); // indicates that the zoom has changed due to multitouch gesture on preview
void requestTakePhoto(); // requesting taking a photo (due to single/double tap, if either getTouchCapturePref(), getDoubleTouchCapturePref() options are enabled)
// the set/clear*Pref() methods are called if Preview decides to override the requested pref (because Camera device doesn't support requested pref) (clear*Pref() is called if the feature isn't supported at all)
// the application can use this information to update its preferences
void setCameraIdPref(int cameraId, String cameraIdSPhysical);
void setFlashPref(String flash_value);
void setFocusPref(String focus_value, boolean is_video);
void setVideoPref(boolean is_video);
void setSceneModePref(String scene_mode);
void clearSceneModePref();
void setColorEffectPref(String color_effect);
void clearColorEffectPref();
void setWhiteBalancePref(String white_balance);
void clearWhiteBalancePref();
void setWhiteBalanceTemperaturePref(int white_balance_temperature);
void setISOPref(String iso);
void clearISOPref();
void setExposureCompensationPref(int exposure);
void clearExposureCompensationPref();
void setCameraResolutionPref(int width, int height);
void setVideoQualityPref(String video_quality);
void setZoomPref(int zoom);
void requestCameraPermission(); // for Android 6+: called when trying to open camera, but CAMERA permission not available
@SuppressWarnings("SameReturnValue")
boolean needsStoragePermission(); // return true if the preview should call requestStoragePermission() if WRITE_EXTERNAL_STORAGE not available (i.e., if the application needs storage permission, e.g., to save photos)
void requestStoragePermission(); // for Android 6+: called when trying to open camera, but WRITE_EXTERNAL_STORAGE permission not available
void requestRecordAudioPermission(); // for Android 6+: called when switching to (or starting up in) video mode, but RECORD_AUDIO permission not available
// Camera2 only modes:
void setExposureTimePref(long exposure_time);
void clearExposureTimePref();
void setFocusDistancePref(float focus_distance, boolean is_target_distance);
// callbacks
void onDrawPreview(Canvas canvas);
boolean onPictureTaken(byte [] data, Date current_date);
boolean onBurstPictureTaken(List<byte []> images, Date current_date);
boolean onRawPictureTaken(RawImage raw_image, Date current_date);
boolean onRawBurstPictureTaken(List<RawImage> raw_images, Date current_date);
void onCaptureStarted(); // called immediately before we start capturing the picture
void onPictureCompleted(); // called after all picture callbacks have been called and returned
void onExtensionProgress(int progress); // Reports percentage progress for vendor camera extensions. Note that not all devices support this being called.
void onContinuousFocusMove(boolean start); // called when focusing starts/stop in continuous picture mode (in photo mode only)
}

View file

@ -0,0 +1,692 @@
package net.sourceforge.opencamera.preview;
import java.util.Date;
import java.util.List;
import android.app.Activity;
import android.graphics.Canvas;
import android.location.Location;
import android.net.Uri;
import android.os.Build;
import android.util.Pair;
import android.view.MotionEvent;
import androidx.annotation.RequiresApi;
import net.sourceforge.opencamera.cameracontroller.CameraController;
import net.sourceforge.opencamera.cameracontroller.RawImage;
/** A partial implementation of ApplicationInterface that provides "default" implementations. So
* sub-classing this is easier than implementing ApplicationInterface directly - you only have to
* provide the unimplemented methods to get started, and can later override
* BasicApplicationInterface's methods as required.
* Note there is no need for your subclass of BasicApplicationInterface to call "super" methods -
* these are just default implementations that should be overridden as required.
*/
public abstract class BasicApplicationInterface implements ApplicationInterface {
@Override
public Location getLocation() {
return null;
}
@Override
public int getCameraIdPref() {
return 0;
}
@Override
public String getCameraIdSPhysicalPref() {
return null;
}
@Override
public String getFlashPref() {
return "flash_off";
}
@Override
public String getFocusPref(boolean is_video) {
return "focus_mode_continuous_picture";
}
@Override
public boolean isVideoPref() {
return false;
}
@Override
public String getSceneModePref() {
return CameraController.SCENE_MODE_DEFAULT;
}
@Override
public String getColorEffectPref() {
return CameraController.COLOR_EFFECT_DEFAULT;
}
@Override
public String getWhiteBalancePref() {
return CameraController.WHITE_BALANCE_DEFAULT;
}
@Override
public int getWhiteBalanceTemperaturePref() {
return 0;
}
@Override
public String getAntiBandingPref() {
return CameraController.ANTIBANDING_DEFAULT;
}
@Override
public String getEdgeModePref() {
return CameraController.EDGE_MODE_DEFAULT;
}
@Override
public String getCameraNoiseReductionModePref() {
return CameraController.NOISE_REDUCTION_MODE_DEFAULT;
}
@Override
public String getISOPref() {
return CameraController.ISO_DEFAULT;
}
@Override
public int getExposureCompensationPref() {
return 0;
}
@Override
public Pair<Integer, Integer> getCameraResolutionPref(CameraResolutionConstraints constraints) {
return null;
}
@Override
public int getImageQualityPref() {
return 90;
}
@Override
public boolean getFaceDetectionPref() {
return false;
}
@Override
public String getVideoQualityPref() {
return "";
}
@Override
public boolean getVideoStabilizationPref() {
return false;
}
@Override
public boolean getForce4KPref() {
return false;
}
@Override
public String getRecordVideoOutputFormatPref() {
return "preference_video_output_format_default";
}
@Override
public String getVideoBitratePref() {
return "default";
}
@Override
public String getVideoFPSPref() {
return "default";
}
@Override
public float getVideoCaptureRateFactor() {
return 1.0f;
}
@Override
public CameraController.TonemapProfile getVideoTonemapProfile() {
return CameraController.TonemapProfile.TONEMAPPROFILE_OFF;
}
@Override
public float getVideoLogProfileStrength() {
return 0;
}
@Override
public float getVideoProfileGamma() {
return 0;
}
@Override
public long getVideoMaxDurationPref() {
return 0;
}
@Override
public int getVideoRestartTimesPref() {
return 0;
}
@Override
public VideoMaxFileSize getVideoMaxFileSizePref() throws NoFreeStorageException {
VideoMaxFileSize video_max_filesize = new VideoMaxFileSize();
video_max_filesize.max_filesize = 0;
video_max_filesize.auto_restart = true;
return video_max_filesize;
}
@Override
public boolean getVideoFlashPref() {
return false;
}
@Override
public boolean getVideoLowPowerCheckPref() {
return true;
}
@Override
public String getPreviewSizePref() {
return "preference_preview_size_wysiwyg";
}
@Override
public String getLockOrientationPref() {
return "none";
}
@Override
public boolean getTouchCapturePref() {
return false;
}
@Override
public boolean getDoubleTapCapturePref() {
return false;
}
@Override
public boolean getPausePreviewPref() {
return false;
}
@Override
public boolean getShowToastsPref() {
return true;
}
@Override
public boolean getShutterSoundPref() {
return true;
}
@Override
public boolean getStartupFocusPref() {
return true;
}
@Override
public long getTimerPref() {
return 0;
}
@Override
public String getRepeatPref() {
return "1";
}
@Override
public long getRepeatIntervalPref() {
return 0;
}
@Override
public boolean getGeotaggingPref() {
return false;
}
@Override
public boolean getRequireLocationPref() {
return false;
}
@Override
public boolean getRecordAudioPref() {
return true;
}
@Override
public String getRecordAudioChannelsPref() {
return "audio_default";
}
@Override
public String getRecordAudioSourcePref() {
return "audio_src_camcorder";
}
@Override
public int getZoomPref() {
return -1;
}
@Override
public double getCalibratedLevelAngle() {
return 0;
}
@Override
public boolean canTakeNewPhoto() {
return true;
}
@Override
public boolean imageQueueWouldBlock(int n_raw, int n_jpegs) {
return false;
}
@Override
public int getDisplayRotation(boolean prefer_later) {
Activity activity = (Activity)this.getContext();
return activity.getWindowManager().getDefaultDisplay().getRotation();
}
@Override
public long getExposureTimePref() {
return CameraController.EXPOSURE_TIME_DEFAULT;
}
@Override
public float getFocusDistancePref(boolean is_target_distance) {
return 0;
}
@Override
public boolean isExpoBracketingPref() {
return false;
}
@Override
public int getExpoBracketingNImagesPref() {
return 3;
}
@Override
public double getExpoBracketingStopsPref() {
return 2.0;
}
@Override
public int getFocusBracketingNImagesPref() {
return 3;
}
@Override
public boolean getFocusBracketingAddInfinityPref() {
return false;
}
@Override
public boolean isFocusBracketingPref() {
return false;
}
@Override
public boolean isCameraBurstPref() {
return false;
}
@Override
public int getBurstNImages() {
return 5;
}
@Override
public boolean getBurstForNoiseReduction() {
return false;
}
@Override
public NRModePref getNRModePref() {
return NRModePref.NRMODE_NORMAL;
}
@Override
public boolean isCameraExtensionPref() {
return false;
}
@Override
@RequiresApi(api = Build.VERSION_CODES.S)
public int getCameraExtensionPref() {
return 0;
}
@Override
public float getAperturePref() {
return -1.0f;
}
@Override
public boolean getJpegRPref() {
return false;
}
@Override
public RawPref getRawPref() {
return RawPref.RAWPREF_JPEG_ONLY;
}
@Override
public int getMaxRawImages() {
return 2;
}
@Override
public boolean useCamera2DummyCaptureHack() {
return false;
}
@Override
public boolean useCamera2FakeFlash() {
return false;
}
@Override
public boolean useCamera2FastBurst() {
return true;
}
@Override
public boolean usePhotoVideoRecording() {
return true;
}
@Override
public boolean isPreviewInBackground() {
return false;
}
@Override
public boolean allowZoom() {
return true;
}
@Override
public boolean optimiseFocusForLatency() {
return true;
}
@Override
public boolean isTestAlwaysFocus() {
return false;
}
@Override
public void cameraSetup() {
}
@Override
public void touchEvent(MotionEvent event) {
}
@Override
public void startingVideo() {
}
@Override
public void startedVideo() {
}
@Override
public void stoppingVideo() {
}
@Override
public void stoppedVideo(VideoMethod video_method, Uri uri, String filename) {
}
@Override
public void restartedVideo(final VideoMethod video_method, final Uri uri, final String filename) {
}
@Override
public void deleteUnusedVideo(final VideoMethod video_method, final Uri uri, final String filename) {
}
@Override
public void onFailedStartPreview() {
}
@Override
public void onCameraError() {
}
@Override
public void onPhotoError() {
}
@Override
public void onVideoInfo(int what, int extra) {
}
@Override
public void onVideoError(int what, int extra) {
}
@Override
public void onVideoRecordStartError(VideoProfile profile) {
}
@Override
public void onVideoRecordStopError(VideoProfile profile) {
}
@Override
public void onFailedReconnectError() {
}
@Override
public void onFailedCreateVideoFileError() {
}
@Override
public void hasPausedPreview(boolean paused) {
}
@Override
public void cameraInOperation(boolean in_operation, boolean is_video) {
}
@Override
public void turnFrontScreenFlashOn() {
}
@Override
public void cameraClosed() {
}
@Override
public void timerBeep(long remaining_time) {
}
@Override
public void multitouchZoom(int new_zoom) {
}
@Override
public void requestTakePhoto() {
}
@Override
public void setCameraIdPref(int cameraId, String cameraIdSPhysical) {
}
@Override
public void setFlashPref(String flash_value) {
}
@Override
public void setFocusPref(String focus_value, boolean is_video) {
}
@Override
public void setVideoPref(boolean is_video) {
}
@Override
public void setSceneModePref(String scene_mode) {
}
@Override
public void clearSceneModePref() {
}
@Override
public void setColorEffectPref(String color_effect) {
}
@Override
public void clearColorEffectPref() {
}
@Override
public void setWhiteBalancePref(String white_balance) {
}
@Override
public void clearWhiteBalancePref() {
}
@Override
public void setWhiteBalanceTemperaturePref(int white_balance_temperature) {
}
@Override
public void setISOPref(String iso) {
}
@Override
public void clearISOPref() {
}
@Override
public void setExposureCompensationPref(int exposure) {
}
@Override
public void clearExposureCompensationPref() {
}
@Override
public void setCameraResolutionPref(int width, int height) {
}
@Override
public void setVideoQualityPref(String video_quality) {
}
@Override
public void setZoomPref(int zoom) {
}
@Override
public void setExposureTimePref(long exposure_time) {
}
@Override
public void clearExposureTimePref() {
}
@Override
public void setFocusDistancePref(float focus_distance, boolean is_target_distance) {
}
@Override
public void onDrawPreview(Canvas canvas) {
}
@Override
public boolean onBurstPictureTaken(List<byte[]> images, Date current_date) {
return false;
}
@Override
public boolean onRawPictureTaken(RawImage raw_image, Date current_date) {
return false;
}
@Override
public boolean onRawBurstPictureTaken(List<RawImage> raw_images, Date current_date) {
return false;
}
@Override
public void onCaptureStarted() {
}
@Override
public void onPictureCompleted() {
}
@Override
public void onExtensionProgress(int progress) {
}
@Override
public void onContinuousFocusMove(boolean start) {
}
}

View file

@ -0,0 +1,72 @@
package net.sourceforge.opencamera.preview;
import net.sourceforge.opencamera.MyDebug;
import android.content.Context;
import android.graphics.Canvas;
import android.os.Handler;
import android.util.Log;
import android.view.View;
import androidx.annotation.NonNull;
/** View for on top of the Preview - this just redirects to Preview.onDraw to do the
* work. Only used if using a MyTextureView (if using MySurfaceView, then that
* class can handle the onDraw()). TextureViews can't be used for both a
* camera preview, and used for drawing on.
*/
public class CanvasView extends View {
private static final String TAG = "CanvasView";
private final Preview preview;
private final int [] measure_spec = new int[2];
private final Handler handler = new Handler();
private final Runnable tick;
CanvasView(Context context, final Preview preview) {
super(context);
this.preview = preview;
if( MyDebug.LOG ) {
Log.d(TAG, "new CanvasView");
}
// deprecated setting, but required on Android versions prior to 3.0
//getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); // deprecated
tick = new Runnable() {
public void run() {
/*if( MyDebug.LOG )
Log.d(TAG, "invalidate()");*/
preview.test_ticker_called = true;
invalidate();
handler.postDelayed(this, preview.getFrameRate());
}
};
}
@Override
public void onDraw(@NonNull Canvas canvas) {
/*if( MyDebug.LOG )
Log.d(TAG, "onDraw()");*/
preview.draw(canvas);
}
@Override
protected void onMeasure(int widthSpec, int heightSpec) {
if( MyDebug.LOG )
Log.d(TAG, "onMeasure: " + widthSpec + " x " + heightSpec);
preview.getMeasureSpec(measure_spec, widthSpec, heightSpec);
super.onMeasure(measure_spec[0], measure_spec[1]);
}
void onPause() {
if( MyDebug.LOG )
Log.d(TAG, "onPause()");
handler.removeCallbacks(tick);
}
void onResume() {
if( MyDebug.LOG )
Log.d(TAG, "onResume()");
tick.run();
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,114 @@
package net.sourceforge.opencamera.preview;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.util.Log;
import androidx.annotation.NonNull;
import net.sourceforge.opencamera.MyDebug;
/** This is essentially similar to CamcorderProfile in that it encapsulates a set of video settings
* to be passed to MediaRecorder, but allows us to store additional fields.
*/
public class VideoProfile {
private static final String TAG = "VideoProfile";
public boolean record_audio;
public boolean no_audio_permission; // set to true if record_audio==false, but where the user had requested audio and we don't have microphone permission
public int audioSource;
public int audioCodec;
public int audioChannels;
@SuppressWarnings("WeakerAccess")
public int audioBitRate;
@SuppressWarnings("WeakerAccess")
public int audioSampleRate;
public int fileFormat;
public String fileExtension = "mp4";
public int videoSource;
public int videoCodec;
public int videoFrameRate;
public double videoCaptureRate;
public int videoBitRate;
public int videoFrameHeight;
public int videoFrameWidth;
/** Returns a dummy video profile, used if video isn't supported.
*/
VideoProfile() {
}
VideoProfile(CamcorderProfile camcorderProfile) {
this.record_audio = true;
this.no_audio_permission = false;
this.audioSource = MediaRecorder.AudioSource.CAMCORDER;
this.audioCodec = camcorderProfile.audioCodec;
this.audioChannels = camcorderProfile.audioChannels;
this.audioBitRate = camcorderProfile.audioBitRate;
this.audioSampleRate = camcorderProfile.audioSampleRate;
this.fileFormat = camcorderProfile.fileFormat;
this.videoSource = MediaRecorder.VideoSource.CAMERA;
this.videoCodec = camcorderProfile.videoCodec;
this.videoFrameRate = camcorderProfile.videoFrameRate;
this.videoCaptureRate = camcorderProfile.videoFrameRate;
this.videoBitRate = camcorderProfile.videoBitRate;
this.videoFrameHeight = camcorderProfile.videoFrameHeight;
this.videoFrameWidth = camcorderProfile.videoFrameWidth;
}
@NonNull
public String toString() {
return ("\nAudioSource: " + this.audioSource +
"\nVideoSource: " + this.videoSource +
"\nFileFormat: " + this.fileFormat +
"\nFileExtension: " + this.fileExtension +
"\nAudioCodec: " + this.audioCodec +
"\nAudioChannels: " + this.audioChannels +
"\nAudioBitrate: " + this.audioBitRate +
"\nAudioSampleRate: " + this.audioSampleRate +
"\nVideoCodec: " + this.videoCodec +
"\nVideoFrameRate: " + this.videoFrameRate +
"\nVideoCaptureRate: " + this.videoCaptureRate +
"\nVideoBitRate: " + this.videoBitRate +
"\nVideoWidth: " + this.videoFrameWidth +
"\nVideoHeight: " + this.videoFrameHeight
);
}
/**
* Copies the fields of this profile to a MediaRecorder instance.
*/
public void copyToMediaRecorder(MediaRecorder media_recorder) {
if( MyDebug.LOG )
Log.d(TAG, "copyToMediaRecorder: " + media_recorder);
if( record_audio ) {
if( MyDebug.LOG )
Log.d(TAG, "record audio");
media_recorder.setAudioSource(this.audioSource);
}
media_recorder.setVideoSource(this.videoSource);
// n.b., order may be important - output format should be first, at least
// also match order of MediaRecorder.setProfile() just to be safe, see https://stackoverflow.com/questions/5524672/is-it-possible-to-use-camcorderprofile-without-audio-source
media_recorder.setOutputFormat(this.fileFormat);
if( MyDebug.LOG )
Log.d(TAG, "set frame rate: " + this.videoFrameRate);
media_recorder.setVideoFrameRate(this.videoFrameRate);
// it's probably safe to always call setCaptureRate, but to be safe (and keep compatibility with old Open Camera versions), we only do so when needed
if( this.videoCaptureRate != (double)this.videoFrameRate ) {
if( MyDebug.LOG )
Log.d(TAG, "set capture rate: " + this.videoCaptureRate);
media_recorder.setCaptureRate(this.videoCaptureRate);
}
media_recorder.setVideoSize(this.videoFrameWidth, this.videoFrameHeight);
media_recorder.setVideoEncodingBitRate(this.videoBitRate);
media_recorder.setVideoEncoder(this.videoCodec);
if( record_audio ) {
media_recorder.setAudioEncodingBitRate(this.audioBitRate);
media_recorder.setAudioChannels(this.audioChannels);
media_recorder.setAudioSamplingRate(this.audioSampleRate);
media_recorder.setAudioEncoder(this.audioCodec);
}
if( MyDebug.LOG )
Log.d(TAG, "done: " + media_recorder);
}
}

View file

@ -0,0 +1,226 @@
package net.sourceforge.opencamera.preview;
import android.media.CamcorderProfile;
import android.util.Log;
import net.sourceforge.opencamera.cameracontroller.CameraController;
import net.sourceforge.opencamera.MyDebug;
import java.io.Serial;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/** Handles video quality options.
* Note that this class should avoid calls to the Android API, so we can perform local unit testing
* on it.
*/
public class VideoQualityHandler {
private static final String TAG = "VideoQualityHandler";
public static class Dimension2D {
final int width;
final int height;
public Dimension2D(int width, int height) {
this.width = width;
this.height = height;
}
}
// video_quality can either be:
// - an int, in which case it refers to a CamcorderProfile
// - of the form [CamcorderProfile]_r[width]x[height] - we use the CamcorderProfile as a base, and override the video resolution - this is needed to support resolutions which don't have corresponding camcorder profiles
private List<String> video_quality;
private int current_video_quality = -1; // this is an index into the video_quality array, or -1 if not found (though this shouldn't happen?)
private List<CameraController.Size> video_sizes;
private List<CameraController.Size> video_sizes_high_speed; // may be null if high speed not supported
void resetCurrentQuality() {
video_quality = null;
current_video_quality = -1;
}
/** Initialises the class with the available video profiles and resolutions. The user should first
* set the video sizes via setVideoSizes().
* @param profiles A list of qualities (see CamcorderProfile.QUALITY_*). Should be supplied in
* order from highest to lowest quality.
* @param dimensions A corresponding list of the width/height for that quality (as given by
* videoFrameWidth, videoFrameHeight in the profile returned by CamcorderProfile.get()).
*/
public void initialiseVideoQualityFromProfiles(List<Integer> profiles, List<Dimension2D> dimensions) {
if( MyDebug.LOG )
Log.d(TAG, "initialiseVideoQualityFromProfiles()");
video_quality = new ArrayList<>();
boolean[] done_video_size = null;
if( video_sizes != null ) {
done_video_size = new boolean[video_sizes.size()];
for(int i=0;i<video_sizes.size();i++)
done_video_size[i] = false;
}
if( profiles.size() != dimensions.size() ) {
Log.e(TAG, "profiles and dimensions have unequal sizes");
throw new RuntimeException(); // this is a programming error
}
for(int i=0;i<profiles.size();i++) {
Dimension2D dim = dimensions.get(i);
addVideoResolutions(done_video_size, profiles.get(i), dim.width, dim.height);
}
if( MyDebug.LOG ) {
for(int i=0;i<video_quality.size();i++) {
Log.d(TAG, "supported video quality: " + video_quality.get(i));
}
}
}
// Android docs and FindBugs recommend that Comparators also be Serializable
private static class SortVideoSizesComparator implements Comparator<CameraController.Size>, Serializable {
@Serial
private static final long serialVersionUID = 5802214721033718212L;
@Override
public int compare(final CameraController.Size a, final CameraController.Size b) {
return b.width * b.height - a.width * a.height;
}
}
public void sortVideoSizes() {
if( MyDebug.LOG )
Log.d(TAG, "sortVideoSizes()");
Collections.sort(this.video_sizes, new SortVideoSizesComparator());
if( MyDebug.LOG ) {
for(CameraController.Size size : video_sizes) {
Log.d(TAG, " supported video size: " + size.width + ", " + size.height);
}
}
}
private void addVideoResolutions(boolean[] done_video_size, int base_profile, int min_resolution_w, int min_resolution_h) {
if( video_sizes == null ) {
return;
}
if( MyDebug.LOG )
Log.d(TAG, "profile " + base_profile + " is resolution " + min_resolution_w + " x " + min_resolution_h);
for(int i=0;i<video_sizes.size();i++) {
if( done_video_size[i] )
continue;
CameraController.Size size = video_sizes.get(i);
if( size.width == min_resolution_w && size.height == min_resolution_h ) {
String str = String.valueOf(base_profile);
video_quality.add(str);
done_video_size[i] = true;
if( MyDebug.LOG )
Log.d(TAG, "added: " + i + ":"+ str + " " + size.width + "x" + size.height);
}
else if( base_profile == CamcorderProfile.QUALITY_LOW || size.width * size.height >= min_resolution_w*min_resolution_h ) {
String str = base_profile + "_r" + size.width + "x" + size.height;
video_quality.add(str);
done_video_size[i] = true;
if( MyDebug.LOG )
Log.d(TAG, "added: " + i + ":" + str);
}
}
}
public List<String> getSupportedVideoQuality() {
if( MyDebug.LOG )
Log.d(TAG, "getSupportedVideoQuality");
return this.video_quality;
}
int getCurrentVideoQualityIndex() {
if( MyDebug.LOG )
Log.d(TAG, "getCurrentVideoQualityIndex");
return this.current_video_quality;
}
void setCurrentVideoQualityIndex(int current_video_quality) {
if( MyDebug.LOG )
Log.d(TAG, "setCurrentVideoQualityIndex: " + current_video_quality);
this.current_video_quality = current_video_quality;
}
public String getCurrentVideoQuality() {
if( current_video_quality == -1 )
return null;
return video_quality.get(current_video_quality);
}
public List<CameraController.Size> getSupportedVideoSizes() {
if( MyDebug.LOG )
Log.d(TAG, "getSupportedVideoSizes");
return this.video_sizes;
}
public List<CameraController.Size> getSupportedVideoSizesHighSpeed() {
if( MyDebug.LOG )
Log.d(TAG, "getSupportedVideoSizesHighSpeed");
return this.video_sizes_high_speed;
}
/** Whether the requested fps is supported, without relying on high-speed mode.
* Typically caller should first check videoSupportsFrameRateHighSpeed().
*/
public boolean videoSupportsFrameRate(int fps) {
return CameraController.CameraFeatures.supportsFrameRate(this.video_sizes, fps);
}
/** Whether the requested fps is supported as a high-speed mode.
*/
public boolean videoSupportsFrameRateHighSpeed(int fps) {
return CameraController.CameraFeatures.supportsFrameRate(this.video_sizes_high_speed, fps);
}
CameraController.Size findVideoSizeForFrameRate(int width, int height, double fps, boolean return_closest) {
if( MyDebug.LOG ) {
Log.d(TAG, "findVideoSizeForFrameRate");
Log.d(TAG, "width: " + width);
Log.d(TAG, "height: " + height);
Log.d(TAG, "fps: " + fps);
}
CameraController.Size requested_size = new CameraController.Size(width, height);
CameraController.Size best_video_size = CameraController.CameraFeatures.findSize(this.getSupportedVideoSizes(), requested_size, fps, return_closest);
if( best_video_size == null && this.getSupportedVideoSizesHighSpeed() != null ) {
if( MyDebug.LOG )
Log.d(TAG, "need to check high speed sizes");
// check high speed
best_video_size = CameraController.CameraFeatures.findSize(this.getSupportedVideoSizesHighSpeed(), requested_size, fps, return_closest);
}
return best_video_size;
}
private static CameraController.Size getMaxVideoSize(List<CameraController.Size> sizes) {
int max_width = -1, max_height = -1;
for(CameraController.Size size : sizes) {
if( max_width == -1 || size.width*size.height > max_width*max_height ) {
max_width = size.width;
max_height = size.height;
}
}
return new CameraController.Size(max_width, max_height);
}
/** Returns the maximum supported (non-high-speed) video size.
*/
CameraController.Size getMaxSupportedVideoSize() {
return getMaxVideoSize(video_sizes);
}
/** Returns the maximum supported high speed video size.
*/
CameraController.Size getMaxSupportedVideoSizeHighSpeed() {
return getMaxVideoSize(video_sizes_high_speed);
}
public void setVideoSizes(List<CameraController.Size> video_sizes) {
this.video_sizes = video_sizes;
this.sortVideoSizes();
}
public void setVideoSizesHighSpeed(List<CameraController.Size> video_sizes_high_speed) {
this.video_sizes_high_speed = video_sizes_high_speed;
}
}

View file

@ -0,0 +1,19 @@
package net.sourceforge.opencamera.preview.camerasurface;
import net.sourceforge.opencamera.cameracontroller.CameraController;
import android.graphics.Matrix;
import android.media.MediaRecorder;
import android.view.View;
/** Provides support for the surface used for the preview - this can either be
* a SurfaceView or a TextureView.
*/
public interface CameraSurface {
View getView();
void setPreviewDisplay(CameraController camera_controller); // n.b., uses double-dispatch similar to Visitor pattern - behaviour depends on type of CameraSurface and CameraController
void setVideoRecorder(MediaRecorder video_recorder);
void setTransform(Matrix matrix);
void onPause();
void onResume();
}

View file

@ -0,0 +1,117 @@
package net.sourceforge.opencamera.preview.camerasurface;
import net.sourceforge.opencamera.MyDebug;
import net.sourceforge.opencamera.cameracontroller.CameraController;
import net.sourceforge.opencamera.cameracontroller.CameraControllerException;
import net.sourceforge.opencamera.preview.Preview;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.media.MediaRecorder;
import android.os.Handler;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceView;
import android.view.View;
import androidx.annotation.NonNull;
/** Provides support for the surface used for the preview, using a SurfaceView.
*/
public class MySurfaceView extends SurfaceView implements CameraSurface {
private static final String TAG = "MySurfaceView";
private final Preview preview;
private final int [] measure_spec = new int[2];
private final Handler handler = new Handler();
private final Runnable tick;
public
MySurfaceView(Context context, final Preview preview) {
super(context);
this.preview = preview;
if( MyDebug.LOG ) {
Log.d(TAG, "new MySurfaceView");
}
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
getHolder().addCallback(preview);
// deprecated setting, but required on Android versions prior to 3.0
//getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); // deprecated
tick = new Runnable() {
public void run() {
/*if( MyDebug.LOG )
Log.d(TAG, "invalidate()");*/
preview.test_ticker_called = true;
invalidate();
handler.postDelayed(this, preview.getFrameRate());
}
};
}
@Override
public View getView() {
return this;
}
@Override
public void setPreviewDisplay(CameraController camera_controller) {
if( MyDebug.LOG )
Log.d(TAG, "setPreviewDisplay");
try {
camera_controller.setPreviewDisplay(this.getHolder());
}
catch(CameraControllerException e) {
MyDebug.logStackTrace(TAG, "failed to set preview display", e);
}
}
@Override
public void setVideoRecorder(MediaRecorder video_recorder) {
video_recorder.setPreviewDisplay(this.getHolder().getSurface());
}
@SuppressLint("ClickableViewAccessibility")
@Override
public boolean onTouchEvent(MotionEvent event) {
return preview.touchEvent(event);
}
@Override
public void onDraw(@NonNull Canvas canvas) {
preview.draw(canvas);
}
@Override
protected void onMeasure(int widthSpec, int heightSpec) {
if( MyDebug.LOG )
Log.d(TAG, "onMeasure: " + widthSpec + " x " + heightSpec);
preview.getMeasureSpec(measure_spec, widthSpec, heightSpec);
super.onMeasure(measure_spec[0], measure_spec[1]);
}
@Override
public void setTransform(Matrix matrix) {
if( MyDebug.LOG )
Log.d(TAG, "setting transforms not supported for MySurfaceView");
throw new RuntimeException();
}
@Override
public void onPause() {
if( MyDebug.LOG )
Log.d(TAG, "onPause()");
handler.removeCallbacks(tick);
}
@Override
public void onResume() {
if( MyDebug.LOG )
Log.d(TAG, "onResume()");
tick.run();
}
}

View file

@ -0,0 +1,90 @@
package net.sourceforge.opencamera.preview.camerasurface;
import net.sourceforge.opencamera.MyDebug;
import net.sourceforge.opencamera.cameracontroller.CameraController;
import net.sourceforge.opencamera.cameracontroller.CameraControllerException;
import net.sourceforge.opencamera.preview.Preview;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Matrix;
import android.media.MediaRecorder;
import android.util.Log;
import android.view.MotionEvent;
import android.view.TextureView;
import android.view.View;
/** Provides support for the surface used for the preview, using a TextureView.
*/
public class MyTextureView extends TextureView implements CameraSurface {
private static final String TAG = "MyTextureView";
private final Preview preview;
private final int [] measure_spec = new int[2];
public MyTextureView(Context context, Preview preview) {
super(context);
this.preview = preview;
if( MyDebug.LOG ) {
Log.d(TAG, "new MyTextureView");
}
// Install a TextureView.SurfaceTextureListener so we get notified when the
// underlying surface is created and destroyed.
this.setSurfaceTextureListener(preview);
}
@Override
public View getView() {
return this;
}
@Override
public void setPreviewDisplay(CameraController camera_controller) {
if( MyDebug.LOG )
Log.d(TAG, "setPreviewDisplay");
try {
camera_controller.setPreviewTexture(this);
}
catch(CameraControllerException e) {
MyDebug.logStackTrace(TAG, "failed to set preview display", e);
}
}
@Override
public void setVideoRecorder(MediaRecorder video_recorder) {
// should be no need to do anything (see documentation for MediaRecorder.setPreviewDisplay())
}
@SuppressLint("ClickableViewAccessibility")
@Override
public boolean onTouchEvent(MotionEvent event) {
return preview.touchEvent(event);
}
/*@Override
public void onDraw(Canvas canvas) {
preview.draw(canvas);
}*/
@Override
protected void onMeasure(int widthSpec, int heightSpec) {
if( MyDebug.LOG )
Log.d(TAG, "onMeasure: " + widthSpec + " x " + heightSpec);
preview.getMeasureSpec(measure_spec, widthSpec, heightSpec);
super.onMeasure(measure_spec[0], measure_spec[1]);
}
@Override
public void setTransform(Matrix matrix) {
super.setTransform(matrix);
}
@Override
public void onPause() {
}
@Override
public void onResume() {
}
}

View file

@ -0,0 +1,532 @@
package net.sourceforge.opencamera.remotecontrol;
import net.sourceforge.opencamera.MyDebug;
import android.Manifest;
import android.app.Service;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothGatt;
import android.bluetooth.BluetoothGattCallback;
import android.bluetooth.BluetoothGattCharacteristic;
import android.bluetooth.BluetoothGattDescriptor;
import android.bluetooth.BluetoothGattService;
import android.bluetooth.BluetoothManager;
import android.bluetooth.BluetoothProfile;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.os.Binder;
import android.os.Handler;
import android.os.IBinder;
import androidx.core.content.ContextCompat;
import android.util.Log;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
import java.util.UUID;
public class BluetoothLeService extends Service {
private final static String TAG = "BluetoothLeService";
private boolean is_bound; // whether service is bound
private BluetoothManager bluetoothManager;
private BluetoothAdapter bluetoothAdapter;
private String device_address;
private BluetoothGatt bluetoothGatt;
private String remote_device_type;
private final Handler bluetoothHandler = new Handler();
private final HashMap<String, BluetoothGattCharacteristic> subscribed_characteristics = new HashMap<>();
private final List<BluetoothGattCharacteristic> charsToSubscribe = new ArrayList<>();
private double currentTemp = -1;
private double currentDepth = -1;
/*private static final int STATE_DISCONNECTED = 0;
private static final int STATE_CONNECTING = 1;
private static final int STATE_CONNECTED = 2;*/
public final static String ACTION_GATT_CONNECTED =
"net.sourceforge.opencamera.Remotecontrol.ACTION_GATT_CONNECTED";
public final static String ACTION_GATT_DISCONNECTED =
"net.sourceforge.opencamera.Remotecontrol.ACTION_GATT_DISCONNECTED";
public final static String ACTION_GATT_SERVICES_DISCOVERED =
"net.sourceforge.opencamera.Remotecontrol.ACTION_GATT_SERVICES_DISCOVERED";
public final static String ACTION_DATA_AVAILABLE =
"net.sourceforge.opencamera.Remotecontrol.ACTION_DATA_AVAILABLE";
public final static String ACTION_REMOTE_COMMAND =
"net.sourceforge.opencamera.Remotecontrol.COMMAND";
public final static String ACTION_SENSOR_VALUE =
"net.sourceforge.opencamera.Remotecontrol.SENSOR";
public final static String SENSOR_TEMPERATURE =
"net.sourceforge.opencamera.Remotecontrol.TEMPERATURE";
public final static String SENSOR_DEPTH =
"net.sourceforge.opencamera.Remotecontrol.DEPTH";
public final static String EXTRA_DATA =
"net.sourceforge.opencamera.Remotecontrol.EXTRA_DATA";
public final static int COMMAND_SHUTTER = 32;
public final static int COMMAND_MODE = 16;
public final static int COMMAND_MENU = 48;
public final static int COMMAND_AFMF = 97;
public final static int COMMAND_UP = 64;
public final static int COMMAND_DOWN = 80;
/* This forces a gratuitous BLE scan to help the device
* connect to the remote faster. This is due to limitations of the
* Android BLE stack and API (just knowing the MAC is not enough on
* many phones).*/
private void triggerScan() {
if( MyDebug.LOG )
Log.d(TAG, "triggerScan");
if( !is_bound ) {
// Don't allow calls to startLeScan() (which requires location permission) when service
// not bound, as application may be in background!
// In theory this shouldn't be needed here, as we also check is_bound in connect(), but
// have it here too just to be safe.
Log.e(TAG, "triggerScan shouldn't be called when service not bound");
return;
}
// Check for Android 12 Bluetooth permission just in case (and for Android lint error)
if( DeviceScanner.useAndroid12BluetoothPermissions() ) {
if( ContextCompat.checkSelfPermission(this, Manifest.permission.BLUETOOTH_SCAN) != PackageManager.PERMISSION_GRANTED ) {
Log.e(TAG, "bluetooth scan permission not granted!");
return;
}
}
// Stops scanning after a pre-defined scan period.
bluetoothHandler.postDelayed(new Runnable() {
@Override
public void run() {
// Check for Android 12 Bluetooth permission just in case (and for Android lint error)
if( DeviceScanner.useAndroid12BluetoothPermissions() ) {
if( ContextCompat.checkSelfPermission(BluetoothLeService.this, Manifest.permission.BLUETOOTH_SCAN) != PackageManager.PERMISSION_GRANTED ) {
Log.e(TAG, "bluetooth scan permission not granted!");
return;
}
}
bluetoothAdapter.stopLeScan(null);
}
}, 10000);
bluetoothAdapter.startLeScan(null);
}
public void setRemoteDeviceType(String remote_device_type) {
if( MyDebug.LOG )
Log.d(TAG, "Setting remote type: " + remote_device_type);
this.remote_device_type = remote_device_type;
}
private final BluetoothGattCallback mGattCallback = new BluetoothGattCallback() {
@Override
public void onConnectionStateChange(BluetoothGatt gatt, int status, int newState) {
String intentAction;
if( newState == BluetoothProfile.STATE_CONNECTED ) {
intentAction = ACTION_GATT_CONNECTED;
broadcastUpdate(intentAction);
if( MyDebug.LOG ) {
Log.d(TAG, "Connected to GATT server, call discoverServices()");
}
// Check for Android 12 Bluetooth permission just in case (and for Android lint error)
boolean has_bluetooth_permission = true;
if( DeviceScanner.useAndroid12BluetoothPermissions() ) {
if( ContextCompat.checkSelfPermission(BluetoothLeService.this, Manifest.permission.BLUETOOTH_CONNECT) != PackageManager.PERMISSION_GRANTED ) {
Log.e(TAG, "bluetooth scan permission not granted!");
has_bluetooth_permission = false;
}
}
if( has_bluetooth_permission ) {
bluetoothGatt.discoverServices();
}
currentDepth = -1;
currentTemp = -1;
}
else if (newState == BluetoothProfile.STATE_DISCONNECTED) {
intentAction = ACTION_GATT_DISCONNECTED;
if( MyDebug.LOG )
Log.d(TAG, "Disconnected from GATT server, reattempting every 5 seconds.");
broadcastUpdate(intentAction);
attemptReconnect();
}
}
void attemptReconnect() {
if( !is_bound ) {
// We check is_bound in connect() itself, but seems pointless to even try if we
// know the service is unbound (and if it's later bound again, we'll try connecting
// again anyway without needing this).
Log.e(TAG, "don't attempt to reconnect when service not bound");
}
Timer timer = new Timer();
timer.schedule(new TimerTask() {
public void run() {
if( MyDebug.LOG )
Log.d(TAG, "Attempting to reconnect to remote.");
connect(device_address);
}
}, 5000);
}
@Override
public void onServicesDiscovered(BluetoothGatt gatt, int status) {
if( status == BluetoothGatt.GATT_SUCCESS ) {
broadcastUpdate(ACTION_GATT_SERVICES_DISCOVERED);
subscribeToServices();
}
else {
if( MyDebug.LOG )
Log.d(TAG, "onServicesDiscovered received: " + status);
}
}
@Override
public void onCharacteristicRead(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic, int status) {
if( status == BluetoothGatt.GATT_SUCCESS ) {
broadcastUpdate(ACTION_DATA_AVAILABLE, characteristic);
}
}
@Override
public void onCharacteristicChanged(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic) {
if( MyDebug.LOG )
Log.d(TAG,"Got notification");
broadcastUpdate(ACTION_DATA_AVAILABLE, characteristic);
}
@Override
public void onDescriptorWrite(BluetoothGatt gatt, BluetoothGattDescriptor descriptor, int status) {
// We need to wait for this callback before enabling the next notification in case we
// have several in our list
if( !charsToSubscribe.isEmpty() ) {
setCharacteristicNotification(charsToSubscribe.remove(0), true);
}
}
};
/**
* Subscribe to the services/characteristics we need depending
* on the remote device model
*
*/
private void subscribeToServices() {
List<BluetoothGattService> gattServices = getSupportedGattServices();
if (gattServices == null) return;
List<UUID> mCharacteristicsWanted;
//noinspection SwitchStatementWithTooFewBranches
switch( remote_device_type ) {
case "preference_remote_type_kraken":
mCharacteristicsWanted = KrakenGattAttributes.getDesiredCharacteristics();
break;
default:
mCharacteristicsWanted = Collections.singletonList(UUID.fromString("0000"));
break;
}
for(BluetoothGattService gattService : gattServices) {
List<BluetoothGattCharacteristic> gattCharacteristics =
gattService.getCharacteristics();
for(BluetoothGattCharacteristic gattCharacteristic : gattCharacteristics) {
UUID uuid = gattCharacteristic.getUuid();
if( mCharacteristicsWanted.contains(uuid) ) {
if( MyDebug.LOG )
Log.d(TAG, "Found characteristic to subscribe to: " + uuid);
charsToSubscribe.add(gattCharacteristic);
}
}
}
setCharacteristicNotification(charsToSubscribe.remove(0), true);
}
private void broadcastUpdate(final String action) {
final Intent intent = new Intent(action);
sendBroadcast(intent);
}
private void broadcastUpdate(String ignoredAction, final BluetoothGattCharacteristic characteristic) {
UUID uuid = characteristic.getUuid();
final int format_uint8 = BluetoothGattCharacteristic.FORMAT_UINT8;
final int format_uint16 = BluetoothGattCharacteristic.FORMAT_UINT16;
int remoteCommand = -1;
if( KrakenGattAttributes.KRAKEN_BUTTONS_CHARACTERISTIC.equals(uuid) ) {
if( MyDebug.LOG )
Log.d(TAG,"Got Kraken button press");
final int buttonCode= characteristic.getIntValue(format_uint8, 0);
if( MyDebug.LOG )
Log.d(TAG, String.format("Received Button press: %d", buttonCode));
// Note: we stay at a fairly generic level here and will manage variants
// on the various button actions in MainActivity, because those will change depending
// on the current state of the app, and we don't want to know anything about that state
// from the Bluetooth LE service
// TODO: update to remove all those tests and just forward buttonCode since value is identical
// but this is more readable if we want to implement other drivers
if( buttonCode == 32 ) {
// Shutter press
remoteCommand = COMMAND_SHUTTER;
}
else if( buttonCode == 16 ) {
// "Mode" button: either "back" action or "Photo/Camera" switch
remoteCommand = COMMAND_MODE;
}
else if( buttonCode == 48 ) {
// "Menu" button
remoteCommand = COMMAND_MENU;
}
else if( buttonCode == 97 ) {
// AF/MF button
remoteCommand = COMMAND_AFMF;
}
else if( buttonCode == 96 ) {
// Long press on MF/AF button.
// Note: the camera issues button code 97 first, then
// 96 after one second of continuous press
}
else if( buttonCode == 64 ) {
// Up button
remoteCommand = COMMAND_UP;
} else if (buttonCode == 80) {
// Down button
remoteCommand = COMMAND_DOWN;
}
// Only send forward if we have something to say
if( remoteCommand > -1 ) {
final Intent intent = new Intent(ACTION_REMOTE_COMMAND);
intent.putExtra(EXTRA_DATA, remoteCommand);
sendBroadcast(intent);
}
}
else if( KrakenGattAttributes.KRAKEN_SENSORS_CHARACTERISTIC.equals(uuid) ) {
// The housing returns four bytes.
// Byte 0-1: depth = (Byte 0 + Byte 1 << 8) / 10 / density
// Byte 2-3: temperature = (Byte 2 + Byte 3 << 8) / 10
//
// Depth is valid for fresh water by default ( makes you wonder whether the sensor
// is really designed for saltwater at all), and the value has to be divided by the density
// of saltwater. A commonly accepted value is 1030 kg/m3 (1.03 density)
double temperature = characteristic.getIntValue(format_uint16, 2) / 10.0;
double depth = characteristic.getIntValue(format_uint16, 0) / 10.0;
if( temperature == currentTemp && depth == currentDepth )
return;
currentDepth = depth;
currentTemp = temperature;
if( MyDebug.LOG )
Log.d(TAG, "Got new Kraken sensor reading. Temperature: " + temperature + " Depth:" + depth);
final Intent intent = new Intent(ACTION_SENSOR_VALUE);
intent.putExtra(SENSOR_TEMPERATURE, temperature);
intent.putExtra(SENSOR_DEPTH, depth);
sendBroadcast(intent);
}
}
public class LocalBinder extends Binder {
public BluetoothLeService getService() {
return BluetoothLeService.this;
}
}
private final IBinder mBinder = new LocalBinder();
@Override
public IBinder onBind(Intent intent) {
if( MyDebug.LOG )
Log.d(TAG, "onBind");
return mBinder;
}
@Override
public boolean onUnbind(Intent intent) {
if( MyDebug.LOG )
Log.d(TAG, "onUnbind");
this.is_bound = false;
close();
return super.onUnbind(intent);
}
/** Only call this after service is bound (from ServiceConnection.onServiceConnected())!
*/
public boolean initialize() {
if( MyDebug.LOG )
Log.d(TAG, "initialize");
// in theory we'd put this in onBind(), to be more symmetric with onUnbind() where we
// set to false - but unclear whether onBind() is always called before
// ServiceConnection.onServiceConnected().
this.is_bound = true;
if( bluetoothManager == null ) {
bluetoothManager = (BluetoothManager) getSystemService(Context.BLUETOOTH_SERVICE);
if( bluetoothManager == null ) {
Log.e(TAG, "Unable to initialize BluetoothManager.");
return false;
}
}
bluetoothAdapter = bluetoothManager.getAdapter();
if( bluetoothAdapter == null ) {
Log.e(TAG, "Unable to obtain a BluetoothAdapter.");
return false;
}
return true;
}
public boolean connect(final String address) {
if( MyDebug.LOG )
Log.d(TAG, "connect: " + address);
if( bluetoothAdapter == null ) {
if( MyDebug.LOG )
Log.d(TAG, "bluetoothAdapter is null");
return false;
}
else if( address == null ) {
if( MyDebug.LOG )
Log.d(TAG, "address is null");
return false;
}
else if( !is_bound ) {
// Don't allow calls to startLeScan() via triggerScan() (which requires location
// permission) when service not bound, as application may be in background!
// And it doesn't seem sensible to even allow connecting if service not bound.
// Under normal operation this isn't needed, but there are calls to connect() that can
// happen from postDelayed() or TimerTask in this class, so a risk that they call
// connect() after the service is unbound!
Log.e(TAG, "connect shouldn't be called when service not bound");
return false;
}
// Check for Android 12 Bluetooth permission just in case (and for Android lint error)
if( DeviceScanner.useAndroid12BluetoothPermissions() ) {
if( ContextCompat.checkSelfPermission(this, Manifest.permission.BLUETOOTH_CONNECT) != PackageManager.PERMISSION_GRANTED ) {
Log.e(TAG, "bluetooth scan permission not granted!");
return false;
}
}
// test code for infinite looping, seeing if this runs in background:
/*if( address.equals("undefined") ) {
Handler handler = new Handler();
handler.postDelayed(new Runnable() {
public void run() {
if( MyDebug.LOG )
Log.d(TAG, "trying connect again from postdelayed");
connect(address);
}
}, 1000);
}
if( address.equals("undefined") ) {
// test - only needed if we've hacked BluetoothRemoteControl.remoteEnabled() to not check for being undefined
if( MyDebug.LOG )
Log.d(TAG, "address is undefined");
return false;
}*/
if( address.equals(device_address) && bluetoothGatt != null ) {
bluetoothGatt.disconnect();
bluetoothGatt.close();
bluetoothGatt = null;
}
final BluetoothDevice device = bluetoothAdapter.getRemoteDevice(address);
if( device == null ) {
if( MyDebug.LOG )
Log.d(TAG, "device not found");
Handler handler = new Handler();
handler.postDelayed(new Runnable() {
public void run() {
if( MyDebug.LOG )
Log.d(TAG, "attempt to connect to remote");
connect(address);
}
}, 5000);
return false;
}
// It looks like Android won't connect to BLE devices properly without scanning
// for them first, even when connecting by explicit MAC address. Since we're using
// BLE for underwater housings and we want rock solid connectivity, we trigger
// a scan for 10 seconds
triggerScan();
bluetoothGatt = device.connectGatt(this, true, mGattCallback);
device_address = address;
return true;
}
private void close() {
if( bluetoothGatt == null ) {
return;
}
// Check for Android 12 Bluetooth permission just in case (and for Android lint error)
if( DeviceScanner.useAndroid12BluetoothPermissions() ) {
if( ContextCompat.checkSelfPermission(this, Manifest.permission.BLUETOOTH_CONNECT) != PackageManager.PERMISSION_GRANTED ) {
Log.e(TAG, "bluetooth scan permission not granted!");
return;
}
}
bluetoothGatt.close();
bluetoothGatt = null;
}
private void setCharacteristicNotification(BluetoothGattCharacteristic characteristic, boolean enabled) {
if( bluetoothAdapter == null ) {
if( MyDebug.LOG )
Log.d(TAG, "bluetoothAdapter is null");
return;
}
else if( bluetoothGatt == null ) {
if( MyDebug.LOG )
Log.d(TAG, "bluetoothGatt is null");
return;
}
// Check for Android 12 Bluetooth permission just in case (and for Android lint error)
if( DeviceScanner.useAndroid12BluetoothPermissions() ) {
if( ContextCompat.checkSelfPermission(this, Manifest.permission.BLUETOOTH_CONNECT) != PackageManager.PERMISSION_GRANTED ) {
Log.e(TAG, "bluetooth scan permission not granted!");
return;
}
}
String uuid = characteristic.getUuid().toString();
bluetoothGatt.setCharacteristicNotification(characteristic, enabled);
if( enabled ) {
subscribed_characteristics.put(uuid, characteristic);
}
else {
subscribed_characteristics.remove(uuid);
}
BluetoothGattDescriptor descriptor = characteristic.getDescriptor(KrakenGattAttributes.CLIENT_CHARACTERISTIC_CONFIG);
descriptor.setValue(BluetoothGattDescriptor.ENABLE_NOTIFICATION_VALUE);
bluetoothGatt.writeDescriptor(descriptor);
}
private List<BluetoothGattService> getSupportedGattServices() {
if( bluetoothGatt == null )
return null;
return bluetoothGatt.getServices();
}
}

View file

@ -0,0 +1,309 @@
package net.sourceforge.opencamera.remotecontrol;
import static android.content.Context.RECEIVER_NOT_EXPORTED;
import android.content.BroadcastReceiver;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.ServiceConnection;
import android.content.SharedPreferences;
import android.os.Build;
import android.os.Handler;
import android.os.IBinder;
import android.preference.PreferenceManager;
import android.util.Log;
import net.sourceforge.opencamera.MainActivity;
import net.sourceforge.opencamera.MyApplicationInterface;
import net.sourceforge.opencamera.MyDebug;
import net.sourceforge.opencamera.PreferenceKeys;
import net.sourceforge.opencamera.ui.MainUI;
/** Class for handling the Bluetooth LE remote control functionality.
*/
public class BluetoothRemoteControl {
private final static String TAG = "BluetoothRemoteControl";
private final MainActivity main_activity;
private BluetoothLeService bluetoothLeService;
private String remoteDeviceAddress;
private String remoteDeviceType;
private boolean is_connected;
public BluetoothRemoteControl(MainActivity main_activity) {
this.main_activity = main_activity;
}
// class to manage the Service lifecycle for remote control.
private final ServiceConnection mServiceConnection = new ServiceConnection() {
@Override
public void onServiceConnected(ComponentName componentName, IBinder service) {
if( MyDebug.LOG )
Log.d(TAG, "onServiceConnected");
if( main_activity.isAppPaused() ) {
if( MyDebug.LOG )
Log.d(TAG, "but app is now paused");
// Unclear if this could happen - possibly if app pauses immediately after starting
// the service, but before we connect? In theory we should then unbind the service,
// but seems safer not to try to call initialize or connect.
// This will mean the BluetoothLeService still thinks it's unbound (is_bound will
// be left false), but find, that just means we'll enforce not trying to connect at
// a later stage).
return;
}
bluetoothLeService = ((BluetoothLeService.LocalBinder) service).getService();
if( !bluetoothLeService.initialize() ) {
Log.e(TAG, "Unable to initialize Bluetooth");
stopRemoteControl();
}
// connect to the device
bluetoothLeService.connect(remoteDeviceAddress);
}
/** Called when a connection to the Service has been lost. This typically happens when the
* process hosting the service has crashed or been killed.
* So in particular, note this isn't the inverse to onServiceConnected() - whilst
* onServiceConnected is always called (after the service receives onBind()), upon normal
* disconnection (after we call unbindService()), the service receives onUnbind(), but
* onServiceDisconnected is not called under normal operation.
*/
@Override
public void onServiceDisconnected(ComponentName componentName) {
if( MyDebug.LOG )
Log.d(TAG, "onServiceDisconnected");
Handler handler = new Handler();
handler.postDelayed(new Runnable() {
public void run() {
bluetoothLeService.connect(remoteDeviceAddress);
}
}, 5000);
}
};
/**
* Receives event from the remote command handler through intents
* Handles various events fired by the Service.
*/
private final BroadcastReceiver remoteControlCommandReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
final String action = intent.getAction();
MyApplicationInterface applicationInterface = main_activity.getApplicationInterface();
MainUI mainUI = main_activity.getMainUI();
if( BluetoothLeService.ACTION_GATT_CONNECTED.equals(action) ) {
if( MyDebug.LOG )
Log.d(TAG, "Remote connected");
// Tell the Bluetooth service what type of remote we want to use
bluetoothLeService.setRemoteDeviceType(remoteDeviceType);
main_activity.setBrightnessForCamera(false);
}
else if( BluetoothLeService.ACTION_GATT_DISCONNECTED.equals(action) ) {
if( MyDebug.LOG )
Log.d(TAG, "Remote disconnected");
is_connected = false;
applicationInterface.getDrawPreview().onExtraOSDValuesChanged("-- \u00B0C", "-- m");
mainUI.updateRemoteConnectionIcon();
main_activity.setBrightnessToMinimumIfWanted();
if (mainUI.isExposureUIOpen())
mainUI.toggleExposureUI();
}
else if( BluetoothLeService.ACTION_GATT_SERVICES_DISCOVERED.equals(action) ) {
if( MyDebug.LOG )
Log.d(TAG, "Remote services discovered");
// We let the BluetoothLEService subscribe to what is relevant, so we
// do nothing here, but we wait until this is done to update the UI
// icon
is_connected = true;
mainUI.updateRemoteConnectionIcon();
}
else if( BluetoothLeService.ACTION_SENSOR_VALUE.equals(action) ) {
double temp = intent.getDoubleExtra(BluetoothLeService.SENSOR_TEMPERATURE, -1);
double depth = intent.getDoubleExtra(BluetoothLeService.SENSOR_DEPTH, -1) / main_activity.getWaterDensity();
depth = (Math.round(depth* 10)) / 10.0; // Round to 1 decimal
if( MyDebug.LOG )
Log.d(TAG, "Sensor values: depth: " + depth + " - temp: " + temp);
// Create two OSD lines
String line1 = temp + " \u00B0C";
String line2 = depth + " m";
applicationInterface.getDrawPreview().onExtraOSDValuesChanged(line1, line2);
}
else if( BluetoothLeService.ACTION_REMOTE_COMMAND.equals(action) ) {
int command = intent.getIntExtra(BluetoothLeService.EXTRA_DATA, -1);
// TODO: we could abstract this into a method provided by each remote control model
switch( command ) {
case BluetoothLeService.COMMAND_SHUTTER:
// Easiest - just take a picture (or start/stop camera)
main_activity.takePicture(false);
break;
case BluetoothLeService.COMMAND_MODE:
// "Mode" key :either toggles photo/video mode, or
// closes the settings screen that is currently open
if( mainUI.popupIsOpen() ) {
mainUI.togglePopupSettings();
}
else if( mainUI.isExposureUIOpen() ) {
mainUI.toggleExposureUI();
}
else {
main_activity.clickedSwitchVideo(null);
}
break;
case BluetoothLeService.COMMAND_MENU:
// Open the exposure UI (ISO/Exposure) or
// select the current line on an open UI or
// select the current option on a button on a selected line
if( !mainUI.popupIsOpen() ) {
if( !mainUI.isExposureUIOpen() ) {
mainUI.toggleExposureUI();
}
else {
mainUI.commandMenuExposure();
}
}
else {
mainUI.commandMenuPopup();
}
break;
case BluetoothLeService.COMMAND_UP:
if( !mainUI.processRemoteUpButton() ) {
// Default up behaviour:
// - if we are on manual focus, then adjust focus.
// - if we are on autofocus, then adjust zoom.
if( main_activity.getPreview().getCurrentFocusValue() != null && main_activity.getPreview().getCurrentFocusValue().equals("focus_mode_manual2") ) {
main_activity.changeFocusDistance(-25, false);
}
else {
// Adjust zoom
main_activity.zoomIn();
}
}
break;
case BluetoothLeService.COMMAND_DOWN:
if( !mainUI.processRemoteDownButton() ) {
if( main_activity.getPreview().getCurrentFocusValue() != null && main_activity.getPreview().getCurrentFocusValue().equals("focus_mode_manual2") ) {
main_activity.changeFocusDistance(25, false);
}
else {
// Adjust zoom
main_activity.zoomOut();
}
}
break;
case BluetoothLeService.COMMAND_AFMF:
// Open the camera settings popup menu (not the app settings)
// or selects the current line/icon in the popup menu, and finally
// clicks the icon
//if( !mainUI.popupIsOpen() ) {
mainUI.togglePopupSettings();
//}
break;
default:
break;
}
}
else {
if( MyDebug.LOG )
Log.d(TAG, "Other remote event");
}
}
};
public boolean remoteConnected() {
/*if( true )
return true; // test*/
return is_connected;
}
// TODO: refactor for a filter than receives generic remote control intents
private static IntentFilter makeRemoteCommandIntentFilter() {
final IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction(BluetoothLeService.ACTION_GATT_CONNECTED);
intentFilter.addAction(BluetoothLeService.ACTION_GATT_DISCONNECTED);
intentFilter.addAction(BluetoothLeService.ACTION_GATT_SERVICES_DISCOVERED);
intentFilter.addAction(BluetoothLeService.ACTION_DATA_AVAILABLE);
intentFilter.addAction(BluetoothLeService.ACTION_REMOTE_COMMAND);
intentFilter.addAction(BluetoothLeService.ACTION_SENSOR_VALUE);
return intentFilter;
}
/**
* Starts or stops the remote control layer
*/
public void startRemoteControl() {
if( MyDebug.LOG )
Log.d(TAG, "BLE Remote control service start check...");
Intent gattServiceIntent = new Intent(main_activity, BluetoothLeService.class);
// Check isAppPaused() just to be safe - in theory shouldn't be needed, but don't want to
// start up the service if we're in background! (And we might as well then try to stop the
// service instead.)
if( !main_activity.isAppPaused() && remoteEnabled() ) {
if( MyDebug.LOG )
Log.d(TAG, "Remote enabled, starting service");
main_activity.bindService(gattServiceIntent, mServiceConnection, Context.BIND_AUTO_CREATE);
// For Android 14 (UPSIDE_DOWN_CAKE) onwards, a flag of RECEIVER_EXPORTED or RECEIVER_NOT_EXPORTED must be specified when using
// registerReceiver with non-system intents, otherwise a SecurityException will be thrown.
// The if condition is for TIRAMISU as there seems no harm doing this for earlier versions too, but RECEIVER_NOT_EXPORTED
// requires Android 13.
if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU ) {
main_activity.registerReceiver(remoteControlCommandReceiver, makeRemoteCommandIntentFilter(), RECEIVER_NOT_EXPORTED);
}
else {
// n.b., this gets an Android lint warning, even though this can only be fixed for TIRAMISU onwards (as
// RECEIVER_NOT_EXPORTED not available on older versions)!
main_activity.registerReceiver(remoteControlCommandReceiver, makeRemoteCommandIntentFilter());
}
}
else {
if( MyDebug.LOG )
Log.d(TAG, "Remote disabled, stopping service");
// Stop the service if necessary
try {
main_activity.unregisterReceiver(remoteControlCommandReceiver);
main_activity.unbindService(mServiceConnection);
is_connected = false; // Unbinding closes the connection, of course
main_activity.getMainUI().updateRemoteConnectionIcon();
}
catch(IllegalArgumentException e){
if( MyDebug.LOG )
Log.d(TAG, "Remote Service was not running, that's fine");
}
}
}
public void stopRemoteControl() {
if( MyDebug.LOG )
Log.d(TAG, "BLE Remote control service shutdown...");
if( remoteEnabled()) {
// Stop the service if necessary
try {
main_activity.unregisterReceiver(remoteControlCommandReceiver);
main_activity.unbindService(mServiceConnection);
is_connected = false; // Unbinding closes the connection, of course
main_activity.getMainUI().updateRemoteConnectionIcon();
}
catch(IllegalArgumentException e){
MyDebug.logStackTrace(TAG, "Remote Service was not running, that's strange", e);
}
}
}
/**
* Checks if remote control is enabled in the settings, and the remote control address
* is also defined
* @return true if this is the case
*/
public boolean remoteEnabled() {
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(main_activity);
boolean remote_enabled = sharedPreferences.getBoolean(PreferenceKeys.EnableRemote, false);
remoteDeviceType = sharedPreferences.getString(PreferenceKeys.RemoteType, "undefined");
remoteDeviceAddress = sharedPreferences.getString(PreferenceKeys.RemoteName, "undefined");
//return remote_enabled; // test - if using this, also need to enable test code in BluetoothLeService.connect()
return remote_enabled && !remoteDeviceAddress.equals("undefined");
}
}

View file

@ -0,0 +1,542 @@
package net.sourceforge.opencamera.remotecontrol;
import android.Manifest;
import android.app.Activity;
import android.app.AlertDialog;
//import android.app.ListActivity;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothManager;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.preference.PreferenceManager;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import net.sourceforge.opencamera.MyDebug;
import net.sourceforge.opencamera.PreferenceKeys;
import net.sourceforge.opencamera.R;
import java.util.ArrayList;
//public class DeviceScanner extends ListActivity {
//public class DeviceScanner extends Activity {
public class DeviceScanner extends AppCompatActivity {
private static final String TAG = "OC-BLEScanner";
private LeDeviceListAdapter leDeviceListAdapter;
private BluetoothAdapter bluetoothAdapter;
private boolean is_scanning;
private Handler bluetoothHandler;
private SharedPreferences mSharedPreferences;
private static final int REQUEST_ENABLE_BT = 1;
private static final int REQUEST_LOCATION_PERMISSIONS = 2;
private static final int REQUEST_BLUETOOTHSCANCONNECT_PERMISSIONS = 3;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_device_select);
bluetoothHandler = new Handler();
if( !getPackageManager().hasSystemFeature(PackageManager.FEATURE_BLUETOOTH_LE) ) {
Toast.makeText(this, R.string.ble_not_supported, Toast.LENGTH_SHORT).show();
finish();
}
final BluetoothManager bluetoothManager = (BluetoothManager) getSystemService(Context.BLUETOOTH_SERVICE);
bluetoothAdapter = bluetoothManager.getAdapter();
if( bluetoothAdapter == null ) {
Toast.makeText(this, R.string.bluetooth_not_supported, Toast.LENGTH_SHORT).show();
finish();
return;
}
Button startScanningButton = findViewById(R.id.StartScanButton);
startScanningButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
startScanning();
}
});
mSharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getApplicationContext());
String preference_remote_device_name = PreferenceKeys.RemoteName;
String remote_name = mSharedPreferences.getString(preference_remote_device_name, "none");
if( MyDebug.LOG )
Log.d(TAG, "preference_remote_device_name: " + remote_name);
TextView currentRemote = findViewById(R.id.currentRemote);
currentRemote.setText(getResources().getString(R.string.bluetooth_current_remote) + " " + remote_name);
}
@Override
public void onContentChanged() {
if( MyDebug.LOG )
Log.d(TAG, "onContentChanged");
super.onContentChanged();
ListView list = findViewById(R.id.list);
list.setOnItemClickListener(new AdapterView.OnItemClickListener() {
public void onItemClick(AdapterView<?> parent, View v, int position, long id) {
onListItemClick(position);
}
});
}
/** Returns whether we can use the new Android 12 permissions for bluetooth (BLUETOOTH_SCAN,
* BLUETOOTH_CONNECT) - if so, we should use these and NOT location permissions.
* See https://developer.android.com/guide/topics/connectivity/bluetooth/permissions .
*/
static boolean useAndroid12BluetoothPermissions() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.S;
}
private void checkBluetoothEnabled() {
if( MyDebug.LOG )
Log.d(TAG, "checkBluetoothEnabled");
// BLUETOOTH_CONNECT permission is needed for BluetoothAdapter.ACTION_REQUEST_ENABLE.
// Callers should have already checked for bluetooth permission, but we have this check
// just in case - and also to avoid the Android lint error that we'd get.
if( useAndroid12BluetoothPermissions() ) {
if( MyDebug.LOG )
Log.d(TAG, "check for bluetooth connect permission");
if( ContextCompat.checkSelfPermission(this, Manifest.permission.BLUETOOTH_CONNECT) != PackageManager.PERMISSION_GRANTED ) {
Log.e(TAG, "bluetooth connect permission not granted!");
return;
}
}
if( !bluetoothAdapter.isEnabled() ) {
// fire an intent to display a dialog asking the user to grant permission to enable Bluetooth
// n.b., on Android 12 need BLUETOOTH_CONNECT permission for this
if( MyDebug.LOG )
Log.d(TAG, "request to enable bluetooth");
Intent enableBtIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivityForResult(enableBtIntent, REQUEST_ENABLE_BT);
}
}
private void startScanning() {
if( MyDebug.LOG )
Log.d(TAG, "Start scanning");
// In real life most of bluetooth LE devices associated with location, so without this
// permission the sample shows nothing in most cases
// Also see https://stackoverflow.com/questions/33045581/location-needs-to-be-enabled-for-bluetooth-low-energy-scanning-on-android-6-0
// Update: on Android 10+, ACCESS_FINE_LOCATION is needed: https://developer.android.com/about/versions/10/privacy/changes#location-telephony-bluetooth-wifi
// Update: on Android 12+, we use the new bluetooth permissions instead of location permissions.
boolean has_permission = false;
if( useAndroid12BluetoothPermissions() ) {
if( ContextCompat.checkSelfPermission(this, Manifest.permission.BLUETOOTH_SCAN) == PackageManager.PERMISSION_GRANTED
&&
ContextCompat.checkSelfPermission(this, Manifest.permission.BLUETOOTH_CONNECT) == PackageManager.PERMISSION_GRANTED
) {
has_permission = true;
}
}
else {
String permission_needed = Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q ? Manifest.permission.ACCESS_FINE_LOCATION : Manifest.permission.ACCESS_COARSE_LOCATION;
int permissionCoarse = Build.VERSION.SDK_INT >= Build.VERSION_CODES.M ?
ContextCompat
.checkSelfPermission(this, permission_needed) :
PackageManager.PERMISSION_GRANTED;
if( permissionCoarse == PackageManager.PERMISSION_GRANTED ) {
has_permission = true;
}
}
if( has_permission ) {
checkBluetoothEnabled();
}
leDeviceListAdapter = new LeDeviceListAdapter();
//setListAdapter(leDeviceListAdapter);
ListView list = findViewById(R.id.list);
list.setAdapter(leDeviceListAdapter);
if( has_permission ) {
scanLeDevice(true);
}
else {
askForDeviceScannerPermission();
}
}
/** Request permissions needed for bluetooth (BLUETOOTH_SCAN and BLUETOOTH_CONNECT on Android
* 12+, else location permission).
*/
private void askForDeviceScannerPermission() {
if( MyDebug.LOG )
Log.d(TAG, "askForDeviceScannerPermission");
// n.b., we only need ACCESS_COARSE_LOCATION, but it's simpler to request both to be consistent with Open Camera's
// location permission requests in PermissionHandler. If we only request ACCESS_COARSE_LOCATION here, and later the
// user enables something that needs ACCESS_FINE_LOCATION, Android ends up showing the "rationale" dialog - and once
// that's dismissed, the permission seems to be granted without showing the permission request dialog (so it works,
// but is confusing for the user)
// Also note that if we did want to only request ACCESS_COARSE_LOCATION here, we'd need to declare that permission
// explicitly in the AndroidManifest.xml, otherwise the dialog to request permission is never shown (and the permission
// is denied automatically).
// Update: on Android 10+, ACCESS_FINE_LOCATION is needed anyway: https://developer.android.com/about/versions/10/privacy/changes#location-telephony-bluetooth-wifi
// Update: on Android 12+, we use the new bluetooth permissions instead of location permissions.
if( useAndroid12BluetoothPermissions() ) {
if( ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.BLUETOOTH_SCAN) ||
ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.BLUETOOTH_CONNECT) ) {
// Show an explanation to the user *asynchronously* -- don't block
// this thread waiting for the user's response! After the user
// sees the explanation, try again to request the permission.
showRequestBluetoothScanConnectPermissionRationale();
}
else {
// Can go ahead and request the permission
if( MyDebug.LOG )
Log.d(TAG, "requesting bluetooth scan/connect permissions...");
ActivityCompat.requestPermissions(this,
new String[]{Manifest.permission.BLUETOOTH_SCAN, Manifest.permission.BLUETOOTH_CONNECT},
REQUEST_BLUETOOTHSCANCONNECT_PERMISSIONS);
}
}
else {
if( ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.ACCESS_FINE_LOCATION) ||
ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.ACCESS_COARSE_LOCATION) ) {
// Show an explanation to the user *asynchronously* -- don't block
// this thread waiting for the user's response! After the user
// sees the explanation, try again to request the permission.
showRequestLocationPermissionRationale();
}
else {
// Can go ahead and request the permission
if( MyDebug.LOG )
Log.d(TAG, "requesting location permissions...");
ActivityCompat.requestPermissions(this,
new String[]{Manifest.permission.ACCESS_FINE_LOCATION, Manifest.permission.ACCESS_COARSE_LOCATION},
REQUEST_LOCATION_PERMISSIONS);
}
}
}
private void showRequestBluetoothScanConnectPermissionRationale() {
if( MyDebug.LOG )
Log.d(TAG, "showRequestBluetoothScanConnectPermissionRationale");
if( !useAndroid12BluetoothPermissions() ) {
// just in case!
Log.e(TAG, "shouldn't be requesting bluetooth scan/connect permissions!");
return;
}
String [] permissions = new String[]{Manifest.permission.BLUETOOTH_SCAN, Manifest.permission.BLUETOOTH_CONNECT};
int message_id = R.string.permission_rationale_bluetooth_scan_connect;
final String [] permissions_f = permissions;
new AlertDialog.Builder(this)
.setTitle(R.string.permission_rationale_title)
.setMessage(message_id)
.setIcon(android.R.drawable.ic_dialog_alert)
.setPositiveButton(android.R.string.ok, null)
.setOnDismissListener(new DialogInterface.OnDismissListener() {
public void onDismiss(DialogInterface dialog) {
if( MyDebug.LOG )
Log.d(TAG, "requesting permission...");
ActivityCompat.requestPermissions(DeviceScanner.this, permissions_f, REQUEST_BLUETOOTHSCANCONNECT_PERMISSIONS);
}
}).show();
}
private void showRequestLocationPermissionRationale() {
if( MyDebug.LOG )
Log.d(TAG, "showRequestLocationPermissionRationale");
if( Build.VERSION.SDK_INT < Build.VERSION_CODES.M ) {
if( MyDebug.LOG )
Log.e(TAG, "shouldn't be requesting permissions for pre-Android M!");
return;
}
String [] permissions = new String[]{Manifest.permission.ACCESS_FINE_LOCATION, Manifest.permission.ACCESS_COARSE_LOCATION};
int message_id = R.string.permission_rationale_location;
final String [] permissions_f = permissions;
new AlertDialog.Builder(this)
.setTitle(R.string.permission_rationale_title)
.setMessage(message_id)
.setIcon(android.R.drawable.ic_dialog_alert)
.setPositiveButton(android.R.string.ok, null)
.setOnDismissListener(new DialogInterface.OnDismissListener() {
public void onDismiss(DialogInterface dialog) {
if( MyDebug.LOG )
Log.d(TAG, "requesting permission...");
ActivityCompat.requestPermissions(DeviceScanner.this, permissions_f, REQUEST_LOCATION_PERMISSIONS);
}
}).show();
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
@NonNull int[] grantResults) {
if( MyDebug.LOG )
Log.d(TAG, "onRequestPermissionsResult: requestCode " + requestCode);
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
switch (requestCode) {
case REQUEST_LOCATION_PERMISSIONS: {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
if( MyDebug.LOG )
Log.d(TAG, "location permission granted");
checkBluetoothEnabled();
scanLeDevice(true);
}
else {
if( MyDebug.LOG )
Log.d(TAG, "location permission denied");
}
break;
}
case REQUEST_BLUETOOTHSCANCONNECT_PERMISSIONS: {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
if( MyDebug.LOG )
Log.d(TAG, "bluetooth scan/connect permission granted");
checkBluetoothEnabled();
scanLeDevice(true);
}
else {
if( MyDebug.LOG )
Log.d(TAG, "bluetooth scan/connect permission denied");
}
break;
}
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if( MyDebug.LOG )
Log.d(TAG, "onActivityResult");
// user decided to cancel the enabling of Bluetooth, so exit
if( requestCode == REQUEST_ENABLE_BT && resultCode == Activity.RESULT_CANCELED ) {
finish();
return;
}
super.onActivityResult(requestCode, resultCode, data);
}
@Override
protected void onPause() {
if( MyDebug.LOG )
Log.d(TAG, "onPause");
super.onPause();
if( is_scanning ) {
scanLeDevice(false);
leDeviceListAdapter.clear();
}
}
@Override
protected void onStop() {
if( MyDebug.LOG )
Log.d(TAG, "onStop");
super.onStop();
// we do this in onPause, but done here again just to be certain!
if( is_scanning ) {
scanLeDevice(false);
leDeviceListAdapter.clear();
}
}
@Override
protected void onDestroy() {
if( MyDebug.LOG )
Log.d(TAG, "onDestroy");
// we do this in onPause, but done here again just to be certain!
if( is_scanning ) {
scanLeDevice(false);
leDeviceListAdapter.clear();
}
super.onDestroy();
}
//@Override
protected void onListItemClick(int position) {
final BluetoothDevice device = leDeviceListAdapter.getDevice(position);
if( device == null )
return;
if( MyDebug.LOG ) {
Log.d(TAG, "onListItemClick");
Log.d(TAG, device.getAddress());
}
String preference_remote_device_name = PreferenceKeys.RemoteName;
SharedPreferences.Editor editor = mSharedPreferences.edit();
editor.putString(preference_remote_device_name, device.getAddress());
editor.apply();
scanLeDevice(false);
finish();
}
private void scanLeDevice(final boolean enable) {
if( MyDebug.LOG )
Log.d(TAG, "scanLeDevice: " + enable);
// BLUETOOTH_SCAN permission is needed for bluetoothAdapter.startLeScan and
// bluetoothAdapter.stopLeScan. Callers should have already checked for bluetooth
// permission, but we have this check just in case - and also to avoid the Android lint
// error that we'd get.
if( useAndroid12BluetoothPermissions() ) {
if( MyDebug.LOG )
Log.d(TAG, "check for bluetooth scan permission");
if( ContextCompat.checkSelfPermission(this, Manifest.permission.BLUETOOTH_SCAN) != PackageManager.PERMISSION_GRANTED ) {
Log.e(TAG, "bluetooth scan permission not granted!");
return;
}
}
if( enable ) {
// stop scanning after certain time
bluetoothHandler.postDelayed(new Runnable() {
@Override
public void run() {
if( MyDebug.LOG )
Log.d(TAG, "stop scanning after delay");
/*is_scanning = false;
bluetoothAdapter.stopLeScan(mLeScanCallback);
invalidateOptionsMenu();*/
scanLeDevice(false);
}
}, 10000);
is_scanning = true;
bluetoothAdapter.startLeScan(mLeScanCallback);
}
else {
is_scanning = false;
bluetoothAdapter.stopLeScan(mLeScanCallback);
}
invalidateOptionsMenu();
}
private class LeDeviceListAdapter extends BaseAdapter {
private final ArrayList<BluetoothDevice> mLeDevices;
private final LayoutInflater mInflator;
LeDeviceListAdapter() {
super();
mLeDevices = new ArrayList<>();
mInflator = DeviceScanner.this.getLayoutInflater();
}
void addDevice(BluetoothDevice device) {
if( !mLeDevices.contains(device) ) {
mLeDevices.add(device);
}
}
BluetoothDevice getDevice(int position) {
return mLeDevices.get(position);
}
void clear() {
mLeDevices.clear();
}
@Override
public int getCount() {
return mLeDevices.size();
}
@Override
public Object getItem(int i) {
return mLeDevices.get(i);
}
@Override
public long getItemId(int i) {
return i;
}
@Override
public View getView(int i, View view, ViewGroup viewGroup) {
ViewHolder viewHolder;
if( view == null ) {
view = mInflator.inflate(R.layout.listitem_device, null);
viewHolder = new ViewHolder();
viewHolder.deviceAddress = view.findViewById(R.id.device_address);
viewHolder.deviceName = view.findViewById(R.id.device_name);
view.setTag(viewHolder);
}
else {
viewHolder = (ViewHolder) view.getTag();
}
// BLUETOOTH_CONNECT permission is needed for device.getName. In theory we shouldn't
// have added to this list if bluetooth permission not available, but we have this
// check just in case - and also to avoid the Android lint error that we'd get.
boolean has_bluetooth_scan_permission = true;
if( useAndroid12BluetoothPermissions() ) {
if( MyDebug.LOG )
Log.d(TAG, "check for bluetooth connect permission");
if( ContextCompat.checkSelfPermission(DeviceScanner.this, Manifest.permission.BLUETOOTH_CONNECT) != PackageManager.PERMISSION_GRANTED ) {
has_bluetooth_scan_permission = false;
}
}
BluetoothDevice device = mLeDevices.get(i);
if( !has_bluetooth_scan_permission ) {
Log.e(TAG, "bluetooth connect permission not granted!");
viewHolder.deviceName.setText(R.string.unknown_device_no_permission);
}
else {
final String deviceName = device.getName();
if( deviceName != null && !deviceName.isEmpty() )
viewHolder.deviceName.setText(deviceName);
else
viewHolder.deviceName.setText(R.string.unknown_device);
}
viewHolder.deviceAddress.setText(device.getAddress());
return view;
}
}
private final BluetoothAdapter.LeScanCallback mLeScanCallback = new BluetoothAdapter.LeScanCallback() {
@Override
public void onLeScan(final BluetoothDevice device, int rssi, byte[] scanRecord) {
runOnUiThread(new Runnable() {
@Override
public void run() {
leDeviceListAdapter.addDevice(device);
leDeviceListAdapter.notifyDataSetChanged();
}
});
}
};
static class ViewHolder {
TextView deviceName;
TextView deviceAddress;
}
}

View file

@ -0,0 +1,26 @@
package net.sourceforge.opencamera.remotecontrol;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
/**
* This class includes the GATT attributes of the Kraken Smart Housing, which is
* an underwater camera housing that communicates its key presses with the phone over
* Bluetooth Low Energy
*/
class KrakenGattAttributes {
static final UUID CLIENT_CHARACTERISTIC_CONFIG = UUID.fromString("00002902-0000-1000-8000-00805f9b34fb");
//static final UUID KRAKEN_SENSORS_SERVICE = UUID.fromString("00001623-1212-efde-1523-785feabcd123");
static final UUID KRAKEN_SENSORS_CHARACTERISTIC = UUID.fromString("00001625-1212-efde-1523-785feabcd123");
//static final UUID KRAKEN_BUTTONS_SERVICE= UUID.fromString("00001523-1212-efde-1523-785feabcd123");
static final UUID KRAKEN_BUTTONS_CHARACTERISTIC= UUID.fromString("00001524-1212-efde-1523-785feabcd123");
//static final UUID BATTERY_SERVICE = UUID.fromString("180f");
//static final UUID BATTERY_LEVEL = UUID.fromString("2a19");
static List<UUID> getDesiredCharacteristics() {
return Arrays.asList(KRAKEN_BUTTONS_CHARACTERISTIC, KRAKEN_SENSORS_CHARACTERISTIC);
}
}

View file

@ -0,0 +1,242 @@
package net.sourceforge.opencamera.ui;
import android.content.Context;
import android.content.res.TypedArray;
import android.os.Parcel;
import android.os.Parcelable;
import android.preference.DialogPreference;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.view.View;
import android.widget.SeekBar;
import android.widget.TextView;
import net.sourceforge.opencamera.MainActivity;
import net.sourceforge.opencamera.R;
/** This contains a custom preference to display a seekbar in place of a ListPreference.
*/
public class ArraySeekBarPreference extends DialogPreference {
//private static final String TAG = "ArraySeekBarPreference";
private SeekBar seekbar;
private TextView textView;
private CharSequence [] entries; // user readable strings
private CharSequence [] values; // values corresponding to each string
private final String default_value;
private String value; // current saved value of this preference (note that this is intentionally not updated when the seekbar changes, as we don't save until the user clicks ok)
private boolean value_set;
public ArraySeekBarPreference(Context context, AttributeSet attrs) {
super(context, attrs);
String namespace = "http://schemas.android.com/apk/res/android";
this.default_value = attrs.getAttributeValue(namespace, "defaultValue");
int entries_id = attrs.getAttributeResourceValue(namespace, "entries", 0);
if( entries_id > 0 )
this.setEntries(entries_id);
int values_id = attrs.getAttributeResourceValue(namespace, "entryValues", 0);
if( values_id > 0 )
this.setEntryValues(values_id);
setDialogLayoutResource(R.layout.arrayseekbarpreference);
}
@Override
protected void onBindDialogView(View view) {
super.onBindDialogView(view);
if( entries == null || values == null ) {
throw new IllegalStateException("ArraySeekBarPreference requires entries and entryValues array");
}
else if( entries.length != values.length ) {
throw new IllegalStateException("ArraySeekBarPreference requires entries and entryValues arrays of same length");
}
this.seekbar = view.findViewById(R.id.arrayseekbarpreference_seekbar);
this.textView = view.findViewById(R.id.arrayseekbarpreference_value);
seekbar.setMax(entries.length-1);
{
int index = getValueIndex();
if( index == -1 ) {
// If we're here, it means the stored value isn't in the values array.
// ListPreference just shows a dialog with no selected entry, but that doesn't really work for
// a seekbar that needs to show the current position! So instead, set the position to the default.
if( default_value != null && values != null ) {
for(int i = values.length - 1; i >= 0; i--) {
if( values[i].equals(default_value) ) {
index = i;
break;
}
}
}
}
if( index >= 0 )
seekbar.setProgress(index);
}
seekbar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
private long last_haptic_time;
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
String new_entry = entries[progress].toString();
textView.setText(new_entry);
if( fromUser ) {
last_haptic_time = MainActivity.performHapticFeedback(seekBar, last_haptic_time);
}
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
});
String new_entry = entries[seekbar.getProgress()].toString();
textView.setText(new_entry);
}
@Override
protected void onDialogClosed(boolean positiveResult) {
super.onDialogClosed(positiveResult);
if( positiveResult && values != null ) {
int progress = seekbar.getProgress();
String new_value = values[progress].toString();
if( callChangeListener(new_value) ) {
setValue(new_value);
}
}
}
public void setEntries(CharSequence[] entries) {
this.entries = entries;
}
private void setEntries(int entries) {
setEntries(getContext().getResources().getTextArray(entries));
}
public void setEntryValues(CharSequence[] values) {
this.values = values;
}
private void setEntryValues(int values) {
setEntryValues(getContext().getResources().getTextArray(values));
}
@Override
public CharSequence getSummary() {
CharSequence summary = super.getSummary();
if( summary != null ) {
CharSequence entry = getEntry();
return String.format(summary.toString(), entry == null ? "" : entry);
}
else
return null;
}
/** Returns the index of the current value in the values array, or -1 if not found.
*/
private int getValueIndex() {
if( value != null && values != null ) {
// go backwards for compatibility with ListPreference in cases with duplicate values
for(int i = values.length - 1; i >= 0; i--) {
if( values[i].equals(value) ) {
return i;
}
}
}
return -1;
}
/** Returns the human readable string of the current value.
*/
private CharSequence getEntry() {
int index = getValueIndex();
return index >= 0 && entries != null ? entries[index] : null;
}
private void setValue(String value) {
final boolean changed = !TextUtils.equals(this.value, value);
if( changed || !value_set ) {
this.value = value;
value_set = true;
persistString(value);
if( changed ) {
notifyChanged();
}
}
}
@Override
protected Object onGetDefaultValue(TypedArray a, int index) {
return a.getString(index);
}
@Override
protected void onSetInitialValue(boolean restoreValue, Object defaultValue) {
setValue(restoreValue ? getPersistedString(value) : (String) defaultValue);
}
@Override
protected Parcelable onSaveInstanceState() {
final Parcelable superState = super.onSaveInstanceState();
if( isPersistent() ) {
return superState;
}
final SavedState state = new SavedState(superState);
state.value = value;
return state;
}
@Override
protected void onRestoreInstanceState(Parcelable state) {
if( state == null || !state.getClass().equals(SavedState.class) ) {
super.onRestoreInstanceState(state);
return;
}
SavedState myState = (SavedState)state;
super.onRestoreInstanceState(myState.getSuperState());
setValue(myState.value);
}
private static class SavedState extends BaseSavedState {
String value;
SavedState(Parcel source) {
super(source);
value = source.readString();
}
@Override
public void writeToParcel(Parcel dest, int flags) {
super.writeToParcel(dest, flags);
dest.writeString(value);
}
SavedState(Parcelable superState) {
super(superState);
}
public static final Parcelable.Creator<SavedState> CREATOR =
new Parcelable.Creator<>() {
public SavedState createFromParcel(Parcel in) {
return new SavedState(in);
}
public SavedState[] newArray(int size) {
return new SavedState[size];
}
};
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,445 @@
package net.sourceforge.opencamera.ui;
import net.sourceforge.opencamera.MyDebug;
import net.sourceforge.opencamera.R;
import net.sourceforge.opencamera.StorageUtils;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.DialogFragment;
import android.content.DialogInterface;
import android.os.Bundle;
import android.os.Environment;
import androidx.annotation.NonNull;
import android.text.InputFilter;
import android.text.Spanned;
import android.util.Log;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.Toast;
/** Dialog to pick a folder or file. Also allows creating new folders. Used when not
* using the Storage Access Framework.
*/
public class FolderChooserDialog extends DialogFragment {
private static final String TAG = "FolderChooserFragment";
private boolean show_new_folder_button = true; // whether to show a button for creating a new folder
private boolean show_dcim_shortcut = true; // whether to show a shortcut to the DCIM/ folder
private boolean mode_folder = true; // if true, the dialog is for selecting a folder; if false, the dialog is for selecting a file
private String extension; // if non-null, and mode_folder==false, only show files matching this file extension
private File start_folder = new File("");
private File current_folder;
private File max_parent; // if non-null, don't show the Parent option if viewing this folder (so the user can't go above that folder)
private AlertDialog folder_dialog;
private ListView list;
private String chosen_folder;
private String chosen_file; // only set if mode_folder==false
private static class FileWrapper implements Comparable<FileWrapper> {
private final File file;
private final String override_name; // if non-null, use this as the display name instead
private final int sort_order; // items are sorted first by sort_order, then alphabetically
FileWrapper(File file, String override_name, int sort_order) {
this.file = file;
this.override_name = override_name;
this.sort_order = sort_order;
}
@NonNull
@Override
public String toString() {
if( override_name != null )
return override_name;
if( file.isDirectory() )
return file.getName() + File.separator;
return file.getName();
}
@Override
public int compareTo(@NonNull FileWrapper o) {
if( this.sort_order < o.sort_order )
return -1;
else if( this.sort_order > o.sort_order )
return 1;
return this.file.getName().toLowerCase(Locale.US).compareTo(o.getFile().getName().toLowerCase(Locale.US));
}
@Override
public boolean equals(Object o) {
// important to override equals(), since we're overriding compareTo()
if( !(o instanceof FileWrapper) )
return false;
FileWrapper that = (FileWrapper)o;
if( this.sort_order != that.sort_order )
return false;
return this.file.getName().toLowerCase(Locale.US).equals(that.getFile().getName().toLowerCase(Locale.US));
}
@Override
public int hashCode() {
// must override this, as we override equals()
return this.file.getName().toLowerCase(Locale.US).hashCode();
}
File getFile() {
return file;
}
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
if( MyDebug.LOG )
Log.d(TAG, "onCreateDialog");
if( MyDebug.LOG )
Log.d(TAG, "start in folder: " + start_folder);
list = new ListView(getActivity());
list.setOnItemClickListener(new OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
if( MyDebug.LOG )
Log.d(TAG, "onItemClick: " + position);
FileWrapper file_wrapper = (FileWrapper) parent.getItemAtPosition(position);
if( MyDebug.LOG )
Log.d(TAG, "clicked: " + file_wrapper.toString());
File file = file_wrapper.getFile();
if( MyDebug.LOG )
Log.d(TAG, "file: " + file.toString());
if( file.isDirectory() ) {
refreshList(file);
}
else if( !mode_folder && file.isFile() ) {
chosen_file = file.getAbsolutePath();
folder_dialog.dismiss();
}
}
});
// good to use as short a text as possible for the icons, to reduce chance that the three buttons will have to appear on top of each other rather than in a row, in portrait mode
AlertDialog.Builder folder_dialog_builder = new AlertDialog.Builder(getActivity())
//.setIcon(R.drawable.alert_dialog_icon)
.setView(list);
if( mode_folder ) {
folder_dialog_builder.setPositiveButton(android.R.string.ok, null); // we set the listener in onShowListener, so we can prevent the dialog from closing (if chosen folder isn't writable)
}
if( show_new_folder_button ) {
folder_dialog_builder.setNeutralButton(R.string.new_folder, null); // we set the listener in onShowListener, so we can prevent the dialog from closing
}
folder_dialog_builder.setNegativeButton(android.R.string.cancel, null);
folder_dialog = folder_dialog_builder.create();
folder_dialog.setOnShowListener(new DialogInterface.OnShowListener() {
@Override
public void onShow(DialogInterface dialog_interface) {
if( mode_folder ) {
Button b_positive = folder_dialog.getButton(AlertDialog.BUTTON_POSITIVE);
b_positive.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if( MyDebug.LOG )
Log.d(TAG, "choose folder: " + current_folder.toString());
if( useFolder() ) {
folder_dialog.dismiss();
}
}
});
}
if( show_new_folder_button ) {
Button b_neutral = folder_dialog.getButton(AlertDialog.BUTTON_NEUTRAL);
b_neutral.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if( MyDebug.LOG )
Log.d(TAG, "new folder in: " + current_folder.toString());
newFolder();
}
});
}
}
});
if( !start_folder.exists() ) {
if( MyDebug.LOG )
Log.d(TAG, "create new folder" + start_folder);
if( !start_folder.mkdirs() ) {
if( MyDebug.LOG )
Log.d(TAG, "failed to create new folder");
// don't do anything yet, this is handled below
}
}
refreshList(start_folder);
if( !canWrite() ) {
// see testFolderChooserInvalid()
if( MyDebug.LOG )
Log.d(TAG, "failed to read folder");
if( show_dcim_shortcut ) {
if( MyDebug.LOG )
Log.d(TAG, "fall back to DCIM");
// note that we reset to DCIM rather than DCIM/OpenCamera, just to increase likelihood of getting back to a valid state
refreshList(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM));
if( current_folder == null ) {
if( MyDebug.LOG )
Log.d(TAG, "can't even read DCIM?!");
refreshList(new File("/"));
}
}
}
return folder_dialog;
}
public void setStartFolder(File start_folder) {
this.start_folder = start_folder;
}
public void setMaxParent(File max_parent) {
if( MyDebug.LOG )
Log.d(TAG, "setMaxParent: " + max_parent);
this.max_parent = max_parent;
}
public void setShowNewFolderButton(boolean show_new_folder_button) {
this.show_new_folder_button = show_new_folder_button;
}
public void setShowDCIMShortcut(boolean show_dcim_shortcut) {
this.show_dcim_shortcut = show_dcim_shortcut;
}
public void setModeFolder(boolean mode_folder) {
this.mode_folder = mode_folder;
}
public void setExtension(String extension) {
this.extension = extension.toLowerCase();
}
private void refreshList(File new_folder) {
if( MyDebug.LOG )
Log.d(TAG, "refreshList: " + new_folder);
if( new_folder == null ) {
if( MyDebug.LOG )
Log.d(TAG, "refreshList: null folder");
return;
}
File [] files = null;
// try/catch just in case?
try {
files = new_folder.listFiles();
}
catch(Exception e) {
MyDebug.logStackTrace(TAG, "exception reading folder", e);
}
// n.b., files may be null if no files could be found in the folder (or we can't read) - but should still allow the user
// to view this folder (so the user can go to parent folders which might be readable again)
List<FileWrapper> listed_files = new ArrayList<>();
if( new_folder.getParentFile() != null ) {
if( max_parent != null && max_parent.equals(new_folder) ) {
// don't show parent option
}
else {
listed_files.add(new FileWrapper(new_folder.getParentFile(), getResources().getString(R.string.parent_folder), 0));
}
}
if( show_dcim_shortcut ) {
File default_folder = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM);
if( !default_folder.equals(new_folder) && !default_folder.equals(new_folder.getParentFile()) )
listed_files.add(new FileWrapper(default_folder, null, 1));
}
if( files != null ) {
for(File file : files) {
boolean accept = false;
if( file.isDirectory() )
accept = true;
else if( !mode_folder && file.isFile() ) {
accept = true;
if( extension != null ) {
String name = file.getName();
int index = name.lastIndexOf('.');
if( index != -1 ) {
String ext = name.substring(index).toLowerCase();
if( !ext.equals(extension) ) {
accept = false;
}
}
}
}
if( accept ) {
int sort_order = file.isDirectory() ? 2 : 3;
listed_files.add(new FileWrapper(file, null, sort_order));
}
}
}
Collections.sort(listed_files);
ArrayAdapter<FileWrapper> adapter = new ArrayAdapter<>(this.getActivity(), android.R.layout.simple_list_item_1, listed_files);
list.setAdapter(adapter);
this.current_folder = new_folder;
//dialog.setTitle(current_folder.getName());
folder_dialog.setTitle(current_folder.getAbsolutePath());
}
private boolean canWrite() {
try {
if( this.current_folder != null && this.current_folder.canWrite() )
return true;
}
catch(Exception e) {
if( MyDebug.LOG )
Log.d(TAG, "exception in canWrite()");
}
return false;
}
private boolean useFolder() {
if( MyDebug.LOG )
Log.d(TAG, "useFolder");
if( current_folder == null )
return false;
if( canWrite() ) {
String new_save_location = current_folder.getAbsolutePath();
if( this.show_dcim_shortcut ) {
File base_folder = StorageUtils.getBaseFolder();
if( current_folder.getParentFile() != null && current_folder.getParentFile().equals(base_folder) ) {
if( MyDebug.LOG )
Log.d(TAG, "parent folder is base folder");
new_save_location = current_folder.getName();
}
}
if( MyDebug.LOG )
Log.d(TAG, "new_save_location: " + new_save_location);
chosen_folder = new_save_location;
return true;
}
else {
Toast.makeText(getActivity(), R.string.cant_write_folder, Toast.LENGTH_SHORT).show();
}
return false;
}
/** Returns the folder selected by the user (or the folder containing the selected folder if
* mode_folder==false). Returns null if the dialog was cancelled.
*/
public String getChosenFolder() {
return this.chosen_folder;
}
/** Returns the file selected by the user, if mode_folder==false. Returns null if the dialog was
* cancelled or mode_folder==true.
*/
public String getChosenFile() {
return this.chosen_file;
}
private static class NewFolderInputFilter implements InputFilter {
// whilst Android seems to allow any characters on internal memory, SD cards are typically formatted with FAT32
private final static String disallowed = "|\\?*<\":>";
@Override
public CharSequence filter(CharSequence source, int start, int end, Spanned dest, int dstart, int dend) {
for(int i=start;i<end;i++) {
if( disallowed.indexOf( source.charAt(i) ) != -1 ) {
return "";
}
}
return null;
}
}
private void newFolder() {
if( MyDebug.LOG )
Log.d(TAG, "newFolder");
if( current_folder == null )
return;
if( canWrite() ) {
final View dialog_view = LayoutInflater.from(getActivity()).inflate(R.layout.alertdialog_edittext, null);
final EditText edit_text = dialog_view.findViewById(R.id.edit_text);
edit_text.setSingleLine();
edit_text.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 20.0f);
// set hint instead of content description for EditText, see https://support.google.com/accessibility/android/answer/6378120
//edit_text.setContentDescription(getResources().getString(R.string.enter_new_folder));
edit_text.setHint(getResources().getString(R.string.enter_new_folder));
InputFilter filter = new NewFolderInputFilter();
edit_text.setFilters(new InputFilter[]{filter});
Dialog dialog = new AlertDialog.Builder(getActivity())
//.setIcon(R.drawable.alert_dialog_icon)
.setTitle(R.string.enter_new_folder)
.setView(dialog_view)
.setPositiveButton(android.R.string.ok, new Dialog.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
// Android warning that we should replace with isEmpty(), but that requires Android 15 for CharSequence.isEmpty()
if( edit_text.getText().length() == 0 ) {
// do nothing
}
else {
try {
String new_folder_name = current_folder.getAbsolutePath() + File.separator + edit_text.getText().toString();
if( MyDebug.LOG )
Log.d(TAG, "create new folder: " + new_folder_name);
File new_folder = new File(new_folder_name);
if( new_folder.exists() ) {
if( MyDebug.LOG )
Log.d(TAG, "folder already exists");
Toast.makeText(getActivity(), R.string.folder_exists, Toast.LENGTH_SHORT).show();
}
else if( new_folder.mkdirs() ) {
if( MyDebug.LOG )
Log.d(TAG, "created new folder");
refreshList(current_folder);
}
else {
if( MyDebug.LOG )
Log.d(TAG, "failed to create new folder");
Toast.makeText(getActivity(), R.string.failed_create_folder, Toast.LENGTH_SHORT).show();
}
}
catch(Exception e) {
MyDebug.logStackTrace(TAG, "exception trying to create new folder", e);
Toast.makeText(getActivity(), R.string.failed_create_folder, Toast.LENGTH_SHORT).show();
}
}
}
})
.setNegativeButton(android.R.string.cancel, null)
.create();
dialog.show();
}
else {
Toast.makeText(getActivity(), R.string.cant_write_folder, Toast.LENGTH_SHORT).show();
}
}
@Override
public void onResume() {
super.onResume();
// refresh in case files have changed
refreshList(current_folder);
}
// for testing:
public File getCurrentFolder() {
return current_folder;
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,256 @@
package net.sourceforge.opencamera.ui;
import android.util.Log;
import android.widget.SeekBar;
import net.sourceforge.opencamera.MyDebug;
import java.util.ArrayList;
import java.util.List;
/** This contains functionality related to the seekbars for manual controls.
*/
public class ManualSeekbars {
private static final String TAG = "ManualSeekbars";
private static final int manual_n = 1000; // the number of values on the seekbar used for manual focus distance
public static double seekbarScaling(double frac) {
// For various seekbars, we want to use a non-linear scaling, so user has more control over smaller values
return (Math.pow(100.0, frac) - 1.0) / 99.0;
}
private static double seekbarScalingInverse(double scaling) {
return Math.log(99.0*scaling + 1.0) / Math.log(100.0);
}
public static void setProgressSeekbarScaled(SeekBar seekBar, double min_value, double max_value, double value) {
seekBar.setMax(manual_n);
double scaling = (value - min_value)/(max_value - min_value);
double frac = seekbarScalingInverse(scaling);
int new_value = (int)(frac*manual_n + 0.5); // add 0.5 for rounding
if( new_value < 0 )
new_value = 0;
else if( new_value > manual_n )
new_value = manual_n;
seekBar.setProgress(new_value);
}
/*public static long exponentialScaling(double frac, double min, double max) {
// We use S(frac) = A * e^(s * frac)
// We want S(0) = min, S(1) = max
// So A = min
// and Ae^s = max
// => s = ln(max/min)
double s = Math.log(max / min);
return (long)(min * Math.exp(s * frac) + 0.5f); // add 0.5f so we round to nearest
}
private static double exponentialScalingInverse(double value, double min, double max) {
double s = Math.log(max / min);
return Math.log(value / min) / s;
}
public void setProgressSeekbarExponential(SeekBar seekBar, double min_value, double max_value, double value) {
seekBar.setMax(manual_n);
double frac = exponentialScalingInverse(value, min_value, max_value);
int new_value = (int)(frac*manual_n + 0.5); // add 0.5 for rounding
if( new_value < 0 )
new_value = 0;
else if( new_value > manual_n )
new_value = manual_n;
seekBar.setProgress(new_value);
}*/
private List<Long> seekbar_values_white_balance;
private List<Long> seekbar_values_iso;
private List<Long> seekbar_values_shutter_speed;
public int getWhiteBalanceTemperature(int progress) {
return seekbar_values_white_balance.get(progress).intValue();
}
public int getISO(int progress) {
return seekbar_values_iso.get(progress).intValue();
}
public long getExposureTime(int progress) {
return seekbar_values_shutter_speed.get(progress);
}
static private void setProgressBarToClosest(SeekBar seekBar, List<Long> seekbar_values, long current_value) {
if( MyDebug.LOG )
Log.d(TAG, "setProgressBarToClosest");
int closest_indx = -1;
long min_dist = 0;
for(int i=0;i<seekbar_values.size();i++) {
long dist = Math.abs(seekbar_values.get(i) - current_value);
/*if( MyDebug.LOG ) {
Log.d(TAG, "seekbar_values[" + i + "]: " + seekbar_values.get(i));
Log.d(TAG, " dist: " + dist);
}*/
if( closest_indx == -1 || dist < min_dist ) {
closest_indx = i;
min_dist = dist;
}
}
if( MyDebug.LOG )
Log.d(TAG, "closest_indx: " + closest_indx);
if( closest_indx != -1 )
seekBar.setProgress(closest_indx);
}
void setISOProgressBarToClosest(SeekBar seekBar, long current_iso) {
setProgressBarToClosest(seekBar, seekbar_values_iso, current_iso);
}
public void setProgressSeekbarWhiteBalance(SeekBar seekBar, long min_white_balance, long max_white_balance, long current_white_balance) {
if( MyDebug.LOG )
Log.d(TAG, "setProgressSeekbarWhiteBalance");
seekbar_values_white_balance = new ArrayList<>();
List<Long> seekbar_values = seekbar_values_white_balance;
// min to max, per 100
for(long i=min_white_balance;i<max_white_balance;i+=100) {
seekbar_values.add(i);
}
seekbar_values.add(max_white_balance);
seekBar.setMax(seekbar_values.size()-1);
setProgressBarToClosest(seekBar, seekbar_values, current_white_balance);
}
public void setProgressSeekbarISO(SeekBar seekBar, long min_iso, long max_iso, long current_iso) {
if( MyDebug.LOG )
Log.d(TAG, "setProgressSeekbarISO");
seekbar_values_iso = new ArrayList<>();
List<Long> seekbar_values = seekbar_values_iso;
seekbar_values.add(min_iso);
// 1 to 99, per 1
for(long i=1;i<100;i++) {
if( i > min_iso && i < max_iso )
seekbar_values.add(i);
}
// 100 to 500, per 5
for(long i=100;i<500;i+=5) {
if( i > min_iso && i < max_iso )
seekbar_values.add(i);
}
// 500 to 1000, per 10
for(long i=500;i<1000;i+=10) {
if( i > min_iso && i < max_iso )
seekbar_values.add(i);
}
// 1000 to 5000, per 50
for(long i=1000;i<5000;i+=50) {
if( i > min_iso && i < max_iso )
seekbar_values.add(i);
}
// 5000 to 10000, per 100
for(long i=5000;i<10000;i+=100) {
if( i > min_iso && i < max_iso )
seekbar_values.add(i);
}
seekbar_values.add(max_iso);
seekBar.setMax(seekbar_values.size()-1);
setProgressBarToClosest(seekBar, seekbar_values, current_iso);
}
public void setProgressSeekbarShutterSpeed(SeekBar seekBar, long min_exposure_time, long max_exposure_time, long current_exposure_time) {
if( MyDebug.LOG )
Log.d(TAG, "setProgressSeekbarShutterSpeed");
seekbar_values_shutter_speed = new ArrayList<>();
List<Long> seekbar_values = seekbar_values_shutter_speed;
seekbar_values.add(min_exposure_time);
// 1/10,000 to 1/1,000
for(int i=10;i>=1;i--) {
long exposure = 1000000000L/(i* 1000L);
if( exposure > min_exposure_time && exposure < max_exposure_time )
seekbar_values.add(exposure);
}
// 1/900 to 1/100
for(int i=9;i>=1;i--) {
long exposure = 1000000000L/(i* 100L);
if( exposure > min_exposure_time && exposure < max_exposure_time )
seekbar_values.add(exposure);
}
// 1/90 to 1/60 (steps of 10)
for(int i=9;i>=6;i--) {
long exposure = 1000000000L/(i* 10L);
if( exposure > min_exposure_time && exposure < max_exposure_time )
seekbar_values.add(exposure);
}
// 1/50 to 1/15 (steps of 5)
for(int i=50;i>=15;i-=5) {
long exposure = 1000000000L/i;
if( exposure > min_exposure_time && exposure < max_exposure_time )
seekbar_values.add(exposure);
}
// 0.1 to 1.9, per 1.0s
for(int i=1;i<20;i++) {
long exposure = (1000000000L/10)*i;
if( exposure > min_exposure_time && exposure < max_exposure_time )
seekbar_values.add(exposure);
}
// 2 to 19, per 1s
for(int i=2;i<20;i++) {
long exposure = 1000000000L*i;
if( exposure > min_exposure_time && exposure < max_exposure_time )
seekbar_values.add(exposure);
}
// 20 to 60, per 5s
for(int i=20;i<60;i+=5) {
long exposure = 1000000000L*i;
if( exposure > min_exposure_time && exposure < max_exposure_time )
seekbar_values.add(exposure);
}
// n.b., very long exposure times are not widely supported, but requested at https://sourceforge.net/p/opencamera/code/merge-requests/49/
// 60 to 180, per 15s
for(int i=60;i<180;i+=15) {
long exposure = 1000000000L*i;
if( exposure > min_exposure_time && exposure < max_exposure_time )
seekbar_values.add(exposure);
}
// 180 to 600, per 60s
for(int i=180;i<600;i+=60) {
long exposure = 1000000000L*i;
if( exposure > min_exposure_time && exposure < max_exposure_time )
seekbar_values.add(exposure);
}
// 600 to 1200, per 120s
for(int i=600;i<=1200;i+=120) {
long exposure = 1000000000L*i;
if( exposure > min_exposure_time && exposure < max_exposure_time )
seekbar_values.add(exposure);
}
seekbar_values.add(max_exposure_time);
seekBar.setMax(seekbar_values.size()-1);
setProgressBarToClosest(seekBar, seekbar_values, current_exposure_time);
}
}

Some files were not shown because too many files have changed in this diff Show more