2025-09-18 17:54:51 +02:00
|
|
|
/*
|
|
|
|
|
* Nextcloud - Android Client
|
|
|
|
|
*
|
|
|
|
|
* SPDX-FileCopyrightText: 2020 Chris Narkiewicz <hello@ezaquarii.com>
|
2025-09-18 18:43:03 +02:00
|
|
|
* SPDX-License-Identifier: AGPL-3.0-or-later OR GPL-2.0-only
|
2025-09-18 17:54:51 +02:00
|
|
|
*/
|
|
|
|
|
package com.nextcloud.client.jobs
|
|
|
|
|
|
|
|
|
|
import android.provider.MediaStore
|
|
|
|
|
import androidx.lifecycle.LiveData
|
|
|
|
|
import androidx.lifecycle.map
|
2025-09-18 18:43:03 +02:00
|
|
|
import androidx.work.BackoffPolicy
|
2025-09-18 17:54:51 +02:00
|
|
|
import androidx.work.Constraints
|
|
|
|
|
import androidx.work.Data
|
|
|
|
|
import androidx.work.ExistingPeriodicWorkPolicy
|
|
|
|
|
import androidx.work.ExistingWorkPolicy
|
|
|
|
|
import androidx.work.ListenableWorker
|
|
|
|
|
import androidx.work.NetworkType
|
|
|
|
|
import androidx.work.OneTimeWorkRequest
|
|
|
|
|
import androidx.work.Operation
|
|
|
|
|
import androidx.work.PeriodicWorkRequest
|
|
|
|
|
import androidx.work.WorkInfo
|
|
|
|
|
import androidx.work.WorkManager
|
|
|
|
|
import androidx.work.workDataOf
|
|
|
|
|
import com.nextcloud.client.account.User
|
|
|
|
|
import com.nextcloud.client.core.Clock
|
|
|
|
|
import com.nextcloud.client.di.Injectable
|
|
|
|
|
import com.nextcloud.client.documentscan.GeneratePdfFromImagesWork
|
2025-11-20 16:16:40 +01:00
|
|
|
import com.nextcloud.client.jobs.autoUpload.AutoUploadWorker
|
2025-09-18 17:54:51 +02:00
|
|
|
import com.nextcloud.client.jobs.download.FileDownloadWorker
|
2025-11-20 16:16:40 +01:00
|
|
|
import com.nextcloud.client.jobs.folderDownload.FolderDownloadWorker
|
2025-09-18 18:43:03 +02:00
|
|
|
import com.nextcloud.client.jobs.metadata.MetadataWorker
|
|
|
|
|
import com.nextcloud.client.jobs.offlineOperations.OfflineOperationsWorker
|
|
|
|
|
import com.nextcloud.client.jobs.upload.FileUploadHelper
|
2025-09-18 17:54:51 +02:00
|
|
|
import com.nextcloud.client.jobs.upload.FileUploadWorker
|
|
|
|
|
import com.nextcloud.client.preferences.AppPreferences
|
2025-09-18 18:43:03 +02:00
|
|
|
import com.nextcloud.utils.extensions.isWorkRunning
|
2025-09-18 17:54:51 +02:00
|
|
|
import com.nextcloud.utils.extensions.isWorkScheduled
|
|
|
|
|
import com.owncloud.android.datamodel.OCFile
|
2025-11-20 16:16:40 +01:00
|
|
|
import com.owncloud.android.datamodel.SyncedFolder
|
2025-09-18 17:54:51 +02:00
|
|
|
import com.owncloud.android.operations.DownloadType
|
2025-09-18 18:43:03 +02:00
|
|
|
import kotlinx.coroutines.CoroutineScope
|
|
|
|
|
import kotlinx.coroutines.Dispatchers
|
|
|
|
|
import kotlinx.coroutines.launch
|
2025-09-18 17:54:51 +02:00
|
|
|
import java.util.Date
|
|
|
|
|
import java.util.UUID
|
|
|
|
|
import java.util.concurrent.TimeUnit
|
|
|
|
|
import kotlin.reflect.KClass
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Note to maintainers
|
|
|
|
|
*
|
|
|
|
|
* Since [androidx.work.WorkManager] is missing API to easily attach worker metadata,
|
|
|
|
|
* we use tags API to attach our custom metadata.
|
|
|
|
|
*
|
|
|
|
|
* To create new job request, use [BackgroundJobManagerImpl.oneTimeRequestBuilder] and
|
|
|
|
|
* [BackgroundJobManagerImpl.periodicRequestBuilder] calls, instead of calling
|
|
|
|
|
* platform builders. Those methods will create builders pre-set with mandatory tags.
|
|
|
|
|
*
|
|
|
|
|
* Since Google is notoriously releasing new background job services, [androidx.work.WorkManager] API is
|
|
|
|
|
* considered private implementation detail and should not be leaked through the interface, to minimize
|
|
|
|
|
* potential migration cost in the future.
|
|
|
|
|
*/
|
|
|
|
|
@Suppress("TooManyFunctions") // we expect this implementation to have rich API
|
|
|
|
|
internal class BackgroundJobManagerImpl(
|
|
|
|
|
private val workManager: WorkManager,
|
|
|
|
|
private val clock: Clock,
|
|
|
|
|
private val preferences: AppPreferences
|
2025-09-18 18:43:03 +02:00
|
|
|
) : BackgroundJobManager,
|
|
|
|
|
Injectable {
|
2025-09-18 17:54:51 +02:00
|
|
|
|
|
|
|
|
companion object {
|
|
|
|
|
const val TAG_ALL = "*" // This tag allows us to retrieve list of all jobs run by Nextcloud client
|
|
|
|
|
const val JOB_CONTENT_OBSERVER = "content_observer"
|
|
|
|
|
const val JOB_PERIODIC_CONTACTS_BACKUP = "periodic_contacts_backup"
|
|
|
|
|
const val JOB_IMMEDIATE_CONTACTS_BACKUP = "immediate_contacts_backup"
|
|
|
|
|
const val JOB_IMMEDIATE_CONTACTS_IMPORT = "immediate_contacts_import"
|
|
|
|
|
const val JOB_PERIODIC_CALENDAR_BACKUP = "periodic_calendar_backup"
|
|
|
|
|
const val JOB_IMMEDIATE_CALENDAR_IMPORT = "immediate_calendar_import"
|
|
|
|
|
const val JOB_PERIODIC_FILES_SYNC = "periodic_files_sync"
|
|
|
|
|
const val JOB_IMMEDIATE_FILES_SYNC = "immediate_files_sync"
|
|
|
|
|
const val JOB_PERIODIC_OFFLINE_SYNC = "periodic_offline_sync"
|
|
|
|
|
const val JOB_PERIODIC_MEDIA_FOLDER_DETECTION = "periodic_media_folder_detection"
|
|
|
|
|
const val JOB_IMMEDIATE_MEDIA_FOLDER_DETECTION = "immediate_media_folder_detection"
|
|
|
|
|
const val JOB_NOTIFICATION = "notification"
|
|
|
|
|
const val JOB_ACCOUNT_REMOVAL = "account_removal"
|
|
|
|
|
const val JOB_FILES_UPLOAD = "files_upload"
|
|
|
|
|
const val JOB_FOLDER_DOWNLOAD = "folder_download"
|
|
|
|
|
const val JOB_FILES_DOWNLOAD = "files_download"
|
|
|
|
|
const val JOB_PDF_GENERATION = "pdf_generation"
|
|
|
|
|
const val JOB_IMMEDIATE_CALENDAR_BACKUP = "immediate_calendar_backup"
|
|
|
|
|
const val JOB_IMMEDIATE_FILES_EXPORT = "immediate_files_export"
|
2025-09-18 18:43:03 +02:00
|
|
|
const val JOB_OFFLINE_OPERATIONS = "offline_operations"
|
|
|
|
|
const val JOB_PERIODIC_OFFLINE_OPERATIONS = "periodic_offline_operations"
|
2025-09-18 17:54:51 +02:00
|
|
|
const val JOB_PERIODIC_HEALTH_STATUS = "periodic_health_status"
|
|
|
|
|
const val JOB_IMMEDIATE_HEALTH_STATUS = "immediate_health_status"
|
2025-11-20 16:16:40 +01:00
|
|
|
const val JOB_DOWNLOAD_FOLDER = "download_folder"
|
2025-09-18 18:43:03 +02:00
|
|
|
const val JOB_METADATA_SYNC = "metadata_sync"
|
|
|
|
|
const val JOB_INTERNAL_TWO_WAY_SYNC = "internal_two_way_sync"
|
2025-09-18 17:54:51 +02:00
|
|
|
|
|
|
|
|
const val JOB_TEST = "test_job"
|
|
|
|
|
|
|
|
|
|
const val MAX_CONTENT_TRIGGER_DELAY_MS = 10000L
|
|
|
|
|
|
|
|
|
|
const val TAG_PREFIX_NAME = "name"
|
|
|
|
|
const val TAG_PREFIX_USER = "user"
|
|
|
|
|
const val TAG_PREFIX_CLASS = "class"
|
|
|
|
|
const val TAG_PREFIX_START_TIMESTAMP = "timestamp"
|
|
|
|
|
val PREFIXES = setOf(TAG_PREFIX_NAME, TAG_PREFIX_USER, TAG_PREFIX_START_TIMESTAMP, TAG_PREFIX_CLASS)
|
|
|
|
|
const val NOT_SET_VALUE = "not set"
|
|
|
|
|
const val PERIODIC_BACKUP_INTERVAL_MINUTES = 24 * 60L
|
|
|
|
|
const val DEFAULT_PERIODIC_JOB_INTERVAL_MINUTES = 15L
|
2025-09-18 18:43:03 +02:00
|
|
|
const val OFFLINE_OPERATIONS_PERIODIC_JOB_INTERVAL_MINUTES = 5L
|
2025-09-18 17:54:51 +02:00
|
|
|
const val DEFAULT_IMMEDIATE_JOB_DELAY_SEC = 3L
|
2025-09-18 18:43:03 +02:00
|
|
|
const val DEFAULT_BACKOFF_CRITERIA_DELAY_SEC = 300L
|
2025-09-18 17:54:51 +02:00
|
|
|
|
|
|
|
|
private const val KEEP_LOG_MILLIS = 1000 * 60 * 60 * 24 * 3L
|
|
|
|
|
|
2025-09-18 18:43:03 +02:00
|
|
|
fun formatNameTag(name: String, user: User? = null): String = if (user == null) {
|
|
|
|
|
"$TAG_PREFIX_NAME:$name"
|
|
|
|
|
} else {
|
|
|
|
|
"$TAG_PREFIX_NAME:$name ${user.accountName}"
|
2025-09-18 17:54:51 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fun formatUserTag(user: User): String = "$TAG_PREFIX_USER:${user.accountName}"
|
|
|
|
|
fun formatClassTag(jobClass: KClass<out ListenableWorker>): String = "$TAG_PREFIX_CLASS:${jobClass.simpleName}"
|
|
|
|
|
fun formatTimeTag(startTimestamp: Long): String = "$TAG_PREFIX_START_TIMESTAMP:$startTimestamp"
|
|
|
|
|
|
|
|
|
|
fun parseTag(tag: String): Pair<String, String>? {
|
|
|
|
|
val key = tag.substringBefore(":", "")
|
|
|
|
|
val value = tag.substringAfter(":", "")
|
|
|
|
|
return if (key in PREFIXES) {
|
|
|
|
|
key to value
|
|
|
|
|
} else {
|
|
|
|
|
null
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-09-18 18:43:03 +02:00
|
|
|
fun parseTimestamp(timestamp: String): Date = try {
|
|
|
|
|
val ms = timestamp.toLong()
|
|
|
|
|
Date(ms)
|
|
|
|
|
} catch (ex: NumberFormatException) {
|
|
|
|
|
Date(0)
|
2025-09-18 17:54:51 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Convert platform [androidx.work.WorkInfo] object into application-specific [JobInfo] model.
|
|
|
|
|
* Conversion extracts work metadata from tags.
|
|
|
|
|
*/
|
2025-09-18 18:43:03 +02:00
|
|
|
fun fromWorkInfo(info: WorkInfo?): JobInfo? = if (info != null) {
|
|
|
|
|
val metadata = mutableMapOf<String, String>()
|
|
|
|
|
info.tags.forEach { parseTag(it)?.let { metadata[it.first] = it.second } }
|
|
|
|
|
val timestamp = parseTimestamp(metadata.get(TAG_PREFIX_START_TIMESTAMP) ?: "0")
|
|
|
|
|
JobInfo(
|
|
|
|
|
id = info.id,
|
|
|
|
|
state = info.state.toString(),
|
|
|
|
|
name = metadata.get(TAG_PREFIX_NAME) ?: NOT_SET_VALUE,
|
|
|
|
|
user = metadata.get(TAG_PREFIX_USER) ?: NOT_SET_VALUE,
|
|
|
|
|
started = timestamp,
|
|
|
|
|
progress = info.progress.getInt("progress", -1),
|
|
|
|
|
workerClass = metadata.get(TAG_PREFIX_CLASS) ?: NOT_SET_VALUE
|
|
|
|
|
)
|
|
|
|
|
} else {
|
|
|
|
|
null
|
2025-09-18 17:54:51 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fun deleteOldLogs(logEntries: MutableList<LogEntry>): MutableList<LogEntry> {
|
|
|
|
|
logEntries.removeIf {
|
|
|
|
|
return@removeIf (
|
|
|
|
|
it.started != null &&
|
|
|
|
|
Date(Date().time - KEEP_LOG_MILLIS).after(it.started)
|
|
|
|
|
) ||
|
|
|
|
|
(
|
|
|
|
|
it.finished != null &&
|
|
|
|
|
Date(Date().time - KEEP_LOG_MILLIS).after(it.finished)
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
return logEntries
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-09-18 18:43:03 +02:00
|
|
|
private val defaultDispatcherScope = CoroutineScope(Dispatchers.Default)
|
|
|
|
|
|
2025-09-18 17:54:51 +02:00
|
|
|
override fun logStartOfWorker(workerName: String?) {
|
|
|
|
|
val logs = deleteOldLogs(preferences.readLogEntry().toMutableList())
|
|
|
|
|
|
|
|
|
|
if (workerName == null) {
|
|
|
|
|
logs.add(LogEntry(Date(), null, null, NOT_SET_VALUE))
|
|
|
|
|
} else {
|
|
|
|
|
logs.add(LogEntry(Date(), null, null, workerName))
|
|
|
|
|
}
|
|
|
|
|
preferences.saveLogEntry(logs)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun logEndOfWorker(workerName: String?, result: ListenableWorker.Result) {
|
|
|
|
|
val logs = deleteOldLogs(preferences.readLogEntry().toMutableList())
|
|
|
|
|
if (workerName == null) {
|
|
|
|
|
logs.add(LogEntry(null, Date(), result.toString(), NOT_SET_VALUE))
|
|
|
|
|
} else {
|
|
|
|
|
logs.add(LogEntry(null, Date(), result.toString(), workerName))
|
|
|
|
|
}
|
|
|
|
|
preferences.saveLogEntry(logs)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Create [OneTimeWorkRequest.Builder] pre-set with common attributes
|
|
|
|
|
*/
|
|
|
|
|
private fun oneTimeRequestBuilder(
|
|
|
|
|
jobClass: KClass<out ListenableWorker>,
|
|
|
|
|
jobName: String,
|
2025-09-18 18:43:03 +02:00
|
|
|
user: User? = null,
|
|
|
|
|
constraints: Constraints = Constraints.Builder().build()
|
2025-09-18 17:54:51 +02:00
|
|
|
): OneTimeWorkRequest.Builder {
|
|
|
|
|
val builder = OneTimeWorkRequest.Builder(jobClass.java)
|
|
|
|
|
.addTag(TAG_ALL)
|
|
|
|
|
.addTag(formatNameTag(jobName, user))
|
|
|
|
|
.addTag(formatTimeTag(clock.currentTime))
|
|
|
|
|
.addTag(formatClassTag(jobClass))
|
2025-09-18 18:43:03 +02:00
|
|
|
.setConstraints(constraints)
|
2025-09-18 17:54:51 +02:00
|
|
|
user?.let { builder.addTag(formatUserTag(it)) }
|
|
|
|
|
return builder
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Create [PeriodicWorkRequest] pre-set with common attributes
|
|
|
|
|
*/
|
|
|
|
|
private fun periodicRequestBuilder(
|
|
|
|
|
jobClass: KClass<out ListenableWorker>,
|
|
|
|
|
jobName: String,
|
|
|
|
|
intervalMins: Long = DEFAULT_PERIODIC_JOB_INTERVAL_MINUTES,
|
|
|
|
|
flexIntervalMins: Long = DEFAULT_PERIODIC_JOB_INTERVAL_MINUTES,
|
2025-09-18 18:43:03 +02:00
|
|
|
user: User? = null,
|
|
|
|
|
constraints: Constraints = Constraints.Builder().build()
|
2025-09-18 17:54:51 +02:00
|
|
|
): PeriodicWorkRequest.Builder {
|
|
|
|
|
val builder = PeriodicWorkRequest.Builder(
|
|
|
|
|
jobClass.java,
|
|
|
|
|
intervalMins,
|
|
|
|
|
TimeUnit.MINUTES,
|
|
|
|
|
flexIntervalMins,
|
|
|
|
|
TimeUnit.MINUTES
|
|
|
|
|
)
|
|
|
|
|
.addTag(TAG_ALL)
|
|
|
|
|
.addTag(formatNameTag(jobName, user))
|
|
|
|
|
.addTag(formatTimeTag(clock.currentTime))
|
|
|
|
|
.addTag(formatClassTag(jobClass))
|
2025-09-18 18:43:03 +02:00
|
|
|
.setConstraints(constraints)
|
2025-09-18 17:54:51 +02:00
|
|
|
user?.let { builder.addTag(formatUserTag(it)) }
|
|
|
|
|
return builder
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private fun WorkManager.getJobInfo(id: UUID): LiveData<JobInfo?> {
|
|
|
|
|
val workInfo = getWorkInfoByIdLiveData(id)
|
|
|
|
|
return workInfo.map { fromWorkInfo(it) }
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Cancel work using name tag with optional user scope.
|
|
|
|
|
* All work instances will be cancelled.
|
|
|
|
|
*/
|
|
|
|
|
private fun WorkManager.cancelJob(name: String, user: User? = null): Operation {
|
|
|
|
|
val tag = formatNameTag(name, user)
|
|
|
|
|
return cancelAllWorkByTag(tag)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override val jobs: LiveData<List<JobInfo>>
|
|
|
|
|
get() {
|
|
|
|
|
val workInfo = workManager.getWorkInfosByTagLiveData("*")
|
|
|
|
|
return workInfo.map { it -> it.map { fromWorkInfo(it) ?: JobInfo() }.sortedBy { it.started }.reversed() }
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun scheduleContentObserverJob() {
|
|
|
|
|
val constrains = Constraints.Builder()
|
|
|
|
|
.addContentUriTrigger(MediaStore.Images.Media.INTERNAL_CONTENT_URI, true)
|
|
|
|
|
.addContentUriTrigger(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, true)
|
|
|
|
|
.addContentUriTrigger(MediaStore.Video.Media.INTERNAL_CONTENT_URI, true)
|
|
|
|
|
.addContentUriTrigger(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, true)
|
|
|
|
|
.setTriggerContentMaxDelay(MAX_CONTENT_TRIGGER_DELAY_MS, TimeUnit.MILLISECONDS)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val request = oneTimeRequestBuilder(ContentObserverWork::class, JOB_CONTENT_OBSERVER)
|
|
|
|
|
.setConstraints(constrains)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
workManager.enqueueUniqueWork(JOB_CONTENT_OBSERVER, ExistingWorkPolicy.REPLACE, request)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun schedulePeriodicContactsBackup(user: User) {
|
|
|
|
|
val data = Data.Builder()
|
|
|
|
|
.putString(ContactsBackupWork.KEY_ACCOUNT, user.accountName)
|
|
|
|
|
.putBoolean(ContactsBackupWork.KEY_FORCE, true)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val constraints = Constraints.Builder()
|
|
|
|
|
.setRequiredNetworkType(NetworkType.CONNECTED)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val request = periodicRequestBuilder(
|
|
|
|
|
jobClass = ContactsBackupWork::class,
|
|
|
|
|
jobName = JOB_PERIODIC_CONTACTS_BACKUP,
|
|
|
|
|
intervalMins = PERIODIC_BACKUP_INTERVAL_MINUTES,
|
|
|
|
|
user = user
|
|
|
|
|
)
|
|
|
|
|
.setInputData(data)
|
|
|
|
|
.setConstraints(constraints)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
workManager.enqueueUniquePeriodicWork(JOB_PERIODIC_CONTACTS_BACKUP, ExistingPeriodicWorkPolicy.KEEP, request)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun cancelPeriodicContactsBackup(user: User) {
|
|
|
|
|
workManager.cancelJob(JOB_PERIODIC_CONTACTS_BACKUP, user)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun startImmediateContactsImport(
|
|
|
|
|
contactsAccountName: String?,
|
|
|
|
|
contactsAccountType: String?,
|
|
|
|
|
vCardFilePath: String,
|
2025-09-18 18:43:03 +02:00
|
|
|
selectedContactsFilePath: String
|
2025-09-18 17:54:51 +02:00
|
|
|
): LiveData<JobInfo?> {
|
|
|
|
|
val data = Data.Builder()
|
|
|
|
|
.putString(ContactsImportWork.ACCOUNT_NAME, contactsAccountName)
|
|
|
|
|
.putString(ContactsImportWork.ACCOUNT_TYPE, contactsAccountType)
|
|
|
|
|
.putString(ContactsImportWork.VCARD_FILE_PATH, vCardFilePath)
|
2025-09-18 18:43:03 +02:00
|
|
|
.putString(ContactsImportWork.SELECTED_CONTACTS_FILE_PATH, selectedContactsFilePath)
|
2025-09-18 17:54:51 +02:00
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val constraints = Constraints.Builder()
|
|
|
|
|
.setRequiresCharging(false)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val request = oneTimeRequestBuilder(ContactsImportWork::class, JOB_IMMEDIATE_CONTACTS_IMPORT)
|
|
|
|
|
.setInputData(data)
|
|
|
|
|
.setConstraints(constraints)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
workManager.enqueueUniqueWork(JOB_IMMEDIATE_CONTACTS_IMPORT, ExistingWorkPolicy.KEEP, request)
|
|
|
|
|
|
|
|
|
|
return workManager.getJobInfo(request.id)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun startImmediateCalendarImport(calendarPaths: Map<String, Int>): LiveData<JobInfo?> {
|
|
|
|
|
val data = Data.Builder()
|
|
|
|
|
.putAll(calendarPaths)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val constraints = Constraints.Builder()
|
|
|
|
|
.setRequiresCharging(false)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val request = oneTimeRequestBuilder(CalendarImportWork::class, JOB_IMMEDIATE_CALENDAR_IMPORT)
|
|
|
|
|
.setInputData(data)
|
|
|
|
|
.setConstraints(constraints)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
workManager.enqueueUniqueWork(JOB_IMMEDIATE_CALENDAR_IMPORT, ExistingWorkPolicy.KEEP, request)
|
|
|
|
|
|
|
|
|
|
return workManager.getJobInfo(request.id)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun startImmediateFilesExportJob(files: Collection<OCFile>): LiveData<JobInfo?> {
|
|
|
|
|
val ids = files.map { it.fileId }.toLongArray()
|
|
|
|
|
|
|
|
|
|
val data = Data.Builder()
|
|
|
|
|
.putLongArray(FilesExportWork.FILES_TO_DOWNLOAD, ids)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val request = oneTimeRequestBuilder(FilesExportWork::class, JOB_IMMEDIATE_FILES_EXPORT)
|
|
|
|
|
.setInputData(data)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
workManager.enqueueUniqueWork(JOB_IMMEDIATE_FILES_EXPORT, ExistingWorkPolicy.APPEND_OR_REPLACE, request)
|
|
|
|
|
|
|
|
|
|
return workManager.getJobInfo(request.id)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun startImmediateContactsBackup(user: User): LiveData<JobInfo?> {
|
|
|
|
|
val data = Data.Builder()
|
|
|
|
|
.putString(ContactsBackupWork.KEY_ACCOUNT, user.accountName)
|
|
|
|
|
.putBoolean(ContactsBackupWork.KEY_FORCE, true)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val request = oneTimeRequestBuilder(ContactsBackupWork::class, JOB_IMMEDIATE_CONTACTS_BACKUP, user)
|
|
|
|
|
.setInputData(data)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
workManager.enqueueUniqueWork(JOB_IMMEDIATE_CONTACTS_BACKUP, ExistingWorkPolicy.KEEP, request)
|
|
|
|
|
return workManager.getJobInfo(request.id)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun startImmediateCalendarBackup(user: User): LiveData<JobInfo?> {
|
|
|
|
|
val data = Data.Builder()
|
|
|
|
|
.putString(CalendarBackupWork.ACCOUNT, user.accountName)
|
|
|
|
|
.putBoolean(CalendarBackupWork.FORCE, true)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val request = oneTimeRequestBuilder(CalendarBackupWork::class, JOB_IMMEDIATE_CALENDAR_BACKUP, user)
|
|
|
|
|
.setInputData(data)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
workManager.enqueueUniqueWork(JOB_IMMEDIATE_CALENDAR_BACKUP, ExistingWorkPolicy.KEEP, request)
|
|
|
|
|
return workManager.getJobInfo(request.id)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun schedulePeriodicCalendarBackup(user: User) {
|
|
|
|
|
val data = Data.Builder()
|
|
|
|
|
.putString(CalendarBackupWork.ACCOUNT, user.accountName)
|
|
|
|
|
.putBoolean(CalendarBackupWork.FORCE, true)
|
|
|
|
|
.build()
|
|
|
|
|
val request = periodicRequestBuilder(
|
|
|
|
|
jobClass = CalendarBackupWork::class,
|
|
|
|
|
jobName = JOB_PERIODIC_CALENDAR_BACKUP,
|
|
|
|
|
intervalMins = PERIODIC_BACKUP_INTERVAL_MINUTES,
|
|
|
|
|
user = user
|
|
|
|
|
).setInputData(data).build()
|
|
|
|
|
|
|
|
|
|
workManager.enqueueUniquePeriodicWork(JOB_PERIODIC_CALENDAR_BACKUP, ExistingPeriodicWorkPolicy.KEEP, request)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun cancelPeriodicCalendarBackup(user: User) {
|
|
|
|
|
workManager.cancelJob(JOB_PERIODIC_CALENDAR_BACKUP, user)
|
|
|
|
|
}
|
|
|
|
|
|
2025-09-18 18:43:03 +02:00
|
|
|
override fun bothFilesSyncJobsRunning(syncedFolderID: Long): Boolean =
|
|
|
|
|
workManager.isWorkRunning(JOB_PERIODIC_FILES_SYNC + "_" + syncedFolderID) &&
|
|
|
|
|
workManager.isWorkRunning(JOB_IMMEDIATE_FILES_SYNC + "_" + syncedFolderID)
|
|
|
|
|
|
|
|
|
|
override fun startPeriodicallyOfflineOperation() {
|
|
|
|
|
val inputData = Data.Builder()
|
|
|
|
|
.putString(OfflineOperationsWorker.JOB_NAME, JOB_PERIODIC_OFFLINE_OPERATIONS)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val request = periodicRequestBuilder(
|
|
|
|
|
jobClass = OfflineOperationsWorker::class,
|
|
|
|
|
jobName = JOB_PERIODIC_OFFLINE_OPERATIONS,
|
|
|
|
|
intervalMins = OFFLINE_OPERATIONS_PERIODIC_JOB_INTERVAL_MINUTES
|
|
|
|
|
)
|
|
|
|
|
.setInputData(inputData)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
workManager.enqueueUniquePeriodicWork(
|
|
|
|
|
JOB_PERIODIC_OFFLINE_OPERATIONS,
|
|
|
|
|
ExistingPeriodicWorkPolicy.UPDATE,
|
|
|
|
|
request
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun startOfflineOperations() {
|
|
|
|
|
val inputData = Data.Builder()
|
|
|
|
|
.putString(OfflineOperationsWorker.JOB_NAME, JOB_OFFLINE_OPERATIONS)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val constraints = Constraints.Builder()
|
|
|
|
|
.setRequiredNetworkType(NetworkType.CONNECTED)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
// Backoff criteria define how the system should retry the task if it fails.
|
|
|
|
|
// LINEAR means each retry will be delayed linearly (e.g., 10s, 20s, 30s...)
|
|
|
|
|
// DEFAULT_PERIODIC_JOB_INTERVAL_MINUTES is used as the initial delay duration.
|
|
|
|
|
val backoffCriteriaPolicy = BackoffPolicy.LINEAR
|
|
|
|
|
val backoffCriteriaDelay = DEFAULT_BACKOFF_CRITERIA_DELAY_SEC
|
|
|
|
|
|
|
|
|
|
val request =
|
|
|
|
|
oneTimeRequestBuilder(OfflineOperationsWorker::class, JOB_OFFLINE_OPERATIONS, constraints = constraints)
|
|
|
|
|
.setBackoffCriteria(
|
|
|
|
|
backoffCriteriaPolicy,
|
|
|
|
|
backoffCriteriaDelay,
|
|
|
|
|
TimeUnit.SECONDS
|
|
|
|
|
)
|
|
|
|
|
.setInputData(inputData)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
workManager.enqueueUniqueWork(
|
|
|
|
|
JOB_OFFLINE_OPERATIONS,
|
|
|
|
|
ExistingWorkPolicy.KEEP,
|
|
|
|
|
request
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
2025-11-20 16:16:40 +01:00
|
|
|
override fun schedulePeriodicFilesSyncJob(syncedFolder: SyncedFolder) {
|
|
|
|
|
val syncedFolderID = syncedFolder.id
|
|
|
|
|
|
2025-09-18 18:43:03 +02:00
|
|
|
val arguments = Data.Builder()
|
2025-11-20 16:16:40 +01:00
|
|
|
.putLong(AutoUploadWorker.SYNCED_FOLDER_ID, syncedFolderID)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val constraints = Constraints.Builder()
|
|
|
|
|
.setRequiredNetworkType(NetworkType.CONNECTED)
|
|
|
|
|
.setRequiresCharging(syncedFolder.isChargingOnly)
|
2025-09-18 18:43:03 +02:00
|
|
|
.build()
|
|
|
|
|
|
2025-09-18 17:54:51 +02:00
|
|
|
val request = periodicRequestBuilder(
|
2025-11-20 16:16:40 +01:00
|
|
|
jobClass = AutoUploadWorker::class,
|
2025-09-18 18:43:03 +02:00
|
|
|
jobName = JOB_PERIODIC_FILES_SYNC + "_" + syncedFolderID,
|
2025-11-20 16:16:40 +01:00
|
|
|
intervalMins = DEFAULT_PERIODIC_JOB_INTERVAL_MINUTES,
|
|
|
|
|
constraints = constraints
|
2025-09-18 18:43:03 +02:00
|
|
|
)
|
2025-11-20 16:16:40 +01:00
|
|
|
.setBackoffCriteria(
|
|
|
|
|
BackoffPolicy.LINEAR,
|
|
|
|
|
DEFAULT_BACKOFF_CRITERIA_DELAY_SEC,
|
|
|
|
|
TimeUnit.SECONDS
|
|
|
|
|
)
|
2025-09-18 18:43:03 +02:00
|
|
|
.setInputData(arguments)
|
|
|
|
|
.build()
|
2025-11-20 16:16:40 +01:00
|
|
|
|
2025-09-18 18:43:03 +02:00
|
|
|
workManager.enqueueUniquePeriodicWork(
|
|
|
|
|
JOB_PERIODIC_FILES_SYNC + "_" + syncedFolderID,
|
2025-11-20 16:16:40 +01:00
|
|
|
ExistingPeriodicWorkPolicy.KEEP,
|
2025-09-18 18:43:03 +02:00
|
|
|
request
|
|
|
|
|
)
|
2025-09-18 17:54:51 +02:00
|
|
|
}
|
|
|
|
|
|
2025-11-20 16:16:40 +01:00
|
|
|
override fun startAutoUploadImmediately(
|
|
|
|
|
syncedFolder: SyncedFolder,
|
2025-09-18 17:54:51 +02:00
|
|
|
overridePowerSaving: Boolean,
|
2025-11-20 16:16:40 +01:00
|
|
|
contentUris: Array<String?>
|
2025-09-18 17:54:51 +02:00
|
|
|
) {
|
2025-11-20 16:16:40 +01:00
|
|
|
val syncedFolderID = syncedFolder.id
|
|
|
|
|
|
2025-09-18 17:54:51 +02:00
|
|
|
val arguments = Data.Builder()
|
2025-11-20 16:16:40 +01:00
|
|
|
.putBoolean(AutoUploadWorker.OVERRIDE_POWER_SAVING, overridePowerSaving)
|
|
|
|
|
.putStringArray(AutoUploadWorker.CONTENT_URIS, contentUris)
|
|
|
|
|
.putLong(AutoUploadWorker.SYNCED_FOLDER_ID, syncedFolderID)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val constraints = Constraints.Builder()
|
|
|
|
|
.setRequiredNetworkType(NetworkType.CONNECTED)
|
|
|
|
|
.setRequiresCharging(syncedFolder.isChargingOnly)
|
2025-09-18 17:54:51 +02:00
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val request = oneTimeRequestBuilder(
|
2025-11-20 16:16:40 +01:00
|
|
|
jobClass = AutoUploadWorker::class,
|
2025-09-18 18:43:03 +02:00
|
|
|
jobName = JOB_IMMEDIATE_FILES_SYNC + "_" + syncedFolderID
|
2025-09-18 17:54:51 +02:00
|
|
|
)
|
|
|
|
|
.setInputData(arguments)
|
2025-11-20 16:16:40 +01:00
|
|
|
.setConstraints(constraints)
|
|
|
|
|
.setBackoffCriteria(
|
|
|
|
|
BackoffPolicy.LINEAR,
|
|
|
|
|
DEFAULT_BACKOFF_CRITERIA_DELAY_SEC,
|
|
|
|
|
TimeUnit.SECONDS
|
|
|
|
|
)
|
2025-09-18 17:54:51 +02:00
|
|
|
.build()
|
|
|
|
|
|
2025-09-18 18:43:03 +02:00
|
|
|
workManager.enqueueUniqueWork(
|
|
|
|
|
JOB_IMMEDIATE_FILES_SYNC + "_" + syncedFolderID,
|
|
|
|
|
ExistingWorkPolicy.APPEND,
|
|
|
|
|
request
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun cancelTwoWaySyncJob() {
|
|
|
|
|
workManager.cancelJob(JOB_INTERNAL_TWO_WAY_SYNC)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun cancelAllFilesDownloadJobs() {
|
|
|
|
|
workManager.cancelAllWorkByTag(formatClassTag(FileDownloadWorker::class))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun startMetadataSyncJob(currentDirPath: String) {
|
|
|
|
|
val inputData = Data.Builder()
|
|
|
|
|
.putString(MetadataWorker.FILE_PATH, currentDirPath)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val constrains = Constraints.Builder()
|
|
|
|
|
.setRequiredNetworkType(NetworkType.CONNECTED)
|
|
|
|
|
.setRequiresBatteryNotLow(true)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val request = oneTimeRequestBuilder(MetadataWorker::class, JOB_METADATA_SYNC)
|
|
|
|
|
.setConstraints(constrains)
|
|
|
|
|
.setInputData(inputData)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
workManager.enqueueUniqueWork(
|
|
|
|
|
JOB_METADATA_SYNC,
|
|
|
|
|
ExistingWorkPolicy.REPLACE,
|
|
|
|
|
request
|
|
|
|
|
)
|
2025-09-18 17:54:51 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun scheduleOfflineSync() {
|
|
|
|
|
val constrains = Constraints.Builder()
|
|
|
|
|
.setRequiredNetworkType(NetworkType.UNMETERED)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val request = periodicRequestBuilder(OfflineSyncWork::class, JOB_PERIODIC_OFFLINE_SYNC)
|
|
|
|
|
.setConstraints(constrains)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
workManager.enqueueUniquePeriodicWork(JOB_PERIODIC_OFFLINE_SYNC, ExistingPeriodicWorkPolicy.KEEP, request)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun scheduleMediaFoldersDetectionJob() {
|
|
|
|
|
val request = periodicRequestBuilder(MediaFoldersDetectionWork::class, JOB_PERIODIC_MEDIA_FOLDER_DETECTION)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
workManager.enqueueUniquePeriodicWork(
|
|
|
|
|
JOB_PERIODIC_MEDIA_FOLDER_DETECTION,
|
|
|
|
|
ExistingPeriodicWorkPolicy.KEEP,
|
|
|
|
|
request
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun startMediaFoldersDetectionJob() {
|
|
|
|
|
val request = oneTimeRequestBuilder(MediaFoldersDetectionWork::class, JOB_IMMEDIATE_MEDIA_FOLDER_DETECTION)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
workManager.enqueueUniqueWork(
|
|
|
|
|
JOB_IMMEDIATE_MEDIA_FOLDER_DETECTION,
|
|
|
|
|
ExistingWorkPolicy.KEEP,
|
|
|
|
|
request
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun startNotificationJob(subject: String, signature: String) {
|
|
|
|
|
val data = Data.Builder()
|
|
|
|
|
.putString(NotificationWork.KEY_NOTIFICATION_SUBJECT, subject)
|
|
|
|
|
.putString(NotificationWork.KEY_NOTIFICATION_SIGNATURE, signature)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val request = oneTimeRequestBuilder(NotificationWork::class, JOB_NOTIFICATION)
|
|
|
|
|
.setInputData(data)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
workManager.enqueue(request)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun startAccountRemovalJob(accountName: String, remoteWipe: Boolean) {
|
|
|
|
|
val data = Data.Builder()
|
|
|
|
|
.putString(AccountRemovalWork.ACCOUNT, accountName)
|
|
|
|
|
.putBoolean(AccountRemovalWork.REMOTE_WIPE, remoteWipe)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val request = oneTimeRequestBuilder(AccountRemovalWork::class, JOB_ACCOUNT_REMOVAL)
|
|
|
|
|
.setInputData(data)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
workManager.enqueue(request)
|
|
|
|
|
}
|
|
|
|
|
|
2025-11-20 16:16:40 +01:00
|
|
|
private fun startFileUploadJobTag(accountName: String): String = JOB_FILES_UPLOAD + accountName
|
2025-09-18 17:54:51 +02:00
|
|
|
|
2025-11-20 16:16:40 +01:00
|
|
|
override fun isStartFileUploadJobScheduled(accountName: String): Boolean =
|
|
|
|
|
workManager.isWorkScheduled(startFileUploadJobTag(accountName))
|
2025-09-18 17:54:51 +02:00
|
|
|
|
2025-09-18 18:43:03 +02:00
|
|
|
/**
|
|
|
|
|
* This method supports initiating uploads for various scenarios, including:
|
|
|
|
|
* - New upload batches
|
|
|
|
|
* - Failed uploads
|
|
|
|
|
* - FilesSyncWork
|
|
|
|
|
* - ...
|
|
|
|
|
*
|
|
|
|
|
* @param user The user for whom the upload job is being created.
|
|
|
|
|
* @param uploadIds Array of upload IDs to be processed. These IDs originate from multiple sources
|
|
|
|
|
* and cannot be determined directly from the account name or a single function
|
|
|
|
|
* within the worker.
|
|
|
|
|
*/
|
|
|
|
|
override fun startFilesUploadJob(user: User, uploadIds: LongArray, showSameFileAlreadyExistsNotification: Boolean) {
|
|
|
|
|
defaultDispatcherScope.launch {
|
|
|
|
|
val batchSize = FileUploadHelper.MAX_FILE_COUNT
|
|
|
|
|
val batches = uploadIds.toList().chunked(batchSize)
|
2025-11-20 16:16:40 +01:00
|
|
|
val tag = startFileUploadJobTag(user.accountName)
|
2025-09-18 18:43:03 +02:00
|
|
|
|
|
|
|
|
val constraints = Constraints.Builder()
|
|
|
|
|
.setRequiredNetworkType(NetworkType.CONNECTED)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val dataBuilder = Data.Builder()
|
|
|
|
|
.putBoolean(
|
|
|
|
|
FileUploadWorker.SHOW_SAME_FILE_ALREADY_EXISTS_NOTIFICATION,
|
|
|
|
|
showSameFileAlreadyExistsNotification
|
|
|
|
|
)
|
|
|
|
|
.putString(FileUploadWorker.ACCOUNT, user.accountName)
|
|
|
|
|
.putInt(FileUploadWorker.TOTAL_UPLOAD_SIZE, uploadIds.size)
|
|
|
|
|
|
|
|
|
|
val workRequests = batches.mapIndexed { index, batch ->
|
|
|
|
|
dataBuilder
|
|
|
|
|
.putLongArray(FileUploadWorker.UPLOAD_IDS, batch.toLongArray())
|
|
|
|
|
.putInt(FileUploadWorker.CURRENT_BATCH_INDEX, index)
|
|
|
|
|
|
|
|
|
|
oneTimeRequestBuilder(FileUploadWorker::class, JOB_FILES_UPLOAD, user)
|
|
|
|
|
.addTag(tag)
|
|
|
|
|
.setInputData(dataBuilder.build())
|
|
|
|
|
.setConstraints(constraints)
|
|
|
|
|
.build()
|
|
|
|
|
}
|
2025-09-18 17:54:51 +02:00
|
|
|
|
2025-09-18 18:43:03 +02:00
|
|
|
// Chain the work requests sequentially
|
|
|
|
|
if (workRequests.isNotEmpty()) {
|
|
|
|
|
var workChain = workManager.beginUniqueWork(
|
|
|
|
|
tag,
|
|
|
|
|
ExistingWorkPolicy.APPEND_OR_REPLACE,
|
|
|
|
|
workRequests.first()
|
|
|
|
|
)
|
2025-09-18 17:54:51 +02:00
|
|
|
|
2025-09-18 18:43:03 +02:00
|
|
|
workRequests.drop(1).forEach { request ->
|
|
|
|
|
workChain = workChain.then(request)
|
|
|
|
|
}
|
2025-09-18 17:54:51 +02:00
|
|
|
|
2025-09-18 18:43:03 +02:00
|
|
|
workChain.enqueue()
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-09-18 17:54:51 +02:00
|
|
|
}
|
|
|
|
|
|
2025-09-18 18:43:03 +02:00
|
|
|
private fun startFileDownloadJobTag(user: User, fileId: Long): String =
|
|
|
|
|
JOB_FOLDER_DOWNLOAD + user.accountName + fileId
|
2025-09-18 17:54:51 +02:00
|
|
|
|
|
|
|
|
override fun startFileDownloadJob(
|
|
|
|
|
user: User,
|
|
|
|
|
file: OCFile,
|
|
|
|
|
behaviour: String,
|
|
|
|
|
downloadType: DownloadType?,
|
|
|
|
|
activityName: String,
|
|
|
|
|
packageName: String,
|
|
|
|
|
conflictUploadId: Long?
|
|
|
|
|
) {
|
|
|
|
|
val tag = startFileDownloadJobTag(user, file.fileId)
|
|
|
|
|
|
|
|
|
|
val data = workDataOf(
|
|
|
|
|
FileDownloadWorker.ACCOUNT_NAME to user.accountName,
|
|
|
|
|
FileDownloadWorker.FILE_REMOTE_PATH to file.remotePath,
|
|
|
|
|
FileDownloadWorker.BEHAVIOUR to behaviour,
|
|
|
|
|
FileDownloadWorker.DOWNLOAD_TYPE to downloadType.toString(),
|
|
|
|
|
FileDownloadWorker.ACTIVITY_NAME to activityName,
|
|
|
|
|
FileDownloadWorker.PACKAGE_NAME to packageName,
|
|
|
|
|
FileDownloadWorker.CONFLICT_UPLOAD_ID to conflictUploadId
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
val request = oneTimeRequestBuilder(FileDownloadWorker::class, JOB_FILES_DOWNLOAD, user)
|
|
|
|
|
.addTag(tag)
|
|
|
|
|
.setInputData(data)
|
|
|
|
|
.build()
|
|
|
|
|
|
2025-09-18 18:43:03 +02:00
|
|
|
// Since for each file new FileDownloadWorker going to be scheduled,
|
|
|
|
|
// better to use ExistingWorkPolicy.KEEP policy.
|
|
|
|
|
workManager.enqueueUniqueWork(tag, ExistingWorkPolicy.KEEP, request)
|
2025-09-18 17:54:51 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun getFileUploads(user: User): LiveData<List<JobInfo>> {
|
|
|
|
|
val workInfo = workManager.getWorkInfosByTagLiveData(formatNameTag(JOB_FILES_UPLOAD, user))
|
|
|
|
|
return workInfo.map { it -> it.map { fromWorkInfo(it) ?: JobInfo() } }
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun cancelFilesUploadJob(user: User) {
|
|
|
|
|
workManager.cancelJob(JOB_FILES_UPLOAD, user)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun cancelFilesDownloadJob(user: User, fileId: Long) {
|
|
|
|
|
workManager.cancelAllWorkByTag(startFileDownloadJobTag(user, fileId))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun startPdfGenerateAndUploadWork(
|
|
|
|
|
user: User,
|
|
|
|
|
uploadFolder: String,
|
|
|
|
|
imagePaths: List<String>,
|
|
|
|
|
pdfPath: String
|
|
|
|
|
) {
|
|
|
|
|
val data = workDataOf(
|
|
|
|
|
GeneratePdfFromImagesWork.INPUT_IMAGE_FILE_PATHS to imagePaths.toTypedArray(),
|
|
|
|
|
GeneratePdfFromImagesWork.INPUT_OUTPUT_FILE_PATH to pdfPath,
|
|
|
|
|
GeneratePdfFromImagesWork.INPUT_UPLOAD_ACCOUNT to user.accountName,
|
|
|
|
|
GeneratePdfFromImagesWork.INPUT_UPLOAD_FOLDER to uploadFolder
|
|
|
|
|
)
|
|
|
|
|
val request = oneTimeRequestBuilder(GeneratePdfFromImagesWork::class, JOB_PDF_GENERATION)
|
|
|
|
|
.setInputData(data)
|
|
|
|
|
.build()
|
|
|
|
|
workManager.enqueue(request)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun scheduleTestJob() {
|
|
|
|
|
val request = periodicRequestBuilder(TestJob::class, JOB_TEST)
|
|
|
|
|
.setInitialDelay(DEFAULT_IMMEDIATE_JOB_DELAY_SEC, TimeUnit.SECONDS)
|
|
|
|
|
.build()
|
|
|
|
|
workManager.enqueueUniquePeriodicWork(JOB_TEST, ExistingPeriodicWorkPolicy.REPLACE, request)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun startImmediateTestJob() {
|
|
|
|
|
val request = oneTimeRequestBuilder(TestJob::class, JOB_TEST)
|
|
|
|
|
.build()
|
|
|
|
|
workManager.enqueueUniqueWork(JOB_TEST, ExistingWorkPolicy.REPLACE, request)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun cancelTestJob() {
|
|
|
|
|
workManager.cancelAllWorkByTag(formatNameTag(JOB_TEST))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun pruneJobs() {
|
|
|
|
|
workManager.pruneWork()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun cancelAllJobs() {
|
|
|
|
|
workManager.cancelAllWorkByTag(TAG_ALL)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun schedulePeriodicHealthStatus() {
|
|
|
|
|
val request = periodicRequestBuilder(
|
|
|
|
|
jobClass = HealthStatusWork::class,
|
|
|
|
|
jobName = JOB_PERIODIC_HEALTH_STATUS,
|
|
|
|
|
intervalMins = PERIODIC_BACKUP_INTERVAL_MINUTES
|
|
|
|
|
).build()
|
|
|
|
|
|
|
|
|
|
workManager.enqueueUniquePeriodicWork(JOB_PERIODIC_HEALTH_STATUS, ExistingPeriodicWorkPolicy.KEEP, request)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun startHealthStatus() {
|
|
|
|
|
val request = oneTimeRequestBuilder(HealthStatusWork::class, JOB_IMMEDIATE_HEALTH_STATUS)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
workManager.enqueueUniqueWork(
|
|
|
|
|
JOB_IMMEDIATE_HEALTH_STATUS,
|
|
|
|
|
ExistingWorkPolicy.KEEP,
|
|
|
|
|
request
|
|
|
|
|
)
|
|
|
|
|
}
|
2025-09-18 18:43:03 +02:00
|
|
|
|
|
|
|
|
override fun scheduleInternal2WaySync(intervalMinutes: Long) {
|
|
|
|
|
val request = periodicRequestBuilder(
|
|
|
|
|
jobClass = InternalTwoWaySyncWork::class,
|
|
|
|
|
jobName = JOB_INTERNAL_TWO_WAY_SYNC,
|
|
|
|
|
intervalMins = intervalMinutes
|
|
|
|
|
)
|
|
|
|
|
.setInitialDelay(intervalMinutes, TimeUnit.MINUTES)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
workManager.enqueueUniquePeriodicWork(JOB_INTERNAL_TWO_WAY_SYNC, ExistingPeriodicWorkPolicy.UPDATE, request)
|
|
|
|
|
}
|
2025-11-20 16:16:40 +01:00
|
|
|
|
|
|
|
|
override fun downloadFolder(folder: OCFile, accountName: String) {
|
|
|
|
|
val constraints = Constraints.Builder()
|
|
|
|
|
.setRequiredNetworkType(NetworkType.CONNECTED)
|
|
|
|
|
.setRequiresStorageNotLow(true)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val data = Data.Builder()
|
|
|
|
|
.putLong(FolderDownloadWorker.FOLDER_ID, folder.fileId)
|
|
|
|
|
.putString(FolderDownloadWorker.ACCOUNT_NAME, accountName)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
val request = oneTimeRequestBuilder(FolderDownloadWorker::class, JOB_DOWNLOAD_FOLDER)
|
|
|
|
|
.addTag(JOB_DOWNLOAD_FOLDER)
|
|
|
|
|
.setInputData(data)
|
|
|
|
|
.setConstraints(constraints)
|
|
|
|
|
.build()
|
|
|
|
|
|
|
|
|
|
workManager.enqueueUniqueWork(JOB_DOWNLOAD_FOLDER, ExistingWorkPolicy.APPEND_OR_REPLACE, request)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override fun cancelFolderDownload() {
|
|
|
|
|
workManager.cancelAllWorkByTag(JOB_DOWNLOAD_FOLDER)
|
|
|
|
|
}
|
2025-09-18 17:54:51 +02:00
|
|
|
}
|