Skip to content
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,20 +1,28 @@
package leakcanary

import androidx.lifecycle.Observer
import androidx.work.Data
import androidx.work.OneTimeWorkRequest
import androidx.work.WorkInfo
import androidx.work.WorkManager
import androidx.work.multiprocess.RemoteListenableWorker.ARGUMENT_CLASS_NAME
import androidx.work.multiprocess.RemoteListenableWorker.ARGUMENT_PACKAGE_NAME
import android.os.Handler
import android.os.Looper
import java.io.File
import leakcanary.EventListener.Event
import leakcanary.EventListener.Event.HeapAnalysisDone
import leakcanary.EventListener.Event.HeapDump
import leakcanary.internal.HeapAnalyzerWorker.Companion.asWorkerInputData
import leakcanary.internal.InternalLeakCanary
import leakcanary.internal.RemoteHeapAnalyzerWorker
import leakcanary.internal.Serializables
import shark.SharkLog

/**
* When receiving a [HeapDump] event, starts a WorkManager worker that performs heap analysis in
* a dedicated :leakcanary process
* a dedicated :leakcanary process. When the analysis completes, the result is sent back to the
* main process and dispatched to all configured event listeners.
*/
object RemoteWorkManagerHeapAnalyzer : EventListener {

Expand Down Expand Up @@ -45,6 +53,48 @@ object RemoteWorkManagerHeapAnalyzer : EventListener {
SharkLog.d { "Enqueuing heap analysis for ${event.file} on WorkManager remote worker" }
val workManager = WorkManager.getInstance(application)
workManager.enqueue(heapAnalysisRequest)

// Observe the remote worker's completion from the main process so we can
// re-dispatch the HeapAnalysisDone event to listeners running here.
val workInfoLiveData = workManager.getWorkInfoByIdLiveData(heapAnalysisRequest.id)
Handler(Looper.getMainLooper()).post {
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

  • Use mainHandler from Handlers.kt instead

workInfoLiveData.observeForever(object : Observer<WorkInfo> {
override fun onChanged(workInfo: WorkInfo) {
if (workInfo.state.isFinished) {
workInfoLiveData.removeObserver(this)
if (workInfo.state == WorkInfo.State.SUCCEEDED) {
dispatchEventFromRemoteWorker(workInfo.outputData)
}
}
}
})
}
}
}

private fun dispatchEventFromRemoteWorker(outputData: Data) {
val eventFilePath = outputData.getString(RemoteHeapAnalyzerWorker.EVENT_FILE_KEY)
if (eventFilePath == null) {
SharkLog.d { "Remote worker completed but no event file path in output data" }
return
}
val eventFile = File(eventFilePath)
if (!eventFile.exists()) {
SharkLog.d { "Remote worker event file does not exist: $eventFilePath" }
return
}
try {
val doneEvent = Serializables.fromByteArray<HeapAnalysisDone<*>>(eventFile.readBytes())
if (doneEvent != null) {
SharkLog.d { "Dispatching remote heap analysis result to main process listeners" }
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Probably don't need this log in the success case, we'll see an event anyway

  • remove?

InternalLeakCanary.sendEvent(doneEvent)
} else {
SharkLog.d { "Failed to deserialize remote worker event" }
}
} catch (e: Throwable) {
SharkLog.d(e) { "Error reading remote worker event file" }
} finally {
eventFile.delete()
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If the main process dies after scheduling the RemoteHeapAnalyzerWorker, then it'll never listen to the result (=> no update) and also the result file will never be deleted. Any way we could avoid that? Maybe re observing on startup? Mayble cleanup on startup? idk

}
}
}
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
package leakcanary.internal

import android.content.Context
import androidx.work.Data
import androidx.work.ForegroundInfo
import androidx.work.WorkerParameters
import androidx.work.impl.utils.futures.SettableFuture
import androidx.work.multiprocess.RemoteListenableWorker
import com.google.common.util.concurrent.ListenableFuture
import java.io.File
import leakcanary.BackgroundThreadHeapAnalyzer.heapAnalyzerThreadHandler
import leakcanary.EventListener.Event.HeapDump
import leakcanary.internal.HeapAnalyzerWorker.Companion.asEvent
Expand All @@ -32,8 +34,22 @@ internal class RemoteHeapAnalyzerWorker(
if (result.isCancelled) {
SharkLog.d { "Remote heap analysis for ${heapDump.file} was canceled" }
} else {
// Dispatch to any listeners configured in the background process.
InternalLeakCanary.sendEvent(doneEvent)
result.set(Result.success())
// Serialize the done event to a file so the main process can re-dispatch it
// to its own listeners. WorkManager output data has a 10 KB limit, so we write
// to a shared file instead and pass the path back.
val outputData = try {
val eventFile = File(applicationContext.filesDir, EVENT_FILE_PREFIX + heapDump.uniqueId)
eventFile.writeBytes(doneEvent.toByteArray())
Data.Builder()
.putString(EVENT_FILE_KEY, eventFile.absolutePath)
.build()
} catch (e: Throwable) {
SharkLog.d(e) { "Failed to serialize done event for main process" }
Data.EMPTY
}
result.set(Result.success(outputData))
}
}
return result
Expand All @@ -44,4 +60,9 @@ internal class RemoteHeapAnalyzerWorker(
applicationContext.heapAnalysisForegroundInfo()
}
}

companion object {
const val EVENT_FILE_PREFIX = "leakcanary_remote_event_"
const val EVENT_FILE_KEY = "leakcanary.remote.event_file"
}
}
Loading