mirror of
https://github.com/YTVanced/VancedMicroG
synced 2024-12-04 00:37:27 +00:00
EN: Catch errors in zip file processing
This commit is contained in:
parent
8e7544bd14
commit
f12536e6ce
1 changed files with 42 additions and 37 deletions
|
@ -19,10 +19,7 @@ import com.google.android.gms.common.api.Status
|
||||||
import com.google.android.gms.nearby.exposurenotification.*
|
import com.google.android.gms.nearby.exposurenotification.*
|
||||||
import com.google.android.gms.nearby.exposurenotification.ExposureNotificationStatusCodes.*
|
import com.google.android.gms.nearby.exposurenotification.ExposureNotificationStatusCodes.*
|
||||||
import com.google.android.gms.nearby.exposurenotification.internal.*
|
import com.google.android.gms.nearby.exposurenotification.internal.*
|
||||||
import kotlinx.coroutines.CompletableDeferred
|
import kotlinx.coroutines.*
|
||||||
import kotlinx.coroutines.CoroutineScope
|
|
||||||
import kotlinx.coroutines.Job
|
|
||||||
import kotlinx.coroutines.withTimeout
|
|
||||||
import org.json.JSONArray
|
import org.json.JSONArray
|
||||||
import org.json.JSONObject
|
import org.json.JSONObject
|
||||||
import org.microg.gms.common.Constants
|
import org.microg.gms.common.Constants
|
||||||
|
@ -296,19 +293,21 @@ class ExposureNotificationServiceImpl(private val context: Context, private val
|
||||||
Log.d(TAG, "Using key file supplier")
|
Log.d(TAG, "Using key file supplier")
|
||||||
try {
|
try {
|
||||||
while (keyFileSupplier.isAvailable && keyFileSupplier.hasNext()) {
|
while (keyFileSupplier.isAvailable && keyFileSupplier.hasNext()) {
|
||||||
try {
|
withContext(Dispatchers.IO) {
|
||||||
val cacheFile = File(context.cacheDir, "en-keyfile-${System.currentTimeMillis()}-${Random.nextInt()}.zip")
|
try {
|
||||||
ParcelFileDescriptor.AutoCloseInputStream(keyFileSupplier.next()).use { it.copyToFile(cacheFile) }
|
val cacheFile = File(context.cacheDir, "en-keyfile-${System.currentTimeMillis()}-${Random.nextLong()}.zip")
|
||||||
val hash = MessageDigest.getInstance("SHA-256").digest(cacheFile)
|
ParcelFileDescriptor.AutoCloseInputStream(keyFileSupplier.next()).use { it.copyToFile(cacheFile) }
|
||||||
val storedKeys = database.storeDiagnosisFileUsed(tid, hash)
|
val hash = MessageDigest.getInstance("SHA-256").digest(cacheFile)
|
||||||
if (storedKeys != null) {
|
val storedKeys = database.storeDiagnosisFileUsed(tid, hash)
|
||||||
keys += storedKeys.toInt()
|
if (storedKeys != null) {
|
||||||
cacheFile.delete()
|
keys += storedKeys.toInt()
|
||||||
} else {
|
cacheFile.delete()
|
||||||
todoKeyFiles.add(cacheFile to hash)
|
} else {
|
||||||
|
todoKeyFiles.add(cacheFile to hash)
|
||||||
|
}
|
||||||
|
} catch (e: Exception) {
|
||||||
|
Log.w(TAG, "Failed parsing file", e)
|
||||||
}
|
}
|
||||||
} catch (e: Exception) {
|
|
||||||
Log.w(TAG, "Failed parsing file", e)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (e: Exception) {
|
} catch (e: Exception) {
|
||||||
|
@ -331,32 +330,38 @@ class ExposureNotificationServiceImpl(private val context: Context, private val
|
||||||
|
|
||||||
var newKeys = if (params.keys != null) database.finishSingleMatching(tid) else 0
|
var newKeys = if (params.keys != null) database.finishSingleMatching(tid) else 0
|
||||||
for ((cacheFile, hash) in todoKeyFiles) {
|
for ((cacheFile, hash) in todoKeyFiles) {
|
||||||
ZipFile(cacheFile).use { zip ->
|
withContext(Dispatchers.IO) {
|
||||||
for (entry in zip.entries()) {
|
try {
|
||||||
if (entry.name == "export.bin") {
|
ZipFile(cacheFile).use { zip ->
|
||||||
val stream = zip.getInputStream(entry)
|
for (entry in zip.entries()) {
|
||||||
val prefix = ByteArray(16)
|
if (entry.name == "export.bin") {
|
||||||
var totalBytesRead = 0
|
val stream = zip.getInputStream(entry)
|
||||||
var bytesRead = 0
|
val prefix = ByteArray(16)
|
||||||
while (bytesRead != -1 && totalBytesRead < prefix.size) {
|
var totalBytesRead = 0
|
||||||
bytesRead = stream.read(prefix, totalBytesRead, prefix.size - totalBytesRead)
|
var bytesRead = 0
|
||||||
if (bytesRead > 0) {
|
while (bytesRead != -1 && totalBytesRead < prefix.size) {
|
||||||
totalBytesRead += bytesRead
|
bytesRead = stream.read(prefix, totalBytesRead, prefix.size - totalBytesRead)
|
||||||
|
if (bytesRead > 0) {
|
||||||
|
totalBytesRead += bytesRead
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (totalBytesRead == prefix.size && String(prefix).trim() == "EK Export v1") {
|
||||||
|
val export = TemporaryExposureKeyExport.ADAPTER.decode(stream)
|
||||||
|
database.finishFileMatching(tid, hash, export.end_timestamp?.let { it * 1000 }
|
||||||
|
?: System.currentTimeMillis(), export.keys.map { it.toKey() }, export.revised_keys.map { it.toKey() })
|
||||||
|
keys += export.keys.size + export.revised_keys.size
|
||||||
|
newKeys += export.keys.size
|
||||||
|
} else {
|
||||||
|
Log.d(TAG, "export.bin had invalid prefix")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (totalBytesRead == prefix.size && String(prefix).trim() == "EK Export v1") {
|
|
||||||
val export = TemporaryExposureKeyExport.ADAPTER.decode(stream)
|
|
||||||
database.finishFileMatching(tid, hash, export.end_timestamp?.let { it * 1000 }
|
|
||||||
?: System.currentTimeMillis(), export.keys.map { it.toKey() }, export.revised_keys.map { it.toKey() })
|
|
||||||
keys += export.keys.size + export.revised_keys.size
|
|
||||||
newKeys += export.keys.size
|
|
||||||
} else {
|
|
||||||
Log.d(TAG, "export.bin had invalid prefix")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
cacheFile.delete()
|
||||||
|
} catch (e: Exception) {
|
||||||
|
Log.w(TAG, "Failed parsing file", e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
cacheFile.delete()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
val time = (System.currentTimeMillis() - start).coerceAtLeast(1).toDouble() / 1000.0
|
val time = (System.currentTimeMillis() - start).coerceAtLeast(1).toDouble() / 1000.0
|
||||||
|
|
Loading…
Reference in a new issue