|
1 | 1 | package com.github.jengelman.gradle.plugins.shadow.transformers |
2 | 2 |
|
3 | 3 | import java.io.File |
4 | | -import java.nio.ByteBuffer |
5 | 4 | import java.security.MessageDigest |
6 | 5 | import javax.inject.Inject |
| 6 | +import org.apache.commons.io.input.MessageDigestInputStream |
7 | 7 | import org.apache.tools.zip.ZipOutputStream |
8 | 8 | import org.gradle.api.GradleException |
9 | 9 | import org.gradle.api.file.FileTreeElement |
10 | 10 | import org.gradle.api.model.ObjectFactory |
11 | 11 | import org.gradle.api.tasks.Internal |
12 | 12 | import org.gradle.api.tasks.util.PatternSet |
| 13 | +import org.gradle.internal.impldep.org.apache.commons.codec.binary.Hex |
13 | 14 |
|
14 | 15 | /** |
15 | 16 | * Transformer to include files with identical content only once in the shadow JAR. |
@@ -61,11 +62,11 @@ public open class DeduplicatingResourceTransformer( |
61 | 62 | public constructor(objectFactory: ObjectFactory) : this(objectFactory, PatternSet()) |
62 | 63 |
|
63 | 64 | internal data class PathInfos(val failOnDuplicateContent: Boolean) { |
64 | | - val filesPerHash: MutableMap<Long, MutableList<File>> = mutableMapOf() |
| 65 | + val filesPerHash: MutableMap<String, MutableList<File>> = mutableMapOf() |
65 | 66 |
|
66 | 67 | fun uniqueContentCount() = filesPerHash.size |
67 | 68 |
|
68 | | - fun addFile(hash: Long, file: File): Boolean { |
| 69 | + fun addFile(hash: String, file: File): Boolean { |
69 | 70 | var filesForHash: MutableList<File>? = filesPerHash[hash] |
70 | 71 | val new = filesForHash == null |
71 | 72 | if (new) { |
@@ -113,23 +114,23 @@ public open class DeduplicatingResourceTransformer( |
113 | 114 | @Transient |
114 | 115 | private var digest: MessageDigest? = null |
115 | 116 |
|
116 | | - internal fun hashForFile(file: File): Long { |
| 117 | + internal fun hashForFile(file: File): String { |
117 | 118 | if (digest == null) { |
118 | 119 | digest = MessageDigest.getInstance("SHA-256") |
119 | 120 | } |
120 | 121 | val d = digest!! |
121 | 122 | try { |
| 123 | + // We could replace this block with `o.a.c.codec.digest.DigestUtils.digest(MessageDigest, File)`, |
| 124 | + // but adding a whole new dependency seemed a bit overkill. |
| 125 | + // Using org.apache.commons.io.input.MessageDigestInputStream doesn't give simpler code either. |
122 | 126 | file.inputStream().use { |
123 | 127 | val buffer = ByteArray(8192) |
124 | | - while (true) { |
125 | | - val rd = it.read(buffer) |
126 | | - if (rd == -1) { |
127 | | - break |
128 | | - } |
129 | | - d.update(buffer, 0, rd) |
| 128 | + var readBytes: Int |
| 129 | + while (it.read(buffer).also { r -> readBytes = r } != -1) { |
| 130 | + d.update(buffer, 0, readBytes) |
130 | 131 | } |
131 | 132 | } |
132 | | - return ByteBuffer.wrap(d.digest()).getLong(0) |
| 133 | + return Hex.encodeHexString(d.digest(), true) |
133 | 134 | } catch (e: Exception) { |
134 | 135 | throw RuntimeException("Failed to read data or calculate hash for $file", e) |
135 | 136 | } |
|
0 commit comments