Skip to content

Commit 273fde7

Browse files
authored
Merge pull request #41 from EchoNineLabs/fix/caching
fix script caching / checksum calculation
2 parents f5240b5 + 181667d commit 273fde7

File tree

2 files changed

+56
-25
lines changed

2 files changed

+56
-25
lines changed
Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
package dev.echonine.kite.scripting.cache
2+
3+
import com.google.gson.GsonBuilder
4+
import com.google.gson.reflect.TypeToken
5+
import dev.echonine.kite.Kite
6+
import kotlinx.coroutines.sync.Mutex
7+
import kotlinx.coroutines.sync.withLock
8+
import kotlin.io.path.Path
9+
10+
typealias DependencyTree = MutableMap<String, List<String>>
11+
12+
class ImportsCache {
13+
private val mutex = Mutex()
14+
private val gson = GsonBuilder().setPrettyPrinting().create()
15+
private val file = Path(Kite.instance?.dataFolder?.path ?: System.getProperty("user.dir", "."), "cache", ".imports").toFile()
16+
17+
private val typeToken = object : TypeToken<DependencyTree>() { /* TYPE MARKER */ }
18+
19+
var cache: DependencyTree = mutableMapOf()
20+
private set
21+
22+
suspend fun write(name: String, dependencies: List<String>) = mutex.withLock {
23+
// Creating parent directories and file in case it does not exist.
24+
file.parentFile.mkdirs()
25+
file.createNewFile()
26+
// Reading file contents.
27+
cache = file.bufferedReader().use { gson.fromJson(it, typeToken) } ?: mutableMapOf()
28+
// Putting list of dependencies / imports to the map.
29+
cache[name] = dependencies
30+
// Saving contents to the file.
31+
file.bufferedWriter().use { gson.toJson(cache, typeToken.type, it) }
32+
}
33+
34+
}

src/main/kotlin/dev/echonine/kite/scripting/configuration/KiteCompilationConfiguration.kt

Lines changed: 22 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,10 @@ import dev.echonine.kite.api.annotations.Repository
88
import dev.echonine.kite.scripting.configuration.compat.DynamicServerJarCompat
99
import dev.echonine.kite.scripting.Script
1010
import dev.echonine.kite.scripting.ScriptContext
11+
import dev.echonine.kite.scripting.cache.ImportsCache
12+
import kotlinx.coroutines.CoroutineScope
13+
import kotlinx.coroutines.Dispatchers
14+
import kotlinx.coroutines.launch
1115
import org.bukkit.Server
1216
import org.bukkit.plugin.java.JavaPlugin
1317
import revxrsal.zapper.DependencyManager
@@ -54,6 +58,10 @@ val cacheDirectory by lazy {
5458
File(Kite.instance?.dataFolder?.path ?: System.getProperty("user.dir", "."), "cache")
5559
}
5660

61+
val importsCache by lazy {
62+
ImportsCache()
63+
}
64+
5765
@Suppress("JavaIoSerializableObjectMustHaveReadResolve")
5866
object KiteCompilationConfiguration : ScriptCompilationConfiguration({
5967
// Adding Bukkit APIs and Kite to default imports.
@@ -132,6 +140,9 @@ object KiteCompilationConfiguration : ScriptCompilationConfiguration({
132140
dependencies.append(JvmDependency(scriptDependencies.map { File(libsDirectory, it) }.filter { it.exists() }))
133141
// Appending imported sources to the script.
134142
importedSources.takeUnless { it.isEmpty() }?.let { importScripts.append(it) }
143+
CoroutineScope(Dispatchers.IO).launch {
144+
importsCache.write(context.compilationConfiguration[displayName]!!, importedSources.map { it.file.path }.toList())
145+
}
135146
}.asSuccess()
136147
})
137148
}
@@ -146,40 +157,26 @@ object KiteCompilationConfiguration : ScriptCompilationConfiguration({
146157
if (cacheDirectory.isDirectory || cacheDirectory.mkdirs()) {
147158
// Configuring compilation cache.
148159
compilationCache(CompiledScriptJarsCache { script, compilationConfiguration ->
160+
val name = compilationConfiguration[displayName]
161+
val checksum = MessageDigest.getInstance("MD5")
149162
// Getting the MD5 checksum and including it in the file name.
150163
// MD5 checksum acts as a file identifier here.
151-
val mainScriptHash = script.text.toByteArray().let {
152-
val md = MessageDigest.getInstance("MD5")
153-
md.update(it)
154-
md.digest().joinToString("") { byte -> "%02x".format(byte) }
164+
checksum.update(script.text.toByteArray())
165+
// Updating digest with all imported scripts.
166+
importsCache.cache[name]?.forEach {
167+
checksum.update(File(it).readBytes())
155168
}
156-
// Also hash the imported scripts
157-
val importsHash = (compilationConfiguration[importScripts]
158-
?: emptyList<FileScriptSource>()).joinToString("") { importedScript ->
159-
importedScript.text.toByteArray().let {
160-
val md = MessageDigest.getInstance("MD5")
161-
md.update(it)
162-
md.digest().joinToString("") { byte -> "%02x".format(byte) }
163-
}
164-
}
165-
166-
// Creating the final hash by combining the main script hash and imports hash
167-
val hash = MessageDigest.getInstance("MD5").apply {
168-
update(mainScriptHash.toByteArray())
169-
update(importsHash.toByteArray())
170-
}.digest().joinToString("") { byte -> "%02x".format(byte) }
171-
172-
val cacheFileName = "${compilationConfiguration[displayName]}.${hash}.cache.jar"
173-
169+
// Converting checksum to a human-readable format so it can be included in the cache file name.
170+
val hash = checksum.digest().joinToString("") { "%02x".format(it) }
171+
val cacheFileName = "$name.$hash.cache.jar"
174172
// Purging old cache files with different hashes (not the current one).
175173
cacheDirectory.listFiles()
176174
?.filter {
177-
it.name.endsWith(".cache.jar") &&
178-
it.name.split(".").first() == compilationConfiguration[displayName] &&
175+
it.name.endsWith(".cache.jar") &&
176+
it.name.split(".").first() == name &&
179177
it.name != cacheFileName
180178
}
181179
?.forEach { it.delete() }
182-
183180
return@CompiledScriptJarsCache File(cacheDirectory, cacheFileName)
184181
})
185182
}

0 commit comments

Comments
 (0)