Skip to content

Commit 9b5a834

Browse files
committed
base for a new storage system
1 parent 13b659b commit 9b5a834

File tree

6 files changed

+188
-74
lines changed

6 files changed

+188
-74
lines changed

common/src/main/java/xyz/jpenilla/squaremap/common/config/Config.java

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,10 @@
22

33
import java.util.ArrayList;
44
import java.util.List;
5+
import java.util.Locale;
56
import org.spongepowered.configurate.NodePath;
67
import org.spongepowered.configurate.transformation.ConfigurationTransformation;
8+
import xyz.jpenilla.squaremap.common.Logging;
79

810
@SuppressWarnings("unused")
911
public final class Config extends AbstractConfig {
@@ -114,6 +116,22 @@ private static void progressLogging() {
114116
PROGRESS_LOGGING_INTERVAL = config.getInt("settings.render-progress-logging.interval-seconds", 1);
115117
}
116118

119+
public static DataFacilityType DATA_FACILITY_TYPE;
120+
121+
private static void storage() {
122+
try {
123+
DATA_FACILITY_TYPE = DataFacilityType.valueOf(
124+
config.getString("settings.storage.type", "flatfile").toUpperCase(Locale.ROOT)
125+
);
126+
} catch (IllegalArgumentException e) {
127+
Logging.logger().error("Invalid storage type '"
128+
+ config.getString("settings.storage.type", "flatfile")
129+
+ "', falling back to flatfile."
130+
);
131+
DATA_FACILITY_TYPE = DataFacilityType.FLATFILE;
132+
}
133+
}
134+
117135
public static Config config() {
118136
return config;
119137
}
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
package xyz.jpenilla.squaremap.common.config;
2+
3+
public enum DataFacilityType {
4+
FLATFILE
5+
}

common/src/main/java/xyz/jpenilla/squaremap/common/data/MapWorldInternal.java

Lines changed: 9 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,8 @@
11
package xyz.jpenilla.squaremap.common.data;
22

3-
import com.google.gson.Gson;
4-
import com.google.gson.GsonBuilder;
5-
import com.google.gson.JsonIOException;
6-
import com.google.gson.JsonSyntaxException;
7-
import com.google.gson.reflect.TypeToken;
8-
import java.io.BufferedReader;
9-
import java.io.IOException;
10-
import java.lang.reflect.Type;
11-
import java.nio.file.Files;
123
import java.nio.file.Path;
134
import java.util.HashMap;
145
import java.util.Iterator;
15-
import java.util.LinkedHashMap;
16-
import java.util.List;
176
import java.util.Map;
187
import java.util.Set;
198
import java.util.concurrent.ConcurrentHashMap;
@@ -32,33 +21,26 @@
3221
import xyz.jpenilla.squaremap.api.Registry;
3322
import xyz.jpenilla.squaremap.api.WorldIdentifier;
3423
import xyz.jpenilla.squaremap.common.LayerRegistry;
35-
import xyz.jpenilla.squaremap.common.Logging;
36-
import xyz.jpenilla.squaremap.common.SquaremapCommon;
3724
import xyz.jpenilla.squaremap.common.config.WorldAdvanced;
3825
import xyz.jpenilla.squaremap.common.config.WorldConfig;
26+
import xyz.jpenilla.squaremap.common.data.facilities.DataFacility;
27+
import xyz.jpenilla.squaremap.common.data.facilities.DataFacilityFactory;
3928
import xyz.jpenilla.squaremap.common.layer.SpawnIconProvider;
4029
import xyz.jpenilla.squaremap.common.layer.WorldBorderProvider;
4130
import xyz.jpenilla.squaremap.common.task.render.AbstractRender;
4231
import xyz.jpenilla.squaremap.common.task.render.BackgroundRender;
4332
import xyz.jpenilla.squaremap.common.task.render.FullRender;
4433
import xyz.jpenilla.squaremap.common.util.Colors;
4534
import xyz.jpenilla.squaremap.common.util.FileUtil;
46-
import xyz.jpenilla.squaremap.common.util.RecordTypeAdapterFactory;
4735
import xyz.jpenilla.squaremap.common.util.Util;
4836
import xyz.jpenilla.squaremap.common.visibilitylimit.VisibilityLimitImpl;
4937

5038
@DefaultQualifier(NonNull.class)
5139
public abstract class MapWorldInternal implements MapWorld {
52-
private static final String DIRTY_CHUNKS_FILE_NAME = "dirty_chunks.json";
53-
private static final String RENDER_PROGRESS_FILE_NAME = "resume_render.json";
54-
private static final Gson GSON = new GsonBuilder()
55-
.registerTypeAdapterFactory(new RecordTypeAdapterFactory())
56-
.enableComplexMapKeySerialization()
57-
.create();
5840
private static final Map<WorldIdentifier, LayerRegistry> LAYER_REGISTRIES = new HashMap<>();
5941

6042
private final ServerLevel level;
61-
private final Path dataPath;
43+
private final DataFacility dataFacility;
6244
private final Path tilesPath;
6345
private final ExecutorService imageIOexecutor;
6446
private final ScheduledExecutorService executor;
@@ -90,16 +72,7 @@ protected MapWorldInternal(final ServerLevel level) {
9072
this.blockColors = new BlockColors(this);
9173
this.levelBiomeColorData = LevelBiomeColorData.create(this);
9274

93-
this.dataPath = SquaremapCommon.instance().platform().dataDirectory().resolve("data").resolve(
94-
Util.levelWebName(this.level)
95-
);
96-
try {
97-
if (!Files.exists(this.dataPath)) {
98-
Files.createDirectories(this.dataPath);
99-
}
100-
} catch (final IOException e) {
101-
throw this.failedToCreateDataDirectory(e);
102-
}
75+
this.dataFacility = DataFacilityFactory.getDataFacility(this.identifier(), Util.levelWebName(this.level));
10376

10477
this.tilesPath = FileUtil.getAndCreateTilesDirectory(this.serverLevel());
10578

@@ -124,53 +97,19 @@ protected MapWorldInternal(final ServerLevel level) {
12497
}
12598

12699
public @Nullable Map<RegionCoordinate, Boolean> getRenderProgress() {
127-
try {
128-
final Path file = this.dataPath.resolve(RENDER_PROGRESS_FILE_NAME);
129-
if (Files.isRegularFile(file)) {
130-
final Type type = new TypeToken<LinkedHashMap<RegionCoordinate, Boolean>>() {
131-
}.getType();
132-
try (final BufferedReader reader = Files.newBufferedReader(file)) {
133-
return GSON.fromJson(reader, type);
134-
}
135-
}
136-
} catch (JsonIOException | JsonSyntaxException | IOException e) {
137-
Logging.logger().warn("Failed to deserialize render progress for world '{}'", this.identifier().asString(), e);
138-
}
139-
return null;
100+
return this.dataFacility.getRenderProgress();
140101
}
141102

142103
public void saveRenderProgress(Map<RegionCoordinate, Boolean> regions) {
143-
try {
144-
Files.writeString(this.dataPath.resolve(RENDER_PROGRESS_FILE_NAME), GSON.toJson(regions));
145-
} catch (IOException e) {
146-
Logging.logger().warn("Failed to serialize render progress for world '{}'", this.identifier().asString(), e);
147-
}
104+
this.dataFacility.saveRenderProgress(regions);
148105
}
149106

150107
private void serializeDirtyChunks() {
151-
try {
152-
Files.writeString(this.dataPath.resolve(DIRTY_CHUNKS_FILE_NAME), GSON.toJson(this.modifiedChunks));
153-
} catch (IOException e) {
154-
Logging.logger().warn("Failed to serialize dirty chunks for world '{}'", this.identifier().asString(), e);
155-
}
108+
this.dataFacility.saveDirtyChunks(this.modifiedChunks);
156109
}
157110

158111
private void deserializeDirtyChunks() {
159-
try {
160-
final Path file = this.dataPath.resolve(DIRTY_CHUNKS_FILE_NAME);
161-
if (Files.isRegularFile(file)) {
162-
try (final BufferedReader reader = Files.newBufferedReader(file)) {
163-
this.modifiedChunks.addAll(
164-
GSON.fromJson(
165-
reader,
166-
TypeToken.getParameterized(List.class, ChunkCoordinate.class).getType()
167-
)
168-
);
169-
}
170-
}
171-
} catch (JsonIOException | JsonSyntaxException | IOException e) {
172-
Logging.logger().warn("Failed to deserialize dirty chunks for world '{}'", this.identifier().asString(), e);
173-
}
112+
this.modifiedChunks.addAll(this.dataFacility.getDirtyChunks());
174113
}
175114

176115
private void startBackgroundRender() {
@@ -255,11 +194,7 @@ public void pauseRenders(boolean pauseRenders) {
255194
}
256195

257196
public void finishedRender() {
258-
try {
259-
Files.deleteIfExists(this.dataPath.resolve(RENDER_PROGRESS_FILE_NAME));
260-
} catch (IOException e) {
261-
Logging.logger().warn("Failed to delete render progress data for world '{}'", this.identifier().asString(), e);
262-
}
197+
this.dataFacility.deleteRenderProgress();
263198
}
264199

265200
public void stopRender() {
Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
package xyz.jpenilla.squaremap.common.data.facilities;
2+
3+
import java.util.Map;
4+
import java.util.Set;
5+
import org.checkerframework.checker.nullness.qual.Nullable;
6+
import xyz.jpenilla.squaremap.common.data.ChunkCoordinate;
7+
import xyz.jpenilla.squaremap.common.data.RegionCoordinate;
8+
9+
public interface DataFacility {
10+
11+
@Nullable Map<RegionCoordinate, Boolean> getRenderProgress();
12+
13+
void saveRenderProgress(Map<RegionCoordinate, Boolean> renderProgress);
14+
15+
void deleteRenderProgress();
16+
17+
Set<ChunkCoordinate> getDirtyChunks();
18+
19+
void saveDirtyChunks(Set<ChunkCoordinate> dirtyChunks);
20+
21+
}
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
package xyz.jpenilla.squaremap.common.data.facilities;
2+
3+
import org.checkerframework.checker.nullness.qual.NonNull;
4+
import org.checkerframework.framework.qual.DefaultQualifier;
5+
import xyz.jpenilla.squaremap.api.WorldIdentifier;
6+
import xyz.jpenilla.squaremap.common.config.Config;
7+
8+
@DefaultQualifier(NonNull.class)
9+
public final class DataFacilityFactory {
10+
11+
public static DataFacility getDataFacility(WorldIdentifier identifier, String worldName) {
12+
// todo
13+
switch (Config.DATA_FACILITY_TYPE) {
14+
case FLATFILE -> new FlatfileDataFacility(identifier, worldName);
15+
}
16+
return null;
17+
}
18+
19+
}
Lines changed: 116 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,116 @@
1+
package xyz.jpenilla.squaremap.common.data.facilities;
2+
3+
import com.google.gson.Gson;
4+
import com.google.gson.GsonBuilder;
5+
import com.google.gson.JsonIOException;
6+
import com.google.gson.JsonSyntaxException;
7+
import com.google.gson.reflect.TypeToken;
8+
import java.io.BufferedReader;
9+
import java.io.IOException;
10+
import java.lang.reflect.Type;
11+
import java.nio.file.Files;
12+
import java.nio.file.Path;
13+
import java.util.LinkedHashMap;
14+
import java.util.List;
15+
import java.util.Map;
16+
import java.util.Set;
17+
import java.util.concurrent.ConcurrentHashMap;
18+
import org.checkerframework.checker.nullness.qual.NonNull;
19+
import org.checkerframework.checker.nullness.qual.Nullable;
20+
import org.checkerframework.framework.qual.DefaultQualifier;
21+
import xyz.jpenilla.squaremap.api.WorldIdentifier;
22+
import xyz.jpenilla.squaremap.common.Logging;
23+
import xyz.jpenilla.squaremap.common.SquaremapCommon;
24+
import xyz.jpenilla.squaremap.common.data.ChunkCoordinate;
25+
import xyz.jpenilla.squaremap.common.data.RegionCoordinate;
26+
import xyz.jpenilla.squaremap.common.util.RecordTypeAdapterFactory;
27+
28+
@DefaultQualifier(NonNull.class)
29+
public class FlatfileDataFacility implements DataFacility {
30+
private static final String DIRTY_CHUNKS_FILE_NAME = "dirty_chunks.json";
31+
private static final String RENDER_PROGRESS_FILE_NAME = "resume_render.json";
32+
private static final Gson GSON = new GsonBuilder()
33+
.registerTypeAdapterFactory(new RecordTypeAdapterFactory())
34+
.enableComplexMapKeySerialization()
35+
.create();
36+
37+
private final WorldIdentifier identifier;
38+
private final Path dataPath;
39+
40+
public FlatfileDataFacility(WorldIdentifier identifier, String worldName) {
41+
this.identifier = identifier;
42+
this.dataPath = SquaremapCommon.instance().platform().dataDirectory().resolve("data").resolve(worldName);
43+
try {
44+
if (!Files.exists(this.dataPath)) {
45+
Files.createDirectories(this.dataPath);
46+
}
47+
} catch (final IOException e) {
48+
throw new IllegalStateException(String.format("Failed to create data directory for world '%s'", this.identifier), e);
49+
}
50+
}
51+
52+
@Override
53+
public @Nullable Map<RegionCoordinate, Boolean> getRenderProgress() {
54+
try {
55+
final Path file = this.dataPath.resolve(RENDER_PROGRESS_FILE_NAME);
56+
if (Files.isRegularFile(file)) {
57+
final Type type = new TypeToken<LinkedHashMap<RegionCoordinate, Boolean>>() {
58+
}.getType();
59+
try (final BufferedReader reader = Files.newBufferedReader(file)) {
60+
return GSON.fromJson(reader, type);
61+
}
62+
}
63+
} catch (JsonIOException | JsonSyntaxException | IOException e) {
64+
Logging.logger().warn("Failed to deserialize render progress for world '{}'", this.identifier.asString(), e);
65+
}
66+
return null;
67+
}
68+
69+
@Override
70+
public void saveRenderProgress(Map<RegionCoordinate, Boolean> renderProgress) {
71+
try {
72+
Files.writeString(this.dataPath.resolve(RENDER_PROGRESS_FILE_NAME), GSON.toJson(renderProgress));
73+
} catch (IOException e) {
74+
Logging.logger().warn("Failed to serialize render progress for world '{}'", this.identifier.asString(), e);
75+
}
76+
}
77+
78+
@Override
79+
public void deleteRenderProgress() {
80+
try {
81+
Files.deleteIfExists(this.dataPath.resolve(RENDER_PROGRESS_FILE_NAME));
82+
} catch (IOException e) {
83+
Logging.logger().warn("Failed to delete render progress data for world '{}'", this.identifier.asString(), e);
84+
}
85+
}
86+
87+
@Override
88+
public Set<ChunkCoordinate> getDirtyChunks() {
89+
Set<ChunkCoordinate> ret = ConcurrentHashMap.newKeySet();
90+
try {
91+
final Path file = this.dataPath.resolve(DIRTY_CHUNKS_FILE_NAME);
92+
if (Files.isRegularFile(file)) {
93+
try (final BufferedReader reader = Files.newBufferedReader(file)) {
94+
ret.addAll(
95+
GSON.fromJson(
96+
reader,
97+
TypeToken.getParameterized(List.class, ChunkCoordinate.class).getType()
98+
)
99+
);
100+
}
101+
}
102+
} catch (JsonIOException | JsonSyntaxException | IOException e) {
103+
Logging.logger().warn("Failed to deserialize dirty chunks for world '{}'", this.identifier.asString(), e);
104+
}
105+
return ret;
106+
}
107+
108+
@Override
109+
public void saveDirtyChunks(Set<ChunkCoordinate> dirtyChunks) {
110+
try {
111+
Files.writeString(this.dataPath.resolve(DIRTY_CHUNKS_FILE_NAME), GSON.toJson(dirtyChunks));
112+
} catch (IOException e) {
113+
Logging.logger().warn("Failed to serialize dirty chunks for world '{}'", this.identifier.asString(), e);
114+
}
115+
}
116+
}

0 commit comments

Comments
 (0)