Skip to content

Commit e3f2999

Browse files
authored
Merge pull request #5 from Quantum-Accelerators/hanaol/mp-dataloader
Fixed the sample_map.py script and some syntax issue.
2 parents 8275f28 + b02d590 commit e3f2999

File tree

5 files changed

+11
-9
lines changed

5 files changed

+11
-9
lines changed
-23 Bytes
Binary file not shown.

src/electrai/data/MP/map/script.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
import json, gzip
2+
3+
sample_map = {"GGA": ["mp-2355719", "mp-1933176", "mp-2507978", "mp-2255579", "mp-1800415", "mp-1923722", "mp-2452291", "mp-1790998", "mp-2632472", "mp-1802556"]}
4+
5+
with gzip.open("map_sample.json.gz", "wt") as f:
6+
json.dump(sample_map, f)

src/electrai/dataloader/chgcar_read.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ def read_data(self, data_dir): # will later change it to input directory
5858
return charge.flatten(), gridsize
5959

6060
def data_split(self):
61-
mapping = loadfn(f)
61+
mapping = loadfn(self.map_dir)
6262
data_list, label_list = [], []
6363
gs_data_list, gs_label_list = [], []
6464

src/electrai/examples/config.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
dataset_name: "chgcar_data"
33
data_dir: ./data/MP/chgcars
44
label_dir: ./data/MP/chgcars
5-
map_dir: ./data/MP/chgcars/map_sample.json.gz
5+
map_dir: ./data/MP/map/map_sample.json.gz
66
rho_type: chgcar
77
functional: GGA
88
normalize_data: True

src/electrai/scripts/train.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -32,9 +32,9 @@
3232
assert 2**cfg.n_upscale_layers == cfg.downsample_data / cfg.downsample_label
3333

3434

35-
-------------------------------
36-
Dataset / DataLoader
37-
-------------------------------
35+
# -------------------------------
36+
# Dataset / DataLoader
37+
# -------------------------------
3838
train_sets, test_sets = get_dataset(cfg)
3939

4040
train_data = RhoData(*train_sets,
@@ -53,10 +53,6 @@
5353
# -------------------------------
5454
# Model / Optimizer / Scheduler
5555
# -------------------------------
56-
torch.cuda.empty_cache()
57-
print(cfg.device)
58-
print(torch.cuda.memory_allocated(cfg.device)/1e9, "GB allocated")
59-
print(torch.cuda.memory_reserved(cfg.device)/1e9, "GB reserved")
6056
model = GeneratorResNet(
6157
n_residual_blocks=int(cfg.n_residual_blocks),
6258
n_upscale_layers=int(cfg.n_upscale_layers),

0 commit comments

Comments
 (0)