-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy pathattribute_write_integration_test.go
More file actions
137 lines (115 loc) · 4.71 KB
/
attribute_write_integration_test.go
File metadata and controls
137 lines (115 loc) · 4.71 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
// Copyright (c) 2025 SciGo HDF5 Library Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be found in the LICENSE file.
package hdf5
import (
"fmt"
"os"
"path/filepath"
"strings"
"testing"
"github.com/stretchr/testify/require"
)
// TestDenseAttributes_Integration_SmallAttributes writes many small attributes.
// Note: Object header size limits compact storage to ~3-5 attributes in practice,
// even though MaxCompactAttributes = 8. The transition happens when header space runs out.
func TestDenseAttributes_Integration_SmallAttributes(t *testing.T) {
testFile := filepath.Join(t.TempDir(), "dense_attrs_small.h5")
// Create file and dataset
fw, err := CreateForWrite(testFile, CreateTruncate)
require.NoError(t, err)
defer os.Remove(testFile)
defer fw.Close()
ds, err := fw.CreateDataset("/data", Int32, []uint64{10})
require.NoError(t, err)
// Write small attributes until object header is full (triggers transition)
// Use short names and int32 values (smallest attributes possible)
var transitionedAt int
var successfulWrites int
for i := 0; i < MaxCompactAttributes+5; i++ {
name := fmt.Sprintf("a%d", i) // Short name
err := ds.WriteAttribute(name, int32(i))
if err != nil {
if strings.Contains(err.Error(), "existing dense storage not yet implemented") {
// Transition happened! This is expected in Phase 2 MVP
t.Logf("Transition successful at attribute %d", i)
t.Logf("MVP limitation: cannot add more attributes after transition (Phase 2)")
transitionedAt = i
break
}
require.NoError(t, err, "unexpected error at attribute %d", i)
} else {
successfulWrites++
}
}
// Verify that we wrote at least some attributes and hit the Phase 3 limitation
require.Greater(t, successfulWrites, 3, "should have written at least 3-4 attributes successfully")
if transitionedAt > 0 {
t.Logf("Successfully transitioned to dense storage at attribute %d", transitionedAt)
require.Greater(t, transitionedAt, 3, "transition should happen after at least 3-4 compact attributes")
require.LessOrEqual(t, transitionedAt, MaxCompactAttributes+1, "transition should happen around MaxCompactAttributes")
}
// Close and verify file exists
err = fw.Close()
require.NoError(t, err)
// Verify file was created
info, err := os.Stat(testFile)
require.NoError(t, err)
require.Greater(t, info.Size(), int64(512), "file should be >512B with dense attributes")
}
// TestDenseAttributes_Integration_Transition tests automatic transition from compact to dense.
// Phase 2 MVP: Transition works, but adding to dense after transition is not yet implemented.
func TestDenseAttributes_Integration_Transition(t *testing.T) {
t.Skip("Phase 2 MVP: Transition works but adding to existing dense storage not yet implemented")
}
// TestDenseAttributes_Integration_UTF8 tests dense attributes with UTF-8 names.
// Phase 2 MVP: UTF-8 names work in compact, transition works, but adding to dense not yet implemented.
func TestDenseAttributes_Integration_UTF8(t *testing.T) {
t.Skip("Phase 2 MVP: UTF-8 names work but full dense workflow not yet implemented")
}
// TestDenseAttributes_Integration_UpsertSemantics tests upsert (modify) semantics for attributes.
// Phase 1: Upsert is supported in compact storage - writing to existing attribute modifies it.
func TestDenseAttributes_Integration_UpsertSemantics(t *testing.T) {
testFile := filepath.Join(t.TempDir(), "dense_upsert.h5")
fw, err := CreateForWrite(testFile, CreateTruncate)
require.NoError(t, err)
defer os.Remove(testFile)
ds, err := fw.CreateDataset("/data", Int32, []uint64{10})
require.NoError(t, err)
// Write a few attributes
for i := 0; i < 3; i++ {
err := ds.WriteAttribute(fmt.Sprintf("attr%d", i), int32(i))
require.NoError(t, err)
}
// Write duplicate attribute (should succeed with upsert semantics)
err = ds.WriteAttribute("attr1", int32(999))
require.NoError(t, err, "upsert should succeed in Phase 1")
// Close and verify
err = fw.Close()
require.NoError(t, err)
// Reopen and verify modified value
f, err := Open(testFile)
require.NoError(t, err)
defer f.Close()
// Find dataset
var dataset *Dataset
f.Walk(func(_ string, obj Object) {
if ds, ok := obj.(*Dataset); ok && ds.Name() == "data" {
dataset = ds
}
})
require.NotNil(t, dataset, "dataset should exist")
// Verify attributes
attrs, err := dataset.Attributes()
require.NoError(t, err)
require.Equal(t, 3, len(attrs), "should still have 3 attributes (not 4)")
// Verify attr1 has new value (999, not 1)
for _, attr := range attrs {
if attr.Name == "attr1" {
val, err := attr.ReadValue()
require.NoError(t, err)
require.Equal(t, int32(999), val, "attr1 should have modified value")
return
}
}
t.Fatal("attr1 not found")
}