Skip to content

Commit 1749dfe

Browse files
author
TianyuZhang1214
authored
fix: import real data from Postgres system tables. (#299)
1 parent 1aeb456 commit 1749dfe

File tree

13 files changed

+4873
-21
lines changed

13 files changed

+4873
-21
lines changed

catalog/initial_data.go

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
package catalog
2+
3+
var InitialDataTables = struct {
4+
PGNamespace [][]any
5+
PGRange [][]any
6+
}{
7+
PGNamespace: [][]any{
8+
{"99", "pg_toast", "10", ""},
9+
{"11", "pg_catalog", "10", "{postgres=UC/postgres,=U/postgres}"},
10+
{"2200", "public", "6171", "{pg_database_owner,=UC/pg_database_owner,=U/pg_database_owner}"},
11+
{"13219", "information_schema", "10", "{postgres=UC/postgres,=U/postgres}"},
12+
{"16395", "test_schema", "10", ""},
13+
},
14+
PGRange: [][]any{
15+
{"3904", "23", "4451", "0", "1978", "int4range_canonical", "int4range_subdiff"},
16+
{"3906", "1700", "4532", "0", "3125", "-", "numrange_subdiff"},
17+
{"3908", "1114", "4533", "0", "3128", "-", "tsrange_subdiff"},
18+
{"3910", "1184", "4534", "0", "3127", "-", "tstzrange_subdiff"},
19+
{"3912", "1082", "4535", "0", "3122", "daterange_canonical", "daterange_subdiff"},
20+
{"3926", "20", "4536", "0", "3124", "int8range_canonical", "int8range_subdiff"},
21+
},
22+
}

catalog/internal_tables.go

Lines changed: 331 additions & 8 deletions
Large diffs are not rendered by default.

catalog/provider.go

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ import (
1818

1919
"github.com/apecloud/myduckserver/adapter"
2020
"github.com/apecloud/myduckserver/configuration"
21+
"github.com/apecloud/myduckserver/initialdata"
2122
)
2223

2324
type DatabaseProvider struct {
@@ -143,6 +144,43 @@ func (prov *DatabaseProvider) initCatalog() error {
143144
return fmt.Errorf("failed to insert initial data into internal table %q: %w", t.Name, err)
144145
}
145146
}
147+
148+
initialFileContent := initialdata.InitialTableDataMap[t.Name]
149+
if initialFileContent != "" {
150+
var count int
151+
// Count rows in the internal table
152+
if err := prov.storage.QueryRow(t.CountAllStmt()).Scan(&count); err != nil {
153+
return fmt.Errorf("failed to count rows in internal table %q: %w", t.Name, err)
154+
}
155+
156+
if count == 0 {
157+
// Create temporary file to store initial data
158+
tmpFile, err := os.CreateTemp("", "initial-data-"+t.Name+".csv")
159+
if err != nil {
160+
return fmt.Errorf("failed to create temporary file for initial data: %w", err)
161+
}
162+
// Ensure the temporary file is removed after usage
163+
defer os.Remove(tmpFile.Name())
164+
defer tmpFile.Close()
165+
166+
// Write the initial data to the temporary file
167+
if _, err := tmpFile.WriteString(initialFileContent); err != nil {
168+
return fmt.Errorf("failed to write initial data to temporary file: %w", err)
169+
}
170+
171+
if err = tmpFile.Sync(); err != nil {
172+
return fmt.Errorf("failed to sync initial data file: %w", err)
173+
}
174+
175+
// Execute the COPY command to insert data into the table
176+
if _, err := prov.storage.ExecContext(
177+
context.Background(),
178+
fmt.Sprintf("COPY %s FROM '%s' (DELIMITER ',', HEADER)", t.QualifiedName(), tmpFile.Name()),
179+
); err != nil {
180+
return fmt.Errorf("failed to insert initial data from file into internal table %q: %w", t.Name, err)
181+
}
182+
}
183+
}
146184
}
147185

148186
if _, err := prov.pool.ExecContext(context.Background(), "PRAGMA enable_checkpoint_on_shutdown"); err != nil {

compatibility/pg/csharp/PGTest.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ public class Tests
1414

1515
public void Connect(string ip, int port, string user, string password)
1616
{
17-
string connectionString = $"Host={ip};Port={port};Username={user};Password={password};Database=postgres;";
17+
string connectionString = $"Host={ip};Port={port};Username={user};Password={password};Database=postgres;Timeout=300;CommandTimeout=600;";
1818
try
1919
{
2020
conn = new NpgsqlConnection(connectionString);

compatibility/pg/test.bats

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -41,13 +41,11 @@ start_process() {
4141
start_process $BATS_TEST_DIRNAME/c/pg_test 127.0.0.1 5432 postgres "" $BATS_TEST_DIRNAME/test.data
4242
}
4343

44-
# Failed because of the following error:
45-
# > Catalog Error: Table with name pg_range does not exist!
46-
# @test "pg-csharp" {
47-
# set_custom_teardown "sudo pkill -f dotnet"
48-
# start_process dotnet build $BATS_TEST_DIRNAME/csharp/PGTest.csproj -o $BATS_TEST_DIRNAME/csharp/bin
49-
# start_process dotnet $BATS_TEST_DIRNAME/csharp/bin/PGTest.dll 127.0.0.1 5432 postgres "" $BATS_TEST_DIRNAME/test.data
50-
# }
44+
@test "pg-csharp" {
45+
set_custom_teardown "sudo pkill -f dotnet"
46+
start_process dotnet build $BATS_TEST_DIRNAME/csharp/PGTest.csproj -o $BATS_TEST_DIRNAME/csharp/bin
47+
start_process dotnet $BATS_TEST_DIRNAME/csharp/bin/PGTest.dll 127.0.0.1 5432 postgres "" $BATS_TEST_DIRNAME/test.data
48+
}
5149

5250
@test "pg-go" {
5351
start_process go build -o $BATS_TEST_DIRNAME/go/pg $BATS_TEST_DIRNAME/go/pg.go

initialdata/file_content.go

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
package initialdata
2+
3+
import _ "embed"
4+
5+
//go:embed pg_class.csv
6+
var pgClassContent string
7+
8+
//go:embed pg_proc.csv
9+
var pgProcContent string
10+
11+
//go:embed pg_type.csv
12+
var pgTypeContent string
13+
14+
var InitialTableDataMap = map[string]string{
15+
"pg_class": pgClassContent,
16+
"pg_proc": pgProcContent,
17+
"pg_type": pgTypeContent,
18+
}

initialdata/initial_data.sh

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
#!/bin/bash
2+
3+
# Start a PostgreSQL container and mount the current directory
4+
CONTAINER_ID=$(docker run --rm -d -e POSTGRES_PASSWORD=postgres -v "$(pwd):/data" postgres)
5+
sleep 5
6+
7+
# Set file paths within the container
8+
PG_CLASS_FILE="/data/pg_class.csv"
9+
PG_PROC_FILE="/data/pg_proc.csv"
10+
PG_TYPE_FILE="/data/pg_type.csv"
11+
12+
# Define SQL queries
13+
PG_CLASS_QUERY="SELECT oid, relname, relnamespace, reltype, reloftype, relowner, relam, relfilenode, reltablespace, relpages, reltuples, relallvisible, reltoastrelid, relhasindex, relisshared, relpersistence, relkind, relnatts, relchecks, relhasrules, relhastriggers, relhassubclass, relrowsecurity, relforcerowsecurity, relispopulated, relreplident, relispartition, relrewrite, relfrozenxid, relminmxid, relacl, reloptions, relpartbound FROM pg_class"
14+
PG_PROC_QUERY="SELECT oid, proname, pronamespace, proowner, prolang, procost, prorows, provariadic, prosupport::regproc::oid, prokind, prosecdef, proleakproof, proisstrict, proretset, provolatile, proparallel, pronargs, pronargdefaults, prorettype, proargtypes, proallargtypes, proargmodes, proargnames, proargdefaults, protrftypes, prosrc, probin, prosqlbody, proconfig, proacl FROM pg_proc"
15+
PG_TYPE_QUERY="SELECT oid, typname, typnamespace, typowner, typlen, typbyval, typtype, typcategory, typispreferred, typisdefined, typdelim, typrelid, typsubscript::regproc::oid, typelem, typarray, typinput::regproc::oid, typoutput::regproc::oid, typreceive::regproc::oid, typsend::regproc::oid, typmodin::regproc::oid, typmodout::regproc::oid, typanalyze::regproc::oid, typalign, typstorage, typnotnull, typbasetype, typtypmod, typndims, typcollation, typdefaultbin, typdefault, typacl FROM pg_type"
16+
17+
# Execute queries and export data to mounted files
18+
docker exec -i $CONTAINER_ID psql -U postgres -c "\COPY ($PG_CLASS_QUERY) TO '$PG_CLASS_FILE' WITH CSV HEADER"
19+
docker exec -i $CONTAINER_ID psql -U postgres -c "\COPY ($PG_PROC_QUERY) TO '$PG_PROC_FILE' WITH CSV HEADER"
20+
docker exec -i $CONTAINER_ID psql -U postgres -c "\COPY ($PG_TYPE_QUERY) TO '$PG_TYPE_FILE' WITH CSV HEADER"
21+
22+
# Stop the container
23+
docker kill $CONTAINER_ID

0 commit comments

Comments
 (0)