Skip to content

Commit acfeca5

Browse files
committed
Revert "Merge pull request #904 from dolthub/jennifer/type"
This reverts commit 40dba4f, reversing changes made to 165c30e.
1 parent 40dba4f commit acfeca5

File tree

196 files changed

+10730
-8106
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

196 files changed

+10730
-8106
lines changed

core/dataloader/csvdataloader.go

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@ import (
2424
"github.com/dolthub/go-mysql-server/sql"
2525
"github.com/sirupsen/logrus"
2626

27-
"github.com/dolthub/doltgresql/server/functions/framework"
2827
"github.com/dolthub/doltgresql/server/types"
2928
)
3029

@@ -70,7 +69,7 @@ func NewCsvDataLoader(ctx *sql.Context, table sql.InsertableTable, delimiter str
7069

7170
// LoadChunk implements the DataLoader interface
7271
func (cdl *CsvDataLoader) LoadChunk(ctx *sql.Context, data *bufio.Reader) error {
73-
combinedReader := NewStringPrefixReader(cdl.partialRecord, data)
72+
combinedReader := newStringPrefixReader(cdl.partialRecord, data)
7473
cdl.partialRecord = ""
7574

7675
reader, err := newCsvReaderWithDelimiter(combinedReader, cdl.delimiter)
@@ -135,7 +134,7 @@ func (cdl *CsvDataLoader) LoadChunk(ctx *sql.Context, data *bufio.Reader) error
135134
if record[i] == nil {
136135
row[i] = nil
137136
} else {
138-
row[i], err = framework.IoInput(ctx, cdl.colTypes[i], fmt.Sprintf("%v", record[i]))
137+
row[i], err = cdl.colTypes[i].IoInput(ctx, fmt.Sprintf("%v", record[i]))
139138
if err != nil {
140139
return err
141140
}

testing/dataloader/csvdataloader_test.go renamed to core/dataloader/csvdataloader_test.go

Lines changed: 7 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
// See the License for the specific language governing permissions and
1313
// limitations under the License.
1414

15-
package _dataloader
15+
package dataloader
1616

1717
import (
1818
"bufio"
@@ -25,16 +25,13 @@ import (
2525
"github.com/dolthub/go-mysql-server/sql"
2626
"github.com/stretchr/testify/require"
2727

28-
"github.com/dolthub/doltgresql/core/dataloader"
29-
"github.com/dolthub/doltgresql/server/initialization"
3028
"github.com/dolthub/doltgresql/server/types"
3129
)
3230

3331
// TestCsvDataLoader tests the CsvDataLoader implementation.
3432
func TestCsvDataLoader(t *testing.T) {
3533
db := memory.NewDatabase("mydb")
3634
provider := memory.NewDBProvider(db)
37-
initialization.Initialize(nil)
3835

3936
ctx := &sql.Context{
4037
Context: context.Background(),
@@ -50,7 +47,7 @@ func TestCsvDataLoader(t *testing.T) {
5047
// Tests that a basic CSV document can be loaded as a single chunk.
5148
t.Run("basic case", func(t *testing.T) {
5249
table := memory.NewTable(db, "myTable", pkSchema, nil)
53-
dataLoader, err := dataloader.NewCsvDataLoader(ctx, table, ",", false)
50+
dataLoader, err := NewCsvDataLoader(ctx, table, ",", false)
5451
require.NoError(t, err)
5552

5653
// Load all the data as a single chunk
@@ -72,7 +69,7 @@ func TestCsvDataLoader(t *testing.T) {
7269
// partial record must be buffered and prepended to the next chunk.
7370
t.Run("record split across two chunks", func(t *testing.T) {
7471
table := memory.NewTable(db, "myTable", pkSchema, nil)
75-
dataLoader, err := dataloader.NewCsvDataLoader(ctx, table, ",", false)
72+
dataLoader, err := NewCsvDataLoader(ctx, table, ",", false)
7673
require.NoError(t, err)
7774

7875
// Load the first chunk
@@ -101,7 +98,7 @@ func TestCsvDataLoader(t *testing.T) {
10198
// header row is present.
10299
t.Run("record split across two chunks, with header", func(t *testing.T) {
103100
table := memory.NewTable(db, "myTable", pkSchema, nil)
104-
dataLoader, err := dataloader.NewCsvDataLoader(ctx, table, ",", true)
101+
dataLoader, err := NewCsvDataLoader(ctx, table, ",", true)
105102
require.NoError(t, err)
106103

107104
// Load the first chunk
@@ -130,7 +127,7 @@ func TestCsvDataLoader(t *testing.T) {
130127
// across two chunks.
131128
t.Run("quoted newlines across two chunks", func(t *testing.T) {
132129
table := memory.NewTable(db, "myTable", pkSchema, nil)
133-
dataLoader, err := dataloader.NewCsvDataLoader(ctx, table, ",", false)
130+
dataLoader, err := NewCsvDataLoader(ctx, table, ",", false)
134131
require.NoError(t, err)
135132

136133
// Load the first chunk
@@ -158,7 +155,7 @@ func TestCsvDataLoader(t *testing.T) {
158155
// Test that calling Abort() does not insert any data into the table.
159156
t.Run("abort cancels data load", func(t *testing.T) {
160157
table := memory.NewTable(db, "myTable", pkSchema, nil)
161-
dataLoader, err := dataloader.NewCsvDataLoader(ctx, table, ",", false)
158+
dataLoader, err := NewCsvDataLoader(ctx, table, ",", false)
162159
require.NoError(t, err)
163160

164161
// Load the first chunk
@@ -183,7 +180,7 @@ func TestCsvDataLoader(t *testing.T) {
183180
// and a header row is present.
184181
t.Run("delimiter='|', record split across two chunks, with header", func(t *testing.T) {
185182
table := memory.NewTable(db, "myTable", pkSchema, nil)
186-
dataLoader, err := dataloader.NewCsvDataLoader(ctx, table, "|", true)
183+
dataLoader, err := NewCsvDataLoader(ctx, table, "|", true)
187184
require.NoError(t, err)
188185

189186
// Load the first chunk

core/dataloader/csvreader.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,15 +67,15 @@ type csvReader struct {
6767
fieldsPerRecord int
6868
}
6969

70-
// NewCsvReader creates a csvReader from a given ReadCloser.
70+
// newCsvReader creates a csvReader from a given ReadCloser.
7171
//
7272
// The interpretation of the bytes of the supplied reader is a little murky. If
7373
// there is a UTF8, UTF16LE or UTF16BE BOM as the first bytes read, then the
7474
// BOM is stripped and the remaining contents of the reader are treated as that
7575
// encoding. If we are not in any of those marked encodings, then some of the
7676
// bytes go uninterpreted until we get to the SQL layer. It is currently the
7777
// case that newlines must be encoded as a '0xa' byte.
78-
func NewCsvReader(r io.ReadCloser) (*csvReader, error) {
78+
func newCsvReader(r io.ReadCloser) (*csvReader, error) {
7979
return newCsvReaderWithDelimiter(r, ",")
8080
}
8181

testing/dataloader/csvreader_test.go renamed to core/dataloader/csvreader_test.go

Lines changed: 8 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -12,15 +12,13 @@
1212
// See the License for the specific language governing permissions and
1313
// limitations under the License.
1414

15-
package _dataloader
15+
package dataloader
1616

1717
import (
1818
"bytes"
1919
"io"
2020
"testing"
2121

22-
"github.com/dolthub/doltgresql/core/dataloader"
23-
2422
"github.com/stretchr/testify/assert"
2523
"github.com/stretchr/testify/require"
2624
)
@@ -74,7 +72,7 @@ bash"
7472
// TestCsvReader tests various cases of CSV data parsing.
7573
func TestCsvReader(t *testing.T) {
7674
t.Run("basic CSV data", func(t *testing.T) {
77-
csvReader, err := dataloader.NewCsvReader(newReader(basicCsvData))
75+
csvReader, err := newCsvReader(newReader(basicCsvData))
7876
require.NoError(t, err)
7977

8078
// Read the first row
@@ -97,7 +95,7 @@ func TestCsvReader(t *testing.T) {
9795
})
9896

9997
t.Run("wrong number of fields", func(t *testing.T) {
100-
csvReader, err := dataloader.NewCsvReader(newReader(wrongNumberOfFieldsCsvData))
98+
csvReader, err := newCsvReader(newReader(wrongNumberOfFieldsCsvData))
10199
require.NoError(t, err)
102100

103101
// Read the first row
@@ -116,7 +114,7 @@ func TestCsvReader(t *testing.T) {
116114
})
117115

118116
t.Run("incomplete line, no newline ending", func(t *testing.T) {
119-
csvReader, err := dataloader.NewCsvReader(newReader(partialLineErrorCsvData))
117+
csvReader, err := newCsvReader(newReader(partialLineErrorCsvData))
120118
require.NoError(t, err)
121119

122120
// Read the first row
@@ -144,7 +142,7 @@ func TestCsvReader(t *testing.T) {
144142
})
145143

146144
t.Run("null and empty string quoting", func(t *testing.T) {
147-
csvReader, err := dataloader.NewCsvReader(newReader(nullAndEmptyStringQuotingCsvData))
145+
csvReader, err := newCsvReader(newReader(nullAndEmptyStringQuotingCsvData))
148146
require.NoError(t, err)
149147

150148
// Read the first row
@@ -162,7 +160,7 @@ func TestCsvReader(t *testing.T) {
162160
})
163161

164162
t.Run("quote escaping", func(t *testing.T) {
165-
csvReader, err := dataloader.NewCsvReader(newReader(escapedQuotesCsvData))
163+
csvReader, err := newCsvReader(newReader(escapedQuotesCsvData))
166164
require.NoError(t, err)
167165

168166
// Read the first row
@@ -181,7 +179,7 @@ func TestCsvReader(t *testing.T) {
181179
})
182180

183181
t.Run("quoted newlines", func(t *testing.T) {
184-
csvReader, err := dataloader.NewCsvReader(newReader(newLineInQuotedFieldCsvData))
182+
csvReader, err := newCsvReader(newReader(newLineInQuotedFieldCsvData))
185183
require.NoError(t, err)
186184

187185
// Read the first row
@@ -197,7 +195,7 @@ func TestCsvReader(t *testing.T) {
197195
})
198196

199197
t.Run("quoted end of data marker", func(t *testing.T) {
200-
csvReader, err := dataloader.NewCsvReader(newReader(endOfDataMarkerCsvData))
198+
csvReader, err := newCsvReader(newReader(endOfDataMarkerCsvData))
201199
require.NoError(t, err)
202200

203201
// Read the first row

core/dataloader/string_prefix_reader.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,9 @@ type stringPrefixReader struct {
2727

2828
var _ io.ReadCloser = (*stringPrefixReader)(nil)
2929

30-
// NewStringPrefixReader creates a new stringPrefixReader that first returns the data in |prefix| and
30+
// newStringPrefixReader creates a new stringPrefixReader that first returns the data in |prefix| and
3131
// then returns data from |reader|.
32-
func NewStringPrefixReader(prefix string, reader io.Reader) *stringPrefixReader {
32+
func newStringPrefixReader(prefix string, reader io.Reader) *stringPrefixReader {
3333
return &stringPrefixReader{
3434
prefix: prefix,
3535
reader: reader,

testing/dataloader/string_prefix_reader_test.go renamed to core/dataloader/string_prefix_reader_test.go

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -12,23 +12,21 @@
1212
// See the License for the specific language governing permissions and
1313
// limitations under the License.
1414

15-
package _dataloader
15+
package dataloader
1616

1717
import (
1818
"bytes"
1919
"io"
2020
"testing"
2121

22-
"github.com/dolthub/doltgresql/core/dataloader"
23-
2422
"github.com/stretchr/testify/require"
2523
)
2624

2725
func TestStringPrefixReader(t *testing.T) {
2826
t.Run("Read prefix and all data in single call", func(t *testing.T) {
2927
prefix := "prefix"
3028
reader := bytes.NewReader([]byte("0123456789"))
31-
prefixReader := dataloader.NewStringPrefixReader(prefix, reader)
29+
prefixReader := newStringPrefixReader(prefix, reader)
3230

3331
data := make([]byte, 100)
3432
bytesRead, err := prefixReader.Read(data)
@@ -44,7 +42,7 @@ func TestStringPrefixReader(t *testing.T) {
4442
t.Run("Read part of prefix", func(t *testing.T) {
4543
prefix := "prefix"
4644
reader := bytes.NewReader([]byte("0123456789"))
47-
prefixReader := dataloader.NewStringPrefixReader(prefix, reader)
45+
prefixReader := newStringPrefixReader(prefix, reader)
4846

4947
data := make([]byte, 5)
5048
bytesRead, err := prefixReader.Read(data)
@@ -79,7 +77,7 @@ func TestStringPrefixReader(t *testing.T) {
7977
t.Run("Read to prefix boundary", func(t *testing.T) {
8078
prefix := "prefix"
8179
reader := bytes.NewReader([]byte("0123456789"))
82-
prefixReader := dataloader.NewStringPrefixReader(prefix, reader)
80+
prefixReader := newStringPrefixReader(prefix, reader)
8381

8482
data := make([]byte, 6)
8583
bytesRead, err := prefixReader.Read(data)

core/dataloader/tabdataloader.go

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@ import (
2323
"github.com/dolthub/go-mysql-server/sql"
2424
"github.com/sirupsen/logrus"
2525

26-
"github.com/dolthub/doltgresql/server/functions/framework"
2726
"github.com/dolthub/doltgresql/server/types"
2827
)
2928

@@ -133,7 +132,7 @@ func (tdl *TabularDataLoader) LoadChunk(ctx *sql.Context, data *bufio.Reader) er
133132
if values[i] == tdl.nullChar {
134133
row[i] = nil
135134
} else {
136-
row[i], err = framework.IoInput(ctx, tdl.colTypes[i], values[i])
135+
row[i], err = tdl.colTypes[i].IoInput(ctx, values[i])
137136
if err != nil {
138137
return err
139138
}

testing/dataloader/tabdataloader_test.go renamed to core/dataloader/tabdataloader_test.go

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
// See the License for the specific language governing permissions and
1313
// limitations under the License.
1414

15-
package _dataloader
15+
package dataloader
1616

1717
import (
1818
"bufio"
@@ -24,7 +24,6 @@ import (
2424
"github.com/dolthub/go-mysql-server/sql"
2525
"github.com/stretchr/testify/require"
2626

27-
"github.com/dolthub/doltgresql/core/dataloader"
2827
"github.com/dolthub/doltgresql/server/types"
2928
)
3029

@@ -46,7 +45,7 @@ func TestTabDataLoader(t *testing.T) {
4645
// Tests that a basic tab delimited doc can be loaded as a single chunk.
4746
t.Run("basic case", func(t *testing.T) {
4847
table := memory.NewTable(db, "myTable", pkSchema, nil)
49-
dataLoader, err := dataloader.NewTabularDataLoader(ctx, table, "\t", "\\N", false)
48+
dataLoader, err := NewTabularDataLoader(ctx, table, "\t", "\\N", false)
5049
require.NoError(t, err)
5150

5251
// Load all the data as a single chunk
@@ -68,7 +67,7 @@ func TestTabDataLoader(t *testing.T) {
6867
// partial record must be buffered and prepended to the next chunk.
6968
t.Run("record split across two chunks", func(t *testing.T) {
7069
table := memory.NewTable(db, "myTable", pkSchema, nil)
71-
dataLoader, err := dataloader.NewTabularDataLoader(ctx, table, "\t", "\\N", false)
70+
dataLoader, err := NewTabularDataLoader(ctx, table, "\t", "\\N", false)
7271
require.NoError(t, err)
7372

7473
// Load the first chunk
@@ -97,7 +96,7 @@ func TestTabDataLoader(t *testing.T) {
9796
// header row is present.
9897
t.Run("record split across two chunks, with header", func(t *testing.T) {
9998
table := memory.NewTable(db, "myTable", pkSchema, nil)
100-
dataLoader, err := dataloader.NewTabularDataLoader(ctx, table, "\t", "\\N", true)
99+
dataLoader, err := NewTabularDataLoader(ctx, table, "\t", "\\N", true)
101100
require.NoError(t, err)
102101

103102
// Load the first chunk
@@ -126,7 +125,7 @@ func TestTabDataLoader(t *testing.T) {
126125
// across two chunks.
127126
t.Run("quoted newlines across two chunks", func(t *testing.T) {
128127
table := memory.NewTable(db, "myTable", pkSchema, nil)
129-
dataLoader, err := dataloader.NewTabularDataLoader(ctx, table, "\t", "\\N", false)
128+
dataLoader, err := NewTabularDataLoader(ctx, table, "\t", "\\N", false)
130129
require.NoError(t, err)
131130

132131
// Load the first chunk
@@ -155,7 +154,7 @@ func TestTabDataLoader(t *testing.T) {
155154
// header row is present.
156155
t.Run("delimiter='|', record split across two chunks, with header", func(t *testing.T) {
157156
table := memory.NewTable(db, "myTable", pkSchema, nil)
158-
dataLoader, err := dataloader.NewTabularDataLoader(ctx, table, "|", "\\N", true)
157+
dataLoader, err := NewTabularDataLoader(ctx, table, "|", "\\N", true)
159158
require.NoError(t, err)
160159

161160
// Load the first chunk
@@ -183,7 +182,7 @@ func TestTabDataLoader(t *testing.T) {
183182
// Test that calling Abort() does not insert any data into the table.
184183
t.Run("abort cancels data load", func(t *testing.T) {
185184
table := memory.NewTable(db, "myTable", pkSchema, nil)
186-
dataLoader, err := dataloader.NewTabularDataLoader(ctx, table, "\t", "\\N", false)
185+
dataLoader, err := NewTabularDataLoader(ctx, table, "\t", "\\N", false)
187186
require.NoError(t, err)
188187

189188
// Load the first chunk

core/typecollection/merge.go

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,14 +21,15 @@ import (
2121
"github.com/dolthub/doltgresql/server/types"
2222
)
2323

24-
// Merge handles merging types on our root and their root.
24+
// Merge handles merging sequences on our root and their root.
2525
func Merge(ctx context.Context, ourCollection, theirCollection, ancCollection *TypeCollection) (*TypeCollection, error) {
2626
mergedCollection := ourCollection.Clone()
27-
err := theirCollection.IterateTypes(func(schema string, theirType types.DoltgresType) error {
27+
err := theirCollection.IterateTypes(func(schema string, theirType *types.Type) error {
2828
// If we don't have the type, then we simply add it
2929
mergedType, exists := mergedCollection.GetType(schema, theirType.Name)
3030
if !exists {
31-
return mergedCollection.CreateType(schema, theirType)
31+
newSeq := *theirType
32+
return mergedCollection.CreateType(schema, &newSeq)
3233
}
3334

3435
// Different types with the same name cannot be merged. (e.g.: 'domain' type and 'base' type with the same name)

0 commit comments

Comments
 (0)