Compare commits

...

41 Commits

Author SHA1 Message Date
Tim Vaillancourt
0eb308dac8 Merge remote-tracking branch 'origin/master' into enum-to-varchar 2021-05-31 14:18:27 +02:00
Shlomi Noach
c90556e29b
Merge branch 'master' into enum-to-varchar 2021-05-25 09:11:15 +03:00
Shlomi Noach
48b02833ba gofmt
Signed-off-by: Shlomi Noach <2607934+shlomi-noach@users.noreply.github.com>
2021-05-04 18:46:20 +03:00
Shlomi Noach
91dec1a0f2
Merge branch 'master' into enum-to-varchar 2021-05-04 18:45:20 +03:00
Shlomi Noach
82bdf066e9 fix compilation error
Signed-off-by: Shlomi Noach <2607934+shlomi-noach@users.noreply.github.com>
2021-05-02 20:59:50 +03:00
Shlomi Noach
6e5b665c55 apply EnumValues to mapped column
Signed-off-by: Shlomi Noach <2607934+shlomi-noach@users.noreply.github.com>
2021-05-02 20:56:02 +03:00
Shlomi Noach
e80ddb42c9 store enum values, use when populating
Signed-off-by: Shlomi Noach <2607934+shlomi-noach@users.noreply.github.com>
2021-05-02 20:44:39 +03:00
Shlomi Noach
95ee9e2144 fix insert query
Signed-off-by: Shlomi Noach <2607934+shlomi-noach@users.noreply.github.com>
2021-05-02 19:37:25 +03:00
Shlomi Noach
939b898ea9 first attempt at setting enum-to-string right
Signed-off-by: Shlomi Noach <2607934+shlomi-noach@users.noreply.github.com>
2021-05-02 19:28:19 +03:00
Shlomi Noach
9bb2daaf15 test: not null
Signed-off-by: Shlomi Noach <2607934+shlomi-noach@users.noreply.github.com>
2021-05-02 18:35:52 +03:00
Shlomi Noach
c1bfe94b0f Convering enum to varchar
Signed-off-by: Shlomi Noach <2607934+shlomi-noach@users.noreply.github.com>
2021-05-02 18:12:35 +03:00
Shlomi Noach
33516f4955
Merge pull request #17 from openark/hooks-eta-seconds
hooks: reporting GH_OST_ETA_SECONDS. ETA as part of migration context
2021-03-07 14:55:31 +02:00
Shlomi Noach
b688c58a45 ETAUnknown constant
Signed-off-by: Shlomi Noach <2607934+shlomi-noach@users.noreply.github.com>
2021-03-07 14:16:04 +02:00
Shlomi Noach
76b9c16a68 N/A denoted by negative value
Signed-off-by: Shlomi Noach <2607934+shlomi-noach@users.noreply.github.com>
2021-03-07 11:27:50 +02:00
Shlomi Noach
51719a2b76 GH_OST_ETA_NANOSECONDS
Signed-off-by: Shlomi Noach <2607934+shlomi-noach@users.noreply.github.com>
2021-03-07 11:11:50 +02:00
Shlomi Noach
54000ab516 hooks: reporting GH_OST_ETA_SECONDS. ETA stored as part of migration context
Signed-off-by: Shlomi Noach <2607934+shlomi-noach@users.noreply.github.com>
2021-03-07 10:47:40 +02:00
Shlomi Noach
2b5cf78b4d
Merge pull request #15 from openark/limit-mysql-connetions
All MySQL DBs limited to max 3 concurrent/idle connections
2021-02-22 12:31:18 +02:00
Shlomi Noach
dea8d54be0
Merge branch 'master' into limit-mysql-connetions 2021-02-22 12:31:07 +02:00
Shlomi Noach
710c9ddda5 All MySQL DBs limited to max 3 concurrent/idle connections
Signed-off-by: Shlomi Noach <2607934+shlomi-noach@users.noreply.github.com>
2021-02-18 10:44:47 +02:00
Shlomi Noach
4a36e246c0
Merge pull request #14 from ccoffey/cathal/safer_cut_over
Cut-over should wait for heartbeat lag to be low enough to succeed
2021-02-07 17:41:59 +02:00
Shlomi Noach
253658d46b
Merge pull request #13 from openark/unique-key-generated-column
Generated column as part of UNIQUE (or PRIMARY) KEY
2021-01-27 09:49:21 +02:00
Shlomi Noach
b7b3bfbd34 skip analysis of generated column data type in unique key
Signed-off-by: Shlomi Noach <2607934+shlomi-noach@users.noreply.github.com>
2021-01-19 13:42:45 +02:00
Shlomi Noach
7202076c77 Generated column as part of UNIQUE (or PRIMARY) KEY
Signed-off-by: Shlomi Noach <2607934+shlomi-noach@users.noreply.github.com>
2021-01-19 13:27:00 +02:00
Shlomi Noach
ff82140597
Merge pull request #12 from openark/copy-auto-increment
Copying AUTO_INCREMENT value to ghost table
2021-01-05 09:37:59 +02:00
Shlomi Noach
525a80d62e
Merge branch 'master' into copy-auto-increment 2021-01-05 09:08:33 +02:00
Shlomi Noach
63219ab3e3 adding test for user defined AUTO_INCREMENT statement 2020-12-31 11:48:46 +02:00
Shlomi Noach
3d4dfaafd9 minor update to test
Signed-off-by: Shlomi Noach <2607934+shlomi-noach@users.noreply.github.com>
2020-12-31 11:06:53 +02:00
Shlomi Noach
31069ae4f2 support GetUint64
Signed-off-by: Shlomi Noach <2607934+shlomi-noach@users.noreply.github.com>
2020-12-31 11:06:37 +02:00
Shlomi Noach
af20211629 parsing AUTO_INCREMENT from alter query, reading AUTO_INCREMENT from original table, applying AUTO_INCREMENT value onto ghost table if applicable and user has not specified AUTO_INCREMENT in alter statement 2020-12-31 11:01:13 +02:00
Shlomi Noach
2d0281f29b clear event beforehand 2020-12-31 10:06:12 +02:00
Shlomi Noach
eeab264eb2 adding tests for AUTO_INCREMENT value after row deletes. Should initially fail 2020-12-31 09:53:37 +02:00
Shlomi Noach
75009db849 Adding simple test for 'expect_table_structure' scenario 2020-12-31 09:50:56 +02:00
Shlomi Noach
26f76027b2 greping for 'expect_table_structure' content 2020-12-31 09:48:08 +02:00
Shlomi Noach
294d43b4f6 WIP: copying AUTO_INCREMENT value to ghost table
Initial commit: towards setting up a test suite

Signed-off-by: Shlomi Noach <2607934+shlomi-noach@users.noreply.github.com>
2020-12-31 09:29:38 +02:00
Shlomi Noach
e9f9af2ef2
Merge pull request #11 from openark/updates-from-upstream-2020-10
Updates from upstream 2020 10
2020-10-18 12:32:36 +03:00
Shlomi Noach
ca0ca5ab73 Merge remote-tracking branch 'upstream/master' into updates-from-upstream-2020-10 2020-10-18 12:02:20 +03:00
Shlomi Noach
ae22d84ef0 v1.1.0 2020-08-05 12:23:41 +03:00
Shlomi Noach
6012e8072d
Merge pull request #8 from openark/ajm188-handle_driver_timeout_error
handle driver timeout error
2020-08-02 11:57:54 +03:00
Shlomi Noach
b59a8ed9da merged conflict 2020-08-02 09:57:02 +03:00
Shlomi Noach
9ccde4f4cd
Merge pull request #5 from openark/parse-alter-statement
Support a complete ALTER TABLE statement in --alter
2020-07-29 15:34:00 +03:00
Shlomi Noach
9b2a04d454
Merge pull request #2 from openark/workflow-upload-artifact
Actions/Workflow: upload artifact
2020-07-28 12:10:37 +03:00
7 changed files with 82 additions and 7 deletions

View File

@ -187,6 +187,10 @@ func (this *Inspector) inspectOriginalAndGhostTables() (err error) {
if column.Name == mappedColumn.Name && column.Type == sql.DateTimeColumnType && mappedColumn.Type == sql.TimestampColumnType { if column.Name == mappedColumn.Name && column.Type == sql.DateTimeColumnType && mappedColumn.Type == sql.TimestampColumnType {
this.migrationContext.MappedSharedColumns.SetConvertDatetimeToTimestamp(column.Name, this.migrationContext.ApplierTimeZone) this.migrationContext.MappedSharedColumns.SetConvertDatetimeToTimestamp(column.Name, this.migrationContext.ApplierTimeZone)
} }
if column.Name == mappedColumn.Name && column.Type == sql.EnumColumnType && mappedColumn.Charset != "" {
this.migrationContext.MappedSharedColumns.SetEnumToTextConversion(column.Name)
this.migrationContext.MappedSharedColumns.SetEnumValues(column.Name, column.EnumValues)
}
} }
for _, column := range this.migrationContext.UniqueKey.Columns.Columns() { for _, column := range this.migrationContext.UniqueKey.Columns.Columns() {
@ -590,6 +594,7 @@ func (this *Inspector) applyColumnTypes(databaseName, tableName string, columnsL
} }
if strings.HasPrefix(columnType, "enum") { if strings.HasPrefix(columnType, "enum") {
column.Type = sql.EnumColumnType column.Type = sql.EnumColumnType
column.EnumValues = sql.ParseEnumValues(m.GetString("COLUMN_TYPE"))
} }
if strings.HasPrefix(columnType, "binary") { if strings.HasPrefix(columnType, "binary") {
column.Type = sql.BinaryColumnType column.Type = sql.BinaryColumnType

View File

@ -38,6 +38,8 @@ func buildColumnsPreparedValues(columns *ColumnList) []string {
var token string var token string
if column.timezoneConversion != nil { if column.timezoneConversion != nil {
token = fmt.Sprintf("convert_tz(?, '%s', '%s')", column.timezoneConversion.ToTimezone, "+00:00") token = fmt.Sprintf("convert_tz(?, '%s', '%s')", column.timezoneConversion.ToTimezone, "+00:00")
} else if column.enumToTextConversion {
token = fmt.Sprintf("ELT(?, %s)", column.EnumValues)
} else if column.Type == JSONColumnType { } else if column.Type == JSONColumnType {
token = "convert(? using utf8mb4)" token = "convert(? using utf8mb4)"
} else { } else {
@ -108,6 +110,8 @@ func BuildSetPreparedClause(columns *ColumnList) (result string, err error) {
var setToken string var setToken string
if column.timezoneConversion != nil { if column.timezoneConversion != nil {
setToken = fmt.Sprintf("%s=convert_tz(?, '%s', '%s')", EscapeName(column.Name), column.timezoneConversion.ToTimezone, "+00:00") setToken = fmt.Sprintf("%s=convert_tz(?, '%s', '%s')", EscapeName(column.Name), column.timezoneConversion.ToTimezone, "+00:00")
} else if column.enumToTextConversion {
setToken = fmt.Sprintf("%s=ELT(?, %s)", EscapeName(column.Name), column.EnumValues)
} else if column.Type == JSONColumnType { } else if column.Type == JSONColumnType {
setToken = fmt.Sprintf("%s=convert(? using utf8mb4)", EscapeName(column.Name)) setToken = fmt.Sprintf("%s=convert(? using utf8mb4)", EscapeName(column.Name))
} else { } else {

View File

@ -33,6 +33,7 @@ var (
// ALTER TABLE tbl something // ALTER TABLE tbl something
regexp.MustCompile(`(?i)\balter\s+table\s+([\S]+)\s+(.*$)`), regexp.MustCompile(`(?i)\balter\s+table\s+([\S]+)\s+(.*$)`),
} }
enumValuesRegexp = regexp.MustCompile("^enum[(](.*)[)]$")
) )
type AlterTableParser struct { type AlterTableParser struct {
@ -205,3 +206,10 @@ func (this *AlterTableParser) HasExplicitTable() bool {
func (this *AlterTableParser) GetAlterStatementOptions() string { func (this *AlterTableParser) GetAlterStatementOptions() string {
return this.alterStatementOptions return this.alterStatementOptions
} }
func ParseEnumValues(enumColumnType string) string {
if submatch := enumValuesRegexp.FindStringSubmatch(enumColumnType); len(submatch) > 0 {
return submatch[1]
}
return enumColumnType
}

View File

@ -322,3 +322,21 @@ func TestParseAlterStatementExplicitTable(t *testing.T) {
test.S(t).ExpectTrue(reflect.DeepEqual(parser.alterTokens, []string{"drop column b", "add index idx(i)"})) test.S(t).ExpectTrue(reflect.DeepEqual(parser.alterTokens, []string{"drop column b", "add index idx(i)"}))
} }
} }
func TestParseEnumValues(t *testing.T) {
{
s := "enum('red','green','blue','orange')"
values := ParseEnumValues(s)
test.S(t).ExpectEquals(values, "'red','green','blue','orange'")
}
{
s := "('red','green','blue','orange')"
values := ParseEnumValues(s)
test.S(t).ExpectEquals(values, "('red','green','blue','orange')")
}
{
s := "zzz"
values := ParseEnumValues(s)
test.S(t).ExpectEquals(values, "zzz")
}
}

View File

@ -37,11 +37,12 @@ type Column struct {
IsUnsigned bool IsUnsigned bool
Charset string Charset string
Type ColumnType Type ColumnType
EnumValues string
timezoneConversion *TimezoneConversion
enumToTextConversion bool
// add Octet length for binary type, fix bytes with suffix "00" get clipped in mysql binlog. // add Octet length for binary type, fix bytes with suffix "00" get clipped in mysql binlog.
// https://github.com/github/gh-ost/issues/909 // https://github.com/github/gh-ost/issues/909
BinaryOctetLength uint BinaryOctetLength uint
timezoneConversion *TimezoneConversion
} }
func (this *Column) convertArg(arg interface{}, isUniqueKeyColumn bool) interface{} { func (this *Column) convertArg(arg interface{}, isUniqueKeyColumn bool) interface{} {
@ -198,6 +199,18 @@ func (this *ColumnList) HasTimezoneConversion(columnName string) bool {
return this.GetColumn(columnName).timezoneConversion != nil return this.GetColumn(columnName).timezoneConversion != nil
} }
func (this *ColumnList) SetEnumToTextConversion(columnName string) {
this.GetColumn(columnName).enumToTextConversion = true
}
func (this *ColumnList) IsEnumToTextConversion(columnName string) bool {
return this.GetColumn(columnName).enumToTextConversion
}
func (this *ColumnList) SetEnumValues(columnName string, enumValues string) {
this.GetColumn(columnName).EnumValues = enumValues
}
func (this *ColumnList) String() string { func (this *ColumnList) String() string {
return strings.Join(this.Names(), ",") return strings.Join(this.Names(), ",")
} }

View File

@ -0,0 +1,26 @@
drop table if exists gh_ost_test;
create table gh_ost_test (
id int auto_increment,
i int not null,
e enum('red', 'green', 'blue', 'orange') null default null collate 'utf8_bin',
primary key(id)
) auto_increment=1;
insert into gh_ost_test values (null, 7, 'red');
drop event if exists gh_ost_test;
delimiter ;;
create event gh_ost_test
on schedule every 1 second
starts current_timestamp
ends current_timestamp + interval 60 second
on completion not preserve
enable
do
begin
insert into gh_ost_test values (null, 11, 'red');
insert into gh_ost_test values (null, 13, 'green');
insert into gh_ost_test values (null, 17, 'blue');
set @last_insert_id := last_insert_id();
update gh_ost_test set e='orange' where id = @last_insert_id;
end ;;

View File

@ -0,0 +1 @@
--alter="change e e varchar(32) not null default ''"