Merge pull request #269 from github/unified-tz-solution
Unified tz solution
This commit is contained in:
commit
24af2e3d05
2
build.sh
2
build.sh
@ -2,7 +2,7 @@
|
||||
#
|
||||
#
|
||||
|
||||
RELEASE_VERSION="1.0.21"
|
||||
RELEASE_VERSION="1.0.23"
|
||||
|
||||
function build {
|
||||
osname=$1
|
||||
|
@ -114,6 +114,7 @@ type MigrationContext struct {
|
||||
|
||||
Hostname string
|
||||
AssumeMasterHostname string
|
||||
ApplierTimeZone string
|
||||
TableEngine string
|
||||
RowsEstimate int64
|
||||
RowsDeltaEstimate int64
|
||||
|
@ -99,8 +99,8 @@ func main() {
|
||||
flag.StringVar(&migrationContext.HooksHintMessage, "hooks-hint", "", "arbitrary message to be injected to hooks via GH_OST_HOOKS_HINT, for your convenience")
|
||||
|
||||
maxLoad := flag.String("max-load", "", "Comma delimited status-name=threshold. e.g: 'Threads_running=100,Threads_connected=500'. When status exceeds threshold, app throttles writes")
|
||||
criticalLoad := flag.String("critical-load", "", "Comma delimited status-name=threshold, same format as `--max-load`. When status exceeds threshold, app panics and quits")
|
||||
flag.Int64Var(&migrationContext.CriticalLoadIntervalMilliseconds, "critical-load-interval-millis", 0, "When 0, migration bails out upon meeting critical-load immediately. When non-zero, a second check is done after given interval, and migration only bails out if 2nd check still meets critical load")
|
||||
criticalLoad := flag.String("critical-load", "", "Comma delimited status-name=threshold, same format as --max-load. When status exceeds threshold, app panics and quits")
|
||||
flag.Int64Var(&migrationContext.CriticalLoadIntervalMilliseconds, "critical-load-interval-millis", 0, "When 0, migration immediately bails out upon meeting critical-load. When non-zero, a second check is done after given interval, and migration only bails out if 2nd check still meets critical load")
|
||||
quiet := flag.Bool("quiet", false, "quiet")
|
||||
verbose := flag.Bool("verbose", false, "verbose")
|
||||
debug := flag.Bool("debug", false, "debug mode (very verbose)")
|
||||
|
@ -59,6 +59,9 @@ func (this *Applier) InitDBConnections() (err error) {
|
||||
if err := this.validateConnection(this.singletonDB); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := this.validateAndReadTimeZone(); err != nil {
|
||||
return err
|
||||
}
|
||||
if impliedKey, err := mysql.GetInstanceKey(this.db); err != nil {
|
||||
return err
|
||||
} else {
|
||||
@ -81,6 +84,17 @@ func (this *Applier) validateConnection(db *gosql.DB) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// validateAndReadTimeZone potentially reads server time-zone
|
||||
func (this *Applier) validateAndReadTimeZone() error {
|
||||
query := `select @@global.time_zone`
|
||||
if err := this.db.QueryRow(query).Scan(&this.migrationContext.ApplierTimeZone); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Infof("will use time_zone='%s' on applier", this.migrationContext.ApplierTimeZone)
|
||||
return nil
|
||||
}
|
||||
|
||||
// showTableStatus returns the output of `show table status like '...'` command
|
||||
func (this *Applier) showTableStatus(tableName string) (rowMap sqlutils.RowMap) {
|
||||
rowMap = nil
|
||||
@ -414,7 +428,29 @@ func (this *Applier) ApplyIterationInsertQuery() (chunkSize int64, rowsAffected
|
||||
if err != nil {
|
||||
return chunkSize, rowsAffected, duration, err
|
||||
}
|
||||
sqlResult, err := sqlutils.Exec(this.db, query, explodedArgs...)
|
||||
|
||||
sqlResult, err := func() (gosql.Result, error) {
|
||||
tx, err := this.db.Begin()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sessionQuery := fmt.Sprintf(`SET
|
||||
SESSION time_zone = '%s',
|
||||
sql_mode = CONCAT(@@session.sql_mode, ',STRICT_ALL_TABLES')
|
||||
`, this.migrationContext.ApplierTimeZone)
|
||||
if _, err := tx.Exec(sessionQuery); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result, err := tx.Exec(query, explodedArgs...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return result, nil
|
||||
}()
|
||||
|
||||
if err != nil {
|
||||
return chunkSize, rowsAffected, duration, err
|
||||
}
|
||||
@ -871,10 +907,11 @@ func (this *Applier) ApplyDMLEventQuery(dmlEvent *binlog.BinlogDMLEvent) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := tx.Exec(`SET
|
||||
sessionQuery := `SET
|
||||
SESSION time_zone = '+00:00',
|
||||
sql_mode = CONCAT(@@session.sql_mode, ',STRICT_ALL_TABLES')
|
||||
`); err != nil {
|
||||
`
|
||||
if _, err := tx.Exec(sessionQuery); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
|
@ -138,6 +138,20 @@ func (this *Inspector) InspectOriginalAndGhostTables() (err error) {
|
||||
this.applyColumnTypes(this.migrationContext.DatabaseName, this.migrationContext.OriginalTableName, this.migrationContext.OriginalTableColumns, this.migrationContext.SharedColumns)
|
||||
this.applyColumnTypes(this.migrationContext.DatabaseName, this.migrationContext.GetGhostTableName(), this.migrationContext.GhostTableColumns, this.migrationContext.MappedSharedColumns)
|
||||
|
||||
for i := range this.migrationContext.SharedColumns.Columns() {
|
||||
column := this.migrationContext.SharedColumns.Columns()[i]
|
||||
mappedColumn := this.migrationContext.MappedSharedColumns.Columns()[i]
|
||||
if column.Name == mappedColumn.Name && column.Type == sql.DateTimeColumnType && mappedColumn.Type == sql.TimestampColumnType {
|
||||
this.migrationContext.MappedSharedColumns.SetConvertDatetimeToTimestamp(column.Name, this.migrationContext.ApplierTimeZone)
|
||||
}
|
||||
}
|
||||
|
||||
for _, column := range this.migrationContext.UniqueKey.Columns.Columns() {
|
||||
if this.migrationContext.MappedSharedColumns.HasTimezoneConversion(column.Name) {
|
||||
return fmt.Errorf("No support at this time for converting a column from DATETIME to TIMESTAMP that is also part of the chosen unique key. Column: %s, key: %s", column.Name, this.migrationContext.UniqueKey.Name)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -502,11 +516,22 @@ func (this *Inspector) applyColumnTypes(databaseName, tableName string, columnsL
|
||||
`
|
||||
err := sqlutils.QueryRowsMap(this.db, query, func(m sqlutils.RowMap) error {
|
||||
columnName := m.GetString("COLUMN_NAME")
|
||||
if strings.Contains(m.GetString("COLUMN_TYPE"), "unsigned") {
|
||||
columnType := m.GetString("COLUMN_TYPE")
|
||||
if strings.Contains(columnType, "unsigned") {
|
||||
for _, columnsList := range columnsLists {
|
||||
columnsList.SetUnsigned(columnName)
|
||||
}
|
||||
}
|
||||
if strings.Contains(columnType, "timestamp") {
|
||||
for _, columnsList := range columnsLists {
|
||||
columnsList.GetColumn(columnName).Type = sql.TimestampColumnType
|
||||
}
|
||||
}
|
||||
if strings.Contains(columnType, "datetime") {
|
||||
for _, columnsList := range columnsLists {
|
||||
columnsList.GetColumn(columnName).Type = sql.DateTimeColumnType
|
||||
}
|
||||
}
|
||||
if charset := m.GetString("CHARACTER_SET_NAME"); charset != "" {
|
||||
for _, columnsList := range columnsLists {
|
||||
columnsList.SetCharset(columnName, charset)
|
||||
|
@ -204,7 +204,7 @@ func (this *EventsStreamer) StreamEvents(canStopStreaming func() bool) error {
|
||||
successiveFailures = 0
|
||||
}
|
||||
if successiveFailures > this.migrationContext.MaxRetries() {
|
||||
return fmt.Errorf("%d successive failures in streamer reconnect at coordinates %+v", lastAppliedRowsEventHint)
|
||||
return fmt.Errorf("%d successive failures in streamer reconnect at coordinates %+v", successiveFailures, this.GetReconnectBinlogCoordinates())
|
||||
}
|
||||
|
||||
// Reposition at same binlog file.
|
||||
|
@ -32,6 +32,20 @@ func EscapeName(name string) string {
|
||||
return fmt.Sprintf("`%s`", name)
|
||||
}
|
||||
|
||||
func buildColumnsPreparedValues(columns *ColumnList) []string {
|
||||
values := make([]string, columns.Len(), columns.Len())
|
||||
for i, column := range columns.Columns() {
|
||||
var token string
|
||||
if column.timezoneConversion != nil {
|
||||
token = fmt.Sprintf("convert_tz(?, '%s', '%s')", column.timezoneConversion.ToTimezone, "+00:00")
|
||||
} else {
|
||||
token = "?"
|
||||
}
|
||||
values[i] = token
|
||||
}
|
||||
return values
|
||||
}
|
||||
|
||||
func buildPreparedValues(length int) []string {
|
||||
values := make([]string, length, length)
|
||||
for i := 0; i < length; i++ {
|
||||
@ -83,13 +97,19 @@ func BuildEqualsPreparedComparison(columns []string) (result string, err error)
|
||||
return BuildEqualsComparison(columns, values)
|
||||
}
|
||||
|
||||
func BuildSetPreparedClause(columns []string) (result string, err error) {
|
||||
if len(columns) == 0 {
|
||||
func BuildSetPreparedClause(columns *ColumnList) (result string, err error) {
|
||||
if columns.Len() == 0 {
|
||||
return "", fmt.Errorf("Got 0 columns in BuildSetPreparedClause")
|
||||
}
|
||||
setTokens := []string{}
|
||||
for _, column := range columns {
|
||||
setTokens = append(setTokens, fmt.Sprintf("%s=?", EscapeName(column)))
|
||||
for _, column := range columns.Columns() {
|
||||
var setToken string
|
||||
if column.timezoneConversion != nil {
|
||||
setToken = fmt.Sprintf("%s=convert_tz(?, '%s', '%s')", EscapeName(column.Name), column.timezoneConversion.ToTimezone, "+00:00")
|
||||
} else {
|
||||
setToken = fmt.Sprintf("%s=?", EscapeName(column.Name))
|
||||
}
|
||||
setTokens = append(setTokens, setToken)
|
||||
}
|
||||
return strings.Join(setTokens, ", "), nil
|
||||
}
|
||||
@ -354,7 +374,7 @@ func BuildDMLInsertQuery(databaseName, tableName string, tableColumns, sharedCol
|
||||
for i := range mappedSharedColumnNames {
|
||||
mappedSharedColumnNames[i] = EscapeName(mappedSharedColumnNames[i])
|
||||
}
|
||||
preparedValues := buildPreparedValues(mappedSharedColumns.Len())
|
||||
preparedValues := buildColumnsPreparedValues(mappedSharedColumns)
|
||||
|
||||
result = fmt.Sprintf(`
|
||||
replace /* gh-ost %s.%s */ into
|
||||
@ -404,11 +424,7 @@ func BuildDMLUpdateQuery(databaseName, tableName string, tableColumns, sharedCol
|
||||
uniqueKeyArgs = append(uniqueKeyArgs, arg)
|
||||
}
|
||||
|
||||
mappedSharedColumnNames := duplicateNames(mappedSharedColumns.Names())
|
||||
for i := range mappedSharedColumnNames {
|
||||
mappedSharedColumnNames[i] = EscapeName(mappedSharedColumnNames[i])
|
||||
}
|
||||
setClause, err := BuildSetPreparedClause(mappedSharedColumnNames)
|
||||
setClause, err := BuildSetPreparedClause(mappedSharedColumns)
|
||||
|
||||
equalsComparison, err := BuildEqualsPreparedComparison(uniqueKeyColumns.Names())
|
||||
result = fmt.Sprintf(`
|
||||
|
@ -79,19 +79,19 @@ func TestBuildEqualsPreparedComparison(t *testing.T) {
|
||||
|
||||
func TestBuildSetPreparedClause(t *testing.T) {
|
||||
{
|
||||
columns := []string{"c1"}
|
||||
columns := NewColumnList([]string{"c1"})
|
||||
clause, err := BuildSetPreparedClause(columns)
|
||||
test.S(t).ExpectNil(err)
|
||||
test.S(t).ExpectEquals(clause, "`c1`=?")
|
||||
}
|
||||
{
|
||||
columns := []string{"c1", "c2"}
|
||||
columns := NewColumnList([]string{"c1", "c2"})
|
||||
clause, err := BuildSetPreparedClause(columns)
|
||||
test.S(t).ExpectNil(err)
|
||||
test.S(t).ExpectEquals(clause, "`c1`=?, `c2`=?")
|
||||
}
|
||||
{
|
||||
columns := []string{}
|
||||
columns := NewColumnList([]string{})
|
||||
_, err := BuildSetPreparedClause(columns)
|
||||
test.S(t).ExpectNotNil(err)
|
||||
}
|
||||
|
@ -12,10 +12,24 @@ import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
type ColumnType int
|
||||
|
||||
const (
|
||||
UnknownColumnType ColumnType = iota
|
||||
TimestampColumnType = iota
|
||||
DateTimeColumnType = iota
|
||||
)
|
||||
|
||||
type TimezoneConvertion struct {
|
||||
ToTimezone string
|
||||
}
|
||||
|
||||
type Column struct {
|
||||
Name string
|
||||
IsUnsigned bool
|
||||
Charset string
|
||||
Name string
|
||||
IsUnsigned bool
|
||||
Charset string
|
||||
Type ColumnType
|
||||
timezoneConversion *TimezoneConvertion
|
||||
}
|
||||
|
||||
func (this *Column) convertArg(arg interface{}) interface{} {
|
||||
@ -112,20 +126,43 @@ func (this *ColumnList) Names() []string {
|
||||
return names
|
||||
}
|
||||
|
||||
func (this *ColumnList) GetColumn(columnName string) *Column {
|
||||
if ordinal, ok := this.Ordinals[columnName]; ok {
|
||||
return &this.columns[ordinal]
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (this *ColumnList) SetUnsigned(columnName string) {
|
||||
this.columns[this.Ordinals[columnName]].IsUnsigned = true
|
||||
this.GetColumn(columnName).IsUnsigned = true
|
||||
}
|
||||
|
||||
func (this *ColumnList) IsUnsigned(columnName string) bool {
|
||||
return this.columns[this.Ordinals[columnName]].IsUnsigned
|
||||
return this.GetColumn(columnName).IsUnsigned
|
||||
}
|
||||
|
||||
func (this *ColumnList) SetCharset(columnName string, charset string) {
|
||||
this.columns[this.Ordinals[columnName]].Charset = charset
|
||||
this.GetColumn(columnName).Charset = charset
|
||||
}
|
||||
|
||||
func (this *ColumnList) GetCharset(columnName string) string {
|
||||
return this.columns[this.Ordinals[columnName]].Charset
|
||||
return this.GetColumn(columnName).Charset
|
||||
}
|
||||
|
||||
func (this *ColumnList) SetColumnType(columnName string, columnType ColumnType) {
|
||||
this.GetColumn(columnName).Type = columnType
|
||||
}
|
||||
|
||||
func (this *ColumnList) GetColumnType(columnName string) ColumnType {
|
||||
return this.GetColumn(columnName).Type
|
||||
}
|
||||
|
||||
func (this *ColumnList) SetConvertDatetimeToTimestamp(columnName string, toTimezone string) {
|
||||
this.GetColumn(columnName).timezoneConversion = &TimezoneConvertion{ToTimezone: toTimezone}
|
||||
}
|
||||
|
||||
func (this *ColumnList) HasTimezoneConversion(columnName string) bool {
|
||||
return this.GetColumn(columnName).timezoneConversion != nil
|
||||
}
|
||||
|
||||
func (this *ColumnList) String() string {
|
||||
|
@ -28,3 +28,17 @@ func TestParseColumnList(t *testing.T) {
|
||||
test.S(t).ExpectEquals(columnList.Ordinals["category"], 1)
|
||||
test.S(t).ExpectEquals(columnList.Ordinals["max_len"], 2)
|
||||
}
|
||||
|
||||
func TestGetColumn(t *testing.T) {
|
||||
names := "id,category,max_len"
|
||||
columnList := ParseColumnList(names)
|
||||
{
|
||||
column := columnList.GetColumn("category")
|
||||
test.S(t).ExpectTrue(column != nil)
|
||||
test.S(t).ExpectEquals(column.Name, "category")
|
||||
}
|
||||
{
|
||||
column := columnList.GetColumn("no_such_column")
|
||||
test.S(t).ExpectTrue(column == nil)
|
||||
}
|
||||
}
|
||||
|
31
localtests/datetime-to-timestamp-pk-fail/create.sql
Normal file
31
localtests/datetime-to-timestamp-pk-fail/create.sql
Normal file
@ -0,0 +1,31 @@
|
||||
drop table if exists gh_ost_test;
|
||||
create table gh_ost_test (
|
||||
id int unsigned auto_increment,
|
||||
i int not null,
|
||||
ts0 timestamp default current_timestamp,
|
||||
ts1 timestamp,
|
||||
dt2 datetime,
|
||||
t datetime,
|
||||
updated tinyint unsigned default 0,
|
||||
primary key(id, t),
|
||||
key i_idx(i)
|
||||
) auto_increment=1;
|
||||
|
||||
drop event if exists gh_ost_test;
|
||||
delimiter ;;
|
||||
create event gh_ost_test
|
||||
on schedule every 1 second
|
||||
starts current_timestamp
|
||||
ends current_timestamp + interval 60 second
|
||||
on completion not preserve
|
||||
enable
|
||||
do
|
||||
begin
|
||||
insert into gh_ost_test values (null, 7, null, now(), now(), '2010-10-20 10:20:30', 0);
|
||||
|
||||
insert into gh_ost_test values (null, 11, null, now(), now(), '2010-10-20 10:20:30', 0);
|
||||
update gh_ost_test set dt2=now() + interval 1 minute, updated = 1 where i = 11 order by id desc limit 1;
|
||||
|
||||
insert into gh_ost_test values (null, 13, null, now(), now(), '2010-10-20 10:20:30', 0);
|
||||
update gh_ost_test set t=t + interval 1 minute, updated = 1 where i = 13 order by id desc limit 1;
|
||||
end ;;
|
1
localtests/datetime-to-timestamp-pk-fail/expect_failure
Normal file
1
localtests/datetime-to-timestamp-pk-fail/expect_failure
Normal file
@ -0,0 +1 @@
|
||||
No support at this time for converting a column from DATETIME to TIMESTAMP that is also part of the chosen unique key
|
1
localtests/datetime-to-timestamp-pk-fail/extra_args
Normal file
1
localtests/datetime-to-timestamp-pk-fail/extra_args
Normal file
@ -0,0 +1 @@
|
||||
--alter="change column t t timestamp not null"
|
31
localtests/datetime-to-timestamp/create.sql
Normal file
31
localtests/datetime-to-timestamp/create.sql
Normal file
@ -0,0 +1,31 @@
|
||||
drop table if exists gh_ost_test;
|
||||
create table gh_ost_test (
|
||||
id int unsigned auto_increment,
|
||||
i int not null,
|
||||
ts0 timestamp default current_timestamp,
|
||||
ts1 timestamp,
|
||||
dt2 datetime,
|
||||
t datetime,
|
||||
updated tinyint unsigned default 0,
|
||||
primary key(id),
|
||||
key i_idx(i)
|
||||
) auto_increment=1;
|
||||
|
||||
drop event if exists gh_ost_test;
|
||||
delimiter ;;
|
||||
create event gh_ost_test
|
||||
on schedule every 1 second
|
||||
starts current_timestamp
|
||||
ends current_timestamp + interval 60 second
|
||||
on completion not preserve
|
||||
enable
|
||||
do
|
||||
begin
|
||||
insert into gh_ost_test values (null, 7, null, now(), now(), '2010-10-20 10:20:30', 0);
|
||||
|
||||
insert into gh_ost_test values (null, 11, null, now(), now(), '2010-10-20 10:20:30', 0);
|
||||
update gh_ost_test set dt2=now() + interval 1 minute, updated = 1 where i = 11 order by id desc limit 1;
|
||||
|
||||
insert into gh_ost_test values (null, 13, null, now(), now(), '2010-10-20 10:20:30', 0);
|
||||
update gh_ost_test set t=t + interval 1 minute, updated = 1 where i = 13 order by id desc limit 1;
|
||||
end ;;
|
1
localtests/datetime-to-timestamp/extra_args
Normal file
1
localtests/datetime-to-timestamp/extra_args
Normal file
@ -0,0 +1 @@
|
||||
--alter="change column t t timestamp not null"
|
37
localtests/datetime/create.sql
Normal file
37
localtests/datetime/create.sql
Normal file
@ -0,0 +1,37 @@
|
||||
drop table if exists gh_ost_test;
|
||||
create table gh_ost_test (
|
||||
id int auto_increment,
|
||||
i int not null,
|
||||
dt0 datetime default current_timestamp,
|
||||
dt1 datetime,
|
||||
dt2 datetime,
|
||||
updated tinyint unsigned default 0,
|
||||
primary key(id),
|
||||
key i_idx(i)
|
||||
) auto_increment=1;
|
||||
|
||||
drop event if exists gh_ost_test;
|
||||
delimiter ;;
|
||||
create event gh_ost_test
|
||||
on schedule every 1 second
|
||||
starts current_timestamp
|
||||
ends current_timestamp + interval 60 second
|
||||
on completion not preserve
|
||||
enable
|
||||
do
|
||||
begin
|
||||
insert into gh_ost_test values (null, 11, null, now(), now(), 0);
|
||||
update gh_ost_test set dt2=now() + interval 1 minute, updated = 1 where i = 11 order by id desc limit 1;
|
||||
|
||||
insert into gh_ost_test values (null, 13, null, now(), now(), 0);
|
||||
update gh_ost_test set dt2=now() + interval 1 minute, updated = 1 where i = 13 order by id desc limit 1;
|
||||
|
||||
insert into gh_ost_test values (null, 17, null, now(), now(), 0);
|
||||
update gh_ost_test set dt2=now() + interval 1 minute, updated = 1 where i = 17 order by id desc limit 1;
|
||||
|
||||
insert into gh_ost_test values (null, 19, null, now(), now(), 0);
|
||||
update gh_ost_test set dt2=now() + interval 1 minute, updated = 1 where i = 19 order by id desc limit 1;
|
||||
|
||||
insert into gh_ost_test values (null, 23, null, now(), now(), 0);
|
||||
update gh_ost_test set dt2=now() + interval 1 minute, updated = 1 where i = 23 order by id desc limit 1;
|
||||
end ;;
|
1
localtests/fail-fk-parent/destroy.sql
Normal file
1
localtests/fail-fk-parent/destroy.sql
Normal file
@ -0,0 +1 @@
|
||||
drop table if exists gh_ost_test_child;
|
@ -0,0 +1 @@
|
||||
Parent-side foreign keys are not supported
|
@ -0,0 +1 @@
|
||||
Child-side foreign keys are not supported. Bailing out
|
@ -9,6 +9,7 @@
|
||||
|
||||
tests_path=$(dirname $0)
|
||||
test_logfile=/tmp/gh-ost-test.log
|
||||
ghost_binary=/tmp/gh-ost-test
|
||||
exec_command_file=/tmp/gh-ost-test.bash
|
||||
|
||||
test_pattern="${1:-.}"
|
||||
@ -68,7 +69,7 @@ test_single() {
|
||||
echo_dot
|
||||
sleep 1
|
||||
#
|
||||
cmd="go run go/cmd/gh-ost/main.go \
|
||||
cmd="$ghost_binary \
|
||||
--user=gh-ost \
|
||||
--password=gh-ost \
|
||||
--host=$replica_host \
|
||||
@ -97,12 +98,27 @@ test_single() {
|
||||
|
||||
execution_result=$?
|
||||
|
||||
if [ -f $tests_path/$test_name/destroy.sql ] ; then
|
||||
gh-ost-test-mysql-master --default-character-set=utf8mb4 test < $tests_path/$test_name/destroy.sql
|
||||
fi
|
||||
|
||||
if [ -f $tests_path/$test_name/expect_failure ] ; then
|
||||
if [ $execution_result -eq 0 ] ; then
|
||||
echo
|
||||
echo "ERROR $test_name execution was expected to exit on error but did not. cat $test_logfile"
|
||||
return 1
|
||||
fi
|
||||
if [ -s $tests_path/$test_name/expect_failure ] ; then
|
||||
# 'expect_failure' file has content. We expect to find this content in the log.
|
||||
expected_error_message="$(cat $tests_path/$test_name/expect_failure)"
|
||||
if grep -q "$expected_error_message" $test_logfile ; then
|
||||
return 0
|
||||
fi
|
||||
echo
|
||||
echo "ERROR $test_name execution was expected to exit with error message '${expected_error_message}' but did not. cat $test_logfile"
|
||||
return 1
|
||||
fi
|
||||
# 'expect_failure' file has no content. We generally agree that the failure is correct
|
||||
return 0
|
||||
fi
|
||||
|
||||
@ -126,7 +142,13 @@ test_single() {
|
||||
fi
|
||||
}
|
||||
|
||||
build_binary() {
|
||||
echo "Building"
|
||||
go build -o $ghost_binary go/cmd/gh-ost/main.go
|
||||
}
|
||||
|
||||
test_all() {
|
||||
build_binary
|
||||
find $tests_path ! -path . -type d -mindepth 1 -maxdepth 1 | cut -d "/" -f 3 | egrep "$test_pattern" | while read test_name ; do
|
||||
test_single "$test_name"
|
||||
if [ $? -ne 0 ] ; then
|
||||
|
33
localtests/timestamp-datetime/create.sql
Normal file
33
localtests/timestamp-datetime/create.sql
Normal file
@ -0,0 +1,33 @@
|
||||
drop table if exists gh_ost_test;
|
||||
create table gh_ost_test (
|
||||
id int auto_increment,
|
||||
i int not null,
|
||||
ts timestamp default current_timestamp,
|
||||
dt datetime,
|
||||
ts2ts timestamp null,
|
||||
ts2dt datetime null,
|
||||
dt2ts timestamp null,
|
||||
dt2dt datetime null,
|
||||
updated tinyint unsigned default 0,
|
||||
primary key(id)
|
||||
) auto_increment=1;
|
||||
|
||||
drop event if exists gh_ost_test;
|
||||
delimiter ;;
|
||||
create event gh_ost_test
|
||||
on schedule every 1 second
|
||||
starts current_timestamp
|
||||
ends current_timestamp + interval 60 second
|
||||
on completion not preserve
|
||||
enable
|
||||
do
|
||||
begin
|
||||
insert into gh_ost_test values (null, 11, now(), now(),null, null, null, null, 0);
|
||||
update gh_ost_test set ts2ts=ts, ts2dt=ts, dt2ts=dt, dt2dt=dt where i = 11 order by id desc limit 1;
|
||||
|
||||
insert into gh_ost_test values (null, 13, null, now(), now(), 0);
|
||||
update gh_ost_test set ts2ts=ts, ts2dt=ts, dt2ts=dt, dt2dt=dt where i = 13 order by id desc limit 1;
|
||||
|
||||
insert into gh_ost_test values (null, 17, null, '2016-07-06 10:20:30', '2016-07-06 10:20:30', 0);
|
||||
update gh_ost_test set ts2ts=ts, ts2dt=ts, dt2ts=dt, dt2dt=dt where i = 17 order by id desc limit 1;
|
||||
end ;;
|
31
localtests/timestamp-to-datetime/create.sql
Normal file
31
localtests/timestamp-to-datetime/create.sql
Normal file
@ -0,0 +1,31 @@
|
||||
drop table if exists gh_ost_test;
|
||||
create table gh_ost_test (
|
||||
id int auto_increment,
|
||||
i int not null,
|
||||
ts0 timestamp default current_timestamp,
|
||||
ts1 timestamp,
|
||||
dt2 datetime,
|
||||
t datetime,
|
||||
updated tinyint unsigned default 0,
|
||||
primary key(id),
|
||||
key i_idx(i)
|
||||
) auto_increment=1;
|
||||
|
||||
drop event if exists gh_ost_test;
|
||||
delimiter ;;
|
||||
create event gh_ost_test
|
||||
on schedule every 1 second
|
||||
starts current_timestamp
|
||||
ends current_timestamp + interval 60 second
|
||||
on completion not preserve
|
||||
enable
|
||||
do
|
||||
begin
|
||||
insert into gh_ost_test values (null, 7, null, now(), now(), '2010-10-20 10:20:30', 0);
|
||||
|
||||
insert into gh_ost_test values (null, 11, null, now(), now(), '2010-10-20 10:20:30', 0);
|
||||
update gh_ost_test set ts2=now() + interval 1 minute, updated = 1 where i = 11 order by id desc limit 1;
|
||||
|
||||
insert into gh_ost_test values (null, 13, null, now(), now(), '2010-10-20 10:20:30', 0);
|
||||
update gh_ost_test set ts2=now() + interval 1 minute, updated = 1 where i = 13 order by id desc limit 1;
|
||||
end ;;
|
1
localtests/timestamp-to-datetime/extra_args
Normal file
1
localtests/timestamp-to-datetime/extra_args
Normal file
@ -0,0 +1 @@
|
||||
--alter="change column t t datetime not null"
|
37
localtests/timestamp/create.sql
Normal file
37
localtests/timestamp/create.sql
Normal file
@ -0,0 +1,37 @@
|
||||
drop table if exists gh_ost_test;
|
||||
create table gh_ost_test (
|
||||
id int auto_increment,
|
||||
i int not null,
|
||||
ts0 timestamp default current_timestamp,
|
||||
ts1 timestamp,
|
||||
ts2 timestamp,
|
||||
updated tinyint unsigned default 0,
|
||||
primary key(id),
|
||||
key i_idx(i)
|
||||
) auto_increment=1;
|
||||
|
||||
drop event if exists gh_ost_test;
|
||||
delimiter ;;
|
||||
create event gh_ost_test
|
||||
on schedule every 1 second
|
||||
starts current_timestamp
|
||||
ends current_timestamp + interval 60 second
|
||||
on completion not preserve
|
||||
enable
|
||||
do
|
||||
begin
|
||||
insert into gh_ost_test values (null, 11, null, now(), now(), 0);
|
||||
update gh_ost_test set ts2=now() + interval 1 minute, updated = 1 where i = 11 order by id desc limit 1;
|
||||
|
||||
insert into gh_ost_test values (null, 13, null, now(), now(), 0);
|
||||
update gh_ost_test set ts2=now() + interval 1 minute, updated = 1 where i = 13 order by id desc limit 1;
|
||||
|
||||
insert into gh_ost_test values (null, 17, null, now(), now(), 0);
|
||||
update gh_ost_test set ts2=now() + interval 1 minute, updated = 1 where i = 17 order by id desc limit 1;
|
||||
|
||||
insert into gh_ost_test values (null, 19, null, now(), now(), 0);
|
||||
update gh_ost_test set ts2=now() + interval 1 minute, updated = 1 where i = 19 order by id desc limit 1;
|
||||
|
||||
insert into gh_ost_test values (null, 23, null, now(), now(), 0);
|
||||
update gh_ost_test set ts2=now() + interval 1 minute, updated = 1 where i = 23 order by id desc limit 1;
|
||||
end ;;
|
44
localtests/tz-datetime-ts/create.sql
Normal file
44
localtests/tz-datetime-ts/create.sql
Normal file
@ -0,0 +1,44 @@
|
||||
drop table if exists gh_ost_test;
|
||||
create table gh_ost_test (
|
||||
id int auto_increment,
|
||||
i int not null,
|
||||
ts0 timestamp default current_timestamp,
|
||||
ts1 timestamp,
|
||||
dt2 datetime,
|
||||
t datetime,
|
||||
updated tinyint unsigned default 0,
|
||||
primary key(id),
|
||||
key i_idx(i)
|
||||
) auto_increment=1;
|
||||
|
||||
drop event if exists gh_ost_test;
|
||||
delimiter ;;
|
||||
create event gh_ost_test
|
||||
on schedule every 1 second
|
||||
starts current_timestamp
|
||||
ends current_timestamp + interval 60 second
|
||||
on completion not preserve
|
||||
enable
|
||||
do
|
||||
begin
|
||||
insert into gh_ost_test values (null, 7, null, now(), now(), '2010-10-20 10:20:30', 0);
|
||||
|
||||
insert into gh_ost_test values (null, 11, null, now(), now(), '2010-10-20 10:20:30', 0);
|
||||
update gh_ost_test set ts2=now() + interval 1 minute, updated = 1 where i = 11 order by id desc limit 1;
|
||||
|
||||
set session time_zone='system';
|
||||
insert into gh_ost_test values (null, 13, null, now(), now(), '2010-10-20 10:20:30', 0);
|
||||
update gh_ost_test set ts2=now() + interval 1 minute, updated = 1 where i = 13 order by id desc limit 1;
|
||||
|
||||
set session time_zone='+00:00';
|
||||
insert into gh_ost_test values (null, 17, null, now(), now(), '2010-10-20 10:20:30', 0);
|
||||
update gh_ost_test set ts2=now() + interval 1 minute, updated = 1 where i = 17 order by id desc limit 1;
|
||||
|
||||
set session time_zone='-03:00';
|
||||
insert into gh_ost_test values (null, 19, null, now(), now(), '2010-10-20 10:20:30', 0);
|
||||
update gh_ost_test set ts2=now() + interval 1 minute, updated = 1 where i = 19 order by id desc limit 1;
|
||||
|
||||
set session time_zone='+05:00';
|
||||
insert into gh_ost_test values (null, 23, null, now(), now(), '2010-10-20 10:20:30', 0);
|
||||
update gh_ost_test set ts2=now() + interval 1 minute, updated = 1 where i = 23 order by id desc limit 1;
|
||||
end ;;
|
1
localtests/tz-datetime-ts/extra_args
Normal file
1
localtests/tz-datetime-ts/extra_args
Normal file
@ -0,0 +1 @@
|
||||
--alter="change column t t timestamp not null"
|
41
localtests/tz-datetime/create.sql
Normal file
41
localtests/tz-datetime/create.sql
Normal file
@ -0,0 +1,41 @@
|
||||
drop table if exists gh_ost_test;
|
||||
create table gh_ost_test (
|
||||
id int auto_increment,
|
||||
i int not null,
|
||||
ts0 timestamp default current_timestamp,
|
||||
ts1 datetime,
|
||||
ts2 datetime,
|
||||
updated tinyint unsigned default 0,
|
||||
primary key(id),
|
||||
key i_idx(i)
|
||||
) auto_increment=1;
|
||||
|
||||
drop event if exists gh_ost_test;
|
||||
delimiter ;;
|
||||
create event gh_ost_test
|
||||
on schedule every 1 second
|
||||
starts current_timestamp
|
||||
ends current_timestamp + interval 60 second
|
||||
on completion not preserve
|
||||
enable
|
||||
do
|
||||
begin
|
||||
insert into gh_ost_test values (null, 11, null, now(), now(), 0);
|
||||
update gh_ost_test set ts2=now() + interval 10 minute, updated = 1 where i = 11 order by id desc limit 1;
|
||||
|
||||
set session time_zone='system';
|
||||
insert into gh_ost_test values (null, 13, null, now(), now(), 0);
|
||||
update gh_ost_test set ts2=now() + interval 10 minute, updated = 1 where i = 13 order by id desc limit 1;
|
||||
|
||||
set session time_zone='+00:00';
|
||||
insert into gh_ost_test values (null, 17, null, now(), now(), 0);
|
||||
update gh_ost_test set ts2=now() + interval 10 minute, updated = 1 where i = 17 order by id desc limit 1;
|
||||
|
||||
set session time_zone='-03:00';
|
||||
insert into gh_ost_test values (null, 19, null, now(), now(), 0);
|
||||
update gh_ost_test set ts2=now() + interval 10 minute, updated = 1 where i = 19 order by id desc limit 1;
|
||||
|
||||
set session time_zone='+05:00';
|
||||
insert into gh_ost_test values (null, 23, null, now(), now(), 0);
|
||||
update gh_ost_test set ts2=now() + interval 10 minute, updated = 1 where i = 23 order by id desc limit 1;
|
||||
end ;;
|
10
vendor/github.com/siddontang/go-mysql/replication/row_event.go
generated
vendored
10
vendor/github.com/siddontang/go-mysql/replication/row_event.go
generated
vendored
@ -591,7 +591,7 @@ func decodeBit(data []byte, nbits int, length int) (value int64, err error) {
|
||||
return
|
||||
}
|
||||
|
||||
func decodeTimestamp2(data []byte, dec uint16) (string, int, error) {
|
||||
func decodeTimestamp2(data []byte, dec uint16) (interface{}, int, error) {
|
||||
//get timestamp binary length
|
||||
n := int(4 + (dec+1)/2)
|
||||
sec := int64(binary.BigEndian.Uint32(data[0:4]))
|
||||
@ -609,13 +609,13 @@ func decodeTimestamp2(data []byte, dec uint16) (string, int, error) {
|
||||
return "0000-00-00 00:00:00", n, nil
|
||||
}
|
||||
|
||||
t := time.Unix(sec, usec*1000).UTC() // .UTC() converted by shlomi-noach
|
||||
return t.Format(TimeFormat), n, nil
|
||||
t := time.Unix(sec, usec*1000)
|
||||
return t, n, nil
|
||||
}
|
||||
|
||||
const DATETIMEF_INT_OFS int64 = 0x8000000000
|
||||
|
||||
func decodeDatetime2(data []byte, dec uint16) (string, int, error) {
|
||||
func decodeDatetime2(data []byte, dec uint16) (interface{}, int, error) {
|
||||
//get datetime binary length
|
||||
n := int(5 + (dec+1)/2)
|
||||
|
||||
@ -657,7 +657,7 @@ func decodeDatetime2(data []byte, dec uint16) (string, int, error) {
|
||||
minute := int((hms >> 6) % (1 << 6))
|
||||
hour := int((hms >> 12))
|
||||
|
||||
return fmt.Sprintf("%04d-%02d-%02d %02d:%02d:%02d", year, month, day, hour, minute, second), n, nil
|
||||
return fmt.Sprintf("%04d-%02d-%02d %02d:%02d:%02d", year, month, day, hour, minute, second), n, nil // commented by Shlomi Noach. Yes I know about `git blame`
|
||||
}
|
||||
|
||||
const TIMEF_OFS int64 = 0x800000000000
|
||||
|
Loading…
Reference in New Issue
Block a user