Merge pull request #21 from openark/fetch-upstream-202105
Fetch upstream 2021-05
This commit is contained in:
commit
1876a90818
4
.github/workflows/ci.yml
vendored
4
.github/workflows/ci.yml
vendored
@ -10,10 +10,10 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Set up Go 1.15
|
- name: Set up Go 1.16
|
||||||
uses: actions/setup-go@v1
|
uses: actions/setup-go@v1
|
||||||
with:
|
with:
|
||||||
go-version: 1.15
|
go-version: 1.16
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: script/cibuild
|
run: script/cibuild
|
||||||
|
9
.github/workflows/replica-tests.yml
vendored
9
.github/workflows/replica-tests.yml
vendored
@ -6,14 +6,19 @@ jobs:
|
|||||||
build:
|
build:
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version: [mysql-5.5.62,mysql-5.6.43,mysql-5.7.25,mysql-8.0.16]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Set up Go 1.14
|
- name: Set up Go 1.16
|
||||||
uses: actions/setup-go@v1
|
uses: actions/setup-go@v1
|
||||||
with:
|
with:
|
||||||
go-version: 1.14
|
go-version: 1.16
|
||||||
|
|
||||||
- name: migration tests
|
- name: migration tests
|
||||||
|
env:
|
||||||
|
TEST_MYSQL_VERSION: ${{ matrix.version }}
|
||||||
run: script/cibuild-gh-ost-replica-tests
|
run: script/cibuild-gh-ost-replica-tests
|
||||||
|
@ -1,6 +1,4 @@
|
|||||||
#
|
FROM golang:1.16.4
|
||||||
|
|
||||||
FROM golang:1.15.6
|
|
||||||
|
|
||||||
RUN apt-get update
|
RUN apt-get update
|
||||||
RUN apt-get install -y ruby ruby-dev rubygems build-essential
|
RUN apt-get install -y ruby ruby-dev rubygems build-essential
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
FROM golang:1.15.6
|
FROM golang:1.16.4
|
||||||
LABEL maintainer="github@github.com"
|
LABEL maintainer="github@github.com"
|
||||||
|
|
||||||
RUN apt-get update
|
RUN apt-get update
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
# gh-ost
|
# gh-ost
|
||||||
|
|
||||||
[![build status](https://travis-ci.org/github/gh-ost.svg)](https://travis-ci.org/github/gh-ost) [![downloads](https://img.shields.io/github/downloads/github/gh-ost/total.svg)](https://github.com/github/gh-ost/releases) [![release](https://img.shields.io/github/release/github/gh-ost.svg)](https://github.com/github/gh-ost/releases)
|
[![ci](https://github.com/github/gh-ost/actions/workflows/ci.yml/badge.svg)](https://github.com/github/gh-ost/actions/workflows/ci.yml) [![replica-tests](https://github.com/github/gh-ost/actions/workflows/replica-tests.yml/badge.svg)](https://github.com/github/gh-ost/actions/workflows/replica-tests.yml) [![downloads](https://img.shields.io/github/downloads/github/gh-ost/total.svg)](https://github.com/github/gh-ost/releases) [![release](https://img.shields.io/github/release/github/gh-ost.svg)](https://github.com/github/gh-ost/releases)
|
||||||
|
|
||||||
#### GitHub's online schema migration for MySQL <img src="doc/images/gh-ost-logo-light-160.png" align="right">
|
#### GitHub's online schema migration for MySQL <img src="doc/images/gh-ost-logo-light-160.png" align="right">
|
||||||
|
|
||||||
@ -95,7 +95,7 @@ Please see [Coding gh-ost](doc/coding-ghost.md) for a guide to getting started d
|
|||||||
|
|
||||||
[Download latest release here](https://github.com/github/gh-ost/releases/latest)
|
[Download latest release here](https://github.com/github/gh-ost/releases/latest)
|
||||||
|
|
||||||
`gh-ost` is a Go project; it is built with Go `1.14` and above. To build on your own, use either:
|
`gh-ost` is a Go project; it is built with Go `1.15` and above. To build on your own, use either:
|
||||||
- [script/build](https://github.com/github/gh-ost/blob/master/script/build) - this is the same build script used by CI hence the authoritative; artifact is `./bin/gh-ost` binary.
|
- [script/build](https://github.com/github/gh-ost/blob/master/script/build) - this is the same build script used by CI hence the authoritative; artifact is `./bin/gh-ost` binary.
|
||||||
- [build.sh](https://github.com/github/gh-ost/blob/master/build.sh) for building `tar.gz` artifacts in `/tmp/gh-ost`
|
- [build.sh](https://github.com/github/gh-ost/blob/master/build.sh) for building `tar.gz` artifacts in `/tmp/gh-ost`
|
||||||
|
|
||||||
|
@ -1 +1 @@
|
|||||||
1.1.3
|
1.1.1
|
||||||
|
9
build.sh
9
build.sh
@ -18,15 +18,16 @@ function build {
|
|||||||
GOOS=$3
|
GOOS=$3
|
||||||
GOARCH=$4
|
GOARCH=$4
|
||||||
|
|
||||||
if ! go version | egrep -q 'go(1\.1[456])' ; then
|
if ! go version | egrep -q 'go(1\.1[56])' ; then
|
||||||
echo "go version must be 1.14 or above"
|
echo "go version must be 1.15 or above"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# TODO: remove GO111MODULE once gh-ost uses Go modules
|
||||||
echo "Building ${osname} binary"
|
echo "Building ${osname} binary"
|
||||||
export GOOS
|
export GOOS
|
||||||
export GOARCH
|
export GOARCH
|
||||||
go build -ldflags "$ldflags" -o $buildpath/$target go/cmd/gh-ost/main.go
|
GO111MODULE=off go build -ldflags "$ldflags" -o $buildpath/$target go/cmd/gh-ost/main.go
|
||||||
|
|
||||||
if [ $? -ne 0 ]; then
|
if [ $? -ne 0 ]; then
|
||||||
echo "Build failed for ${osname}"
|
echo "Build failed for ${osname}"
|
||||||
@ -40,7 +41,7 @@ function build {
|
|||||||
builddir=$(setuptree)
|
builddir=$(setuptree)
|
||||||
cp $buildpath/$target $builddir/gh-ost/usr/bin
|
cp $buildpath/$target $builddir/gh-ost/usr/bin
|
||||||
cd $buildpath
|
cd $buildpath
|
||||||
fpm -v "${RELEASE_VERSION}" --epoch 1 -f -s dir -n gh-ost -m 'shlomi-noach <shlomi-noach+gh-ost-deb@github.com>' --description "GitHub's Online Schema Migrations for MySQL " --url "https://github.com/github/gh-ost" --vendor "GitHub" --license "Apache 2.0" -C $builddir/gh-ost --prefix=/ -t rpm .
|
fpm -v "${RELEASE_VERSION}" --epoch 1 -f -s dir -n gh-ost -m 'shlomi-noach <shlomi-noach+gh-ost-deb@github.com>' --description "GitHub's Online Schema Migrations for MySQL " --url "https://github.com/github/gh-ost" --vendor "GitHub" --license "Apache 2.0" -C $builddir/gh-ost --prefix=/ -t rpm --rpm-rpmbuild-define "_build_id_links none" .
|
||||||
fpm -v "${RELEASE_VERSION}" --epoch 1 -f -s dir -n gh-ost -m 'shlomi-noach <shlomi-noach+gh-ost-deb@github.com>' --description "GitHub's Online Schema Migrations for MySQL " --url "https://github.com/github/gh-ost" --vendor "GitHub" --license "Apache 2.0" -C $builddir/gh-ost --prefix=/ -t deb --deb-no-default-config-files .
|
fpm -v "${RELEASE_VERSION}" --epoch 1 -f -s dir -n gh-ost -m 'shlomi-noach <shlomi-noach+gh-ost-deb@github.com>' --description "GitHub's Online Schema Migrations for MySQL " --url "https://github.com/github/gh-ost" --vendor "GitHub" --license "Apache 2.0" -C $builddir/gh-ost --prefix=/ -t deb --deb-no-default-config-files .
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
@ -181,6 +181,9 @@ Optionally involve the process ID, for example: `--replica-server-id=$((10000000
|
|||||||
It's on you to choose a number that does not collide with another `gh-ost` or another running replica.
|
It's on you to choose a number that does not collide with another `gh-ost` or another running replica.
|
||||||
See also: [`concurrent-migrations`](cheatsheet.md#concurrent-migrations) on the cheatsheet.
|
See also: [`concurrent-migrations`](cheatsheet.md#concurrent-migrations) on the cheatsheet.
|
||||||
|
|
||||||
|
### serve-socket-file
|
||||||
|
|
||||||
|
Defaults to an auto-determined and advertised upon startup file. Defines Unix socket file to serve on.
|
||||||
### skip-foreign-key-checks
|
### skip-foreign-key-checks
|
||||||
|
|
||||||
By default `gh-ost` verifies no foreign keys exist on the migrated table. On servers with large number of tables this check can take a long time. If you're absolutely certain no foreign keys exist (table does not reference other table nor is referenced by other tables) and wish to save the check time, provide with `--skip-foreign-key-checks`.
|
By default `gh-ost` verifies no foreign keys exist on the migrated table. On servers with large number of tables this check can take a long time. If you're absolutely certain no foreign keys exist (table does not reference other table nor is referenced by other tables) and wish to save the check time, provide with `--skip-foreign-key-checks`.
|
||||||
|
@ -18,6 +18,8 @@ Both interfaces may serve at the same time. Both respond to simple text command,
|
|||||||
- `status`: returns a detailed status summary of migration progress and configuration
|
- `status`: returns a detailed status summary of migration progress and configuration
|
||||||
- `sup`: returns a brief status summary of migration progress
|
- `sup`: returns a brief status summary of migration progress
|
||||||
- `coordinates`: returns recent (though not exactly up to date) binary log coordinates of the inspected server
|
- `coordinates`: returns recent (though not exactly up to date) binary log coordinates of the inspected server
|
||||||
|
- `applier`: returns the hostname of the applier
|
||||||
|
- `inspector`: returns the hostname of the inspector
|
||||||
- `chunk-size=<newsize>`: modify the `chunk-size`; applies on next running copy-iteration
|
- `chunk-size=<newsize>`: modify the `chunk-size`; applies on next running copy-iteration
|
||||||
- `dml-batch-size=<newsize>`: modify the `dml-batch-size`; applies on next applying of binary log events
|
- `dml-batch-size=<newsize>`: modify the `dml-batch-size`; applies on next applying of binary log events
|
||||||
- `max-lag-millis=<max-lag>`: modify the maximum replication lag threshold (milliseconds, minimum value is `100`, i.e. `0.1` second)
|
- `max-lag-millis=<max-lag>`: modify the maximum replication lag threshold (milliseconds, minimum value is `100`, i.e. `0.1` second)
|
||||||
|
@ -540,7 +540,7 @@ func (this *Inspector) CountTableRows() error {
|
|||||||
|
|
||||||
this.migrationContext.Log.Infof("As instructed, I'm issuing a SELECT COUNT(*) on the table. This may take a while")
|
this.migrationContext.Log.Infof("As instructed, I'm issuing a SELECT COUNT(*) on the table. This may take a while")
|
||||||
|
|
||||||
query := fmt.Sprintf(`select /* gh-ost */ count(*) as rows from %s.%s`, sql.EscapeName(this.migrationContext.DatabaseName), sql.EscapeName(this.migrationContext.OriginalTableName))
|
query := fmt.Sprintf(`select /* gh-ost */ count(*) as count_rows from %s.%s`, sql.EscapeName(this.migrationContext.DatabaseName), sql.EscapeName(this.migrationContext.OriginalTableName))
|
||||||
var rowsEstimate int64
|
var rowsEstimate int64
|
||||||
if err := this.db.QueryRow(query).Scan(&rowsEstimate); err != nil {
|
if err := this.db.QueryRow(query).Scan(&rowsEstimate); err != nil {
|
||||||
return err
|
return err
|
||||||
@ -567,6 +567,7 @@ func (this *Inspector) applyColumnTypes(databaseName, tableName string, columnsL
|
|||||||
err := sqlutils.QueryRowsMap(this.db, query, func(m sqlutils.RowMap) error {
|
err := sqlutils.QueryRowsMap(this.db, query, func(m sqlutils.RowMap) error {
|
||||||
columnName := m.GetString("COLUMN_NAME")
|
columnName := m.GetString("COLUMN_NAME")
|
||||||
columnType := m.GetString("COLUMN_TYPE")
|
columnType := m.GetString("COLUMN_TYPE")
|
||||||
|
columnOctetLength := m.GetUint("CHARACTER_OCTET_LENGTH")
|
||||||
for _, columnsList := range columnsLists {
|
for _, columnsList := range columnsLists {
|
||||||
column := columnsList.GetColumn(columnName)
|
column := columnsList.GetColumn(columnName)
|
||||||
if column == nil {
|
if column == nil {
|
||||||
@ -595,6 +596,10 @@ func (this *Inspector) applyColumnTypes(databaseName, tableName string, columnsL
|
|||||||
column.Type = sql.EnumColumnType
|
column.Type = sql.EnumColumnType
|
||||||
column.EnumValues = sql.ParseEnumValues(m.GetString("COLUMN_TYPE"))
|
column.EnumValues = sql.ParseEnumValues(m.GetString("COLUMN_TYPE"))
|
||||||
}
|
}
|
||||||
|
if strings.HasPrefix(columnType, "binary") {
|
||||||
|
column.Type = sql.BinaryColumnType
|
||||||
|
column.BinaryOctetLength = columnOctetLength
|
||||||
|
}
|
||||||
if charset := m.GetString("CHARACTER_SET_NAME"); charset != "" {
|
if charset := m.GetString("CHARACTER_SET_NAME"); charset != "" {
|
||||||
column.Charset = charset
|
column.Charset = charset
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
Copyright 2016 GitHub Inc.
|
Copyright 2021 GitHub Inc.
|
||||||
See https://github.com/github/gh-ost/blob/master/LICENSE
|
See https://github.com/github/gh-ost/blob/master/LICENSE
|
||||||
*/
|
*/
|
||||||
|
|
||||||
@ -147,6 +147,8 @@ func (this *Server) applyServerCommand(command string, writer *bufio.Writer) (pr
|
|||||||
status # Print a detailed status message
|
status # Print a detailed status message
|
||||||
sup # Print a short status message
|
sup # Print a short status message
|
||||||
coordinates # Print the currently inspected coordinates
|
coordinates # Print the currently inspected coordinates
|
||||||
|
applier # Print the hostname of the applier
|
||||||
|
inspector # Print the hostname of the inspector
|
||||||
chunk-size=<newsize> # Set a new chunk-size
|
chunk-size=<newsize> # Set a new chunk-size
|
||||||
dml-batch-size=<newsize> # Set a new dml-batch-size
|
dml-batch-size=<newsize> # Set a new dml-batch-size
|
||||||
nice-ratio=<ratio> # Set a new nice-ratio, immediate sleep after each row-copy operation, float (examples: 0 is aggressive, 0.7 adds 70% runtime, 1.0 doubles runtime, 2.0 triples runtime, ...)
|
nice-ratio=<ratio> # Set a new nice-ratio, immediate sleep after each row-copy operation, float (examples: 0 is aggressive, 0.7 adds 70% runtime, 1.0 doubles runtime, 2.0 triples runtime, ...)
|
||||||
@ -177,6 +179,22 @@ help # This message
|
|||||||
}
|
}
|
||||||
return NoPrintStatusRule, fmt.Errorf("coordinates are read-only")
|
return NoPrintStatusRule, fmt.Errorf("coordinates are read-only")
|
||||||
}
|
}
|
||||||
|
case "applier":
|
||||||
|
if this.migrationContext.ApplierConnectionConfig != nil && this.migrationContext.ApplierConnectionConfig.ImpliedKey != nil {
|
||||||
|
fmt.Fprintf(writer, "Host: %s, Version: %s\n",
|
||||||
|
this.migrationContext.ApplierConnectionConfig.ImpliedKey.String(),
|
||||||
|
this.migrationContext.ApplierMySQLVersion,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return NoPrintStatusRule, nil
|
||||||
|
case "inspector":
|
||||||
|
if this.migrationContext.InspectorConnectionConfig != nil && this.migrationContext.InspectorConnectionConfig.ImpliedKey != nil {
|
||||||
|
fmt.Fprintf(writer, "Host: %s, Version: %s\n",
|
||||||
|
this.migrationContext.InspectorConnectionConfig.ImpliedKey.String(),
|
||||||
|
this.migrationContext.InspectorMySQLVersion,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return NoPrintStatusRule, nil
|
||||||
case "chunk-size":
|
case "chunk-size":
|
||||||
{
|
{
|
||||||
if argIsQuestion {
|
if argIsQuestion {
|
||||||
|
@ -400,7 +400,7 @@ func BuildDMLDeleteQuery(databaseName, tableName string, tableColumns, uniqueKey
|
|||||||
}
|
}
|
||||||
for _, column := range uniqueKeyColumns.Columns() {
|
for _, column := range uniqueKeyColumns.Columns() {
|
||||||
tableOrdinal := tableColumns.Ordinals[column.Name]
|
tableOrdinal := tableColumns.Ordinals[column.Name]
|
||||||
arg := column.convertArg(args[tableOrdinal])
|
arg := column.convertArg(args[tableOrdinal], true)
|
||||||
uniqueKeyArgs = append(uniqueKeyArgs, arg)
|
uniqueKeyArgs = append(uniqueKeyArgs, arg)
|
||||||
}
|
}
|
||||||
databaseName = EscapeName(databaseName)
|
databaseName = EscapeName(databaseName)
|
||||||
@ -437,7 +437,7 @@ func BuildDMLInsertQuery(databaseName, tableName string, tableColumns, sharedCol
|
|||||||
|
|
||||||
for _, column := range sharedColumns.Columns() {
|
for _, column := range sharedColumns.Columns() {
|
||||||
tableOrdinal := tableColumns.Ordinals[column.Name]
|
tableOrdinal := tableColumns.Ordinals[column.Name]
|
||||||
arg := column.convertArg(args[tableOrdinal])
|
arg := column.convertArg(args[tableOrdinal], false)
|
||||||
sharedArgs = append(sharedArgs, arg)
|
sharedArgs = append(sharedArgs, arg)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -485,13 +485,13 @@ func BuildDMLUpdateQuery(databaseName, tableName string, tableColumns, sharedCol
|
|||||||
|
|
||||||
for _, column := range sharedColumns.Columns() {
|
for _, column := range sharedColumns.Columns() {
|
||||||
tableOrdinal := tableColumns.Ordinals[column.Name]
|
tableOrdinal := tableColumns.Ordinals[column.Name]
|
||||||
arg := column.convertArg(valueArgs[tableOrdinal])
|
arg := column.convertArg(valueArgs[tableOrdinal], false)
|
||||||
sharedArgs = append(sharedArgs, arg)
|
sharedArgs = append(sharedArgs, arg)
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, column := range uniqueKeyColumns.Columns() {
|
for _, column := range uniqueKeyColumns.Columns() {
|
||||||
tableOrdinal := tableColumns.Ordinals[column.Name]
|
tableOrdinal := tableColumns.Ordinals[column.Name]
|
||||||
arg := column.convertArg(whereArgs[tableOrdinal])
|
arg := column.convertArg(whereArgs[tableOrdinal], true)
|
||||||
uniqueKeyArgs = append(uniqueKeyArgs, arg)
|
uniqueKeyArgs = append(uniqueKeyArgs, arg)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6,6 +6,7 @@
|
|||||||
package sql
|
package sql
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
"reflect"
|
"reflect"
|
||||||
"strconv"
|
"strconv"
|
||||||
@ -22,6 +23,7 @@ const (
|
|||||||
MediumIntColumnType
|
MediumIntColumnType
|
||||||
JSONColumnType
|
JSONColumnType
|
||||||
FloatColumnType
|
FloatColumnType
|
||||||
|
BinaryColumnType
|
||||||
)
|
)
|
||||||
|
|
||||||
const maxMediumintUnsigned int32 = 16777215
|
const maxMediumintUnsigned int32 = 16777215
|
||||||
@ -38,14 +40,31 @@ type Column struct {
|
|||||||
EnumValues string
|
EnumValues string
|
||||||
timezoneConversion *TimezoneConversion
|
timezoneConversion *TimezoneConversion
|
||||||
enumToTextConversion bool
|
enumToTextConversion bool
|
||||||
|
|
||||||
|
// add Octet length for binary type, fix bytes with suffix "00" get clipped in mysql binlog.
|
||||||
|
// https://github.com/github/gh-ost/issues/909
|
||||||
|
BinaryOctetLength uint
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *Column) convertArg(arg interface{}) interface{} {
|
func (this *Column) convertArg(arg interface{}, isUniqueKeyColumn bool) interface{} {
|
||||||
if s, ok := arg.(string); ok {
|
if s, ok := arg.(string); ok {
|
||||||
// string, charset conversion
|
// string, charset conversion
|
||||||
if encoding, ok := charsetEncodingMap[this.Charset]; ok {
|
if encoding, ok := charsetEncodingMap[this.Charset]; ok {
|
||||||
arg, _ = encoding.NewDecoder().String(s)
|
arg, _ = encoding.NewDecoder().String(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if this.Type == BinaryColumnType && isUniqueKeyColumn {
|
||||||
|
arg2Bytes := []byte(arg.(string))
|
||||||
|
size := len(arg2Bytes)
|
||||||
|
if uint(size) < this.BinaryOctetLength {
|
||||||
|
buf := bytes.NewBuffer(arg2Bytes)
|
||||||
|
for i := uint(0); i < (this.BinaryOctetLength - uint(size)); i++ {
|
||||||
|
buf.Write([]byte{0})
|
||||||
|
}
|
||||||
|
arg = buf.String()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return arg
|
return arg
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -242,7 +242,10 @@ build_binary() {
|
|||||||
echo "Using binary: $ghost_binary"
|
echo "Using binary: $ghost_binary"
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
go build -o $ghost_binary go/cmd/gh-ost/main.go
|
|
||||||
|
# TODO: remove GO111MODULE once gh-ost uses Go modules
|
||||||
|
GO111MODULE=off go build -o $ghost_binary go/cmd/gh-ost/main.go
|
||||||
|
|
||||||
if [ $? -ne 0 ] ; then
|
if [ $? -ne 0 ] ; then
|
||||||
echo "Build failure"
|
echo "Build failure"
|
||||||
exit 1
|
exit 1
|
||||||
|
@ -17,4 +17,5 @@ export GOPATH="$PWD/.gopath"
|
|||||||
cd .gopath/src/github.com/github/gh-ost
|
cd .gopath/src/github.com/github/gh-ost
|
||||||
|
|
||||||
# We put the binaries directly into the bindir, because we have no need for shim wrappers
|
# We put the binaries directly into the bindir, because we have no need for shim wrappers
|
||||||
go build -o "$bindir/gh-ost" -ldflags "-X main.AppVersion=${version} -X main.BuildDescribe=${describe}" ./go/cmd/gh-ost/main.go
|
# TODO: remove GO111MODULE once gh-ost uses Go modules
|
||||||
|
GO111MODULE=off go build -o "$bindir/gh-ost" -ldflags "-X main.AppVersion=${version} -X main.BuildDescribe=${describe}" ./go/cmd/gh-ost/main.go
|
||||||
|
@ -4,19 +4,25 @@ set -e
|
|||||||
|
|
||||||
whoami
|
whoami
|
||||||
|
|
||||||
# Clone gh-ost-ci-env
|
fetch_ci_env() {
|
||||||
# Only clone if not already running locally at latest commit
|
# Clone gh-ost-ci-env
|
||||||
remote_commit=$(git ls-remote https://github.com/github/gh-ost-ci-env.git HEAD | cut -f1)
|
# Only clone if not already running locally at latest commit
|
||||||
local_commit="unknown"
|
remote_commit=$(git ls-remote https://github.com/github/gh-ost-ci-env.git HEAD | cut -f1)
|
||||||
[ -d "gh-ost-ci-env" ] && local_commit=$(cd gh-ost-ci-env && git log --format="%H" -n 1)
|
local_commit="unknown"
|
||||||
|
[ -d "gh-ost-ci-env" ] && local_commit=$(cd gh-ost-ci-env && git log --format="%H" -n 1)
|
||||||
|
|
||||||
echo "remote commit is: $remote_commit"
|
echo "remote commit is: $remote_commit"
|
||||||
echo "local commit is: $local_commit"
|
echo "local commit is: $local_commit"
|
||||||
|
|
||||||
if [ "$remote_commit" != "$local_commit" ] ; then
|
if [ "$remote_commit" != "$local_commit" ] ; then
|
||||||
rm -rf ./gh-ost-ci-env
|
rm -rf ./gh-ost-ci-env
|
||||||
git clone https://github.com/github/gh-ost-ci-env.git
|
git clone https://github.com/github/gh-ost-ci-env.git
|
||||||
fi
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
test_dbdeployer() {
|
||||||
|
gh-ost-ci-env/bin/linux/dbdeployer --version
|
||||||
|
}
|
||||||
|
|
||||||
test_mysql_version() {
|
test_mysql_version() {
|
||||||
local mysql_version
|
local mysql_version
|
||||||
@ -30,17 +36,18 @@ test_mysql_version() {
|
|||||||
|
|
||||||
mkdir -p sandbox/binary
|
mkdir -p sandbox/binary
|
||||||
rm -rf sandbox/binary/*
|
rm -rf sandbox/binary/*
|
||||||
gh-ost-ci-env/bin/linux/dbdeployer unpack gh-ost-ci-env/mysql-tarballs/"$mysql_version".tar.gz --unpack-version="$mysql_version" --sandbox-binary ${PWD}/sandbox/binary
|
gh-ost-ci-env/bin/linux/dbdeployer unpack gh-ost-ci-env/mysql-tarballs/"$mysql_version".tar.xz --sandbox-binary ${PWD}/sandbox/binary
|
||||||
|
|
||||||
mkdir -p sandboxes
|
mkdir -p sandboxes
|
||||||
rm -rf sandboxes/*
|
rm -rf sandboxes/*
|
||||||
|
|
||||||
if echo "$mysql_version" | egrep "5[.]5[.]" ; then
|
local mysql_version_num=${mysql_version#*-}
|
||||||
|
if echo "$mysql_version_num" | egrep "5[.]5[.]" ; then
|
||||||
gtid=""
|
gtid=""
|
||||||
else
|
else
|
||||||
gtid="--gtid"
|
gtid="--gtid"
|
||||||
fi
|
fi
|
||||||
gh-ost-ci-env/bin/linux/dbdeployer deploy replication "$mysql_version" --nodes 2 --sandbox-binary ${PWD}/sandbox/binary --sandbox-home ${PWD}/sandboxes ${gtid} --my-cnf-options log_slave_updates --my-cnf-options log_bin --my-cnf-options binlog_format=ROW --sandbox-directory rsandbox
|
gh-ost-ci-env/bin/linux/dbdeployer deploy replication "$mysql_version_num" --nodes 2 --sandbox-binary ${PWD}/sandbox/binary --sandbox-home ${PWD}/sandboxes ${gtid} --my-cnf-options log_slave_updates --my-cnf-options log_bin --my-cnf-options binlog_format=ROW --sandbox-directory rsandbox
|
||||||
|
|
||||||
sed '/sandboxes/d' -i gh-ost-ci-env/bin/gh-ost-test-mysql-master
|
sed '/sandboxes/d' -i gh-ost-ci-env/bin/gh-ost-test-mysql-master
|
||||||
echo 'sandboxes/rsandbox/m "$@"' >> gh-ost-ci-env/bin/gh-ost-test-mysql-master
|
echo 'sandboxes/rsandbox/m "$@"' >> gh-ost-ci-env/bin/gh-ost-test-mysql-master
|
||||||
@ -59,12 +66,26 @@ test_mysql_version() {
|
|||||||
find sandboxes -name "stop_all" | bash
|
find sandboxes -name "stop_all" | bash
|
||||||
}
|
}
|
||||||
|
|
||||||
echo "Building..."
|
main() {
|
||||||
. script/build
|
fetch_ci_env
|
||||||
# Test all versions:
|
test_dbdeployer
|
||||||
find gh-ost-ci-env/mysql-tarballs/ -name "*.tar.gz" | while read f ; do basename $f ".tar.gz" ; done | sort -r | while read mysql_version ; do
|
|
||||||
|
echo "Building..."
|
||||||
|
. script/build
|
||||||
|
|
||||||
|
# TEST_MYSQL_VERSION is set by the replica-tests CI job
|
||||||
|
if [ -z "$TEST_MYSQL_VERSION" ]; then
|
||||||
|
# Test all versions:
|
||||||
|
find gh-ost-ci-env/mysql-tarballs/ -name "*.tar.xz" | while read f ; do basename $f ".tar.xz" ; done | sort -r | while read mysql_version ; do
|
||||||
echo "found MySQL version: $mysql_version"
|
echo "found MySQL version: $mysql_version"
|
||||||
done
|
done
|
||||||
find gh-ost-ci-env/mysql-tarballs/ -name "*.tar.gz" | while read f ; do basename $f ".tar.gz" ; done | sort -r | while read mysql_version ; do
|
find gh-ost-ci-env/mysql-tarballs/ -name "*.tar.xz" | while read f ; do basename $f ".tar.xz" ; done | sort -r | while read mysql_version ; do
|
||||||
test_mysql_version "$mysql_version"
|
test_mysql_version "$mysql_version"
|
||||||
done
|
done
|
||||||
|
else
|
||||||
|
echo "found MySQL version: $TEST_MYSQL_VERSION"
|
||||||
|
test_mysql_version "$TEST_MYSQL_VERSION"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
main
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
PREFERRED_GO_VERSION=go1.14.7
|
PREFERRED_GO_VERSION=go1.16.4
|
||||||
SUPPORTED_GO_VERSIONS='go1.1[456]'
|
SUPPORTED_GO_VERSIONS='go1.1[56]'
|
||||||
|
|
||||||
GO_PKG_DARWIN=${PREFERRED_GO_VERSION}.darwin-amd64.pkg
|
GO_PKG_DARWIN=${PREFERRED_GO_VERSION}.darwin-amd64.pkg
|
||||||
GO_PKG_DARWIN_SHA=0f215de06019a054a3da46a0722989986c956d719c7a0a8fc38a5f3c216d6f6b
|
GO_PKG_DARWIN_SHA=0f215de06019a054a3da46a0722989986c956d719c7a0a8fc38a5f3c216d6f6b
|
||||||
|
@ -13,5 +13,6 @@ script/build
|
|||||||
|
|
||||||
cd .gopath/src/github.com/github/gh-ost
|
cd .gopath/src/github.com/github/gh-ost
|
||||||
|
|
||||||
|
# TODO: remove GO111MODULE once gh-ost uses Go modules
|
||||||
echo "Running unit tests"
|
echo "Running unit tests"
|
||||||
go test ./go/...
|
GO111MODULE=off go test ./go/...
|
||||||
|
5
test.sh
5
test.sh
@ -5,7 +5,10 @@ retval=0
|
|||||||
for testsuite in base mysql sql
|
for testsuite in base mysql sql
|
||||||
do
|
do
|
||||||
pushd go/${testsuite} > /dev/null;
|
pushd go/${testsuite} > /dev/null;
|
||||||
go test $*;
|
|
||||||
|
# TODO: remove GO111MODULE once gh-ost uses Go modules
|
||||||
|
GO111MODULE=off go test $*;
|
||||||
|
|
||||||
[ $? -ne 0 ] && retval=1
|
[ $? -ne 0 ] && retval=1
|
||||||
popd > /dev/null;
|
popd > /dev/null;
|
||||||
done
|
done
|
||||||
|
Loading…
Reference in New Issue
Block a user