提交 3fda3fb6 编写于 作者: martianzhang's avatar martianzhang

fix Tokenize concurrent map writes bug

  Test Function:
  var i = 1
  // infinite loop
  for {
	for _, sql := range TestSQLs {
		go ast.QueryType(sql)
	}
	i = i + 1
	fmt.Println("loop: ", i)
  }
上级 ee6cfd0c
......@@ -45,7 +45,6 @@ const (
var maxCachekeySize = 15
var cacheHits int
var cacheMisses int
var tokenCache map[string]Token
var tokenBoundaries = []string{
// multi character
......@@ -790,7 +789,7 @@ func Tokenize(sql string) []Token {
var token Token
var tokenLength int
var tokens []Token
tokenCache = make(map[string]Token)
tokenCache := make(map[string]Token)
// Used to make sure the string keeps shrinking on each iteration
oldStringLen := len(sql) + 1
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册