提交 ddd8b4a9 编写于 作者: O openeuler-ci-bot 提交者: Gitee

!137 fix bugs that when engine is abtest, tuning progress is not correct

Merge pull request !137 from lixiaoguang/dev_fix_display
...@@ -69,8 +69,12 @@ class Optimizer(Resource): ...@@ -69,8 +69,12 @@ class Optimizer(Resource):
value[self.pipe] = parent_conn value[self.pipe] = parent_conn
task_cache.TasksCache.get_instance().set(task_id, value) task_cache.TasksCache.get_instance().set(task_id, value)
iters = args.get("max_eval")
if args.get("engine") == "abtest":
iters = parent_conn.recv()
result["task_id"] = task_id result["task_id"] = task_id
result["status"] = "OK" result["status"] = "OK"
result["iters"] = iters
return result, 200 return result, 200
def put(self, task_id): def put(self, task_id):
......
...@@ -64,9 +64,12 @@ class ABtestTuningManager: ...@@ -64,9 +64,12 @@ class ABtestTuningManager:
self._best_val_vec = [] self._best_val_vec = []
self._performance = [] self._performance = []
self._options = [] self._options = []
self._max_eval = 0
for i in range(self._abtuning_num): for i in range(self._abtuning_num):
self._max_eval += len(self._abtuning_list[i].items)
self._default_val_vec.append(self._abtuning_list[i].items[0]) self._default_val_vec.append(self._abtuning_list[i].items[0])
self._best_val_vec.append(self._abtuning_list[i].items[0]) self._best_val_vec.append(self._abtuning_list[i].items[0])
self._child_conn.send(self._max_eval)
def construct_set_knob_val_vec(self, item): def construct_set_knob_val_vec(self, item):
"""construct set knob val vec""" """construct set knob val vec"""
......
...@@ -50,6 +50,7 @@ type RespPostBody struct { ...@@ -50,6 +50,7 @@ type RespPostBody struct {
TaskID string `json:"task_id"` TaskID string `json:"task_id"`
Status string `json:"status"` Status string `json:"status"`
Message string `json:"message"` Message string `json:"message"`
Iters int `json:"iters"`
} }
// OptimizerPutBody send to the optimizer service when iterations // OptimizerPutBody send to the optimizer service when iterations
......
...@@ -70,6 +70,7 @@ type YamlPrjCli struct { ...@@ -70,6 +70,7 @@ type YamlPrjCli struct {
EvalCurrent float64 `yaml:"-"` EvalCurrent float64 `yaml:"-"`
StartIters int32 `yaml:"-"` StartIters int32 `yaml:"-"`
Params string `yaml:"-"` Params string `yaml:"-"`
FeatureFilter bool `yaml:"-"`
} }
// YamlPrjSvr :store the server yaml project // YamlPrjSvr :store the server yaml project
...@@ -113,7 +114,7 @@ type RelationShip struct { ...@@ -113,7 +114,7 @@ type RelationShip struct {
} }
// BenchMark method call the benchmark script // BenchMark method call the benchmark script
func (y *YamlPrjCli) BenchMark(featureFilter bool) (string, error) { func (y *YamlPrjCli) BenchMark() (string, error) {
benchStr := make([]string, 0) benchStr := make([]string, 0)
benchOutByte, err := ExecCommand(y.Benchmark) benchOutByte, err := ExecCommand(y.Benchmark)
...@@ -147,15 +148,13 @@ func (y *YamlPrjCli) BenchMark(featureFilter bool) (string, error) { ...@@ -147,15 +148,13 @@ func (y *YamlPrjCli) BenchMark(featureFilter bool) (string, error) {
benchStr = append(benchStr, evaluation.Name+"="+out) benchStr = append(benchStr, evaluation.Name+"="+out)
} }
if !featureFilter {
if sum < y.EvalMin {
y.EvalMin = sum
}
}
if utils.IsEquals(y.EvalBase, 0.0) { if utils.IsEquals(y.EvalBase, 0.0) {
y.EvalBase = sum y.EvalBase = sum
y.EvalMin = sum y.EvalMin = sum
} }
if !y.FeatureFilter && sum < y.EvalMin {
y.EvalMin = sum
}
y.EvalCurrent = sum y.EvalCurrent = sum
return strings.Join(benchStr, ","), nil return strings.Join(benchStr, ","), nil
} }
......
...@@ -146,12 +146,18 @@ func (o *Optimizer) createOptimizerTask(ch chan *PB.TuningMessage, iters int32, ...@@ -146,12 +146,18 @@ func (o *Optimizer) createOptimizerTask(ch chan *PB.TuningMessage, iters int32,
if err != nil { if err != nil {
return err return err
} }
log.Infof("create optimizer task response body is: %+v", respPostIns)
if respPostIns.Status != "OK" { if respPostIns.Status != "OK" {
err := fmt.Errorf("create task failed: %s", respPostIns.Status) err := fmt.Errorf("create task failed: %s", respPostIns.Status)
log.Errorf(err.Error()) log.Errorf(err.Error())
return err return err
} }
ch <- &PB.TuningMessage{
State: PB.TuningMessage_JobInit,
FeatureFilter: o.FeatureFilter,
Content: []byte(strconv.Itoa(respPostIns.Iters)),
}
url := config.GetURL(config.OptimizerURI) url := config.GetURL(config.OptimizerURI)
o.OptimizerPutURL = fmt.Sprintf("%s/%s", url, respPostIns.TaskID) o.OptimizerPutURL = fmt.Sprintf("%s/%s", url, respPostIns.TaskID)
log.Infof("optimizer put url is: %s", o.OptimizerPutURL) log.Infof("optimizer put url is: %s", o.OptimizerPutURL)
...@@ -285,10 +291,6 @@ func (o *Optimizer) DynamicTuned(ch chan *PB.TuningMessage, stopCh chan int) err ...@@ -285,10 +291,6 @@ func (o *Optimizer) DynamicTuned(ch chan *PB.TuningMessage, stopCh chan int) err
} }
o.Iter++ o.Iter++
if int32(o.Iter) <= o.MaxIter {
message := fmt.Sprintf("Current Tuning Progress.....(%d/%d)", o.Iter, o.MaxIter)
ch <- &PB.TuningMessage{State: PB.TuningMessage_Display, Content: []byte(message)}
}
evalMinSum := fmt.Sprintf("%s=%.2f", project.MIN_BENCHMARK_VALUE, o.MinEvalSum) evalMinSum := fmt.Sprintf("%s=%.2f", project.MIN_BENCHMARK_VALUE, o.MinEvalSum)
log.Infof("send back to client to start benchmark") log.Infof("send back to client to start benchmark")
ch <- &PB.TuningMessage{State: PB.TuningMessage_BenchMark, ch <- &PB.TuningMessage{State: PB.TuningMessage_BenchMark,
......
...@@ -110,7 +110,7 @@ func profileTunning(ctx *cli.Context) error { ...@@ -110,7 +110,7 @@ func profileTunning(ctx *cli.Context) error {
go func() { go func() {
if !ctx.Bool("restart") { if !ctx.Bool("restart") {
fmt.Println("Start to benchmark baseline...") fmt.Println("Start to benchmark baseline...")
benchmarkByte, err := prj.BenchMark(true) benchmarkByte, err := prj.BenchMark()
if err != nil { if err != nil {
fmt.Println("benchmark result:", benchmarkByte) fmt.Println("benchmark result:", benchmarkByte)
errors <- err errors <- err
...@@ -148,6 +148,13 @@ func profileTunning(ctx *cli.Context) error { ...@@ -148,6 +148,13 @@ func profileTunning(ctx *cli.Context) error {
state := reply.GetState() state := reply.GetState()
switch state { switch state {
case PB.TuningMessage_JobInit: case PB.TuningMessage_JobInit:
prj.FeatureFilter = reply.GetFeatureFilter()
iterations, err := strconv.Atoi(string(reply.GetContent()))
if err != nil {
return err
}
prj.Iterations = int32(iterations)
case PB.TuningMessage_JobRestart:
prj.StartIters = 1 prj.StartIters = 1
if err := stream.Send(&PB.TuningMessage{State: PB.TuningMessage_JobRestart, Content: []byte(strconv.Itoa(int(prj.Iterations)))}); err != nil { if err := stream.Send(&PB.TuningMessage{State: PB.TuningMessage_JobRestart, Content: []byte(strconv.Itoa(int(prj.Iterations)))}); err != nil {
return fmt.Errorf("client sends failure, error: %v", err) return fmt.Errorf("client sends failure, error: %v", err)
...@@ -166,25 +173,26 @@ func profileTunning(ctx *cli.Context) error { ...@@ -166,25 +173,26 @@ func profileTunning(ctx *cli.Context) error {
prj.SetHistoryEvalBase(reply.GetTuningLog()) prj.SetHistoryEvalBase(reply.GetTuningLog())
} }
prj.Params = string(reply.GetContent()) prj.Params = string(reply.GetContent())
benchmarkByte, err := prj.BenchMark(reply.GetFeatureFilter()) benchmarkByte, err := prj.BenchMark()
if err != nil { if err != nil {
fmt.Println("benchmark result:", benchmarkByte) fmt.Println("benchmark result:", benchmarkByte)
return err return err
} }
currentTime := time.Now() currentTime := time.Now()
if reply.GetFeatureFilter() { if prj.FeatureFilter {
fmt.Printf(" Used time: %s, Total Time: %s, Current Progress......(%d/%d)\n", fmt.Printf(" Used time: %s, Total Time: %s, Current Progress......(%d/%d)\n",
currentTime.Sub(prj.StartsTime).Round(time.Second).String(), currentTime.Sub(prj.StartsTime).Round(time.Second).String(),
time.Duration(int64(currentTime.Sub(prj.StartsTime).Round(time.Second).Seconds())+prj.TotalTime)*time.Second, time.Duration(int64(currentTime.Sub(prj.StartsTime).Round(time.Second).Seconds())+prj.TotalTime)*time.Second,
prj.StartIters, prj.FeatureFilterIters) prj.StartIters, prj.Iterations)
} else { } else {
fmt.Printf(" Current Tuning Progress......(%d/%d)\n", prj.StartIters, prj.Iterations)
fmt.Printf(" Used time: %s, Total Time: %s, Best Performance: %.2f, Performance Improvement Rate: %s%%\n", fmt.Printf(" Used time: %s, Total Time: %s, Best Performance: %.2f, Performance Improvement Rate: %s%%\n",
currentTime.Sub(prj.StartsTime).Round(time.Second).String(), currentTime.Sub(prj.StartsTime).Round(time.Second).String(),
time.Duration(int64(currentTime.Sub(prj.StartsTime).Round(time.Second).Seconds())+prj.TotalTime)*time.Second, time.Duration(int64(currentTime.Sub(prj.StartsTime).Round(time.Second).Seconds())+prj.TotalTime)*time.Second,
math.Abs(prj.EvalMin), prj.ImproveRateString(prj.EvalMin)) math.Abs(prj.EvalMin), prj.ImproveRateString(prj.EvalMin))
} }
if ctx.Bool("detail") && !reply.GetFeatureFilter() { if ctx.Bool("detail") && !prj.FeatureFilter {
fmt.Printf(" The %dth recommand parameters is: %s\n"+ fmt.Printf(" The %dth recommand parameters is: %s\n"+
" The %dth evaluation value: %s(%s%%)\n", prj.StartIters, prj.Params, prj.StartIters, strings.Replace(benchmarkByte, "-", "", -1), prj.ImproveRateString(prj.EvalCurrent)) " The %dth evaluation value: %s(%s%%)\n", prj.StartIters, prj.Params, prj.StartIters, strings.Replace(benchmarkByte, "-", "", -1), prj.ImproveRateString(prj.EvalCurrent))
} }
...@@ -210,7 +218,7 @@ func profileTunning(ctx *cli.Context) error { ...@@ -210,7 +218,7 @@ func profileTunning(ctx *cli.Context) error {
case PB.TuningMessage_Ending: case PB.TuningMessage_Ending:
fmt.Printf(" Baseline Performance is: %.2f\n", math.Abs(prj.EvalBase)) fmt.Printf(" Baseline Performance is: %.2f\n", math.Abs(prj.EvalBase))
fmt.Printf(" %s\n", string(reply.GetContent())) fmt.Printf(" %s\n", string(reply.GetContent()))
fmt.Printf(" tuning finished\n") fmt.Printf(" Tuning Finished\n")
goto End goto End
} }
......
...@@ -528,7 +528,7 @@ func (s *ProfileServer) Tuning(stream PB.ProfileMgr_TuningServer) error { ...@@ -528,7 +528,7 @@ func (s *ProfileServer) Tuning(stream PB.ProfileMgr_TuningServer) error {
select { select {
case <-stopCh: case <-stopCh:
if cycles > 0 { if cycles > 0 {
_ = stream.Send(&PB.TuningMessage{State: PB.TuningMessage_JobInit}) _ = stream.Send(&PB.TuningMessage{State: PB.TuningMessage_JobRestart})
} else { } else {
_ = stream.Send(&PB.TuningMessage{State: PB.TuningMessage_Ending}) _ = stream.Send(&PB.TuningMessage{State: PB.TuningMessage_Ending})
} }
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册