提交 5beed1ec 编写于 作者: B baiyfbupt

make loss function program arg dispensableOC

上级 0073f078
...@@ -95,7 +95,7 @@ def merge(teacher_program, ...@@ -95,7 +95,7 @@ def merge(teacher_program,
def fsp_loss(teacher_var1_name, teacher_var2_name, student_var1_name, def fsp_loss(teacher_var1_name, teacher_var2_name, student_var1_name,
student_var2_name, program): student_var2_name, program=fluid.default_main_program()):
""" """
Combine variables from student model and teacher model by fsp-loss. Combine variables from student model and teacher model by fsp-loss.
Args: Args:
...@@ -121,7 +121,8 @@ def fsp_loss(teacher_var1_name, teacher_var2_name, student_var1_name, ...@@ -121,7 +121,8 @@ def fsp_loss(teacher_var1_name, teacher_var2_name, student_var1_name,
return fsp_loss return fsp_loss
def l2_loss(teacher_var_name, student_var_name, program): def l2_loss(teacher_var_name, student_var_name,
program=fluid.default_main_program()):
""" """
Combine variables from student model and teacher model by l2-loss. Combine variables from student model and teacher model by l2-loss.
Args: Args:
...@@ -139,7 +140,7 @@ def l2_loss(teacher_var_name, student_var_name, program): ...@@ -139,7 +140,7 @@ def l2_loss(teacher_var_name, student_var_name, program):
def soft_label_loss(teacher_var_name, def soft_label_loss(teacher_var_name,
student_var_name, student_var_name,
program, program=fluid.default_main_program(),
teacher_temperature=1., teacher_temperature=1.,
student_temperature=1.): student_temperature=1.):
""" """
...@@ -165,7 +166,7 @@ def soft_label_loss(teacher_var_name, ...@@ -165,7 +166,7 @@ def soft_label_loss(teacher_var_name,
return soft_label_loss return soft_label_loss
def loss(program, loss_func, **kwargs): def loss(loss_func, program=fluid.default_main_program(), **kwargs):
""" """
Combine variables from student model and teacher model by self defined loss. Combine variables from student model and teacher model by self defined loss.
Args: Args:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册