From 8307b70f2e85c394864106d2c88bf0f375c8933f Mon Sep 17 00:00:00 2001 From: Hui Zhang Date: Tue, 20 Apr 2021 08:18:22 +0000 Subject: [PATCH] fix log --- .notebook/compute_cmvn_loader_test.ipynb | 180 ++++++++++------------- deepspeech/exps/deepspeech2/model.py | 14 +- deepspeech/exps/u2/model.py | 7 +- 3 files changed, 87 insertions(+), 114 deletions(-) diff --git a/.notebook/compute_cmvn_loader_test.ipynb b/.notebook/compute_cmvn_loader_test.ipynb index 91f1480f..916f7c41 100644 --- a/.notebook/compute_cmvn_loader_test.ipynb +++ b/.notebook/compute_cmvn_loader_test.ipynb @@ -50,83 +50,44 @@ "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/paddle/fluid/layers/utils.py:26: DeprecationWarning: `np.int` is a deprecated alias for the builtin `int`. To silence this warning, use `int` by itself. Doing this will not modify any behavior and is safe. When replacing `np.int`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n", "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", " def convert_to_list(value, n, name, dtype=np.int):\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:93] register user softmax to paddle, remove this when fixed!\n", - "2021-04-16 15:30:29,345 - WARNING - register user softmax to paddle, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:97] register user log_softmax to paddle, remove this when fixed!\n", - "2021-04-16 15:30:29,346 - WARNING - register user log_softmax to paddle, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:101] register user sigmoid to paddle, remove this when fixed!\n", - "2021-04-16 15:30:29,347 - WARNING - register user sigmoid to paddle, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:105] register user log_sigmoid to paddle, remove this when fixed!\n", - "2021-04-16 15:30:29,348 - WARNING - register user log_sigmoid to paddle, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:109] register user relu to paddle, remove this when fixed!\n", - "2021-04-16 15:30:29,349 - WARNING - register user relu to paddle, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:119] override cat of paddle if exists or register, remove this when fixed!\n", - "2021-04-16 15:30:29,349 - WARNING - override cat of paddle if exists or register, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:133] override item of paddle.Tensor if exists or register, remove this when fixed!\n", - "2021-04-16 15:30:29,350 - WARNING - override item of paddle.Tensor if exists or register, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:144] override long of paddle.Tensor if exists or register, remove this when fixed!\n", - "2021-04-16 15:30:29,351 - WARNING - override long of paddle.Tensor if exists or register, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:164] override new_full of paddle.Tensor if exists or register, remove this when fixed!\n", - "2021-04-16 15:30:29,352 - WARNING - override new_full of paddle.Tensor if exists or register, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:179] override eq of paddle.Tensor if exists or register, remove this when fixed!\n", - "2021-04-16 15:30:29,353 - WARNING - override eq of paddle.Tensor if exists or register, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:185] override eq of paddle if exists or register, remove this when fixed!\n", - "2021-04-16 15:30:29,354 - WARNING - override eq of paddle if exists or register, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:195] override contiguous of paddle.Tensor if exists or register, remove this when fixed!\n", - "2021-04-16 15:30:29,355 - WARNING - override contiguous of paddle.Tensor if exists or register, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:212] override size of paddle.Tensor (`to_static` do not process `size` property, maybe some `paddle` api dependent on it), remove this when fixed!\n", - "2021-04-16 15:30:29,356 - WARNING - override size of paddle.Tensor (`to_static` do not process `size` property, maybe some `paddle` api dependent on it), remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:223] register user view to paddle.Tensor, remove this when fixed!\n", - "2021-04-16 15:30:29,357 - WARNING - register user view to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:233] register user view_as to paddle.Tensor, remove this when fixed!\n", - "2021-04-16 15:30:29,361 - WARNING - register user view_as to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:259] register user masked_fill to paddle.Tensor, remove this when fixed!\n", - "2021-04-16 15:30:29,362 - WARNING - register user masked_fill to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:277] register user masked_fill_ to paddle.Tensor, remove this when fixed!\n", - "2021-04-16 15:30:29,363 - WARNING - register user masked_fill_ to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:288] register user fill_ to paddle.Tensor, remove this when fixed!\n", - "2021-04-16 15:30:29,364 - WARNING - register user fill_ to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:298] register user repeat to paddle.Tensor, remove this when fixed!\n", - "2021-04-16 15:30:29,365 - WARNING - register user repeat to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:303] register user softmax to paddle.Tensor, remove this when fixed!\n", - "2021-04-16 15:30:29,366 - WARNING - register user softmax to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:308] register user sigmoid to paddle.Tensor, remove this when fixed!\n", - "2021-04-16 15:30:29,366 - WARNING - register user sigmoid to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:312] register user relu to paddle.Tensor, remove this when fixed!\n", - "2021-04-16 15:30:29,367 - WARNING - register user relu to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:322] register user type_as to paddle.Tensor, remove this when fixed!\n", - "2021-04-16 15:30:29,368 - WARNING - register user type_as to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:337] register user to to paddle.Tensor, remove this when fixed!\n", - "2021-04-16 15:30:29,369 - WARNING - register user to to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:346] register user float to paddle.Tensor, remove this when fixed!\n", - "2021-04-16 15:30:29,370 - WARNING - register user float to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:356] register user tolist to paddle.Tensor, remove this when fixed!\n", - "2021-04-16 15:30:29,370 - WARNING - register user tolist to paddle.Tensor, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:371] register user glu to paddle.nn.functional, remove this when fixed!\n", - "2021-04-16 15:30:29,371 - WARNING - register user glu to paddle.nn.functional, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:422] override ctc_loss of paddle.nn.functional if exists, remove this when fixed!\n", - "2021-04-16 15:30:29,372 - WARNING - override ctc_loss of paddle.nn.functional if exists, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:428] register user Module to paddle.nn, remove this when fixed!\n", - "2021-04-16 15:30:29,377 - WARNING - register user Module to paddle.nn, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:434] register user ModuleList to paddle.nn, remove this when fixed!\n", - "2021-04-16 15:30:29,378 - WARNING - register user ModuleList to paddle.nn, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:450] register user GLU to paddle.nn, remove this when fixed!\n", - "2021-04-16 15:30:29,379 - WARNING - register user GLU to paddle.nn, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:483] register user ConstantPad2d to paddle.nn, remove this when fixed!\n", - "2021-04-16 15:30:29,380 - WARNING - register user ConstantPad2d to paddle.nn, remove this when fixed!\n", - "[WARNING 2021/04/16 15:30:29 __init__.py:489] register user export to paddle.jit, remove this when fixed!\n", - "2021-04-16 15:30:29,381 - WARNING - register user export to paddle.jit, remove this when fixed!\n", + "register user softmax to paddle, remove this when fixed!\n", + "register user log_softmax to paddle, remove this when fixed!\n", + "register user sigmoid to paddle, remove this when fixed!\n", + "register user log_sigmoid to paddle, remove this when fixed!\n", + "register user relu to paddle, remove this when fixed!\n", + "override cat of paddle if exists or register, remove this when fixed!\n", + "override item of paddle.Tensor if exists or register, remove this when fixed!\n", + "override long of paddle.Tensor if exists or register, remove this when fixed!\n", + "override new_full of paddle.Tensor if exists or register, remove this when fixed!\n", + "override eq of paddle.Tensor if exists or register, remove this when fixed!\n", + "override eq of paddle if exists or register, remove this when fixed!\n", + "override contiguous of paddle.Tensor if exists or register, remove this when fixed!\n", + "override size of paddle.Tensor (`to_static` do not process `size` property, maybe some `paddle` api dependent on it), remove this when fixed!\n", + "register user view to paddle.Tensor, remove this when fixed!\n", + "register user view_as to paddle.Tensor, remove this when fixed!\n", + "register user masked_fill to paddle.Tensor, remove this when fixed!\n", + "register user masked_fill_ to paddle.Tensor, remove this when fixed!\n", + "register user fill_ to paddle.Tensor, remove this when fixed!\n", + "register user repeat to paddle.Tensor, remove this when fixed!\n", + "register user softmax to paddle.Tensor, remove this when fixed!\n", + "register user sigmoid to paddle.Tensor, remove this when fixed!\n", + "register user relu to paddle.Tensor, remove this when fixed!\n", + "register user type_as to paddle.Tensor, remove this when fixed!\n", + "register user to to paddle.Tensor, remove this when fixed!\n", + "register user float to paddle.Tensor, remove this when fixed!\n", + "register user tolist to paddle.Tensor, remove this when fixed!\n", + "register user glu to paddle.nn.functional, remove this when fixed!\n", + "override ctc_loss of paddle.nn.functional if exists, remove this when fixed!\n", + "register user Module to paddle.nn, remove this when fixed!\n", + "register user ModuleList to paddle.nn, remove this when fixed!\n", + "register user GLU to paddle.nn, remove this when fixed!\n", + "register user ConstantPad2d to paddle.nn, remove this when fixed!\n", + "register user export to paddle.jit, remove this when fixed!\n", "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/scipy/fftpack/__init__.py:103: DeprecationWarning: The module numpy.dual is deprecated. Instead of using dual, use the functions directly from numpy or scipy.\n", " from numpy.dual import register_func\n", "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/scipy/special/orthogonal.py:81: DeprecationWarning: `np.int` is a deprecated alias for the builtin `int`. To silence this warning, use `int` by itself. Doing this will not modify any behavior and is safe. When replacing `np.int`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n", "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", - " from numpy import (exp, inf, pi, sqrt, floor, sin, cos, around, int,\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ + " from numpy import (exp, inf, pi, sqrt, floor, sin, cos, around, int,\n", "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/numba/core/types/__init__.py:108: DeprecationWarning: `np.long` is a deprecated alias for `np.compat.long`. To silence this warning, use `np.compat.long` by itself. In the likely event your code does not need to work on Python 2 you can use the builtin `int` for which `np.compat.long` is itself an alias. Doing this will not modify any behaviour and is safe. When replacing `np.long`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n", "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", " long_ = _make_signed(np.long)\n", @@ -185,19 +146,10 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 5, "id": "enormous-currency", "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/ipykernel/ipkernel.py:283: DeprecationWarning: `should_run_async` will not call `transform_cell` automatically in the future. Please pass the result to `transformed_cell` argument and any exception that happen during thetransform in `preprocessing_exc_tuple` in IPython 7.17 and above.\n", - " and should_run_async(code)\n" - ] - } - ], + "outputs": [], "source": [ "import random\n", "\n", @@ -234,7 +186,8 @@ " var_stat += square_sums\n", "\n", " number += feat.shape[1]\n", - " return paddle.to_tensor(number), paddle.to_tensor(mean_stat), paddle.to_tensor(var_stat)\n", + " #return paddle.to_tensor(number), paddle.to_tensor(mean_stat), paddle.to_tensor(var_stat)\n", + " return number, mean_stat, var_stat\n", "\n", "\n", "class AudioDataset(Dataset):\n", @@ -260,7 +213,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 6, "id": "armed-semester", "metadata": {}, "outputs": [ @@ -268,20 +221,20 @@ "name": "stderr", "output_type": "stream", "text": [ - "Exception ignored in: \n", + "Process Process-2:\n", "Traceback (most recent call last):\n", - " File \"/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/paddle/fluid/dataloader/dataloader_iter.py\", line 763, in __del__\n", - " self._try_shutdown_all()\n", - " File \"/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/paddle/fluid/dataloader/dataloader_iter.py\", line 590, in _try_shutdown_all\n", - " w.join()\n", - " File \"/usr/local/lib/python3.7/multiprocessing/process.py\", line 140, in join\n", - " res = self._popen.wait(timeout)\n", - " File \"/usr/local/lib/python3.7/multiprocessing/popen_fork.py\", line 48, in wait\n", - " return self.poll(os.WNOHANG if timeout == 0.0 else 0)\n", - " File \"/usr/local/lib/python3.7/multiprocessing/popen_fork.py\", line 28, in poll\n", - " pid, sts = os.waitpid(self.pid, flag)\n", - "KeyboardInterrupt: \n", - "2021-04-16 15:44:43,413 - ERROR - DataLoader reader thread raised an exception!\n" + " File \"/usr/local/lib/python3.7/multiprocessing/process.py\", line 297, in _bootstrap\n", + " self.run()\n", + " File \"/usr/local/lib/python3.7/multiprocessing/process.py\", line 99, in run\n", + " self._target(*self._args, **self._kwargs)\n", + " File \"/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/paddle/fluid/dataloader/dataloader_iter.py\", line 463, in _worker_loop\n", + " six.reraise(*sys.exc_info())\n", + " File \"/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/six.py\", line 703, in reraise\n", + " raise value\n", + " File \"/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/paddle/fluid/dataloader/dataloader_iter.py\", line 446, in _worker_loop\n", + " for s in sample:\n", + "TypeError: 'int' object is not iterable\n", + "2021-04-20 07:43:09,866 - ERROR - DataLoader reader thread raised an exception!\n" ] }, { @@ -291,10 +244,37 @@ "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mSystemError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 38\u001b[0m \u001b[0mwav_number\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 39\u001b[0m \u001b[0;31m# for i, batch in enumerate(data_loader()):\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 40\u001b[0;31m \u001b[0;32mfor\u001b[0m \u001b[0mbatch\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mdata_loader\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 41\u001b[0m \u001b[0mnumber\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmean_stat\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvar_stat\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mbatch\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 42\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mi\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 38\u001b[0m \u001b[0mwav_number\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 39\u001b[0m \u001b[0;31m# for i, batch in enumerate(data_loader()):\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 40\u001b[0;31m \u001b[0;32mfor\u001b[0m \u001b[0mbatch\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mdata_loader\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 41\u001b[0m \u001b[0mnumber\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmean_stat\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvar_stat\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mbatch\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 42\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mi\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/paddle/fluid/dataloader/dataloader_iter.py\u001b[0m in \u001b[0;36m__next__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 777\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 778\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0min_dygraph_mode\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 779\u001b[0;31m \u001b[0mdata\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_reader\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread_next_var_list\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 780\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 781\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_return_list\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;31mSystemError\u001b[0m: (Fatal) Blocking queue is killed because the data reader raises an exception.\n [Hint: Expected killed_ != true, but received killed_:1 == true:1.] (at /paddle/paddle/fluid/operators/reader/blocking_queue.h:158)\n" ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Exception in thread Thread-5:\n", + "Traceback (most recent call last):\n", + " File \"/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/paddle/fluid/dataloader/dataloader_iter.py\", line 684, in _get_data\n", + " data = self._data_queue.get(timeout=self._timeout)\n", + " File \"/usr/local/lib/python3.7/multiprocessing/queues.py\", line 105, in get\n", + " raise Empty\n", + "_queue.Empty\n", + "\n", + "During handling of the above exception, another exception occurred:\n", + "\n", + "Traceback (most recent call last):\n", + " File \"/usr/local/lib/python3.7/threading.py\", line 917, in _bootstrap_inner\n", + " self.run()\n", + " File \"/usr/local/lib/python3.7/threading.py\", line 865, in run\n", + " self._target(*self._args, **self._kwargs)\n", + " File \"/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/paddle/fluid/dataloader/dataloader_iter.py\", line 616, in _thread_loop\n", + " batch = self._get_data()\n", + " File \"/workspace/DeepSpeech-2.x/tools/venv/lib/python3.7/site-packages/paddle/fluid/dataloader/dataloader_iter.py\", line 700, in _get_data\n", + " \"pids: {}\".format(len(failed_workers), pids))\n", + "RuntimeError: DataLoader 1 workers exit unexpectedly, pids: 40561\n", + "\n" + ] } ], "source": [ diff --git a/deepspeech/exps/deepspeech2/model.py b/deepspeech/exps/deepspeech2/model.py index ca9fff2b..e6bdb1d5 100644 --- a/deepspeech/exps/deepspeech2/model.py +++ b/deepspeech/exps/deepspeech2/model.py @@ -80,12 +80,11 @@ class DeepSpeech2Trainer(Trainer): num_utts = batch[0].shape[0] num_seen_utts += num_utts total_loss += float(loss) * num_utts - valid_losses['val_loss'].append(float(loss)) + valid_losses['val_loss'].append(float(loss)) if (i + 1) % self.config.training.log_interval == 0: - valid_losses = {k: np.mean(v) for k, v in valid_losses.items()} - - valid_losses['val_history_loss'] = total_loss / num_seen_utts + valid_dump = {k: np.mean(v) for k, v in valid_losses.items()} + valid_dump['val_history_loss'] = total_loss / num_seen_utts # logging msg = f"Valid: Rank: {dist.get_rank()}, " @@ -93,14 +92,9 @@ class DeepSpeech2Trainer(Trainer): msg += "step: {}, ".format(self.iteration) msg += "batch : {}/{}, ".format(i + 1, len(self.valid_loader)) msg += ', '.join('{}: {:>.6f}'.format(k, v) - for k, v in valid_losses.items()) + for k, v in valid_dump.items()) logger.info(msg) - if self.visualizer: - for k, v in valid_losses.items(): - self.visualizer.add_scalar("valid/{}".format(k), v, - self.iteration) - logger.info('Rank {} Val info val_loss {}'.format( dist.get_rank(), total_loss / num_seen_utts)) return total_loss, num_seen_utts diff --git a/deepspeech/exps/u2/model.py b/deepspeech/exps/u2/model.py index 9948c30e..2dffc182 100644 --- a/deepspeech/exps/u2/model.py +++ b/deepspeech/exps/u2/model.py @@ -129,9 +129,8 @@ class U2Trainer(Trainer): valid_losses['val_ctc_loss'].append(float(ctc_loss)) if (i + 1) % self.config.training.log_interval == 0: - valid_losses = {k: np.mean(v) for k, v in valid_losses.items()} - - valid_losses['val_history_loss'] = total_loss / num_seen_utts + valid_dump = {k: np.mean(v) for k, v in valid_losses.items()} + valid_dump['val_history_loss'] = total_loss / num_seen_utts # logging msg = f"Valid: Rank: {dist.get_rank()}, " @@ -139,7 +138,7 @@ class U2Trainer(Trainer): msg += "step: {}, ".format(self.iteration) msg += "batch : {}/{}, ".format(i + 1, len(self.valid_loader)) msg += ', '.join('{}: {:>.6f}'.format(k, v) - for k, v in valid_losses.items()) + for k, v in valid_dump.items()) logger.info(msg) logger.info('Rank {} Val info val_loss {}'.format( -- GitLab