diff --git a/python/paddle/fluid/tests/unittests/test_newprofiler.py b/python/paddle/fluid/tests/unittests/test_newprofiler.py index 0088687b125636fbfe7af472aa1154ec630e8659..ac2b205e61128eb984b1235660aa04668dc047cc 100755 --- a/python/paddle/fluid/tests/unittests/test_newprofiler.py +++ b/python/paddle/fluid/tests/unittests/test_newprofiler.py @@ -16,6 +16,7 @@ from __future__ import print_function import unittest import numpy as np +import tempfile import paddle import paddle.profiler as profiler @@ -138,6 +139,146 @@ class TestNvprof(unittest.TestCase): y = x / 2.0 +class TestGetProfiler(unittest.TestCase): + def test_getprofiler(self): + config_content = ''' + { + "targets": ["CPU"], + "scheduler": [3,4], + "on_trace_ready": { + "export_chrome_tracing":{ + "module": "paddle.profiler", + "use_direct": false, + "args": [], + "kwargs": { + "dir_name": "testdebug/" + } + } + }, + "timer_only": false + } + ''' + filehandle = tempfile.NamedTemporaryFile(mode='w') + filehandle.write(config_content) + filehandle.flush() + import paddle.profiler.profiler as profiler + profiler = profiler.get_profiler(filehandle.name) + x_value = np.random.randn(2, 3, 3) + x = paddle.to_tensor( + x_value, stop_gradient=False, place=paddle.CPUPlace()) + with profiler: + for i in range(5): + y = x / 2.0 + ones_like_y = paddle.ones_like(y) + profiler.step() + + # below tests are just for coverage, wrong config + # test use_direct + config_content = ''' + { + "targets": ["Cpu", "Gpu"], + "scheduler": { + "make_scheduler":{ + "module": "paddle.profiler", + "use_direct": true, + "args": [], + "kwargs": {} + } + }, + "on_trace_ready": { + "export_chrome_tracing":{ + "module": "paddle.profiler1", + "use_direct": true, + "args": [], + "kwargs": { + } + } + }, + "timer_only": false + } + ''' + filehandle = tempfile.NamedTemporaryFile(mode='w') + filehandle.write(config_content) + filehandle.flush() + import paddle.profiler.profiler as profiler + try: + profiler = profiler.get_profiler(filehandle.name) + except: + pass + + # test scheduler + config_content = ''' + { + "targets": ["Cpu", "Gpu"], + "scheduler": { + "make_scheduler":{ + "module": "paddle.profiler", + "use_direct": false, + "args": [], + "kwargs": { + "closed": 1, + "ready": 1, + "record": 2 + } + } + }, + "on_trace_ready": { + "export_chrome_tracing":{ + "module": "paddle.profiler", + "use_direct": true, + "args": [], + "kwargs": { + } + } + }, + "timer_only": false + } + ''' + filehandle = tempfile.NamedTemporaryFile(mode='w') + filehandle.write(config_content) + filehandle.flush() + import paddle.profiler.profiler as profiler + profiler = profiler.get_profiler(filehandle.name) + + # test exception + config_content = ''' + { + "targets": [1], + "scheduler": { + "make_scheduler1":{ + "module": "paddle.profiler", + "use_direct": false, + "args": [], + "kwargs": { + "closed": 1, + "ready": 1, + "record": 2 + } + } + }, + "on_trace_ready": { + "export_chrome_tracing1":{ + "module": "paddle.profiler", + "use_direct": false, + "args": [], + "kwargs": { + "dir_name": "testdebug/" + } + } + }, + "timer_only": 1 + } + ''' + filehandle = tempfile.NamedTemporaryFile(mode='w') + filehandle.write(config_content) + filehandle.flush() + import paddle.profiler.profiler as profiler + profiler = profiler.get_profiler(filehandle.name) + # test path error + import paddle.profiler.profiler as profiler + profiler = profiler.get_profiler('nopath.json') + + class RandomDataset(Dataset): def __init__(self, num_samples): self.num_samples = num_samples diff --git a/python/paddle/profiler/profiler.py b/python/paddle/profiler/profiler.py index c1c4f4ff8c13c9d496ade26c7899118b8ff50c40..2fae583397a8e904562b38dc727fba32aa5ca525 100644 --- a/python/paddle/profiler/profiler.py +++ b/python/paddle/profiler/profiler.py @@ -18,6 +18,8 @@ import datetime from enum import Enum from typing import Any, Callable, Iterable, Optional, Union from warnings import warn +import importlib +import json import paddle from paddle.fluid.core import (_Profiler, _ProfilerResult, ProfilerOptions, @@ -741,3 +743,73 @@ class Profiler: op_detail=op_detail, thread_sep=thread_sep, time_unit=time_unit)) + + +def get_profiler(config_path): + try: + with open(config_path, 'r') as filehandle: + config_dict = json.load(filehandle) + except Exception as e: + print('Load config file for profiler error: {}'.format(e)) + print('Use default parameters instead.') + return Profiler() + translated_config_dict = {} + if "targets" in config_dict: + try: + translated_config_dict['targets'] = [] + for target in config_dict['targets']: + if target.lower() == "cpu": + translated_config_dict['targets'].append(ProfilerTarget.CPU) + elif target.lower() == 'gpu': + translated_config_dict['targets'].append(ProfilerTarget.GPU) + except: + print('Set targets parameter error, use default parameter instead.') + translated_config_dict['targets'] = None + if "scheduler" in config_dict: + try: + if isinstance(config_dict['scheduler'], dict): + for key, value in config_dict['scheduler'].items(): + module_path = value['module'] + use_direct = value['use_direct'] + module = importlib.import_module(module_path) + method = getattr(module, key) + if not use_direct: + translated_config_dict['scheduler'] = method( + *value['args'], **value['kwargs']) + else: + translated_config_dict['scheduler'] = method + else: + translated_config_dict['scheduler'] = [ + config_dict['scheduler'][0], config_dict['scheduler'][1] + ] + + except: + print( + 'Set scheduler parameter error, use default parameter instead.') + translated_config_dict['scheduler'] = None + if "on_trace_ready" in config_dict: + try: + if isinstance(config_dict['on_trace_ready'], dict): + for key, value in config_dict['on_trace_ready'].items(): + module_path = value['module'] + use_direct = value['use_direct'] + module = importlib.import_module(module_path) + method = getattr(module, key) + if not use_direct: + translated_config_dict['on_trace_ready'] = method( + *value['args'], **value['kwargs']) + else: + translated_config_dict['on_trace_ready'] = method + except: + print( + 'Set on_trace_ready parameter error, use default parameter instead.' + ) + translated_config_dict['on_trace_ready'] = None + if "timer_only" in config_dict: + if isinstance(config_dict['timer_only'], bool): + translated_config_dict['timer_only'] = config_dict['timer_only'] + else: + print( + 'Set timer_only parameter error, use default parameter instead.') + + return Profiler(**translated_config_dict) diff --git a/python/paddle/profiler/profiler_statistic.py b/python/paddle/profiler/profiler_statistic.py index 5fed51476132eaf761498332096b97ac26a8bb45..e4d4ff8c183bca9e5399b3cb378aefcd00bb9d67 100755 --- a/python/paddle/profiler/profiler_statistic.py +++ b/python/paddle/profiler/profiler_statistic.py @@ -743,6 +743,16 @@ def _build_table(statistic_data, TracerEventType. Communication] = statistic_data.distributed_summary.cpu_calls + for event_type in [ + TracerEventType.Dataloader, TracerEventType.Forward, + TracerEventType.Backward, TracerEventType.Optimization + ]: + event_type_name = str(event_type).split('.')[1] + if event_type in cpu_call_times and event_type_name in statistic_data.event_summary.model_perspective_items: + cpu_call_times[ + event_type] = statistic_data.event_summary.model_perspective_items[ + event_type_name].call + gpu_time_range = collections.defaultdict(list) for device_id, device_time_ranges in statistic_data.time_range_summary.GPUTimeRange.items( ):