提交 bb6c627b 编写于 作者: B barriery

update log

上级 47d71a5c
...@@ -6,3 +6,5 @@ dag: ...@@ -6,3 +6,5 @@ dag:
client_type: brpc client_type: brpc
retry: 1 retry: 1
use_profile: false use_profile: false
tracer:
interval_s: 10
...@@ -12,20 +12,22 @@ ...@@ -12,20 +12,22 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
# pylint: disable=doc-string-missing # pylint: disable=doc-string-missing
import paddle_serving_server.pipeline as pipeline
import logging
logging.basicConfig(
format="[%(process)d](%(threadName)s) %(levelname)s %(asctime)s [%(filename)s:%(lineno)d] %(message)s",
level=logging.INFO)
from paddle_serving_server.pipeline import Op, RequestOp, ResponseOp from paddle_serving_server.pipeline import Op, RequestOp, ResponseOp
from paddle_serving_server.pipeline import PipelineServer from paddle_serving_server.pipeline import PipelineServer
from paddle_serving_server.pipeline.proto import pipeline_service_pb2 from paddle_serving_server.pipeline.proto import pipeline_service_pb2
from paddle_serving_server.pipeline.channel import ChannelDataEcode from paddle_serving_server.pipeline.channel import ChannelDataEcode
import numpy as np import numpy as np
from paddle_serving_app.reader import IMDBDataset from paddle_serving_app.reader import IMDBDataset
import logging
_LOGGER = logging.getLogger() _LOGGER = logging.getLogger()
console_handler = pipeline.logger.StreamHandler()
console_handler.setLevel(logging.INFO)
console_handler.setFormatter(
logging.Formatter(
"%(levelname)s %(asctime)s [%(filename)s:%(lineno)d] %(message)s"))
_LOGGER.addHandler(console_handler)
class ImdbRequestOp(RequestOp): class ImdbRequestOp(RequestOp):
......
...@@ -17,7 +17,7 @@ import copy ...@@ -17,7 +17,7 @@ import copy
import re import re
import logging import logging
_LOGGER = logging.getLogger() _LOGGER = logging.getLogger("pipeline.analyse")
class Analyst(object): class Analyst(object):
......
...@@ -29,7 +29,7 @@ import enum ...@@ -29,7 +29,7 @@ import enum
import os import os
import copy import copy
_LOGGER = logging.getLogger() _LOGGER = logging.getLogger("pipeline.channel")
class ChannelDataEcode(enum.Enum): class ChannelDataEcode(enum.Enum):
......
...@@ -32,7 +32,7 @@ from .profiler import TimeProfiler, PerformanceTracer ...@@ -32,7 +32,7 @@ from .profiler import TimeProfiler, PerformanceTracer
from .util import NameGenerator from .util import NameGenerator
from .proto import pipeline_service_pb2 from .proto import pipeline_service_pb2
_LOGGER = logging.getLogger() _LOGGER = logging.getLogger("pipeline.dag")
class DAGExecutor(object): class DAGExecutor(object):
......
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import logging.handlers
import os
class SectionLevelFilter(object):
def __init__(self, levels):
self._levels = levels
def filter(self, logRecord):
return logRecord.levelno in self._levels
class OutOfMouduleFilter(object):
def __init__(self, out_names):
self._out_names = out_names
def filter(self, logRecord):
return logRecord.name not in self._out_names
class OutOfMouduleAndSectionLevelFilter(object):
def __init__(self, out_names, levels):
self._out_names = out_names
self._levels = levels
def filter(self, logRecord):
if logRecord.name in self._out_names:
return False
return logRecord.levelno in self._levels
class StreamHandler(logging.StreamHandler):
def __init__(self, *args, **kwargs):
super(StreamHandler, self).__init__(*args, **kwargs)
self.addFilter(OutOfMouduleFilter(["pipeline.profiler"]))
log_dir = "PipelineServingLogs"
if not os.path.exists(log_dir):
os.makedirs(log_dir)
# root logger
_LOGGER = logging.getLogger()
_LOGGER.setLevel(logging.DEBUG)
formatter = logging.Formatter(
"%(levelname)s %(asctime)s [%(filename)s:%(lineno)d] %(message)s")
# info and warn
file_info = logging.handlers.RotatingFileHandler(
os.path.join(log_dir, "INFO.log"))
file_info.addFilter(OutOfMouduleFilter(["pipeline.profiler"]))
file_info.addFilter(SectionLevelFilter([logging.INFO, logging.WARNING]))
file_info.setFormatter(formatter)
# err and critical
file_err = logging.handlers.RotatingFileHandler(
os.path.join(log_dir, "ERROR.log"))
file_err.addFilter(OutOfMouduleFilter(["pipeline.profiler"]))
file_err.setLevel(logging.ERROR)
file_err.setFormatter(formatter)
_LOGGER.addHandler(file_info)
_LOGGER.addHandler(file_err)
# tracer logger
_TRACER = logging.getLogger("pipeline.profiler")
_TRACER.setLevel(logging.INFO)
_TRACER.addFilter(logging.Filter("pipeline.profiler"))
# tracer
tracer_formatter = logging.Formatter("%(asctime)s %(message)s")
file_trace = logging.handlers.RotatingFileHandler(
os.path.join(log_dir, "TRACE.log"))
file_trace.setFormatter(tracer_formatter)
_TRACER.addHandler(file_trace)
...@@ -32,7 +32,7 @@ from .channel import (ThreadChannel, ProcessChannel, ChannelDataEcode, ...@@ -32,7 +32,7 @@ from .channel import (ThreadChannel, ProcessChannel, ChannelDataEcode,
from .util import NameGenerator from .util import NameGenerator
from .profiler import UnsafeTimeProfiler as TimeProfiler from .profiler import UnsafeTimeProfiler as TimeProfiler
_LOGGER = logging.getLogger() _LOGGER = logging.getLogger("pipeline.operator")
_op_name_gen = NameGenerator("Op") _op_name_gen = NameGenerator("Op")
......
...@@ -22,7 +22,7 @@ from .channel import ChannelDataEcode ...@@ -22,7 +22,7 @@ from .channel import ChannelDataEcode
from .proto import pipeline_service_pb2 from .proto import pipeline_service_pb2
from .proto import pipeline_service_pb2_grpc from .proto import pipeline_service_pb2_grpc
_LOGGER = logging.getLogger() _LOGGER = logging.getLogger("pipeline.pipeline_client")
class PipelineClient(object): class PipelineClient(object):
......
...@@ -26,7 +26,7 @@ from .proto import pipeline_service_pb2_grpc ...@@ -26,7 +26,7 @@ from .proto import pipeline_service_pb2_grpc
from .operator import ResponseOp from .operator import ResponseOp
from .dag import DAGExecutor from .dag import DAGExecutor
_LOGGER = logging.getLogger() _LOGGER = logging.getLogger("pipeline.pipeline_server")
class PipelineServicer(pipeline_service_pb2_grpc.PipelineServiceServicer): class PipelineServicer(pipeline_service_pb2_grpc.PipelineServiceServicer):
......
...@@ -27,7 +27,7 @@ import time ...@@ -27,7 +27,7 @@ import time
import threading import threading
import multiprocessing import multiprocessing
_TRACER = logging.getLogger("tracer") _TRACER = logging.getLogger("pipeline.profiler")
class PerformanceTracer(object): class PerformanceTracer(object):
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册