提交 cbb7f5eb 编写于 作者: S SunAhong1993

only develop caffe.proto

上级 df8da649
...@@ -49,7 +49,7 @@ def arg_parser(): ...@@ -49,7 +49,7 @@ def arg_parser():
"--caffe_proto", "--caffe_proto",
"-c", "-c",
type=_text_type, type=_text_type,
default=None, default='./x2paddle/decoder/caffe_pb2.py',
help="the .py file compiled by caffe proto file of caffe model") help="the .py file compiled by caffe proto file of caffe model")
parser.add_argument("--version", parser.add_argument("--version",
"-v", "-v",
...@@ -91,24 +91,10 @@ def tf2paddle(model_path, save_dir): ...@@ -91,24 +91,10 @@ def tf2paddle(model_path, save_dir):
def caffe2paddle(proto, weight, save_dir, caffe_proto): def caffe2paddle(proto, weight, save_dir, caffe_proto):
if caffe_proto is not None:
import os import os
if caffe_proto is not None and not os.path.isfile(caffe_proto): if caffe_proto is not None and not os.path.isfile(caffe_proto):
print("The .py file compiled by caffe.proto is not exist.") print("The .py file compiled by caffe.proto is not exist.")
return return
else:
try:
import caffe
version = caffe.__version__
if version != '1.0.0':
print("caffe == 1.0.0 is required")
return
except:
print("Caffe is not installed.")
print(
"You have 2 options: 1. install caffe 2. compile the caffe.proto"
)
return
from x2paddle.decoder.caffe_decoder import CaffeDecoder from x2paddle.decoder.caffe_decoder import CaffeDecoder
from x2paddle.op_mapper.caffe_op_mapper import CaffeOpMapper from x2paddle.op_mapper.caffe_op_mapper import CaffeOpMapper
......
...@@ -23,43 +23,21 @@ from x2paddle.op_mapper import caffe_shape ...@@ -23,43 +23,21 @@ from x2paddle.op_mapper import caffe_shape
class CaffeResolver(object): class CaffeResolver(object):
def __init__(self, caffe_proto): def __init__(self, caffe_proto):
self.proto_path = caffe_proto self.caffe_proto = caffe_proto
if self.proto_path is None:
self.use_default = True
else:
self.use_default = False
self.import_caffe() self.import_caffe()
def import_caffepb(self): def import_caffepb(self):
(filepath, (filepath,
tempfilename) = os.path.split(os.path.abspath(self.proto_path)) tempfilename) = os.path.split(os.path.abspath(self.caffe_proto))
(filename, extension) = os.path.splitext(tempfilename) (filename, extension) = os.path.splitext(tempfilename)
sys.path.append(filepath) sys.path.append(filepath)
out = __import__(filename) out = __import__(filename)
return out return out
def import_caffe(self): def import_caffe(self):
self.caffe = None
self.caffepb = None
if self.use_default:
try:
# Try to import PyCaffe first
import caffe
self.caffe = caffe
except ImportError:
# Fall back to the protobuf implementation
self.caffepb = self.import_caffepb()
else:
self.caffepb = self.import_caffepb() self.caffepb = self.import_caffepb()
if self.caffe:
# Use the protobuf code from the imported distribution.
# This way, Caffe variants with custom layers will work.
self.caffepb = self.caffe.proto.caffe_pb2
self.NetParameter = self.caffepb.NetParameter self.NetParameter = self.caffepb.NetParameter
def has_pycaffe(self):
return self.caffe is not None
class CaffeGraphNode(GraphNode): class CaffeGraphNode(GraphNode):
def __init__(self, layer, layer_name=None): def __init__(self, layer, layer_name=None):
...@@ -125,62 +103,6 @@ class CaffeGraph(Graph): ...@@ -125,62 +103,6 @@ class CaffeGraph(Graph):
layers = self.model.layers or self.model.layer layers = self.model.layers or self.model.layer
layers = self.filter_layers(layers) layers = self.filter_layers(layers)
inputs_num = len(self.model.input)
if inputs_num != 0:
input_dims_num = len(self.model.input_dim)
if input_dims_num != 0:
if input_dims_num > 0 and input_dims_num != inputs_num * 4:
raise Error('invalid input_dim[%d] param in prototxt' %
(input_dims_num))
for i in range(inputs_num):
dims = self.model.input_dim[i * 4:(i + 1) * 4]
data = self.model.layer.add()
try:
from caffe import layers as L
data.CopyFrom(
L.Input(input_param=dict(shape=dict(
dim=[dims[0], dims[1], dims[2], dims[3]
]))).to_proto().layer[0])
except:
print(
"The .py file compiled by .proto file does not work for the old style prototxt. "
)
print("There are 2 solutions for you as below:")
print(
"1. install caffe and don\'t set \'--caffe_proto\'."
)
print(
"2. modify your .prototxt from the old style to the new style."
)
sys.exit(-1)
data.name = self.model.input[i]
data.top[0] = self.model.input[i]
else:
for i in range(inputs_num):
dims = self.model.input_shape[i].dim[0:4]
data = self.model.layer.add()
try:
from caffe import layers as L
data.CopyFrom(
L.Input(input_param=dict(shape=dict(
dim=[dims[0], dims[1], dims[2], dims[3]
]))).to_proto().layer[0])
except:
print(
"The .py file compiled by .proto file does not work for the old style prototxt. "
)
print("There are 2 solutions for you as below:")
print(
"1. install caffe and don\'t set \'--caffe_proto\'."
)
print(
"2. modify your .prototxt from the old style to the new style."
)
sys.exit(-1)
data.name = self.model.input[i]
data.top[0] = self.model.input[i]
layers = [data] + layers
top_layer = {} top_layer = {}
for layer in layers: for layer in layers:
self.node_map[layer.name] = CaffeGraphNode(layer) self.node_map[layer.name] = CaffeGraphNode(layer)
...@@ -202,7 +124,7 @@ class CaffeGraph(Graph): ...@@ -202,7 +124,7 @@ class CaffeGraph(Graph):
node = self.node_map[layer_name] node = self.node_map[layer_name]
node.set_params(data) node.set_params(data)
else: else:
raise Exception('Ignoring parameters for non-existent layer: %s' % \ print('Ignoring parameters for non-existent layer: %s' % \
layer_name) layer_name)
super(CaffeGraph, self).build() super(CaffeGraph, self).build()
...@@ -221,35 +143,20 @@ class CaffeGraph(Graph): ...@@ -221,35 +143,20 @@ class CaffeGraph(Graph):
class CaffeDecoder(object): class CaffeDecoder(object):
def __init__(self, proto_path, model_path, caffe_proto=None): def __init__(self, proto_path, model_path, caffe_proto):
self.proto_path = proto_path self.proto_path = proto_path
self.model_path = model_path self.model_path = model_path
self.resolver = CaffeResolver(caffe_proto=caffe_proto) self.resolver = CaffeResolver(caffe_proto=caffe_proto)
self.net = self.resolver.NetParameter() self.net = self.resolver.NetParameter()
with open(proto_path, 'rb') as proto_file: with open(proto_path, 'rb') as proto_file:
proto_str = proto_file.read() proto_str = self.old2new(proto_file)
text_format.Merge(proto_str, self.net) text_format.Merge(proto_str, self.net)
self.load() self.load_using_pb()
self.caffe_graph = CaffeGraph(self.net, self.params) self.caffe_graph = CaffeGraph(self.net, self.params)
self.caffe_graph.build() self.caffe_graph.build()
def load(self):
if self.resolver.has_pycaffe():
self.load_using_caffe()
else:
self.load_using_pb()
def load_using_caffe(self):
caffe = self.resolver.caffe
caffe.set_mode_cpu()
print(self.proto_path)
print(self.model_path)
net = caffe.Net(self.proto_path, self.model_path, caffe.TEST)
data = lambda blob: blob.data
self.params = [(k, list(map(data, v))) for k, v in net.params.items()]
def load_using_pb(self): def load_using_pb(self):
data = self.resolver.NetParameter() data = self.resolver.NetParameter()
data.MergeFromString(open(self.model_path, 'rb').read()) data.MergeFromString(open(self.model_path, 'rb').read())
...@@ -271,3 +178,69 @@ class CaffeDecoder(object): ...@@ -271,3 +178,69 @@ class CaffeDecoder(object):
data = np.array(blob.data, dtype=np.float32).reshape(c_o, c_i, h, w) data = np.array(blob.data, dtype=np.float32).reshape(c_o, c_i, h, w)
transformed.append(data) transformed.append(data)
return transformed return transformed
def old2new(self, proto_file):
part1_str = ''
part2_str = ''
part3_str = ''
is_input = False
dims = []
line = proto_file.readline()
print('Check if it is a new style of caffe...')
while line:
l_str = bytes.decode(line)
if l_str.replace(' ', '').startswith('input:'):
part2_str += 'layer {\n'
part2_str += (
' name: ' +
l_str.strip().replace(' ', '').split('input:')[-1] + '\n')
part2_str += ' type: \"Input\"\n'
part2_str += (
' top: ' +
l_str.strip().replace(' ', '').split('input:')[-1] + '\n')
is_input = True
line = proto_file.readline()
continue
elif l_str.replace(' ', '').startswith('input_dim:'):
dims.append(
int(l_str.strip().replace(' ', '').split('input_dim:')[-1]))
if len(dims) == 4:
part2_str += ' input_param { shape: { dim: ' + str(dims[0]) + \
' dim: ' + str(dims[1]) + \
' dim: ' + str(dims[2]) + \
' dim: ' + str(dims[3]) + ' } }\n'
dims = []
part2_str += '}\n'
line = proto_file.readline()
if bytes.decode(line).replace(' ', '').startswith('}'):
line = proto_file.readline()
continue
elif l_str.replace(' ', '').startswith('input_shape'):
part2_str += l_str.replace('input_shape',
'input_param { shape: ')
l_str = bytes.decode(proto_file.readline())
while l_str:
if '}' in l_str:
part2_str += l_str + '\n}\n}'
break
else:
part2_str += l_str
l_str = bytes.decode(proto_file.readline())
line = proto_file.readline()
continue
if not is_input:
part1_str += bytes.decode(line)
else:
part3_str += bytes.decode(line)
line = proto_file.readline()
out = part1_str + part2_str + part3_str
layer_str = 'layer{'
part = out.split(layer_str)
if len(part) == 1:
layer_str = 'layer {'
part = out.split(layer_str)
for i in range(len(part)):
if part[i].strip().replace(' ', '') == '' or part[i].count(':') > 1:
continue
out = out.replace(layer_str + part[i], part[i].replace(' ', ''))
return str.encode(out)
...@@ -27,10 +27,6 @@ class CaffeOpMapper(OpMapper): ...@@ -27,10 +27,6 @@ class CaffeOpMapper(OpMapper):
self.weights = dict() self.weights = dict()
resolver = decoder.resolver resolver = decoder.resolver
self.used_custom_layers = {} self.used_custom_layers = {}
if resolver.has_pycaffe():
self.did_use_pb = False
else:
self.did_use_pb = True
print("Total nodes: {}".format(len(self.graph.topo_sort))) print("Total nodes: {}".format(len(self.graph.topo_sort)))
for node_name in self.graph.topo_sort: for node_name in self.graph.topo_sort:
...@@ -79,8 +75,6 @@ class CaffeOpMapper(OpMapper): ...@@ -79,8 +75,6 @@ class CaffeOpMapper(OpMapper):
def adjust_parameters(self, node): def adjust_parameters(self, node):
data = node.data data = node.data
if not self.did_use_pb:
return data
# When using the protobuf-backend, each parameter initially has four dimensions. # When using the protobuf-backend, each parameter initially has four dimensions.
# In certain cases (like FC layers), we want to eliminate the singleton dimensions. # In certain cases (like FC layers), we want to eliminate the singleton dimensions.
# This implementation takes care of the common cases. However, it does leave the # This implementation takes care of the common cases. However, it does leave the
...@@ -93,6 +87,8 @@ class CaffeOpMapper(OpMapper): ...@@ -93,6 +87,8 @@ class CaffeOpMapper(OpMapper):
squeeze_indices.append(0) # Squeeze FC. squeeze_indices.append(0) # Squeeze FC.
for idx in squeeze_indices: for idx in squeeze_indices:
print('Transform the weights of {}...'.format(node.layer_name +
str(idx)))
if idx >= len(data): if idx >= len(data):
continue continue
...@@ -892,7 +888,7 @@ class CaffeOpMapper(OpMapper): ...@@ -892,7 +888,7 @@ class CaffeOpMapper(OpMapper):
output=node, output=node,
param_attr=attr) param_attr=attr)
def Flatten(self, noed): def Flatten(self, node):
assert len( assert len(
node.inputs node.inputs
) == 1, 'The count of DetectionOutput node\'s input is not 1.' ) == 1, 'The count of DetectionOutput node\'s input is not 1.'
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册