提交 f5d93368 编写于 作者: L Liu Yiqun

Unify the definition of kFeedOpType and kFetchOpType.

上级 119da449
...@@ -33,9 +33,6 @@ DEFINE_bool(check_nan_inf, false, ...@@ -33,9 +33,6 @@ DEFINE_bool(check_nan_inf, false,
namespace paddle { namespace paddle {
namespace framework { namespace framework {
const std::string kFeedOpType = "feed";
const std::string kFetchOpType = "fetch";
Executor::Executor(const platform::Place& place) : place_(place) {} Executor::Executor(const platform::Place& place) : place_(place) {}
static void CreateTensor(Variable* var, proto::VarDesc::VarType var_type) { static void CreateTensor(Variable* var, proto::VarDesc::VarType var_type) {
......
...@@ -13,6 +13,7 @@ See the License for the specific language governing permissions and ...@@ -13,6 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License. */ limitations under the License. */
#pragma once #pragma once
#include <string>
#include <vector> #include <vector>
#include "paddle/framework/lod_tensor.h" #include "paddle/framework/lod_tensor.h"
...@@ -20,5 +21,8 @@ namespace paddle { ...@@ -20,5 +21,8 @@ namespace paddle {
namespace framework { namespace framework {
using FeedFetchType = LoDTensor; using FeedFetchType = LoDTensor;
using FeedFetchList = std::vector<FeedFetchType>; using FeedFetchList = std::vector<FeedFetchType>;
static const std::string kFeedOpType = "feed";
static const std::string kFetchOpType = "fetch";
} // namespace framework } // namespace framework
} // namespace paddle } // namespace paddle
...@@ -14,13 +14,11 @@ limitations under the License. */ ...@@ -14,13 +14,11 @@ limitations under the License. */
#include "paddle/framework/program_desc.h" #include "paddle/framework/program_desc.h"
#include "paddle/framework/block_desc.h" #include "paddle/framework/block_desc.h"
#include "paddle/framework/feed_fetch_type.h"
namespace paddle { namespace paddle {
namespace framework { namespace framework {
const std::string kFeedOpType = "feed";
const std::string kFetchOpType = "fetch";
BlockDesc *ProgramDesc::AppendBlock(const BlockDesc &parent) { BlockDesc *ProgramDesc::AppendBlock(const BlockDesc &parent) {
auto *b = desc_.add_blocks(); auto *b = desc_.add_blocks();
b->set_parent_idx(parent.ID()); b->set_parent_idx(parent.ID());
...@@ -67,26 +65,26 @@ ProgramDesc::ProgramDesc(const std::string &binary_str) { ...@@ -67,26 +65,26 @@ ProgramDesc::ProgramDesc(const std::string &binary_str) {
} }
} }
const std::vector<std::string> ProgramDesc::GetFeedVarNames() { const std::vector<std::string> ProgramDesc::GetFeedTargetNames() {
BlockDesc *global_block = blocks_[0].get(); BlockDesc *global_block = blocks_[0].get();
std::vector<std::string> feed_var_names; std::vector<std::string> feed_target_names;
for (auto *op : global_block->AllOps()) { for (auto *op : global_block->AllOps()) {
if (op->Type() == "feed") { if (op->Type() == kFeedOpType) {
feed_var_names.insert(feed_var_names.begin(), op->Output("Out")[0]); feed_target_names.insert(feed_target_names.begin(), op->Output("Out")[0]);
} }
} }
return feed_var_names; return feed_target_names;
} }
const std::vector<std::string> ProgramDesc::GetFetchVarNames() { const std::vector<std::string> ProgramDesc::GetFetchTargetNames() {
BlockDesc *global_block = blocks_[0].get(); BlockDesc *global_block = blocks_[0].get();
std::vector<std::string> fetch_var_names; std::vector<std::string> fetch_target_names;
for (auto *op : global_block->AllOps()) { for (auto *op : global_block->AllOps()) {
if (op->Type() == "fetch") { if (op->Type() == kFetchOpType) {
fetch_var_names.push_back(op->Input("X")[0]); fetch_target_names.push_back(op->Input("X")[0]);
} }
} }
return fetch_var_names; return fetch_target_names;
} }
} // namespace framework } // namespace framework
......
...@@ -45,9 +45,8 @@ class ProgramDesc { ...@@ -45,9 +45,8 @@ class ProgramDesc {
proto::ProgramDesc *Proto(); proto::ProgramDesc *Proto();
const std::vector<std::string> GetFeedVarNames(); const std::vector<std::string> GetFeedTargetNames();
const std::vector<std::string> GetFetchTargetNames();
const std::vector<std::string> GetFetchVarNames();
private: private:
proto::ProgramDesc desc_; proto::ProgramDesc desc_;
......
...@@ -21,12 +21,11 @@ limitations under the License. */ ...@@ -21,12 +21,11 @@ limitations under the License. */
#include <vector> #include <vector>
#include <glog/logging.h> #include <glog/logging.h>
#include "paddle/framework/feed_fetch_type.h"
namespace paddle { namespace paddle {
namespace framework { namespace framework {
const std::string kFeedOpType = "feed";
const std::string kFetchOpType = "fetch";
const std::string kDropOutOpType = "dropout"; const std::string kDropOutOpType = "dropout";
const std::string kBatchNormOpType = "batch_norm"; const std::string kBatchNormOpType = "batch_norm";
......
...@@ -13,13 +13,14 @@ See the License for the specific language governing permissions and ...@@ -13,13 +13,14 @@ See the License for the specific language governing permissions and
limitations under the License. */ limitations under the License. */
#include "paddle/inference/io.h" #include "paddle/inference/io.h"
#include <fstream> #include <fstream>
#include "paddle/framework/block_desc.h"
#include "paddle/framework/feed_fetch_type.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
const std::string kFeedOpType = "feed";
bool IsParameter(const framework::VarDesc* var, bool IsParameter(const framework::VarDesc* var,
const framework::ProgramDesc* main_program) { const framework::ProgramDesc* main_program) {
if (var->Persistable()) { if (var->Persistable()) {
...@@ -27,7 +28,7 @@ bool IsParameter(const framework::VarDesc* var, ...@@ -27,7 +28,7 @@ bool IsParameter(const framework::VarDesc* var,
for (size_t i = 0; i < main_program->Size(); ++i) { for (size_t i = 0; i < main_program->Size(); ++i) {
const framework::BlockDesc& block = main_program->Block(i); const framework::BlockDesc& block = main_program->Block(i);
for (auto* op : block.AllOps()) { for (auto* op : block.AllOps()) {
if (op->Type() == kFeedOpType) { if (op->Type() == framework::kFeedOpType) {
continue; continue;
} }
for (auto input_argument_name : op->InputArgumentNames()) { for (auto input_argument_name : op->InputArgumentNames()) {
......
...@@ -16,18 +16,13 @@ limitations under the License. */ ...@@ -16,18 +16,13 @@ limitations under the License. */
#include <string> #include <string>
#include <vector> #include <vector>
#include "paddle/framework/block_desc.h"
#include "paddle/framework/executor.h" #include "paddle/framework/executor.h"
#include "paddle/framework/program_desc.h" #include "paddle/framework/program_desc.h"
#include "paddle/framework/scope.h" #include "paddle/framework/scope.h"
#include "paddle/framework/var_desc.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
bool IsParameter(const framework::VarDesc* var,
const framework::ProgramDesc* main_program);
void LoadPersistables(framework::Executor& executor, void LoadPersistables(framework::Executor& executor,
framework::Scope& scope, framework::Scope& scope,
const std::string& dirname, const std::string& dirname,
......
...@@ -33,11 +33,11 @@ void TestInference(const std::string& dirname, ...@@ -33,11 +33,11 @@ void TestInference(const std::string& dirname,
// 2. Initialize the inference_program and load all parameters from file // 2. Initialize the inference_program and load all parameters from file
auto* inference_program = paddle::inference::Load(executor, *scope, dirname); auto* inference_program = paddle::inference::Load(executor, *scope, dirname);
// 3. Get the feed_var_names and fetch_var_names // 3. Get the feed_target_names and fetch_target_names
const std::vector<std::string>& feed_target_names = const std::vector<std::string>& feed_target_names =
inference_program->GetFeedVarNames(); inference_program->GetFeedTargetNames();
const std::vector<std::string>& fetch_target_names = const std::vector<std::string>& fetch_target_names =
inference_program->GetFetchVarNames(); inference_program->GetFetchTargetNames();
// 4. Prepare inputs // 4. Prepare inputs
std::map<std::string, const paddle::framework::LoDTensor*> feed_targets; std::map<std::string, const paddle::framework::LoDTensor*> feed_targets;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册