提交 388e699b 编写于 作者: qnqinan's avatar qnqinan

Merge branch 'develop' of https://github.com/PaddlePaddle/paddle-mobile into develop

......@@ -197,5 +197,17 @@ uint64_t vaddr_to_paddr(void *address) {
return 0;
#endif
}
uint32_t paddle_mobile_version() {
uint32_t v_master = 34;
uint32_t v_slave = 34;
uint32_t first = 1, second = 2, fourth_master = 1, fourth_slave = 2;
uint32_t master = first << 24 | second << 16 | v_master << 8 | fourth_master;
uint32_t slave = first << 24 | second << 16 | v_slave << 8 | fourth_slave;
return slave;
}
} // namespace fpga
} // namespace paddle_mobile
......@@ -30,6 +30,7 @@ limitations under the License. */
namespace paddle_mobile {
namespace fpga {
enum DataType {
DATA_TYPE_INT8 = 2,
DATA_TYPE_FP32 = 1,
......@@ -275,5 +276,8 @@ uint64_t vaddr_to_paddr(void* address);
void expand_conv_arg(ConvArgs* arg);
void expand_EW_arg(EWAddArgs* arg);
inline int32_t convertmantissa(int32_t i);
uint32_t paddle_mobile_version();
} // namespace fpga
} // namespace paddle_mobile
......@@ -30,10 +30,12 @@ limitations under the License. */
namespace paddle_mobile {
#ifdef PADDLE_MOBILE_FPGA
namespace fpga {
int open_device();
void* fpga_malloc(size_t size);
void fpga_free(void* ptr);
uint32_t paddle_mobile_version();
} // namespace fpga
#endif
......
......@@ -139,6 +139,9 @@ PaddleMobileConfig GetConfig1() {
int main() {
open_device();
timeval start11, end11;
long dif_sec, dif_usec; // NOLINT
PaddleMobileConfig config = GetConfig();
auto predictor =
CreatePaddlePredictor<PaddleMobileConfig,
......@@ -172,8 +175,6 @@ int main() {
std::cout << "Finishing feeding data " << std::endl;
timeval start11, end11;
long dif_sec, dif_usec; // NOLINT
gettimeofday(&start11, NULL);
predictor->Predict_From_To(0, -1);
gettimeofday(&end11, NULL);
......@@ -189,8 +190,9 @@ int main() {
std::cout << "Output number is " << v.size() << std::endl;
for (int fetchNum = 0; fetchNum < v.size(); fetchNum++) {
std::string dumpName = "marker_api_fetch_" + std::to_string(fetchNum);
dump_stride(dumpName, v[fetchNum]);
// dump_stride(dumpName, v[fetchNum]);
}
fpga_free(img);
PaddleMobileConfig config1 = GetConfig1();
auto predictor1 =
......@@ -233,6 +235,7 @@ int main() {
std::string dumpName = "marker2_api_fetch_" + std::to_string(fetchNum);
dump_stride(dumpName, v1[fetchNum]);
}
fpga_free(img1);
}
return 0;
}
......@@ -16,6 +16,7 @@ limitations under the License. */
#define PADDLE_MOBILE_FPGA
#endif
#include <fstream>
#include <iomanip>
#include <iostream>
#include "../../src/io/paddle_inference_api.h"
......@@ -69,7 +70,7 @@ PaddleMobileConfig GetConfig1() {
int main() {
open_device();
#if 0
PaddleMobileConfig config1 = GetConfig1();
auto predictor1 =
CreatePaddlePredictor<PaddleMobileConfig,
......@@ -98,7 +99,8 @@ int main() {
predictor1->FetchPaddleTensors(&v1); // Old data in v will be cleared
std::cout << "Output number is " << v1.size() << std::endl;
std::cout << "out[0] length " << v1[0].data.length() << std::endl;
fpga_free(img1);
#endif
////////////////////////////
PaddleMobileConfig config = GetConfig();
......@@ -160,6 +162,11 @@ int main() {
}
}
std::cout << "Finish getting vector values" << std::endl;
fpga_free(img);
auto version = fpga::paddle_mobile_version();
std::cout << "0X0" << std::hex << version << std::endl;
return 0;
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册