network.h 2.8 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44
/**
 * \file src/gopt/test/network.h
 * MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
 *
 * Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
 *
 * Unless required by applicable law or agreed to in writing,
 * software distributed under the License is distributed on an
 * "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or
 * implied.
 */

#pragma once

#include "megbrain/test/helper.h"

#include "megbrain/gopt/framework.h"
#include "megbrain/opr/basic_arith_wrapper.h"
#include "megbrain/opr/blas.h"
#include "megbrain/opr/dnn/convolution.h"
#include "megbrain/opr/dnn/pooling.h"
#include "megbrain/opr/imgproc.h"
#include "megbrain/opr/nn_int.h"
#include "megbrain/opr/tensor_gen.h"
#include "megbrain/opr/tensor_manip.h"
#include "megbrain/opr/utility.h"

namespace mgb {
class Network {
private:
    HostTensorGenerator<> gen;
    CompNode cn;

public:
    std::shared_ptr<ComputingGraph> graph = ComputingGraph::make();
    Network(CompNode cn_) : cn{cn_} {}
    ~Network() noexcept = default;
    using KernSize = SmallVector<size_t, 2>;
    using Stride = SmallVector<size_t, 2>;
    using Padding = SmallVector<size_t, 2>;
    SymbolVar add_var(const char* name, const TensorShape& shp = {1}) {
        return opr::Host2DeviceCopy::make(*graph, gen(shp), cn).rename(name);
    }
    SymbolVar add_cvar(const char* name, const TensorShape& shp = {1}) {
M
Megvii Engine Team 已提交
45
        return opr::SharedDeviceTensor::make(*graph, *gen(shp), cn).rename(name);
46 47
    }

M
Megvii Engine Team 已提交
48 49 50 51 52 53
    SymbolVar add_conv(
            SymbolVar f, size_t output_channels, KernSize kern_size,
            DType out_dtype = dtype::Float32(), bool has_relu = true,
            Stride stride = {1, 1}, Padding padding = {0, 0});
    SymbolVar add_deconv(
            SymbolVar f, size_t ratio, size_t output_channels, DType out_dtype);
54 55 56 57 58 59 60 61 62 63 64
    SymbolVar add_elemwise(
            const SymbolVarArray inps, DType out_dtype = dtype::Float32(),
            opr::Elemwise::Param::Mode mode = opr::Elemwise::Param::Mode::ADD);
    using Window = SmallVector<size_t, 2>;
    SymbolVar add_pooling(
            SymbolVar f, Window window, Stride stride = {1, 1},
            Padding padding = {0, 0},
            opr::Pooling::Param::Mode mode = opr::Pooling::Param::Mode::MAX);
    SymbolVar add_type_cvt(SymbolVar f, DType out_dtype = dtype::Float32());
};

M
Megvii Engine Team 已提交
65 66 67
SymbolVar create_block(
        Network& network, SymbolVar f, size_t stride, size_t num_outputs1,
        bool has_proj = false, DType out_dtype = dtype::Float32());
68

M
Megvii Engine Team 已提交
69 70
SymbolVar make_resnet18(
        Network& network, size_t batch = 16, DType out_dtype = dtype::Float32());
71

M
Megvii Engine Team 已提交
72 73
SymbolVarArray make_det(
        Network& network, size_t batch = 16, DType out_dtype = dtype::Float32());
74 75 76 77

}  // namespace mgb

// vim: syntax=cpp.doxygen foldmethod=marker foldmarker=f{{{,f}}}