| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| |
|
| | #include "pass_level1.h" |
| |
|
| | #include "../utils.h" |
| |
|
| | namespace pnnx { |
| |
|
| | class Linear : public FuseModulePass |
| | { |
| | public: |
| | const char* match_type_str() const |
| | { |
| | return "__torch__.torch.nn.modules.linear.Linear"; |
| | } |
| |
|
| | const char* type_str() const |
| | { |
| | return "nn.Linear"; |
| | } |
| |
|
| | void write(Operator* op, const std::shared_ptr<torch::jit::Graph>& graph, const torch::jit::Module& mod) const |
| | { |
| | const torch::jit::Node* addmm = find_node_by_kind(graph, "aten::addmm"); |
| |
|
| | const auto& weight = mod.attr("weight").toTensor(); |
| |
|
| | op->params["in_features"] = weight.size(1); |
| | op->params["out_features"] = weight.size(0); |
| | op->params["bias"] = mod.hasattr("bias") && mod.attr("bias").isTensor(); |
| |
|
| | op->attrs["weight"] = weight; |
| | if (mod.hasattr("bias") && mod.attr("bias").isTensor()) |
| | { |
| | op->attrs["bias"] = mod.attr("bias").toTensor(); |
| | } |
| | } |
| | }; |
| |
|
| | REGISTER_GLOBAL_PNNX_FUSE_MODULE_PASS(Linear) |
| |
|
| | } |
| |
|