Skip to content

Commit fd24387

Browse files
deivanayakisankaralingamdeivanayakisankaralingam
authored andcommitted
fixed failing checks issue
1 parent fc13455 commit fd24387

File tree

2 files changed

+2
-1
lines changed

2 files changed

+2
-1
lines changed

python/tvm/relax/frontend/torch/fx_translator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ def _prelu_module(self, node: fx.Node) -> relax.Var:
108108
module = self.named_modules[node.target]
109109
alpha_tensor = module.weight.numpy()
110110
alpha = relax.const(alpha_tensor, dtype="float32")
111-
axis = 0 if len(x.struct_info.shape) == 1 else 1 # Extract Channel size
111+
axis = 0 if len(x.struct_info.shape) == 1 else 1 # Extract Channel size
112112
return self.block_builder.emit(relax.op.nn.prelu(x, alpha, axis))
113113

114114
def _softmax_module(self, node: fx.Node) -> relax.Var:

src/relax/op/nn/nn.cc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -79,6 +79,7 @@ TVM_REGISTER_OP("relax.nn.softplus")
7979
.set_attrs_type<SoftplusAttrs>()
8080
.set_attr<FInferStructInfo>("FInferStructInfo",
8181
InferStructInfoUnaryArith</*require_float_dtype=*/true>)
82+
.set_attr<Bool>("FPurity", Bool(true));
8283

8384
/* relax.nn.prelu */
8485
TVM_REGISTER_NODE_TYPE(PReluAttrs);

0 commit comments

Comments
 (0)