Skip to content

Commit

Permalink
[Torch] emit aten.log_sigmoid and decompose it to log(sigmoid) (#3246)
Browse files Browse the repository at this point in the history
  • Loading branch information
qingyunqu authored Apr 28, 2024
1 parent a339d7b commit 46c0f3c
Show file tree
Hide file tree
Showing 8 changed files with 95 additions and 0 deletions.
23 changes: 23 additions & 0 deletions include/torch-mlir/Dialect/Torch/IR/GeneratedTorchOps.td
Original file line number Diff line number Diff line change
Expand Up @@ -4949,6 +4949,29 @@ def Torch_AtenGluOp : Torch_Op<"aten.glu", [
}];
}

def Torch_AtenLogSigmoidOp : Torch_Op<"aten.log_sigmoid", [
AllowsTypeRefinement,
HasValueSemantics,
ReadOnly
]> {
let summary = "Generated op for `aten::log_sigmoid : (Tensor) -> (Tensor)`";
let arguments = (ins
AnyTorchTensorType:$self
);
let results = (outs
AnyTorchOptionalTensorType:$result
);
let hasCustomAssemblyFormat = 1;
let extraClassDefinition = [{
ParseResult AtenLogSigmoidOp::parse(OpAsmParser &parser, OperationState &result) {
return parseDefaultTorchOp(parser, result, 1, 1);
}
void AtenLogSigmoidOp::print(OpAsmPrinter &printer) {
printDefaultTorchOp(printer, *this, 1, 1);
}
}];
}

def Torch_AtenUnbindCopyIntOp : Torch_Op<"aten.unbind_copy.int", [
AllowsTypeRefinement,
HasValueSemantics,
Expand Down
19 changes: 19 additions & 0 deletions lib/Dialect/Torch/Transforms/AbstractInterpLibrary.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -6510,6 +6510,10 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() {
" %0 = call @__torch__.torch.jit._shape_functions.unary(%arg0) : (!torch.list<int>) -> !torch.list<int>\n"
" return %0 : !torch.list<int>\n"
" }\n"
" func.func @\"__torch_mlir_shape_fn.aten.log_sigmoid\"(%arg0: !torch.list<int>) -> !torch.list<int> {\n"
" %0 = call @__torch__.torch.jit._shape_functions.unary(%arg0) : (!torch.list<int>) -> !torch.list<int>\n"
" return %0 : !torch.list<int>\n"
" }\n"
" func.func @\"__torch_mlir_shape_fn.aten.mish\"(%arg0: !torch.list<int>) -> !torch.list<int> {\n"
" %0 = call @__torch__.torch.jit._shape_functions.unary(%arg0) : (!torch.list<int>) -> !torch.list<int>\n"
" return %0 : !torch.list<int>\n"
Expand Down Expand Up @@ -9771,6 +9775,21 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() {
" %1 = call @__torch__._get_dtype_of_floating_point_op(%0#1) : (!torch.int) -> !torch.int\n"
" return %1 : !torch.int\n"
" }\n"
" func.func @\"__torch_mlir_dtype_fn.aten.log_sigmoid\"(%arg0: !torch.tuple<int, int>) -> !torch.int {\n"
" %none = torch.constant.none\n"
" %str = torch.constant.str \"AssertionError: \"\n"
" %int11 = torch.constant.int 11\n"
" %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple<int, int> -> !torch.int, !torch.int\n"
" %1 = torch.aten.eq.int %0#1, %int11 : !torch.int, !torch.int -> !torch.bool\n"
" %2 = torch.aten.__not__ %1 : !torch.bool -> !torch.bool\n"
" torch.prim.If %2 -> () {\n"
" torch.prim.If.yield\n"
" } else {\n"
" torch.prim.RaiseException %str, %none : !torch.str, !torch.none\n"
" torch.prim.If.yield\n"
" }\n"
" return %0#1 : !torch.int\n"
" }\n"
" func.func @\"__torch_mlir_dtype_fn.aten.logit\"(%arg0: !torch.tuple<int, int>, %arg1: !torch.optional<float>) -> !torch.int {\n"
" %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple<int, int> -> !torch.int, !torch.int\n"
" %1 = call @__torch__._get_dtype_of_floating_point_op(%0#1) : (!torch.int) -> !torch.int\n"
Expand Down
15 changes: 15 additions & 0 deletions lib/Dialect/Torch/Transforms/DecomposeComplexOps.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1900,6 +1900,20 @@ class DecomposeAten_LogSoftmaxOp : public OpRewritePattern<Aten_LogSoftmaxOp> {
};
} // namespace

namespace {
class DecomposeAtenLogSigmoidOp : public OpRewritePattern<AtenLogSigmoidOp> {
public:
using OpRewritePattern<AtenLogSigmoidOp>::OpRewritePattern;
LogicalResult matchAndRewrite(AtenLogSigmoidOp op,
PatternRewriter &rewriter) const override {
Value sigmoid =
rewriter.create<AtenSigmoidOp>(op.getLoc(), op.getType(), op.getSelf());
rewriter.replaceOpWithNewOp<AtenLogOp>(op, op.getType(), sigmoid);
return success();
}
};
} // namespace

// Decompose aten.matmul into: aten.mm and aten.bmm according to ranks.
namespace {
class DecomposeAtenMatmulOp : public OpRewritePattern<AtenMatmulOp> {
Expand Down Expand Up @@ -7606,6 +7620,7 @@ class DecomposeComplexOpsPass
addPatternIfTargetOpIsIllegal<DecomposeAten_SoftmaxOp>(patterns);
addPatternIfTargetOpIsIllegal<DecomposeAten_LogSoftmaxOp>(patterns);
addPatternIfTargetOpIsIllegal<DecomposeAtenLogSoftmaxIntOp>(patterns);
addPatternIfTargetOpIsIllegal<DecomposeAtenLogSigmoidOp>(patterns);
addPatternIfTargetOpIsIllegal<DecomposeAtenEmptyLikeOp>(patterns);
addPatternIfTargetOpIsIllegal<
DecomposeConstantTensorAllocLikeOp<AtenOnesLikeOp, 1>>(patterns);
Expand Down
1 change: 1 addition & 0 deletions lib/Dialect/Torch/Transforms/LowerToBackendContract.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -370,6 +370,7 @@ static void markDecomposedOpsAsIllegal(MLIRContext *context,
target.addIllegalOp<Aten_SoftmaxOp>();
target.addIllegalOp<Aten_LogSoftmaxOp>();
target.addIllegalOp<AtenLogSoftmaxIntOp>();
target.addIllegalOp<AtenLogSigmoidOp>();
target.addIllegalOp<AtenEmptyLikeOp>();
target.addIllegalOp<AtenOnesLikeOp>();
target.addIllegalOp<AtenZerosLikeOp>();
Expand Down
3 changes: 3 additions & 0 deletions projects/pt1/e2e_testing/xfail_sets.py
Original file line number Diff line number Diff line change
Expand Up @@ -1431,6 +1431,7 @@
"ElementwiseSinhModule_basic",
"ElementwiseTruncIntModule_basic",
"ElementwiseTruncModule_basic",
"ElementwiseLogSigmoidModule_basic",
}

STABLEHLO_CRASHING_SET = {
Expand All @@ -1440,6 +1441,7 @@
# Write the TOSA set as a "passing" set as it is very early in development
# and very few tests work yet.
TOSA_PASS_SET = {
"ElementwiseLogSigmoidModule_basic",
"ElementwiseTruncModule_basic",
"ElementwiseTruncIntModule_basic",
"ElementwiseSgnModule_basic",
Expand Down Expand Up @@ -1935,6 +1937,7 @@
# failed to legalize operation 'torch.operator'
"ElementwisePreluModule_basic",
"ElementwisePreluStaticModule_basic",
"ElementwiseLogSigmoidModule_basic",
# Shape Related failures
"PrimListUnpackNumMismatchModule_basic",
"ReshapeExpandModule_basic",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -251,6 +251,9 @@ def aten〇trunc〡shape(self: List[int]) -> List[int]:
def aten〇log〡shape(self: List[int]) -> List[int]:
return upstream_shape_functions.unary(self)

def aten〇log_sigmoid〡shape(self: List[int]) -> List[int]:
return upstream_shape_functions.unary(self)

def aten〇mish〡shape(self: List[int]) -> List[int]:
return upstream_shape_functions.unary(self)

Expand Down Expand Up @@ -2083,6 +2086,12 @@ def aten〇log1p〡dtype(self_rank_dtype: Tuple[int, int]) -> int:
self_rank, self_dtype = self_rank_dtype
return _get_dtype_of_floating_point_op(self_dtype)

@check_dtype_function(_check_tensors_with_the_same_dtype(num_of_tensors=1, error_types={torch.bool}))
def aten〇log_sigmoid〡dtype(self_rank_dtype: Tuple[int, int]) -> int:
self_rank, self_dtype = self_rank_dtype
assert not self_dtype == torch.bool
return self_dtype

@check_dtype_function(_check_tensors_with_the_same_dtype(num_of_tensors=1))
def aten〇logit〡dtype(self_rank_dtype: Tuple[int, int], eps: Optional[float] = None) -> int:
self_rank, self_dtype = self_rank_dtype
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -478,6 +478,7 @@ def emit_with_mutating_variants(key, **kwargs):
emit("aten::view_as_real : (Tensor) -> (Tensor)")
emit("aten::isclose : (Tensor, Tensor, float, float, bool) -> (Tensor)")
emit("aten::glu : (Tensor, int) -> (Tensor)")
emit("aten::log_sigmoid : (Tensor) -> (Tensor)")

# Ops with dynamic number of outputs
emit("aten::unbind_copy.int : (Tensor, int) -> (Tensor[])")
Expand Down
24 changes: 24 additions & 0 deletions projects/pt1/python/torch_mlir_e2e_test/test_suite/elementwise.py
Original file line number Diff line number Diff line change
Expand Up @@ -2109,6 +2109,30 @@ def ElementwiseLogitModule_basic(module, tu: TestUtils):
# ==============================================================================


class ElementwiseLogSigmoidModule(torch.nn.Module):
def __init__(self):
super().__init__()
self.m = torch.nn.LogSigmoid()

@export
@annotate_args(
[
None,
([-1, -1], torch.float32, True),
]
)
def forward(self, a):
return self.m(a)


@register_test_case(module_factory=lambda: ElementwiseLogSigmoidModule())
def ElementwiseLogSigmoidModule_basic(module, tu: TestUtils):
module.forward(tu.rand(3, 4))


# ==============================================================================


class ElementwiseErfModule(torch.nn.Module):
def __init__(self):
super().__init__()
Expand Down

0 comments on commit 46c0f3c

Please sign in to comment.