kazum commented on a change in pull request #5502:
URL: https://github.com/apache/incubator-tvm/pull/5502#discussion_r422493215
##########
File path: python/tvm/relay/op/_tensor_grad.py
##########
@@ -110,18 +110,66 @@ def sin_grad(orig, grad):
x = orig.args[0]
return [grad * cos(x)]
+
@register_gradient("sinh")
def sinh_grad(orig, grad):
"""Returns [grad * cosh(x)]"""
x = orig.args[0]
return [grad * cosh(x)]
+
+@register_gradient("acos")
+def acos_grad(orig, grad):
+ """Returns [grad * -1/((1 - (x ^ 2)) ^ 1/2)]"""
+ x = orig.args[0]
+ a = const(2.0)
+ ones = ones_like(x)
+ return [grad * (-ones / sqrt(ones - power(x, a)))]
Review comment:
Replace `power(x,a)` with `x*x`, and remove `a = const(2.0)`.
##########
File path: python/tvm/relay/op/_tensor_grad.py
##########
@@ -110,18 +110,66 @@ def sin_grad(orig, grad):
x = orig.args[0]
return [grad * cos(x)]
+
@register_gradient("sinh")
def sinh_grad(orig, grad):
"""Returns [grad * cosh(x)]"""
x = orig.args[0]
return [grad * cosh(x)]
+
+@register_gradient("acos")
+def acos_grad(orig, grad):
+ """Returns [grad * -1/((1 - (x ^ 2)) ^ 1/2)]"""
+ x = orig.args[0]
+ a = const(2.0)
+ ones = ones_like(x)
+ return [grad * (-ones / sqrt(ones - power(x, a)))]
+
+
+@register_gradient("acosh")
+def acosh_grad(orig, grad):
+ """Returns [grad * 1/((x - 1) ^ 1/2 * (x + 1) ^ 1/2)]"""
+ x = orig.args[0]
+ ones = ones_like(x)
+ return [grad * ones / sqrt((x * x) - ones)]
+
+
+@register_gradient("asin")
+def asin_grad(orig, grad):
+ """Returns [grad * 1/((1 - (x ^ 2)) ^ (1/2))]"""
+ x = orig.args[0]
+ a = const(2.0)
+ ones = ones_like(x)
+ return [grad * ones / sqrt(ones - power(x, a))]
+
+
+@register_gradient("asinh")
+def asinh_grad(orig, grad):
+ """Returns [grad * 1/((1 + (x ^ 2)) ^ (1/2))]"""
+ x = orig.args[0]
+ a = const(2.0)
+ ones = ones_like(x)
+ return [grad * ones / sqrt(ones + power(x, a))]
Review comment:
Same here.
##########
File path: python/tvm/relay/op/_tensor_grad.py
##########
@@ -110,18 +110,66 @@ def sin_grad(orig, grad):
x = orig.args[0]
return [grad * cos(x)]
+
@register_gradient("sinh")
def sinh_grad(orig, grad):
"""Returns [grad * cosh(x)]"""
x = orig.args[0]
return [grad * cosh(x)]
+
+@register_gradient("acos")
+def acos_grad(orig, grad):
+ """Returns [grad * -1/((1 - (x ^ 2)) ^ 1/2)]"""
+ x = orig.args[0]
+ a = const(2.0)
+ ones = ones_like(x)
+ return [grad * (-ones / sqrt(ones - power(x, a)))]
+
+
+@register_gradient("acosh")
+def acosh_grad(orig, grad):
+ """Returns [grad * 1/((x - 1) ^ 1/2 * (x + 1) ^ 1/2)]"""
+ x = orig.args[0]
+ ones = ones_like(x)
+ return [grad * ones / sqrt((x * x) - ones)]
+
+
+@register_gradient("asin")
+def asin_grad(orig, grad):
+ """Returns [grad * 1/((1 - (x ^ 2)) ^ (1/2))]"""
+ x = orig.args[0]
+ a = const(2.0)
+ ones = ones_like(x)
+ return [grad * ones / sqrt(ones - power(x, a))]
Review comment:
Same here.
##########
File path: python/tvm/relay/op/_tensor_grad.py
##########
@@ -110,18 +110,66 @@ def sin_grad(orig, grad):
x = orig.args[0]
return [grad * cos(x)]
+
@register_gradient("sinh")
def sinh_grad(orig, grad):
"""Returns [grad * cosh(x)]"""
x = orig.args[0]
return [grad * cosh(x)]
+
+@register_gradient("acos")
+def acos_grad(orig, grad):
+ """Returns [grad * -1/((1 - (x ^ 2)) ^ 1/2)]"""
+ x = orig.args[0]
+ a = const(2.0)
+ ones = ones_like(x)
+ return [grad * (-ones / sqrt(ones - power(x, a)))]
+
+
+@register_gradient("acosh")
+def acosh_grad(orig, grad):
+ """Returns [grad * 1/((x - 1) ^ 1/2 * (x + 1) ^ 1/2)]"""
+ x = orig.args[0]
+ ones = ones_like(x)
+ return [grad * ones / sqrt((x * x) - ones)]
+
+
+@register_gradient("asin")
+def asin_grad(orig, grad):
+ """Returns [grad * 1/((1 - (x ^ 2)) ^ (1/2))]"""
+ x = orig.args[0]
+ a = const(2.0)
+ ones = ones_like(x)
+ return [grad * ones / sqrt(ones - power(x, a))]
+
+
+@register_gradient("asinh")
+def asinh_grad(orig, grad):
+ """Returns [grad * 1/((1 + (x ^ 2)) ^ (1/2))]"""
+ x = orig.args[0]
+ a = const(2.0)
+ ones = ones_like(x)
+ return [grad * ones / sqrt(ones + power(x, a))]
+
+
@register_gradient("atan")
def atan_grad(orig, grad):
"""Returns [grad * 1 / (1 + x ^ 2)]"""
x = orig.args[0]
a = const(2.0)
- return [grad * ones_like(x) / (ones_like(x) + power(x, a))]
+ ones = ones_like(x)
+ return [grad * ones / (ones + power(x, a))]
Review comment:
Same here.
##########
File path: python/tvm/relay/op/_tensor_grad.py
##########
@@ -110,18 +110,66 @@ def sin_grad(orig, grad):
x = orig.args[0]
return [grad * cos(x)]
+
@register_gradient("sinh")
def sinh_grad(orig, grad):
"""Returns [grad * cosh(x)]"""
x = orig.args[0]
return [grad * cosh(x)]
+
+@register_gradient("acos")
+def acos_grad(orig, grad):
+ """Returns [grad * -1/((1 - (x ^ 2)) ^ 1/2)]"""
+ x = orig.args[0]
+ a = const(2.0)
+ ones = ones_like(x)
+ return [grad * (-ones / sqrt(ones - power(x, a)))]
+
+
+@register_gradient("acosh")
+def acosh_grad(orig, grad):
+ """Returns [grad * 1/((x - 1) ^ 1/2 * (x + 1) ^ 1/2)]"""
+ x = orig.args[0]
+ ones = ones_like(x)
+ return [grad * ones / sqrt((x * x) - ones)]
+
+
+@register_gradient("asin")
+def asin_grad(orig, grad):
+ """Returns [grad * 1/((1 - (x ^ 2)) ^ (1/2))]"""
+ x = orig.args[0]
+ a = const(2.0)
+ ones = ones_like(x)
+ return [grad * ones / sqrt(ones - power(x, a))]
+
+
+@register_gradient("asinh")
+def asinh_grad(orig, grad):
+ """Returns [grad * 1/((1 + (x ^ 2)) ^ (1/2))]"""
+ x = orig.args[0]
+ a = const(2.0)
+ ones = ones_like(x)
+ return [grad * ones / sqrt(ones + power(x, a))]
+
+
@register_gradient("atan")
def atan_grad(orig, grad):
"""Returns [grad * 1 / (1 + x ^ 2)]"""
x = orig.args[0]
a = const(2.0)
- return [grad * ones_like(x) / (ones_like(x) + power(x, a))]
+ ones = ones_like(x)
+ return [grad * ones / (ones + power(x, a))]
+
+
+@register_gradient("atanh")
+def atanh_grad(orig, grad):
+ """Returns [grad * 1 / (1 - x ^ 2)]"""
+ x = orig.args[0]
+ a = const(2.0)
+ ones = ones_like(x)
+ return [grad * ones / (ones - power(x, a))]
Review comment:
Same here.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]