Though very useful in simple cases, symbolic differentiation often leads to complex and redundant expressions. In addition, balckbox routines cannot be differentiated.
In [2]:
from sympy import*x = symbols('x')#diff(cos(x), x)
\(\displaystyle - \sin{\left(x \right)}\)
In [3]:
# a more complicated esxpressiondef sigmoid(x):return1/ (1+ exp(-x))diff(sigmoid(x),x)
Here we show the simplicity and efficiency of autograd from numpy.
In [6]:
import autograd.numpy as npimport matplotlib.pyplot as pltfrom autograd import elementwise_grad as egrad # for functions that vectorize over inputs# We could use np.tanh, but let's write our own as an example.def tanh(x):return (1.0- np.exp(-x)) / (1.0+ np.exp(-x))x = np.linspace(-7, 7, 200)plt.plot(x, tanh(x), x, egrad(tanh)(x), # first derivative x, egrad(egrad(tanh))(x), # second derivative x, egrad(egrad(egrad(tanh)))(x), # third derivative x, egrad(egrad(egrad(egrad(tanh))))(x), # fourth derivative x, egrad(egrad(egrad(egrad(egrad(tanh)))))(x), # fifth derivative x, egrad(egrad(egrad(egrad(egrad(egrad(tanh))))))(x)) # sixth derivativeplt.axis('off')plt.savefig("tanh.png")plt.show()
In [7]:
from autograd import gradgrad_tanh = grad(tanh) # Obtain its gradient functiongA = grad_tanh(1.0) # Evaluate the gradient at x = 1.0gN = (tanh(1.01) - tanh(0.99)) /0.02# Compare to finite differencesprint(gA, gN)