1.函数求一阶导
- importtensorflowastf
tf.enable_eager_execution()
tfe=tf.contrib.eager- frommathimportpi
def f(x):
returntf.square(tf.sin(x))- assertf(pi/2).numpy()==1.0
sess=tf.Session()
grad_f=tfe.gradients_function(f)
print(grad_f(np.zeros(1))[0].numpy())
2.高阶函数求导
- importnumpyasnp
def f(x):
returntf.square(tf.sin(x))
def grad(f):
returnlambdax:tfe.gradients_function(f)(x)[0]
x=tf.lin_space(-2*pi,2*pi,100)- # print(grad(f)(x).numpy())
x=x.numpy()- importmatplotlib.pyplotasplt
plt.plot(x,f(x).numpy(),label="f")
plt.plot(x,grad(f)(x).numpy(),label="first derivative")#一阶导
plt.plot(x,grad(grad(f))(x).numpy(),label="second derivative")#二阶导
plt.plot(x,grad(grad(grad(f)))(x).numpy(),label="third derivative")#三阶导
plt.legend()
plt.show()
def f(x,y):
output=1
foriinrange(int(y)):
output=tf.multiply(output,x)
returnoutput
def g(x,y):
returntfe.gradients_function(f)(x,y)[0]
print(f(3.0,2).numpy())#f(x)=x^2
print(g(3.0,2).numpy())#f'(x)=2*x
print(f(4.0,3).numpy())#f(x)=x^3
print(g(4.0,3).numpy())#f(x)=3x^2
3.函数求一阶偏导
x=tf.ones((2,2))- withtf.GradientTape(persistent=True)ast:
t.watch(x)
y=tf.reduce_sum(x)
z=tf.multiply(y,y)
dz_dy=t.gradient(z,y)
print(dz_dy.numpy())
dz_dx=t.gradient(z,x)
print(dz_dx.numpy())- foriin[0,1]:
forjin[0,1]:
print(dz_dx[j].numpy() )
4.函数求二阶偏导
x=tf.constant(2.0)- withtf.GradientTape()ast:
withtf.GradientTape()ast2:
t2.watch(x)
y=x*x*x
dy_dx=t2.gradient(y,x)
d2y_dx2=t.gradient(dy_dx,x)
print(dy_dx.numpy())
print(d2y_dx2.numpy())
更多免费技术资料可关注:annalin1203