DDR爱好者之家 Design By 杰米

1. 函数求一阶导

import tensorflow as tf
tf.enable_eager_execution()
tfe=tf.contrib.eager
from math import pi
def f(x):
  return tf.square(tf.sin(x))
assert f(pi/2).numpy()==1.0
sess=tf.Session()
grad_f=tfe.gradients_function(f)
print(grad_f(np.zeros(1))[0].numpy())

2. 高阶函数求导

import numpy as np
def f(x):
  return tf.square(tf.sin(x))
 
def grad(f):
  return lambda x:tfe.gradients_function(f)(x)[0]
 
x=tf.lin_space(-2*pi,2*pi,100)
# print(grad(f)(x).numpy())
x=x.numpy()
import matplotlib.pyplot as plt
plt.plot(x,f(x).numpy(),label="f")
plt.plot(x,grad(f)(x).numpy(),label="first derivative")#一阶导
plt.plot(x,grad(grad(f))(x).numpy(),label="second derivative")#二阶导
plt.plot(x,grad(grad(grad(f)))(x).numpy(),label="third derivative")#三阶导
plt.legend()
plt.show()
 

def f(x,y):
  output=1
  for i in range(int(y)):
    output=tf.multiply(output,x)
  return output
 
def g(x,y):
  return tfe.gradients_function(f)(x,y)[0]
 
print(f(3.0,2).numpy()) #f(x)=x^2
print(g(3.0,2).numpy()) #f'(x)=2*x
print(f(4.0,3).numpy())#f(x)=x^3
print(g(4.0,3).numpy())#f(x)=3x^2

3. 函数求一阶偏导

x=tf.ones((2,2))
with tf.GradientTape(persistent=True) as t:
  t.watch(x)
  y=tf.reduce_sum(x)
  z=tf.multiply(y,y)
  
dz_dy=t.gradient(z,y)
print(dz_dy.numpy())
dz_dx=t.gradient(z,x)
print(dz_dx.numpy())
for i in [0, 1]:
 for j in [0, 1]:
  print(dz_dx[i][j].numpy() )

4. 函数求二阶偏导

x=tf.constant(2.0)
with tf.GradientTape() as t:
  with tf.GradientTape() as t2:
    t2.watch(x)
    y=x*x*x
  dy_dx=t2.gradient(y,x)
d2y_dx2=t.gradient(dy_dx,x)
 
print(dy_dx.numpy())
print(d2y_dx2.numpy())

以上这篇tensorflow求导和梯度计算实例就是小编分享给大家的全部内容了,希望能给大家一个参考,也希望大家多多支持。

DDR爱好者之家 Design By 杰米
广告合作:本站广告合作请联系QQ:858582 申请时备注:广告合作(否则不回)
免责声明:本站资源来自互联网收集,仅供用于学习和交流,请遵循相关法律法规,本站一切资源不代表本站立场,如有侵权、后门、不妥请联系本站删除!
DDR爱好者之家 Design By 杰米