赞
踩
- import numpy as np
- import mindspore.nn as nn
- from mindspore import Parameter, Tensor
-
- class Net(nn.Cell):
- def __init__(self):
- super(Net, self).__init__()
- self.w = Parameter(np.array([6.0]), name='w')
- self.b = Parameter(np.array([1.0]), name='b')
-
- def construct(self, x):
- f = self.w * x + self.b
- return f
- from mindspore import dtype as mstype
- import mindspore.ops as ops
-
- class GradNet(nn.Cell):
- def __init__(self, net):
- super(GradNet, self).__init__()
- self.net = net
- self.grad_op = ops.GradOperation()
-
- def construct(self, x):
- gradient_function = self.grad_op(self.net)
- return gradient_function(x)
- x = Tensor([100], dtype=mstype.float32)
- output = GradNet(Net())(x)
-
- print(output) # [6.]
- from mindspore import ParameterTuple
-
- class GradNet(nn.Cell):
- def __init__(self, net):
- super(GradNet, self).__init__()
- self.net = net
- self.params = ParameterTuple(net.trainable_params())
- self.grad_op = ops.GradOperation(get_by_list=True) # 设置对权重参数进行一阶求导
-
- def construct(self, x):
- gradient_function = self.grad_op(self.
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。