1, "dimension must be larger than 1") # print(np.max(x, axis = 1, keepdims = True)) # axis = 1..._np.max(x, axis=self.dim, keepdims=true)">
当前位置:   article > 正文

Numpy softmax函数注意事项及代码实现_np.max(x, axis=self.dim, keepdims=true)

np.max(x, axis=self.dim, keepdims=true)
  1. import numpy as np
  2. def softmax(x):
  3. """ softmax function """
  4. # assert(len(x.shape) > 1, "dimension must be larger than 1")
  5. # print(np.max(x, axis = 1, keepdims = True)) # axis = 1, 行
  6. x -= np.max(x, axis = 1, keepdims = True) #为了稳定地计算softmax概率, 一般会减掉最大的那个元素
  7. print("减去行最大值 :\n", x)
  8. x = np.exp(x) / np.sum(np.exp(x), axis = 1, keepdims = True)
  9. return x
  10. x = np.random.randint(low = 1, high = 5, size = (2, 3)) #生成一个2x3的矩阵,取值范围在1-5之间
  11. print("原始 :\n", x)
  12. x_ = softmax(x)
  13. print("变换后 :\n", x_)


softmax(x)_i = \frac{e^{x_i}}{\sum_j e^{x_j}}

需要注意的地方是:

  1. 为了稳定地计算softmax概率, 一般会减掉最大的那个元素
  2. numpy里面axis = 1指的是行,axis = 0才是列
  3. np.exp(x)对矩阵或者更高维运算的话,会对矩阵内每个值直接取指数

结果如下:

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/小惠珠哦/article/detail/785236
推荐阅读
相关标签
  

闽ICP备14008679号