Index A I | 関数Sample |
functions 関数 identity step 構成・方式など タスク 導入 Sample 用語 |
sampleなどはCentOS7で実施 $ python3 xxxx.pyfunctions
#!/usr/bin/env python3
# functions
import numpy as np
def sigmoid(x):
return 1 / (1 + np.exp(-x))
def softmax(x):
if x.ndim == 2:
x = x.T
x = x - np.max(x, axis=0)
y = np.exp(x) / np.sum(np.exp(x), axis=0)
return y.T
x = x - np.max(x) # オーバーフロー対策
return np.exp(x) / np.sum(np.exp(x))
def numerical_gradient(f, x):
h = 1e-4
grad = np.zeros_like(x)
it = np.nditer(x, flags=['multi_index'])
while not it.finished:
idx = it.multi_index
tmp_val = x[idx] # 値を保存
x[idx] = tmp_val + h
fxh1 = f()
x[idx] = tmp_val - h
fxh2 = f()
grad[idx] = (fxh1 - fxh2) / (2 * h)
x[idx] = tmp_val # 値を戻す
it.iternext()
return grad
def cross_entropy_error(y, t):
if y.ndim == 1:
t = t.reshape(1, t.size)
y = y.reshape(1, y.size)
# 教師データがone-hot-vectorの場合、正解ラベルのインデックスに変換
if t.size == y.size:
t = t.argmax(axis=1)
batch_size = y.shape[0]
return -np.sum(np.log(y[np.arange(batch_size), t] + 1e-7)) / batch_size
def sigmoid_grad(x): # 誤差逆伝播法で使用
return (1.0 - sigmoid(x)) * sigmoid(x)
def identity_function(x):
return x
def step_function(x):
return np.array(x > 0, dtype=np.int)
def relu(x):
return np.maximum(0, x)
def relu_grad(x):
grad = np.zeros(x)
grad[x>=0] = 1
return grad
def sum_squared_error(y, t):
return 0.5 * np.sum((y-t)**2)
def softmax_loss(X, t):
y = softmax(X)
return cross_entropy_error(y, t)
|
All Rights Reserved. Copyright (C) ITCL |