import numpy as np
def Initilize_Net():
    nets={}
    nets['w1']=np.array([[0.1,0.3,0.5],[0.2,0.4,0.6]])
    nets['w2']=np.array([[0.1,0.4],[0.2,0.5],[0.3,0.6]])
    nets['w3']=np.array([[0.1,0.3],[0.2,0.4]])
    nets['b1']=np.array([0.1,0.2,0.3])
    nets['b2']=np.array([0.1,0.2])
    nets['b3']=np.array([0.1,0.2])
    return  nets
def sigmodis(x):
    return 1/(1+np.exp(-x))
def softmax(a):
    c=np.max(a)
    exp_a=np.exp(a-c)
    sum_exp=np.sum(exp_a)
    y=exp_a/sum_exp
    return y
def forword(nets,x):
    w1,w2,w3=nets['w1'],nets['w2'],nets['w3']
    b1,b2,b3=nets['b1'],nets['b2'],nets['b3']
    a1=np.dot(x,w1)+b1
    z1=sigmodis(a1)
    a2=np.dot(z1,w2)+b2
    z2=sigmodis(a2)
    a3=np.dot(z2,w3)+b3
    y=sigmodis(a3)
    return y
Nets=Initilize_Net()
x=np.array([1.0,0.5])
y=forword(Nets,x)
print(y)
print(softmax(y))