import math
import random

InputN = 2		#number of neurons in the input layer
HN = 5			#number of neurons in the hidden layer
OutN = 2		#number of neurons in the output layer
datanum = 500	#number of training samples

result = ""
buffer = ""

#dichiara ed assegna pesi, ingressi ed uscite
x_out = [0.0] * InputN				#input layer
hn_out = [0.0] * HN					#hidden layer
y_out = [0.0] * OutN	      		#output layer
y = [0.0] * OutN					#expected output layer
w = [[0.2] * InputN] * HN			#weights from input layer to hidden layer
v = [[0.2] * HN] * OutN				#weights from hidden layer to output layer
deltaw = [[0.0] * InputN] * HN		#delta for w weights
deltav = [[0.0] * HN] * OutN		#delta for v weights
hn_delta  = [0.0] * HN	 			#delta of hidden layer
y_delta = [0.0] * OutN				#delta of output layer
inp = [[0.0] * InputN] * datanum	#expect input 
teach = [[0.0] * OutN] * datanum   	#expect output


error = 0.0
errlimit = 0.001
alpha = 0.1
beta = 0.1
loop = 0
times = 50000
i = 0 
j = 0 
m = 0
max = 0  
min = 0
sumtemp = 0.0
errtemp = 0.0

#sigmoid serves as avtivation function
def sigmoid(x):
	return(1.0 / (1.0 + math.exp(-x)))
	
	
def esegui(x_out, hn_out, y_out, w, v):
	#acquisisci dati di input
	for i in range(InputN):
		x_out[i] = input()
		x_out[i] = int(x_out[i])
	#azzera livelli H e Y
	for h in range(HN):
		hn_out[h] = 0.0			#hidden layer
	for j in range(OutN):
		y_out[j] = 0.0        	#output layer	
	for i in range(HN):
			sumtemp = 0.0
			for j in range(InputN):
				sumtemp += w[i][j] * x_out[j]
			hn_out[i] = sigmoid(sumtemp)		# sigmoid serves as the activation function
	for i in range(OutN):
			sumtemp = 0.0
			for j in range(HN):
				sumtemp += v[i][j] * hn_out[j]
			y_out[i] = sigmoid(sumtemp)
	#stampa y_out
	for i in range(OutN):		
		print('y_out[{}]={}\n'.format(i, y_out[i]))	
			
			
	
# Generate data samples
# You can use your own data!!!
for m in range(datanum):
	for i in range(InputN):
		inp[m][i] =  (random.randint(0, 10) / 10)
	teach[m][0] = inp[m][0] + inp[m][1]
	teach[m][1] = 0.0 
		
# Initializition
for i in range(InputN):
	for j in range(HN):
		w[j][i] = (random.randint(0, 10) / 10)* 2 - 1
		deltaw[j][i] = 0.0
for i in range(HN):
	for j in range(OutN):
		v[j][i] = (random.randint(0, 10) / 10) * 2 - 1
		deltav[j][i] = 0.0

# Training
while loop < times:
	loop = loop + 1
	error = 0.0
	for m in range(datanum):
		# Feedforward
		max = 0.2
		min = 0.1
		for i in range(InputN):
			x_out[i] = inp[m][i]
		if max < x_out[i]:
			max = x_out[i]
		if min > x_out[i]:
			min = x_out[i]
		for i in range(InputN):
			x_out[i] = (x_out[i] - min) / (max - min)
		for i in range(OutN):
			y[i] = teach[m][i]	
		for i in range(HN):
			sumtemp = 0.0
			for j in range(InputN):
				sumtemp += w[i][j] * x_out[j]
			hn_out[i] = sigmoid(sumtemp)		# sigmoid serves as the activation function
		for i in range(OutN):
			sumtemp = 0.0
			for j in range(HN):
				sumtemp += v[i][j] * hn_out[j]
			y_out[i] = sigmoid(sumtemp)
				
		#Backpropagation
		for i in range(OutN):
			errtemp = y[i] - y_out[i]
			y_delta[i] = -errtemp * sigmoid(y_out[i]) * (1.0 - sigmoid(y_out[i]))
		error += errtemp * errtemp
		for i in range(HN):
			errtemp = 0.0
			for j in range(OutN):
				errtemp += y_delta[j] * v[j][i]
			hn_delta[i] = errtemp * (1.0 + hn_out[i]) * (1.0 - hn_out[i])

		#Stochastic gradient descent
		for i in range(OutN):
			for j in range(HN):
				deltav[i][j] = alpha * deltav[i][j] + beta * y_delta[i] * hn_out[j]
				v[i][j] -= deltav[i][j]
		for i in range(HN):
			for j in range(InputN):
				deltaw[i][j] = alpha * deltaw[i][j] + beta * hn_delta[i] * x_out[j]
				w[i][j] -= deltaw[i][j]

	# Global error 
	error = error / 2
	if loop%1000==0:
		result = "Global Error = "
		print(buffer, error)
		result += buffer
		result += "\r\n"
	if error < errlimit:
		break

	print("The {} th training, error: {}\n". format(loop, error))

#comandi
while 1:
	execute = input('comando> ')
	#cancella sinapsi
	if int(execute) == 5:
		break
	#carica sinapsi
	if int(execute) == 4:
		w1, w2 = carica(w1, w2)
	#salva sinapsi
	if int(execute) == 3:
		salva_pesi(w1, w2)
	#visualizza pesi
	if int(execute) == 2:
		for k in range(HN):
			for i in range (InputN):
				print('w[{},{}]={}'.format(k, i, w[k][i]))
		for i in range(OutN):
			for k in range(HN):
				print('v[{},{}]={}'.format(i, k, v[i][k]))
	#esegui rete
	if int(execute) == 1:
		x_out[0] = input('x_out[0]=')
		x_out[1] = input('x_out[1]=')
		esegui(x_out, hn_out, y_out, w, v)
			


