#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Sun Apr 18 15:32:51 2021 This is the "linear algebra" version of the linear neural net. Simple classifier using symbols TGF. @author: doug """ import numpy as np from sklearn.metrics import confusion_matrix import matplotlib.pyplot as plt import sys def WidHoff2(X,T,NumEpochs): temp1=np.shape(X) temp2=np.shape(T) if temp1[1]!=temp2[1]: sys.exit("Dimensional mismatch") NumPts=temp1[1] W=np.random.randn(temp2[0],temp1[0]) b=np.random.randn(temp2[0],1) err=np.zeros((NumEpochs,NumPts)) for i in range(0,NumEpochs): for j in range(0,NumPts): temp=X[:,j] temp=temp[:,np.newaxis] ThisOut=(W @ temp) + b tempT=T[:,j] ThisErr=tempT[:,np.newaxis]-ThisOut err[i,j]=np.linalg.norm(ThisErr) alpha=0.9999/(np.sum(temp**2)) W+=alpha*(ThisErr @ temp.T) b+=alpha*ThisErr return W,b,err # Define the data X=np.array([[1, 1, 1, -1,-1, 1, -1, -1,-1, 1, -1,-1,-1, 1, -1, -1], [-1, 1, 1, 1,-1, -1, 1, -1,-1, -1, 1, -1,-1, -1, 1, -1], [1, 1, 1, -1,1, -1, -1, -1, 1, 1, 1, -1, 1, 1, 1, -1], [-1, 1, 1, 1,-1, 1, -1, -1, -1, 1, 1, 1, -1, 1, 1, 1], [1, 1, 1, -1,1, 1, -1, -1,1, -1, -1, -1,1, -1, -1, -1], [-1, 1, 1, 1,-1, 1, 1, -1,-1 ,1, -1, -1,-1, 1, -1, -1]]) X=X.T # X is 16 x 6 T=np.array([[1,1, 0, 0, 0, 0],[0, 0, 1, 1, 0, 0],[0, 0, 0, 0, 1, 1]]); NumPoints=6 NumEpochs=60 W, b, EpochErr = WidHoff2(X,T,NumEpochs) # Output results Z=W @ X + b # Convert binary vector to integer class (max entry of each vector) Zout=np.argmax(Z,axis=0) Tout=np.argmax(T,axis=0) C=confusion_matrix(Zout,Tout) print(C) plt.plot(sum(EpochErr.T))