#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Sun Apr 18 15:32:51 2021 This is the "linear algebra" version of the linear neural net. Simple classifier using symbols TGF. @author: doug """ import numpy as np from sklearn.metrics import confusion_matrix import matplotlib.pyplot as plt def WidHoff2(X,T,NumEpochs): temp1=np.shape(X) temp2=np.shape(T) if temp1[1]!=temp2[1]: sys.exit("Dimensional mismatch") NumPts=temp1[1] W=np.random.randn(temp2[0],temp1[0]) b=np.random.randn(temp2[0],1) err=np.zeros((NumEpochs,NumPts)) for i in range(0,NumEpochs): for j in range(0,NumPts): temp=X[:,j] temp=temp[:,np.newaxis] ThisOut=(W @ temp) + b tempT=T[:,j] ThisErr=tempT[:,np.newaxis]-ThisOut err[i,j]=np.linalg.norm(ThisErr) # alpha=0.9999/(np.sum(temp**2)) Too big for this example alpha=0.153 W+=alpha*(ThisErr @ temp.T) b+=alpha*ThisErr return W,b,err def MakeData(): time1=np.arange(0,4,0.025) time2=np.arange(4.025,6,0.025) time=np.concatenate((time1,time2),axis=None) kk=len(time) time=time.reshape(1,kk) signal1=np.sin(4*np.pi*time1) signal2=np.sin(8*np.pi*time2) signal=np.concatenate((signal1,signal2),axis=None) signal=signal.reshape(1,kk) return signal,time def lagX(S,k): p1,p=S.shape X=np.zeros((k,p-k)) for j in range(0,k): X[j,:]=S[0,j:p-(k-j)] T=S[0,k:p] T=T.reshape(1,p-k) return X,T # Main program below: signal,time=MakeData() X,T=lagX(signal,5) W, b, EpochErr = WidHoff2(X,T,1) yout=W @ X + b tt=EpochErr.reshape(234,1); plt.plot(tt) plt.show() temp=time[0,5:] temp=temp.reshape(234,1) plt.figure plt.scatter(time,signal,color='k') plt.plot(temp,yout.reshape(234,1),color='r') plt.show()