Tuesday, May 29, 2018

recursive and iterative BFS, DFS,inorder traversal of graph in python

from collections import defaultdict
import queue

#######DFS  RECURSIVE
class node:
    def __init__(self,value):
        self.data = value
        self.left = None        
        self.right  = None
class DFS:
    def __init__(self):
        self.graph =defaultdict(list)
    def dfs_call(self,visited,val):
        if val is None:
            return        
        visited[val] = True        
        print(val)

        for i in self.graph[val]:
            if i is not None:
                if(visited[i]==False):
                    self.dfs_call(visited,i)


    def dfs_recursive(self,val):
        visited = [False]*(len(self.graph))
        self.dfs_call(visited, val)

###### DFS RECURSIVE

#######DFS ITERATIVE for binary tree
def dfs_iter(root):
    visited = [False]*10    
    stack =[]
    stack.append(root)

    while(stack):
        val = stack.pop()
        print (val.data)
        if((val.right  is not None)):
            stack.append(val.right)
        if((val.left  is not None)):
            stack.append(val.left)

###DFS ITERATIVE FOR GRAPH
def dfs_iter_graph(g,len):
    visited = [False]*len
    stack = []
    stack.append(0)
    visited[0]=True
    while(stack):
        val = stack.pop()
        print(val)

        for i in g.graph[val]:
            if i is not None:
                if(visited[i] == False):
                    stack.append(i)
                    visited[i] = True


##INORDER RECURSIVE AND ITERATIVE###########
def inorder(root):
    if(root):
        inorder(root.left)
        print (root.data)
        inorder(root.right)



def inorder_iter(root):
    current =root
    stack = []

    done = 0
    while(not done):
        if(current is not None):

            stack.append(current)
            current =current.left



        else:
            if(stack):
                current = stack.pop()
                print(current.data)

                current = current.right
            else:
                done =1





######BFS WITH QUEUE

def BFS(root,length):
    L = queue.Queue(maxsize=10)
    visited = [False]*length
    L.put(0)
    visited[0] = True
    while( not L.empty()):
        val = L.get()
        print(val)

        for i in g.graph[val]:
            if i is not None:
                if(visited[i] == False):
                    L.put(i)
                    visited[i] == True











root = node(5)
root.left = node(4)
root.right = node(10)
root.left.left = node(3)
root.right.right = node(15)
# inorder_iter(root)
# dfs_iter(root)






g = DFS()
g.graph[0].append(1)
g.graph[0].append(2)
g.graph[1].append(3)
g.graph[1].append(4)
g.graph[2].append(5)
g.graph[2].append(6)
g.graph[3].append(None)
g.graph[3].append(None)
g.graph[4].append(None)
g.graph[4].append(None)
g.graph[5].append(None)
g.graph[5].append(None)
g.graph[6].append(None)
g.graph[6].append(None)
# g.dfs_recursive(0)len = len(g.graph)
print (len)

# dfs_iter_graph(g,len)BFS(g,len)

Saturday, May 19, 2018

Matlab Multilayer Perceptron (MLP) with back propagation algorithm to solve a 3-class classification problem.

Specification:

Number of layers: 2 (hidden + output) 
Number of input units: 13 (+ 1 for bias)
Number of hidden units: 8 (+1 for bias) 
Number of output units: 3 
Activation functions: sigmoid for hidden units; softmax for output units 
Initial weights: uniform random numbers between 0 and 1




Code:






Output:


folowing steps are followed
1: forward pass to compute an,zn,bn and yn
2: compute the error signal.
3: Pass error backward to compter error for hidden layer.
4: Compute gradient
5: Update weight
*********************************************
Total number of instance = 178 
learning rate = 0.500000 
Total number of iteration = 5000 
cost befor training 1.971846
Total number of correctly classified instance before traing = 71 
Start Training
Total number of correctly classified instance after training = 178 
Cost after Training = 0.020612 
Total time = 22.546547 sec
*********************************************
>> 

Matlab program to implement gradient descent algorithm to fit linear regression parameter

Data: fisherIrisSetosaData

implementation of  Linear Regression to predict sepal length given sepal width. 



This program finds the regression parameter w0 and w1. With this parameter, the test instance output can be obtained.
Code:



clc
clear all

fisherIrisSetosaData = importdata("fisherIrisSetosaData.txt");

Y = fisherIrisSetosaData(1:end,1);%sepal length
X = fisherIrisSetosaData(1:end,2);%sepal Width

w0_old = 0;
w1_old = 0;
w0diff =1;
w1diff = 1;
alpha = 0.1;
count =0;
tic;
while (w0diff >0.000001 || w1diff >0.000001)
        count = count +1;
        sum_w0 = 0;
        sum_w1 = 0;
        for i =1:50
            sum_w0 = sum_w0+ ((w0_old+w1_old.*X(i,1))-Y(i,1));
            sum_w1 = sum_w1+ ((w0_old+w1_old.*X(i,1))-Y(i,1)).*X(i,1);
        
        
            
            
            
        end 
       % sum_w0 = sum((w0_old+w1_old.*X)-Y);
       % sum_w1 =sum(((w0_old+w1_old.*X)-Y).*X);
        cost_w0 = alpha/50 *sum_w0;
        cost_w1 =alpha/50 *sum_w1;
        w0_new = w0_old-cost_w0;
        w1_new = w1_old -cost_w1;
        w0diff = abs(w0_old - w0_new);
        w1diff = abs(w1_old - w1_new);
        w0_old = w0_new;
        w1_old = w1_new;
        
        
     
    
end 

time_descent = toc;


fprintf('value of Parameter w0 and w1 is with alpha  = %f\n',alpha)
%disp("value of Parameter w0 and w1 is given as ");
fprintf('W0 = %f\n',w0_old);
fprintf('W0 = %f\n',w1_old);

plot(X,Y,"g*");
hold on
calculated_y = w0_old+w1_old.* X;
plot(X,calculated_y,'r')
legend("real data","calculated line",'Location','NorthWest');

xlabel("SepalWidth");
ylabel("Sepal Length");
title("linear regression with gradient descent");
fprintf("Solving linear function takes %f seconds.\n",time_descent);
fprintf("Total number of Iteration  = %d.\n",count);
disp(count)