soft computing 1

Upload: shikha-ghodeshwar

Post on 05-Apr-2018

234 views

Category:

Documents


0 download

TRANSCRIPT

  • 7/31/2019 Soft Computing 1

    1/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 1

    PROGRAM NO.1

    OBJECT: WRITE A PROGRAM TO IMPLEMENT ARTIFICIAL NEURON NETWORK

    USING THRESHOLD FUNCTION.

    PROGRAM:

    # include

    #inlude

    void main()

    {

    clrscr();

    int x1,x2,x3,x4,w1,w2,w3,w4,t,net;

    t=50;

    x1=2;

    x2=1;

    x3=3;

    x4=2;

    cout

  • 7/31/2019 Soft Computing 1

    2/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 2

    coutw4;

    net=x1*w1+x2*w2+x3*w3+x4*w4;

    cout

  • 7/31/2019 Soft Computing 1

    3/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 3

    OUTPUT

    enter weights:

    enter value of w1:

    12

    enter value of w2:

    9

    enter value of w3:

    6

    enter value of w4:

    5

    the value of net is= 61

    value of threshold is :1

  • 7/31/2019 Soft Computing 1

    4/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 4

    PROGRAM NO.2

    OBJECT: WRITE A PROGRAM TO IMPLEMENT ARTIFICIAL NEURON NETWORK

    USING SIGMOIDAL FUNCTION.

    PROGRAM:

    # include

    #inlude

    #inlude

    #include

    #include

    void main()

    {

    clrscr();

    int gdriver=DETECT,gmode,errorcode;

    initgraph(&gdriver,&gmode,);

    errorcode=graphresult();

    if(errorode!=grOk)

    {

    cout

  • 7/31/2019 Soft Computing 1

    5/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 5

    }

    clearviewport();

    int xmax,ymax;

    xmax=getmaxx();

    ymax=getmaxy();

    line(0,0,0,ymax);

    line(xmax,ymax,0,ymax);

    lineto(0,ymax);

    for(i=1;i

  • 7/31/2019 Soft Computing 1

    6/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 6

    OUTPUT

  • 7/31/2019 Soft Computing 1

    7/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 7

    PROGRAM NO.3

    OBJECT: WRITE A PROGRAM TO IMPLEMENT ARTIFICIAL NEURON NETWORK

    USING HYPERBOLIC TANGENT FUNCTION.

    PROGRAM:

    # include

    #inlude

    #include

    #include

    #include

    void main()

    {

    clrscr();

    int i,a[10],th;

    float w[10],net,out;

    char ch;

    cout

  • 7/31/2019 Soft Computing 1

    8/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 8

    cin>>w[j];

    }

    for(j=0;j

  • 7/31/2019 Soft Computing 1

    9/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 9

    {

    y=tanh(x*(3.14)/180);

    putpixel((axisx+x),axisy-(100*y)-10,6);

    }

    }

    getch();

    }

  • 7/31/2019 Soft Computing 1

    10/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 10

    OUTPUT

    enter no of input vectors

    enter value of input vectors (must be 0 or 1)

    1

    enter value of weight of input vectors (must be between 0 or 1)

    .7

    enter value of input vectors (must be 0 or 1)

    0

    enter value of weight of input vectors (must be between 0 or 1)

    .4

    output value =0.905148

    press G to view the graph

  • 7/31/2019 Soft Computing 1

    11/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 11

    PROGRAM NO.4

    OBJECT: WRITE A PROGRAM TO IMPLEMENT ADALINE NETWORK.

    PROGRAM:

    # include

    #inlude

    #int adaline();

    void main()

    {

    int n,s=1,t[5],i;

    clrscr();

    coutn;

    for(i=0;i

  • 7/31/2019 Soft Computing 1

    12/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 12

    getch();

    }

    int adaline()

    {

    int i,a[10],th,x=0;

    float w[10],net=0;

    cout

  • 7/31/2019 Soft Computing 1

    13/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 13

    {

    cout

  • 7/31/2019 Soft Computing 1

    14/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 14

    OUTPUT

    enter no of elementary networks 2

    enter no of input vetors 2

    enter value of input vetors 1

    enter value of weight of input vector (must be between 0 and 1) .5

    enter value of input vetors 1

    enter value of weight of input vector (must be between 0 and 1) .8

    enter threshold value:1

    output is:1

    enter no of input vetors 2

    enter value of input vetors 1

    enter value of weight of input vector (must be between 0 and 1) .7

    enter value of input vetors 0

    enter value of weight of input vector (must be between 0 and 1) .5

    enter threshold value:1

    output is:0

  • 7/31/2019 Soft Computing 1

    15/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 15

    PROGRAM NO.5

    OBJECT: WRITE A PROGRAM TO IMPLEMENT MADALINE NETWORK.

    PROGRAM:

    # include

    #inlude

    #int madaline();

    void main()

    {

    int n,s=1,t[5],i;

    clrscr();

    coutn;

    for(i=0;i

  • 7/31/2019 Soft Computing 1

    16/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 16

    getch();

    }

    int madaline()

    {

    int i,a[10],th,x=0;

    float w[10],net=0;

    cout

  • 7/31/2019 Soft Computing 1

    17/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 17

    {

    cout

  • 7/31/2019 Soft Computing 1

    18/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 18

    PROGRAM NO.6

    OBJECT: WRITE A PROGRAM TO IMPLEMENT TRAIN BACK PROPAGATION

    ALGORITHM.

    PROGRAM:

    function [layer1,layer2] = nn_train(data, output)

    % data(N,:) is training data set N

    % output(N,:) is the corresponding output

    input_count=2;

    layer1_count=2;output_count=2;

    layer1=rand(2,2);

    layer2= rand(2,2);

    learn = 0.1;

    [n,m] = size(data);

    for data_set=1:n

    [out,inputs1,inputs2,outs1,outs2] = nn_eval(transpose(data(data_set,:)),

    layer1, layer2, 1.0);

    % compute deltas for each layer

    delta2 = zeros(output_count);

    delta1 = zeros(layer1_count);

    for i=1:output_count

    % delta2 is (desired - actual) * actual * (1 - actual)

    delta2(i) = (output(data_set,i) - out(i,1)) * out(i,1) * (1 - out(i,1));

    end

  • 7/31/2019 Soft Computing 1

    19/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 19

    for i=1:layer1_count

    d_sum = 0;

    % Sum up the previous deltas*inpusfor j=1:output_count

    d_sum = d_sum - delta2(j)*inputs2(i,j);

    end

    % delta1 is output * (1 - output) * sum above

    delta1(i) = outs2(i,1) * (1 - outs2(i,1)) * d_sum;

    end

    % second layer weights

    [p,q] = size(inputs2);

    for k=1:output_count

    for j=1:p

    % Adjust the weights by -learning constant * delta * input

    layer2(j,k) = layer2(j,k) + -learn * delta2(k) * inputs2(j,k);

    end

    end

    % first layer weights

    [p,q] = size(inputs1);

    for k=1:q

    for j=1:p

    % Adjust the weights by -learning constant * output * (1 - output) * delta

    * inputlayer1(j,k) = layer1(j,k) + -learn * outs2(k,1) * (1 - outs2(k,1)) * delta1(k)

    * inputs1(j,k);

    end

    end

    end

  • 7/31/2019 Soft Computing 1

    20/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 20

    OUTPUT

    data =

    1.0000 1.00000.9000 0.9000

    1.4000 1.4000

    1.2000 1.2000

    1.0000 1.0000

    1.0000 1.0000

    1.0000 1.0000

    1.0000 1.0000

    output =

    0.1000 0.1000

    0.0900 0.0900

    0.1400 0.1400

    0.1200 0.1200

    0.1000 0.1000

    0.1000 0.1000

    0.1000 0.1000

    0.1000 0.1000

    ans =

    0.5248

    0.5714

  • 7/31/2019 Soft Computing 1

    21/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 21

    PROGRAM NO.7

    OBJECT: WRITE A PROGRAM TO IMPLEMENT TRAIN COUNTER PROPAGATION.

    PROGRAM:

    clear;

    %set initial weights

    v=[0.6 0.2;0.6 0.2;0.2 0.6; 0.2 0.6];

    w=[0.4 0.3;0.4 0.3];

    x=[0 1 1 0];

    y=[1 0];

    alpha=0.3;

    for j=1:2

    D(j)=0;

    for i=1:4

    D(j)=D(j)+(x(i)-v(i,j))^2;

    end

    for k=1:2

    D(j)=D(j)+(y(k)-w(k,j))^2;

    end

    end

    for j=1:2

    if D(j)==min(D)

    J=j;

  • 7/31/2019 Soft Computing 1

    22/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 22

    end

    end

    disp('After one step the weight matrix are');

    v(:,J)=v(:,J)+alpha*(x'-v(:,J))

    w(:,J)=w(:,J)+alpha*(y'-w(:,J))

  • 7/31/2019 Soft Computing 1

    23/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 23

    OUTPUT

    After one step the weight matrix are v =

    0.4200 0.2000

    0.7200 0.2000

    0.4400 0.6000

    0.1400 0.6000

    w =

    0.5800 0.3000

    0.2800 0.3000

  • 7/31/2019 Soft Computing 1

    24/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 24

    PROGRAM NO.8

    OBJECT: WRITE A PROGRAM TO IMPLEMENT ART 1 ALGORITHM.

    PROGRAM:

    clc;

    clear;

    b=[0.57 0.0 0.3;0.0 0.0 0.3;0.0 0.57 0.3;0.0 0.47 0.3];

    t=[1 1 0 0;1 0 0 1;1 1 1 1];

    vp=0.4;

    L=2;

    x=[1 0 1 1];

    s=x;

    ns=sum(s);

    y=x*b;

    con=1;

    while con

    for i=1:3

    if y(i)==max(y)

    J=i;

    end

    end

    x=s.*t(J,:);

    nx=sum(x);

  • 7/31/2019 Soft Computing 1

    25/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 25

    if nx/ns >= vp

    b(:,J)=L*x(:)/(L-1+nx);

    t(J,:)=x(1,:);

    con=0;

    else

    y(J)=-1;

    con=1;

    end

    if y+1==0

    con=0;

    end

    end

    disp('Top Down Weights');

    disp(t);

    disp('Bottom up Weights');

    disp(b);

  • 7/31/2019 Soft Computing 1

    26/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 26

    OUTPUT

    Top-down Weights

    1 1 0 0

    1 0 0 1

    1 1 1 1

    Bottom-up Weights

    0.5700 0.6667 0.3000

    0 0 0.3000

    0 0 0.3000

    0 0.6667 0.3000

  • 7/31/2019 Soft Computing 1

    27/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 27

    PROGRAM NO.9

    OBJECT: WRITE A PROGRAM FOR DELTA LEARNING ALGORITHM.

    PROGRAM:

    % - - - - - - - - - - - - - - - - - - - - -

    % Main Program

    % - - - - - - - - - - - - - - - - - - - - -

    % Load data

    load housing.txt

    X = housing(:,1:13);

    t = housing(:,14);

    % Scale to zero mean, unit variance and introduce bias on input.

    xmean = mean(X);

    xstd = std(X); X =

    (X-ones(size(X,1),1)*xmean)./(ones(size(X,1),1)*xstd); X = [ones(size(X,1),1) X];

    tmean = mean(t);

    tstd = std(t);

    t = (t-tmean)/tstd;

    % Iterate over a number of hidden nodes maxHidden = 2;

    for numHidden=1:maxHidden

    % Initialise random weight vector.

    % Wh are hidden weights, wo are output weights. randn(seed, 123456);

    Wh = 0.1*randn(size(X,2),numHidden);

  • 7/31/2019 Soft Computing 1

    28/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 28

    wo = 0.1*randn(numHidden+1,1);

    numPatterns = size(X,1);

    eta = 0.05/numPatterns;

    for i=1:numEpochs

    % Calculate outputs, errors, and gradients. phi = [ones(size(X,1),1)

    tanh(X*Wh)];

    y = phi*wo;

    err = y-t;

    go = phierr;

    Gh = X((1-phi(:,2:numHidden1).2).

    (errwo(2:numHidden1)));

    % Perform gradient descent.

    wo = wo - eta*go; Wh = Wh - eta*Gh;

    % Update performance statistics. mse(i) = var(err);

    end

    plot(1:numEpochs, mse, -)

    hold on end

    fsize=15; set(gca,xtick,*0:500:2000+,FontSize,fsize)

    set(gca,ytick,*0:0.5:1+,FontSize,fsize) xlabel(Number of

    Epochs,FontSize,fsize) ylabel(Mean Squared Error,FontSize,fsize) hold off

    % - - - - - - - - - - - - - - - - - - - - -

    % End of Main

    % - - - - - - - - - - - - - - - - - - - - -

  • 7/31/2019 Soft Computing 1

    29/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    Page | 29

    OUTPUT

  • 7/31/2019 Soft Computing 1

    30/30

    0 2 0 1 C S 0 8 1 0 7 6 J A B A L P U R E N G I N E E R I N G C O L L E G E

    PROGRAM NO.10

    OBJECT: WRITE A PROGRAM FOR HEBBIAN LEARNING ALGORITHM.

    PROGRAM:

    #include

    using namespace std;

    int main()

    {

    int x[4][3],w[3],y[4][1],i,j,p;

    for(i=0;i