Смекни!
smekni.com

Классификация сейсмических сигналов на основе нейросетевых технологий (стр. 13 из 13)

float Target;

float TotalRes; /* Total propability */

int Flag; /* Flag = 1, if vector was error and = 0

in over case */

float *result; /* Result of testing vector on current

iteration */

int *TmpFlag; /* analog 'Flag' on current itteration */

int *NumIter; /* Number iteration of learning on which

Learning cycle STOPED */

int **NumLE; /* Error vectors after cycle of learning

was test*/

} STAT;

/* structure of the some result of learning cycle */

typedef struct ResLearning {

int NumIter;

int LearnError[NMAXPAT+1]; /* A[0]-count of error,

A[1]-ID1,

A[2]-ID2,...

A[NMAXRL]-ID?.*/

} RL;

/* function prototypes */

void OnlyTestVector(void);

void TestAfterLearn (void);

void CheckOneVector ( void );

void CrossValidation ( void );

DEF **defbuild(char *filename);

DEF *defread(FILE *fp);

FILE *defopen (char *filename);

char *defvalue(DEF **deflist, const char *name);

int defclose(FILE *fp);

void defdestroy(DEF **, int);

void getvalues(void);

void Debug (char *fmt, ...);

void Report (char *fmt, ...);

void Widrow_Init(void);

int Init_W( void );

float RavnRaspr(float A, float B);

float NormRaspr(float B,float A);

void ShufflePat(int *INP,int Koll_El);

float F_Act(float x);

float Forward (PAT src);

int LearnFunc (void);

int Reset (float ResErr, int Cnt, int N_Err);

void Update_Last (int n, float Total_Out);

void Update_Prom1 (int n);

void Prom_to_W (void);

void Update_All_W (int num, float err_cur );

void Init_PromW(void);

void Prom_to_OLD(void);

int CheckVector(float Res, PAT src);

int *TestLearn(int *src);

RL FurtherLearning(int NumIteration,

float StartLearnTolerans,

float EndLearnTolerans,

RL src);

STAT *definestat (PAT src);

STAT **DefineAllStat (PAT *src,int Num);

void FillStatForm (STAT *st, int iteration, float res, RL lr);

void FillSimpleStatForm (STAT *st, float res);

void destroystat ( STAT *st, int param);

void DestroyAllStat (STAT **st, int Num);

void PrintStatHeader(void);

void printstat(STAT *st);

void PrintStatLearn(RL src);

void PrintTestStat(STAT **st, int len);

void PrintErrorStat (STAT **st,int Len);

int DefineNetStructure (char *ptr);

void getStructure(char buf[20]);

PAT patcpy (PAT dest, PAT src);

PAT* LocPatMemory(int num);

void ReadPattern (PAT *input, char *name,int Len);

void FreePatMemory(PAT* src, int num);

void ShowPattern (char *fname, PAT *src, int len);

void ShowVector(char *fname,PAT src);

float getPatTarget (float res);

PAT* DataOrder (PAT* src,int Len, int Ubit, PAT* dest, PAT* test);

void FindMinMax (PAT *src,int Dimens, int Num_elem, float **Out_Array);

void ConvX_AB_01(PAT src);

int *DefineCN (int len);

int getPosition (int Num, int *src, int Len);

void DestroyCN (int *src);

void ShowCurN (int LEN);

float **LocateMemAMM(void);

void FreeAMM (float **src);

void WriteHeaderNet(char *fname, float **src);

void WriteNet (char *fname,int It);

void ReadHeaderNet(char *fname, float **src);

int ReadNet (char *fname, int It);

FILE *OpenFile(char *name);

int CloseFile(FILE *fp);

/* End of common file */

6. Файл автоматической компиляции программы под Unix -“Makefile”.

CC= cc

LIBS= -lm

OBJ= nvclass.o

nvclass: $(OBJ)

$(CC) -o nvclass $(LIBS) $(OBJ)

nvclass.o: nvclass.c

7. Основной модуль - “nvclass.с”

/*

* Neuron Classificator ver 1.0

*/

#include "common.h"

/* =========================

* MAIN MODULE

* =========================

*/

void main (int argc, char *argv[])

{ int i;

char buf[MAXLINE], PrName[20], *ptr;

time_t tim;

time(&tim);

/* UNIX Module */

Dfp = OpenFile(DebugFile);

strcpy(buf,argv[0]);

ptr = strrchr(buf,'/');

ptr++;

strcpy(PrName,ptr);

Debug ("\n\n'%s' - Started %s",PrName,ctime(&tim));

getvalues();

Rfp = OpenFile(ReportFile);

DefineNetStructure(NetStr); /* NetStr string from input file */

getStructure(buf);

Debug ("\nNeyral net %s",buf);

Input = LocPatMemory(NPATTERN);

Work = LocPatMemory(NPATTERN);

Array_MinMax = LocateMemAMM();

Cur_Number = DefineCN (NPATTERN);

printf("\nMetka - 1");

if (Type == TYPE_ONE)

OnlyTestVector ();

if (Type == TYPE_TWO)

TestAfterLearn ();

if (Type == TYPE_THREE)

CheckOneVector ();

if (Type == TYPE_FOUR)

CrossValidation();

time(&tim);

Debug ("\n\n%s - Normal Stoped %s",PrName,ctime(&tim));

CloseFile(Dfp);

CloseFile(Rfp);

FreeAMM (Array_MinMax);

DestroyCN (Cur_Number);

FreePatMemory(Input,NPATTERN);

FreePatMemory(Work, NPATTERN);

}

/*

* ^OnlyTestVectors - read net from (NetworkFile) and test the TestVector(s)

*/

void OnlyTestVector(void)

{ char buf[MAXLINE+1];

STAT **st, *stat;

int i,j;

float Res;

Debug ("\nOnlyTestVector proc start");

Debug ("\n NPATTERN = %d",NPATTERN);

Debug ("\n NTEST = %d",NTEST);

Test = LocPatMemory(NTEST);

ReadPattern(Test,TestVector, NTEST);

/* ShowPattern ("1.tst",Test,NTEST);*/

PrintStatHeader();

st = DefineAllStat (Test,NTEST);

ReadHeaderNet(NetworkFile,Array_MinMax);

if (Scaling == Yes)

{ for (i=0;i<NTEST;i++)

ConvX_AB_01(Test[i]);

}

for (i=0; i < Loop ; i++)

{ Debug("&bsol;n----/ STEP = %d /-----",i+1);

Report("&bsol;n < Loop %d > ",i+1);

ReadNet(NetworkFile,i+1);

for (j=0;j<NTEST;j++)

{ Res=Forward(Test[j]);

CheckVector(Res,Test[j]);

FillSimpleStatForm(st[j],Res);

}

PrintTestStat(st,NTEST);

}

DestroyAllStat (st,1);

FreePatMemory(Test,NTEST);

}

/* ---------------------------------

* Debug to LOG_FILE and to CONSOLE

*/

/* debug for UNIX */

void Debug (char *fmt, ...)

{ va_list argptr;

int cnt=0;

if ((Dfp != NULL) && (DEBUG == Yes))

{

va_start(argptr, fmt);

vfprintf(Dfp, fmt, argptr);

fflush (Dfp);

va_end(argptr);

}

}

void Report (char *fmt, ...)

{ va_list argptr;

int cnt=0;

if (Rfp != NULL)

{

va_start(argptr, fmt);

vprintf (fmt,argptr);

vfprintf(Rfp, fmt, argptr);

fflush (Rfp);

va_end(argptr);

}

}

/* debug for DOS */

/*

void Debug (char *fmt, ...)

{ FILE *file;

va_list argptr;

if (DEBUG == Yes)

{ if ((file = fopen(DebugFile,"a+"))==NULL)

{ fprintf(stderr, "&bsol;nCannot open DEBUG file.&bsol;n");

exit(1);

}

va_start(argptr, fmt);

vfprintf(file, fmt, argptr);

va_end(argptr);

fclose (file);

}

}

void Report (char *fmt, ...)

{ FILE *file;

va_list argptr;

if ((file = fopen(ReportFile,"a+"))==NULL)

{ fprintf(stderr, "Cannot open REPORT file.&bsol;n");

exit(1);

}

va_start(argptr, fmt);

vfprintf(file, fmt, argptr);

vprintf(fmt,argptr);

va_end(argptr);

fclose (file);

}

*/

/*

* ^ReadPattern

*/

void ReadPattern (PAT *input, char *name, int Len)

{ int i=0, j=0, id, TmpNp=0, TmpNd=0, Flag=0;

char *buf1="NumOfPattern:";

char *buf2="PatternDimens:";

char str[40],str1[10];

PAT Ptr;

FILE *DataFile;

float tmp;

Debug ("&bsol;nReadPattern(%s,%d) - started",name,Len);

Ptr.A =(float*) malloc (NDATA * sizeof(float));

if ((DataFile = fopen(name,"r")) == NULL )

{ Debug("&bsol;nCan't read the data file (%s)",name);

fclose(DataFile);

exit (1);

}

if ((strcmp(name,TestVector)) == 0) /* if read TestVector, then read */

Flag = 1; /* only ID and A[i]. (NO Target) */

fscanf(DataFile,"%s %s",str,str1);

if ((strcmp(str,buf1))==0)

TmpNp = atoi (str1);

Debug("&bsol;nNumOfPattern = %d",TmpNp);

fscanf(DataFile,"%s %s",str,str1);

if ((strcmp(str,buf2))==0)

TmpNd = atoi (str1);

Debug("&bsol;nPatternDimens = %d",TmpNd);

if (TmpNp != Len)

Debug ("&bsol;n&bsol;tWARNING! - NumOfPattern NOT EQUAL Param (%d != %d)",TmpNp,Len);

if (TmpNd != NDATA)

Debug ("&bsol;n&bsol;tWARNING! - PatternDimens NOT EQUAL NDATA (%d != %d)",TmpNd,NDATA);

for (i = 0; i < Len; i++)

{fscanf(DataFile,"%d",&id);

Ptr.ID = id;

for (j=0; j < NDATA; j++)

{ fscanf (DataFile,"%f",&tmp);

Ptr.A[j]=tmp;

}

if ( Flag )

tmp = -1;

else

fscanf(DataFile,"%f",&tmp);

Ptr.Target = tmp;

input[i]=patcpy(input[i],Ptr);

}

fclose(DataFile);

}

/*

* ^LocPatMemory - locate memory for (PAT *)

*/

PAT* LocPatMemory(int num)

{ int i;

PAT *src;

src = (PAT *) malloc (num * sizeof(PAT));

for (i=0; i< num; i++)

{src[i].ID = -1;

src[i].A = (float*) malloc (NDATA * sizeof(float));

src[i].Target = -1.0;

}

return (src);

}

void FreePatMemory( PAT* src, int num )

{ int i;

for (i=0;i<num;i++)

free (src[i].A);

free (src);

}

/*

* Copies pattern src to dest.

* Return dest.

*/

PAT patcpy (PAT dest, PAT src)

{ int i;

dest.ID = src.ID;

for (i=0;i<NDATA;i++)

dest.A[i] = src.A[i];

dest.Target = src.Target;

return dest;

}

…..

/* Random distribution value

* rand() return x from [0,32767] -> x/32768

* -> x from [0,1]

*/

float RavnRaspr(float A, float B)

{float x;

x = (B-A)*rand()/(RAND_MAX+1.0) + A;

return x;

}

float NormRaspr(float A,float B)

{ float mat_ogidanie=A, Sigma=B;

float Sumx=0.0, x;

int i;

for (i=0;i<12;i++)

Sumx = Sumx + RavnRaspr(0,1); /* from R[0,1] -> N[a,sigma]*/

x = Sigma*(Sumx-6) + mat_ogidanie;

return x;

}

int Init_W ( void )

{ int i,j;

float A, B;

time_t t,t1;

t = time(NULL);

t1=t;

/* restart random generator*/

while (t==t1)

srand((unsigned) time(&t));

if (InitFunc == Random)

{ A = -Constant;

B = Constant;

Debug ("&bsol;nInit_W () --- Start (%ld))",t);

Debug ("&bsol;n InitFunc=Random[%4.2f,%4.2f]",A,B);

for(i=0; i<=NDATA; i++)

for(j=0; j<NUNIT1; j++)

W1[i][j]=RavnRaspr(A,B);

for(j=0; j <= NUNIT1; j++)

W2[j]=RavnRaspr(A,B);

}

if (InitFunc == Gauss)

{ A = Alfa;

B = Sigma;

Debug ("&bsol;nInit_W () --- Start (%ld))",t);

Debug ("&bsol;n InitFunc=Gauss[%4.2f,%4.2f]",A,B);

for(i=0; i<=NDATA; i++)

for(j=0; j<NUNIT1; j++)

W1[i][j] = NormRaspr(A,B);

for(j=0; j <= NUNIT1; j++)

W2[j] = NormRaspr(A,B);

}

if ( Widrow == Yes )

Widrow_Init();

Debug ("&bsol;nInit_W - sucsefull ");

return OK;

}

/* LearnFunc */

int LearnFunc (void)

{ int i, j, n, K, NumErr=0;

int num=0;

float err_cur=0.0, Res=0;

time_t tim;

float ep[NMAXPAT];

GL_Error=1.0;

time(&tim);

Debug ("&bsol;nLearnFunc () --- Started");

Debug ("&bsol;n eta = %4.2f",eta);

Debug ("&bsol;n LearnTolerance = %4.2f",LearnTolerance);

Init_PromW();

do

{ num++;

err_cur = 0.0;

NumErr = 0;

for (n = 0; n < NWORK; n++)

{ K = Cur_Number[n];

Res=Forward(Work[K]);

ep[n]=fabs(Res-Work[K].Target);

if (ep[n] > LearnTolerance)

{ NumErr++;

Init_PromW();

Update_Last(K, Res);

Update_Prom1(K);

Prom_to_W();

}

err_cur = err_cur + (ep[n]*ep[n]);

}

err_cur=0.5*(err_cur/NWORK);

result = Reset(err_cur, num, NumErr);

if ((num % NumOut)==0)

Debug("&bsol;nStep :%d NumErr :%d Error:%6.4f",num,NumErr,err_cur);

} while (result == CONT || result == RESTART);

Debug("&bsol;nStep :%d NumErr :%d Error:%6.4f",num,NumErr,err_cur);

return num;

}