I have a code which throws "segmentation fault" after some computation.

I was looking for some help with efficient memory management.

My program does the following:

I have a grid with min. i,j,k coordinates -52, -34, -35 and max. i,j,k coordinates 53, 37, 33. (In i, j, k loop).

I want to compare each grid point to a *.pdb file of this sort which has ~2000 lines of x, y, z positions.

ATOM 1756 OH2 TIP3W2710 -46.420 0.150 2.420

ATOM 1757 OH2 TIP3W2753 19.800 2.170 -14.550

ATOM 1758 OH2 TIP3W2754 18.150 20.090 12.270

So basically in the main body of the program i compare each unique i, j, k value to each x, y, z in the compare() function.

The problem is the program goes through 1000 i,j,k values and then gives segmentation fault.

Heres the program:

```
#include<stdio.h>
#include<stdlib.h>
#include<math.h>
main()
{
FILE *fp1;
float i, j, k;
int val;
fp1 = fopen("output.dat", "w");
for (i=-52; i <53; i++)
{
for(j=-34; j <37; j++)
{
for (k=-35; k <33; k++)
{
val= compare (i,j,k);
if (val>=1)
{
fprintf(fp1, "%f\t %f\t %f\t %d\n", i,j,k, val);
}
}
}
}
fclose(fp1);
}
compare (p,q,r)
float p,q,r ;
{
char cbuff[100];
char sx[7];
char sy[7];
char sz[7];
float a[2500];
float b[2500];
float c[2500];
float dist, num;
int count=0, i=0, j;
FILE *fp2;
fp2 = fopen("test1.pdb", "r");
while (fgets (cbuff, 80, fp2)!= NULL)
{
if (cbuff[0] =='A' && cbuff[1] == 'T' && cbuff[2] == 'O' && cbuff[3] == 'M')
{
strncpy (sx, cbuff+31, 7);
sx[7] = '\0';
strncpy (sy, cbuff+40, 7);
sy[7] = '\0';
strncpy (sz, cbuff+47, 7);
sz[7] = '\0';
a[i] = atof(sx);
b[i] = atof(sy);
c[i] = atof(sz);
/*printf("%d\t%f\t%f\t%f\n", i, a[i], b[i], c[i]);*/
i++;
}
}
for(i=0; i < 2013; i++)
{
num = (pow ((c[i]-r),2) + pow ((b[i]-q),2) + pow((a[i]-p),2));
dist = sqrt (num);
if (dist < 1 )
{
count++;
printf("%f\n", dist);
}
}
return(count);
fclose(fp2);
}
```