0

iam write this programe put in run there is problem in pointer
please can you help me.
to find the place of the problem when you run it press f10

#include<iostream>
#include<fstream>
#include<string>
#include<conio.h>
#include <iomanip>
//#include<stdio>
//#include<ctype>
using namespace std;


/****************************************************************
Functions prototype.
*****************************************************************/

void   Open_File();
void   Demage_Lexeme();
int    Search(char[256],int);
void   analyze();
void   Skip_Comment();
void   Read_String();
void   Is_Keyword_Or_Not();
void   Is_Identifier_Or_Not();
void   Is_Operator_Or_Not();
void   Read_Number();
void   Is_Special_Or_Not();
void   Is_Comparison_Or_Not();
void   Add_To_Lexical (char[256],int,char[256]);

void Print_result();
void    Print_tokens();
void   Token_Attribute();

/****************************************************************
Data structure used in program.
*****************************************************************/

struct lexical
{
    char    data[256];          //Value of token.
    int     line[256];          //Line # which token appear in input file.
    int     times;              //# of times that token appear in input file.
    char    type[256];           //Type of each token.
    struct  lexical *next;

	
};

 typedef struct lexical Lex;
 typedef Lex *lex;
  lexical *head=NULL;
 lexical *tail;
/****************************************************************
File pointer for accessing the file.
*****************************************************************/

//FILE *block;
//FILE *result;
//FILE *tokens;
char lexeme[256];
string ch;
char c;
int f,flag,line=1,i=1;
//lex head=NULL;
//lex tail=NULL;
/****************************************************************
Array holding all keywords for checking.
*****************************************************************/

char *keywords[]={"procedure","is","begin","end","var","cin","cout","if",
		  "then","else","and","or","not","loop","exit","when",
		  "while","until"};

/****************************************************************
Array holding all arithmetic operations for checking.
*****************************************************************/

char arithmetic_operator[]={'+','-','*','/'};

/****************************************************************
Array holding all comparison operations for checking.
*****************************************************************/

char *comparison_operator[]={"<",">","=","<=","<>",">="};

/****************************************************************
Array holding all special for checking.
*****************************************************************/

char special[]={'%','!','@','~','$'};

/****************************************************************

			**************
			*MAIN PROGRAM*
			**************

*****************************************************************/

void main()
{
  Open_File();
  analyze();
  //fclose(block);
   
Print_result();
  Print_tokens();
}

/****************************************************************
This function open input sourse file.
*****************************************************************/

void Open_File()
{ifstream block_file("block.txt");

  //block=fopen("source.txt","r");   //provide path for source.txt here

	/////if(block_file.is_open())
	////{
		///while(!block_file.eof())
		///{
			//getline(block_file,str);
			//for(int j=0;j<str.length();++j)


////////////////////////////
if(block_file.is_open()==NULL)
  {
cout<<"!!!Can't open input file - source.txt!!!";
	
  }
}

/****************************************************************
Function to add item to structure of array to store data and
information of lexical items.
*****************************************************************/

void Add_To_Lexical (char value[256],int line,char type[256])
{
lex neww_lex;

	if (!Search(value,line))    //When return 1 the token not found.
	{

	  neww_lex=new Lex[2000];
//new_lex=new lexical(2000);
	  if (neww_lex!=NULL)
	  {
		strcpy_s(neww_lex->data,value);
		neww_lex->line[0]=line;
		neww_lex->times=1;
		strcpy_s(neww_lex->type,type);
		neww_lex->next=NULL;

		if (head==NULL)
		   head=neww_lex;
		else
		   tail->next=neww_lex;

		tail=neww_lex;
	  }
	}
}

/****************************************************************
Function to search token.
*****************************************************************/

int Search (char value[256],int line)
{struct lexical *x;
x=new lexical;
x->next=NULL;
 x=head;
  int flag=0;

  //while ((x->next)!=NULL && !flag)
  while (!flag)
  {
    if (strcmp(x->data,value)==0)
    {
      x->line[x->times]=line;
      x->times++;
      flag=1;
    }
    x=x->next;
  }
  return flag;
}

/****************************************************************
Function to print the ST.TXT .
*****************************************************************/

void Print_result()
{
  lex x=head;
  
  
ofstream result("result.TXT");

if ((result.is_open()==NULL))
      cout<<"The file result.TXT can not open. \n";

  else

  {
    result<<"\t"<<"Line#"<<"\t"<<"Lexeme"<<"\t"<<"Type";
    result<<"\t ---- \t    ------ \t ---- \n";

    while (x!=NULL)
    {
      if ((strcmp(x->type,"num")==0)         ||
	 (strcmp(x->type,"keyword")==0)      ||
	 (strcmp(x->type,"identifier")==0))
      {
	 result<<"\t ";

	 for (int j=0;j<x->times;j++)
	 {
	   result<<x->line[j];
		if (j!=x->times-1)      //This condition to prevent the comma
	   result<<","<<x->line[j];  //"," to not print after last line #.
	 }

	result<<"\t"<< x->data<<"\t"<<x->type<<"\n";
      }
      x=x->next;
    }

    result.close();
  }
}

/****************************************************************
Function to print the TOKENS.TXT .
*****************************************************************/

void Print_tokens()
{
  int flag=0;
  
 
	 ifstream read("block.txt");
if(read.is_open()==NULL)
{cout<<"cant open block file\n";}
else
{	ofstream write("tokens.txt");
	
	if(write.is_open()==NULL)
	{cout<<"cant open token file\n";}
	else
		if(read.is_open())
		{read>>c;
			getchar();
			while(!read.eof())
			{
				//getline(read,ch);
				if((c==' ')&&(!flag))
				{do
					read.get(c);
					//putchar(c);
					while (c==' ');
					read.seekg(-2,1);
	                 //read.ss(read,-2,1);
	                  read>>c;
	   	              flag=1;
				}
                  if (c!='\n' && c!='\t')
				  write<<c;
				   if (c=='\n')
				   {
	   write<<"\n";
	   write.close();
	    Token_Attribute();
	    i++;
	    flag=0;
	  }
read.get(c);

	  //getline(read,ch);
			}
			
	}
		
}
	read.close();

	
	

}
    
/****************************************************************
Function to put the token and atrribute in TOKENS.TXT .
*****************************************************************/

void Token_Attribute()
{  
ofstream  tokens(" tokens.TXT");



	////////////////////////
  lex x=head;
   int j;

  while (x!=NULL)
  {
    if (x->line[0]==i)
    {
      tokens<<"token : "<<x->type<<"\t";

      if ((strcmp(x->type,"num")==0)         ||
	 (strcmp(x->type,"keyword")==0)      ||
	 (strcmp(x->type,"identifier")==0))

      {
	tokens<<"attribute : line#="<<i<<"\n";
      }

      else

      {
	tokens<<"attribute : "<<x->data<<"\n";
      }

    }
    x=x->next;
  }
  tokens<<"\n";
}

/****************************************************************
Function to create lexical analysis.
*****************************************************************/

void analyze()
{ifstream read("block.txt");

c=read.get();
                     //Read character.

  while(!read.eof())                   //While the file is not end.
  {

      if(c=='\n')                   //Compute # of lines in source.txt .
	  {
	    line++;
	  c=read.get();
	  }

      if(c==' ' && c=='\n' )
      {
	  line++;
c=read.get();
      }
      if(c==' ' && c!='\n' )          //The character is space.
c=read.get();


      if(c=='/' || c=='\"')    //Function for skipping comments in the file
	  Skip_Comment();	//and '"' with display statements.


      if(c==' ')              //The character is leter.
	{
	    Read_String();
	    Is_Keyword_Or_Not();
	    Is_Operator_Or_Not();
	    Is_Identifier_Or_Not();
	}


      if(isdigit(c))             //The character is digit.
	 Read_Number();


      if (c==';')                //The character is semicolon.
	Add_To_Lexical(";",line,"semicolon");


      if (c==':')                //The character is colon.
	Add_To_Lexical(":",line,"colon");


      if (c==',')                //The character is comma.
	Add_To_Lexical(",",line,"comma");


      if (c=='(')                //The character is parenthesis.
	Add_To_Lexical("(",line,"parenthesis");


      if (c==')')                //The character is parenthesis.
	Add_To_Lexical(")",line,"parenthesis");

				 //The character is comparison_operator
      if (c=='<' || c=='=' || c=='>')
	Is_Comparison_Or_Not();


      Is_Special_Or_Not();       //After failed scaning in before cases
				 //check the character is special or not.
      Demage_Lexeme();

      if(c==' ' && c=='\n' )
      {
	  line++;
	c=read.get();
      }
      else
     c=read.get();
  }
}

/****************************************************************
This function read all character of strings.
*****************************************************************/

void Read_String()
{ofstream block_file("block.txt");
  int j=0;

  do
  {
    lexeme[j++]=c;
  c=getchar();
  } while(isalpha(c));

  block_file.seekp(-1,1);
    lexeme[j]='\0';
}

/****************************************************************
This function demage the data stored in the lexeme to store
next word without any previous character of previous word .
*****************************************************************/

void Demage_Lexeme()
{
  int j=0;

  while (lexeme[j]!='\0')
  {
    lexeme[j]='\0';
    j++;
  }
}

/****************************************************************
This function check the string is keyword or not.
*****************************************************************/

void Is_Keyword_Or_Not()
{
  int j=0;

  flag=0;

  for (j=0;j<18;j++)         //search for keyword & # of keywords = 18.
  {
    if (strcmp(lexeme,keywords[j])==0)
	{
		Add_To_Lexical(lexeme,line,"keyword");
		flag=1;
		break;
	}
  }
}

/****************************************************************
This function check the string is indentifier or not.
*****************************************************************/

void Is_Identifier_Or_Not()
{
    if(flag==0)   //identifier
       Add_To_Lexical(lexeme,line,"identifier");
}

/*****************************************************************
This function check if the character is operator or not.
******************************************************************/

void Is_Operator_Or_Not()
{
  

      for (int j=0;j<4;j++)               //The # of arithmetic_operator is 4.
	if (c==arithmetic_operator[j])
	{
	  lexeme[0]=c;
	  lexeme[1]='\0';
	  Add_To_Lexical(lexeme,line,"operator");
	  break;
	}
}

/*****************************************************************
This function check if the character is special symbol or not.
******************************************************************/

void Is_Special_Or_Not()
{
  int j;

  for (j=0;j<5;j++)               //The # of special symbol is 5.
    if (c==special[j] )
      {
	lexeme[0]=c;
	lexeme[1]='\0';
	Add_To_Lexical(lexeme,line,"special");
	break;
      }

}

/*****************************************************************
This function check if the character is special symbol or not.
******************************************************************/

void Is_Comparison_Or_Not()
{ofstream block_file("block.txt");
    int j,flag=0;

    char k=c;

	
c=getchar();
if(isalpha(c)&&k=='<')
{do
c=getchar();

      while (c!='>');
      flag=1;


    }

    else

    if (c=='<' && k=='<')            //Skip cout <<.
      flag=1;

    else

    if (c=='>' && k=='>')            //Skip cin >>.
      flag=1;

    if (!flag)
    {

      lexeme[0]=c;          //We read here to check if the comparision
 c=getchar();         //operator is single which is only (> or < or =)
			     //or like ( >= or == or <= ).

      if (c=='<' || c=='=' || c=='>')
      {
	lexeme[1]=c;
	lexeme[2]='\0';
      }
      else
      {
	lexeme[1]='\0';
	block_file.seekp(-1,1);
      }

      for (j=0;j<6;j++)      //The # of comparison_operator is 6.
	if (strcmp(lexeme,comparison_operator[j])==0)
	  {
	    Add_To_Lexical(lexeme,line,"comparision");
	    break;
	  }
   }
}


/*****************************************************************
This function check if the character is number or not.
******************************************************************/

void Read_Number()
{

   int j=0;

   while(isdigit(c) || c=='.')  //search for digits
   {
      lexeme[j]=c;
     c=getchar();
      j++;
   }

   lexeme[j]='\0';
   Add_To_Lexical(lexeme,line,"num");

}

/****************************************************************
Function to skip comment statements.
*****************************************************************/

void Skip_Comment()
{
    if (c=='\"')             //Skipping printf statements and
    {                        //other such display statements.
	if(c=='\"')
	while((c=getchar())!='"');
    }

    else
		 //Skip comments.
    {c=getchar();
//getline(block,ch);
	if(c=='/')	//Checking single line comments
	{
		while((c=getchar())!='\n');
	}
	else if(c=='*')	//Checking multiple line comments.
	{
		while(f==0)
		{c=getchar();
			//getline(block,ch);
			if(c=='*')
			{
				c=getchar();
				if(c=='/')
					f=1;
			}
		}
		f=0;
       }
    }
}
2
Contributors
1
Reply
2
Views
8 Years
Discussion Span
Last Post by Ancient Dragon
0

please post the input file(s) (books.txt, tokens.txt, etc.) It appears the reading of tokens.txt is doing an awful lot of unnecessary work by reading the file one character at a time instead of using getline() to read the entire line into a std::string object. If you want to strip comments then just use string's find() method to check if '//' or '/*' comment exists.

This topic has been dead for over six months. Start a new discussion instead.
Have something to contribute to this discussion? Please be thoughtful, detailed and courteous, and be sure to adhere to our posting rules.