In the first phase of Huffman-based text data compression, the number of occurrences of each character in a text file must be counted and encoded as the character weight.
In statistics, I use a struct array to store the characters that appear in the text and their occurrences.
Definition: struct chrcount
{
Char CHR;
Int count;
} Charcount [128];
Below is my program
# Include <stdio. h>
# Include <stdlib. h>
# Include <conio. h>
Void main ()
{
File * FP;
Int J = 0;
Int I;
Struct chrcount // each character in the storage file and its number
{
Char CHR;
Int count;
} Charcount [128];
Char C;
For (I = 0; I <128; I ++)
{
Charcount [I]. Count = 0;
}
If (FP = fopen ("data.txt", "A +") = NULL)
{
Printf ("can't open file data.txt/N ");
Exit (1 );
}
C = fgetc (FP );
Putchar (C );
Charcount [0]. CHR = C;
Charcount [0]. Count = 1;
J ++;
While (C = fgetc (FP ))! = EOF)
{
Putchar (C );
For (I = 0; I <j; I ++)
{
If (C = charcount [I]. CHR)
{
Charcount [I]. Count ++;
}
Else
{
Charcount [J]. CHR = C;
Charcount [J]. Count ++;
J ++;
}
Break;
}
}
Fclose (FP );
Printf ("/N ");
For (I = 0; I <j; I ++)
{
Printf ("% C:", charcount [I]. CHR );
Printf ("% d/N", charcount [I]. Count );
}
}
The problem with this program is that it can only correctly count the number of times the first character appears in the text, the subsequent characters appear once, record again
Where is the problem?