I am trying to read a file of over 1GB (which has 1,157,421,364 bytes), it gives memory error when using fread() function, but works well when I use fgets() function. Note: I have intermixed C and C++..
Can someone help me to overcome this memory error, am I doing something wrong?
Thanks in advance...
Error is "Memory Error"
#include <iostream>
#include <cstdlib>
#include <cstdio>
#include <cerrno>
#include <cstring>
void read_file2(FILE* readFilePtr){
long file_size;
fseek(readFilePtr, 0L, SEEK_END);
file_size = ftell(readFilePtr);
rewind(readFilePtr);
char *buffer;
buffer = (char*) malloc (sizeof(char)*file_size);
if (buffer == NULL) {
fputs("Memory Error", stderr);
exit(2);
}
long lines = 0;
if (fread(buffer, 1, file_size, readFilePtr) != file_size){
fputs("Reading Error", stderr);
exit(1);
}
char *p = buffer;
while (p = (char*) memchr(p, '\n', (buffer + file_size) - p)){
++p;
++lines;
}
printf("Num of lines %ld\n", lines);
free(buffer);
}
int main(int argc, char** argv){
clock_t begin_time, end_time;
float time_consumed;
begin_time = clock();
FILE* inputFilePtr = fopen(argv[1], "rb");
if(inputFilePtr == NULL){
printf("Error Opening %s: %s (%u)\n", argv[1], strerror(errno), errno);
return 1;
}
read_file2(inputFilePtr);
end_time = clock();
time_consumed = ((float)end_time - (float)begin_time)/CLOCKS开发者_StackOverflow_PER_SEC;
printf("Time consumed is -- %f\n", time_consumed);
return 0;
}
You can read the file in chunks, instead of reading it as a whole, reading all of the file to one allocated buffer means a huge memory allocation of your application, do you really want that?. That's being said assuming you don't need to process it all in once (which is true in most cases).
You usually don't read big files in one go like that. You use something called buffered reads. Essentially you continuously call fread
in a loop until there's nothing left to read.
精彩评论