C/c + + programs to store large files to the hadoop platform
How to choose?
1. Read the file, open the file in hadoop, while (write ())
2. Call the Linux system operation command
CodePudding user response:
http://hadoop.apache.org/docs/r0.19.1/libhdfs.html# include "HDFS. H"
Int main (int arg c, char * * argv) {
HdfsFS fs=hdfsConnect (" default ", 0);
Const char * writePath="/TMP/testfile. TXT";
HdfsFile writeFile=hdfsOpenFile (fs, writePath, O_WRONLY | O_CREAT, 0, 0, 0).
if(! WriteFile) {
Fprintf (stderr, "Failed to open % s for writing! \ n ", writePath);
The exit (1);
}
Char * buffer="Hello, World!" ;
TSize num_written_bytes=hdfsWrite (fs, writeFile (void *) buffer, strlen (buffer) + 1);
If (hdfsFlush (fs, writeFile)) {
Fprintf (stderr, "Failed to" flush "% s \ n", writePath);
The exit (1);
}
HdfsCloseFile (fs, writeFile);
}