Test environment: centos6.10,hadoop2.7.3,jdk1.8
Test code: HDFSCSAMPLE.C
#include "hdfs.h"
#include <string.h>
#include <stdio.h>
#include <stdlib.h>
int main(int argc, char **argv) {
hdfsFS fs = hdfsConnect("default", 0);
const char* writePath = "/tmp/testfile.txt";
hdfsFile writeFile = hdfsOpenFile(fs, writePath, O_WRONLY |O_CREAT, 0, 0, 0);
if(!writeFile) {
fprintf(stderr, "Failed to open %s for writing!\n", writePath);
exit(-1);
}
char* buffer = "Hello, World!";
tSize num_written_bytes = hdfsWrite(fs, writeFile, (void*)buffer, strlen(buffer)+1);
if (hdfsFlush(fs, writeFile)) {
fprintf(stderr, "Failed to ‘flush‘ %s\n", writePath);
exit(-1);
}
hdfsCloseFile(fs, writeFile);
}
Compile script:
compile.sh
#!/bin/bash
export JAVA_HOME=/root/softs/jdk1.8.0_172
export HADOOP_HOME=/root/softs/hadoop-2.7.3/
gcc -I$HADOOP_HOME/include -L$HADOOP_HOME/lib/native -lhdfs -L$JAVA_HOME/jre/lib/amd64/server -ljvm HDFSCSample.c
Execute command:
# chmod +x compile.sh
#./compile.sh
Execute script:
execute.sh
#!/bin/bash
export JAVA_HOME=/root/softs/jdk1.8.0_172
export HADOOP_HOME=/root/softs/hadoop-2.7.3/
export LD_LIBRARY_PATH=$HADOOP_HOME/lib/native:$JAVA_HOME/jre/lib/amd64/server/
CLASSPATH=./
for f in $HADOOP_HOME/share/hadoop/common/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
for f in $HADOOP_HOME/share/hadoop/common/lib/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
for f in $HADOOP_HOME/share/hadoop/hdfs/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
for f in $HADOOP_HOME/share/hadoop/hdfs/lib/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
export CLASSPATH=$CLASSPATH
./a.out
Execute command:
# chmod +x execute.sh
#./execute.sh
Open/tmp/testfile.txt, you can see the inside write a Hello, world!
Configuration and use of Libhdfs