javaAPI 操作hdfs文件
程序员文章站
2024-03-23 08:35:34
...
1.pom文件配置
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.test</groupId>
<artifactId>hdfs_upload</artifactId>
<version>1.0-SNAPSHOT</version>
<name>hdfs_upload</name>
<!-- FIXME change it to the project's website -->
<url>http://www.example.com</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.7</maven.compiler.source>
<maven.compiler.target>1.7</maven.compiler.target>
</properties>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.7.1</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.4.3</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<transformers>
<transformer
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer"><!-- main()所在的类,注意修改 -->
<mainClass>com.gjh.App</mainClass>
</transformer>
</transformers>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
2.文件操作
import org.apache.hadoop.conf.*;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import java.net.URI;
/**
* HDFS: Create File
*
*/
public class App {
Configuration conf = null;
//配置NameNode地址
URI uri = null;
FileSystem fs = null;
public static void main(String[] args) throws Exception{
//define new file
Configuration conf = new Configuration();
//配置NameNode地址
URI uri = new URI("hdfs://localhost:9000");
//指定用户名,获取FileSystem对象
FileSystem fs = FileSystem.get(uri, conf, "gjh");
System.out.println("初始化...");
//创建文件
FSDataOutputStream os = fs.create(new Path("/1702240034/text5.txt"),true);
os.writeBytes("hello,hdfs my name is gjh gjh gjh gjh");
os.close(); //关闭流
System.out.println("创建text5.txt");
//上传文件
Path localPath = new Path("/home/gjh/date/text2.txt");
Path hdfsPath = new Path("/1702240034/text2.txt");
fs.copyFromLocalFile(localPath, hdfsPath);
System.out.println("上传text2.txt");
//下载文件
Path localPath1 = new Path("/home/gjh/date/text1.txt");
Path hdfsPath1 = new Path("/1702240034/text1.txt");
fs.copyToLocalFile(hdfsPath1, localPath1);
System.out.println("下载text1.txt");
//文件属性查看
System.out.println("text1.txt文件属性查看");
FileStatus fis = fs.getFileStatus(new Path("/1702240034/text1.txt"));
System.out.println("path:"+fis.getPath());//文件路径
System.out.println("length:"+fis.getLen());//文件长度
System.out.println("modify time:"+fis.getModificationTime());//文件修改时间
System.out.println("owner:"+fis.getOwner());//文件所有者
System.out.println("blockSize:"+fis.getBlockSize());//文件块大小
//查看属性
System.out.println("text1.txt查看");
FSDataInputStream in = fs.open(new Path("/1702240034/text1.txt"));
IOUtils.copyBytes(in, System.out, 1024);
in.close();
fs.close();//结束操作
System.out.println("结束操作!");
}
上一篇: 设计模式-中介者模式