HDFS Single Node Java API Java API
pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.lihaozhe</groupId>
<artifactId>hadoop</artifactId>
<version>1.0.0</version>
<properties>
<jdk.version>8</jdk.version>
<!-- 公共配置 -->
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<maven.compiler.compilerVersion>8</maven.compiler.compilerVersion>
<maven.compiler.encoding>utf-8</maven.compiler.encoding>
<project.build.sourceEncoding>utf-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<maven.test.failure.ignore>true</maven.test.failure.ignore>
<maven.test.skip>true</maven.test.skip>
<commons-io.version>2.14.0</commons-io.version>
<commons-lang3.version>3.13.0</commons-lang3.version>
<commons-pool2.version>2.11.1</commons-pool2.version>
<fastjson.version>2.0.41</fastjson.version>
<guaua.version>31.1-jre</guaua.version>
<gson.version>2.10.1</gson.version>
<hadoop.version>3.3.5</hadoop.version>
<hutool.version>5.8.22</hutool.version>
<ikanalyzer.version>2012_u6</ikanalyzer.version>
<jackson.version>2.15.3</jackson.version>
<jieba-analysis.version>1.0.2</jieba-analysis.version>
<junit.version>5.10.0</junit.version>
<lombok.version>1.18.30</lombok.version>
<log4j-slf4j.version>2.20.0</log4j-slf4j.version>
<mysql.version>8.0.33</mysql.version>
</properties>
<dependencies>
<!-- https://mvnrepository.com/artifact/org.junit.jupiter/junit-jupiter-api -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>${junit.version}</version>
<!-- 作用域 -->
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/org.junit.jupiter/junit-jupiter-engine -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/org.projectlombok/lombok -->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>${lombok.version}</version>
<scope>provided</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/cn.hutool/hutool-all -->
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
<version>${hutool.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-lang3 -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>${commons-lang3.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/commons-io/commons-io -->
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>${commons-io.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.google.code.gson/gson -->
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>${gson.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.alibaba/fastjson -->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>${fastjson.version}</version>
</dependency>
<!--jackson-->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jsr310</artifactId>
<version>${jackson.version}</version>
</dependency>
<!--java 小工具-->
<dependency>
<groupId>com.github.binarywang</groupId>
<artifactId>java-testdata-generator</artifactId>
<version>1.1.2</version>
</dependency>
<!--mysql驱动-->
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>${mysql.version}</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>${log4j-slf4j.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guaua.version}</version>
</dependency>
<!-- commons-pool2 -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-pool2</artifactId>
<version>${commons-pool2.version}</version>
</dependency>
<!--分词-->
<!--IK-->
<dependency>
<groupId>com.janeluo</groupId>
<artifactId>ikanalyzer</artifactId>
<version>${ikanalyzer.version}</version>
</dependency>
<!--jieba-->
<dependency>
<groupId>com.huaban</groupId>
<artifactId>jieba-analysis</artifactId>
<version>${jieba-analysis.version}</version>
</dependency>
</dependencies>
<build>
<finalName>${project.name}</finalName>
<!--<outputDirectory>../package</outputDirectory>-->
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.11.0</version>
<configuration>
<!-- 设置编译字符编码 -->
<encoding>UTF-8</encoding>
<!-- 设置编译jdk版本 -->
<source>${jdk.version}</source>
<target>${jdk.version}</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-clean-plugin</artifactId>
<version>3.2.0</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<version>3.3.1</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-war-plugin</artifactId>
<version>3.3.2</version>
</plugin>
<!-- 编译级别 -->
<!-- 打包的时候跳过测试junit begin -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.22.2</version>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
</plugins>
</build>
</project>
HDFS Single Node Java API Java API
package com.lihaozhe.hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.Test;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
public class HdfsTest {
@Test
public void test01() {
URI uri = null;
try {
// HDFS 地址
uri = new URI("hdfs://hadoop:9000/");
// 配置项
Configuration conf = new Configuration();
// 通过这种方式设置java客户端身份
String user = "lhz";
// 获取 HDFS 连接
FileSystem fs = FileSystem.get(uri, conf, user);
// 验证 HDFS 文件系统根下是否存在 lihaozhe 目录
String result = fs.exists(new Path("/lihaozhe")) ? "目录存在" : "目录不存在";
// 输出验证结果
System.out.println(result);
// 关闭 HDFS 连接
fs.close();
} catch (URISyntaxException | IOException | InterruptedException e) {
throw new RuntimeException(e);
}
}
@Test
public void test02() {
URI uri = null;
try {
// HDFS 地址
uri = new URI("hdfs://hadoop:9000/");
// 配置项
Configuration conf = new Configuration();
// 通过这种方式设置java客户端身份
String user = "lhz";
// 获取 HDFS 连接
FileSystem fs = FileSystem.get(uri, conf, user);
// 获取 HDFS 文件系统根下所有文件
FileStatus[] fileStatuses = fs.listStatus(new Path("/"));
// 输出验证结果
for (FileStatus fileStatus : fileStatuses) {
System.out.println(fileStatus.getPath());
}
// 关闭 HDFS 连接
fs.close();
} catch (URISyntaxException | IOException | InterruptedException e) {
throw new RuntimeException(e);
}
}
@Test
public void test03() {
URI uri = null;
try {
// HDFS 地址
uri = new URI("hdfs://hadoop:9000/");
// 配置项
Configuration conf = new Configuration();
// 通过这种方式设置java客户端身份
String user = "lhz";
// 获取 HDFS 连接
FileSystem fs = FileSystem.get(uri, conf, user);
// 设置欲被创建的目录名
Path path = new Path("/xiaoshuo");
// 创建目录
if (!fs.exists(path)) {
System.out.println(fs.mkdirs(path) ? "创建成功" : "创建失败");
} else {
System.out.println(path.getName() + "已经存在无需重复创建");
}
// 关闭 HDFS 连接
fs.close();
} catch (URISyntaxException | IOException | InterruptedException e) {
throw new RuntimeException(e);
}
}
@Test
public void test04() {
URI uri = null;
try {
// HDFS 地址
uri = new URI("hdfs://hadoop:9000/");
// 配置项
Configuration conf = new Configuration();
// 通过这种方式设置java客户端身份
String user = "lhz";
// 获取 HDFS 连接
FileSystem fs = FileSystem.get(uri, conf, user);
// 设置欲被创建的目录名
Path path = new Path("/xiaoshuo");
// 创建目录
if (fs.exists(path)) {
System.out.println(fs.rename(path, new Path("/小说")) ? "修改成功" : "修改失败");
} else {
System.out.println(path.getName() + "不存在");
}
// 关闭 HDFS 连接
fs.close();
} catch (URISyntaxException | IOException | InterruptedException e) {
throw new RuntimeException(e);
}
}
@Test
public void test05() {
URI uri = null;
try {
// HDFS 地址
uri = new URI("hdfs://hadoop:9000/");
// 配置项
Configuration conf = new Configuration();
// 通过这种方式设置java客户端身份
String user = "lhz";
// 获取 HDFS 连接
FileSystem fs = FileSystem.get(uri, conf, user);
// 设置欲被创建的目录名
Path path = new Path("/小说");
// 创建目录
if (fs.exists(path)) {
System.out.println(fs.delete(path, true) ? "删除成功" : "删除失败");
} else {
System.out.println(path.getName() + "不存在");
}
// 关闭 HDFS 连接
fs.close();
} catch (URISyntaxException | IOException | InterruptedException e) {
throw new RuntimeException(e);
}
}
@Test
public void test06() {
URI uri = null;
try {
// HDFS 地址
uri = new URI("hdfs://hadoop:9000/");
// 配置项
Configuration conf = new Configuration();
// 通过这种方式设置java客户端身份
String user = "lhz";
// 获取 HDFS 连接
FileSystem fs = FileSystem.get(uri, conf, user);
// 本地文件系统 欲被上传的文件
Path src = new Path("三国演义.txt");
// HDFS 存储路径
Path dst = new Path("/小说/三国演义.txt");
fs.copyFromLocalFile(src, dst);
System.out.println(fs.exists(dst) ? "上传成功" : "上传失败");
// 关闭 HDFS 连接
fs.close();
} catch (URISyntaxException | IOException | InterruptedException e) {
throw new RuntimeException(e);
}
}
@Test
public void test07() {
URI uri = null;
try {
// HDFS 地址
uri = new URI("hdfs://hadoop:9000/");
// 配置项
Configuration conf = new Configuration();
// 通过这种方式设置java客户端身份
String user = "lhz";
// 获取 HDFS 连接
FileSystem fs = FileSystem.get(uri, conf, user);
// HDFS 文件系统路径
Path src = new Path("/小说/三国演义.txt");
// 本地文件系统 从 HDFS 文件系统下载到本地文件系统的路径
Path dst = new Path("三国.txt");
fs.copyToLocalFile(src, dst);
File file = new File(dst.getName());
System.out.println(file.exists() ? "下载成功" : "下载失败");
// 关闭 HDFS 连接
fs.close();
} catch (URISyntaxException | IOException | InterruptedException e) {
throw new RuntimeException(e);
}
}
}
版权声明:本文内容由互联网用户自发贡献,该文观点仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 举报,一经查实,本站将立刻删除。
文章由极客之音整理,本文链接:https://www.bmabk.com/index.php/post/188681.html