<configuration><property><name>fs.defaultFS</name><value>hdfs://localhost:9000</value></property> </configuration>
<configuration><property><name&plication</name><value>1</value></property> </configuration>
import org.f.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;import java.io.*;
import java.URI;
import java.URISyntaxException;/*** hadoop HDFS java api 操作* @author: lyj* @since: 2019/3/20*/
public class HDFSApp {//配置链接虚拟机的IPpublic static final String HDFS_PATH = "hdfs://192.168.10.99:8020";//hdfs文件系统FileSystem fileSystem = null;//获取环境对象Configuration configuration = null;/*** 新建目录* @throws IOException*/@Testpublic void mkdir() throws IOException {fileSystem.mkdirs(new Path("/hdfsapi/test"));}//hadoop fs -ls /hdfsapi/test/*** 创建文件* @throws IOException*/@Testpublic void create() throws IOException {//创建文件FSDataOutputStream outputStream = ate(new Path("/hdfsapi/"));outputStream.write("hello hadoop".getBytes());outputStream.flush();outputStream.close();}/*** 打印* @throws IOException*/@Testpublic void cat() throws IOException {FSDataInputStream inputStream = fileSystem.open(new Path("/hdfsapi/"));pyBytes(inputStream, System.out, 1024);inputStream.close();}/*** 重命名* @throws IOException*/@Testpublic void rename() throws IOException {Path oldPath = new Path("/hdfsapi/");Path newPath = new Path("/hdfsapi/");Assert.ame(oldPath,newPath));}/*** 上传本地文件到hdfs* @throws Exception*/@Testpublic void copyFromLocalFile() throws Exception {Path oldPath = new Path("E:\");Path newPath = new Path("/hdfsapi/test");pyFromLocalFile(oldPath,newPath);}/*** 上传本地文件到hdfs 带进度条* @throws Exception*/@Testpublic void copyFromLocalFileWithProgress() throws Exception {
// Path oldPath = new Path("E:\");
// Path newPath = new Path("/hdfsapi/test");
// pyFromLocalFile(oldPath,newPath);InputStream in = new BufferedInputStream(new FileInputStream(new File("E:\")));FSDataOutputStream outputStream = ate(new Path("/hdfsapi/"), new Progressable() {@Overridepublic void progress() {System.out.print(".");//带进度提醒信息}});pyBytes(in,outputStream,4096);}/*** 下载文件到本地* @throws Exception*/@Testpublic void copyToLocalFile() throws Exception{
// 获取输入流InputStream in = fileSystem.open(new Path("/hdfsapi/"));
// 获取输出流OutputStream outputStream = new FileOutputStream(new File("E:\apache-activemq-5.15."));pyBytes(in, outputStream, configuration);in.close();outputStream.close();}/*** 列出所有的文件* @throws URISyntaxException* @throws IOException*/@Testpublic void listFiles() throws Exception{FileStatus[] fileStatuses = fileSystem.listStatus(new Path("/hdfsapi/test"));for (FileStatus fileStatus :fileStatuses){String isDir = fileStatus.isDirectory()?"文件夹":"文件";short relication = Replication();long len = Len();String path = Path().toString();System.out.println(isDir +":" + relication +":"+len+":"+path);}}@Testpublic void upset() throws URISyntaxException, IOException {//上传文件,路径大家记得改一下String file = "E:/hadoopTest/";InputStream inputStream = new FileInputStream(new File(file));FSDataOutputStream outputStream = ate(new Path("/hdfsapi/"));pyBytes(inputStream, outputStream, configuration);
// pyFromLocalFile();底层是调用了pyBytes()}@Testpublic void download() throws URISyntaxException, IOException {
// 获取输入流InputStream in = fileSystem.open(new Path("/"));
// 获取输出流String file = "E:/hadoopTest/";OutputStream outputStream = new FileOutputStream(new File(file));pyBytes(in, outputStream, configuration);in.close();outputStream.close();}@Testpublic void demo1() throws URISyntaxException, IOException {configuration = new Configuration();fileSystem = (FileSystem) (new URI(HDFS_PATH), configuration);
// 1、在hdfs创建目录teacher。
// 2、在teacher目录下上传文件。String file = "E:/";InputStream inputStream = new FileInputStream(new File(file));OutputStream outputStream = ate(new Path("/hdfs/"));pyBytes(inputStream, outputStream, configuration);
// 3、在hdfs创建目录student,并在student目录下创建新目录Tom、LiMing、Jerry.fileSystem.mkdirs(new Path("/hdfs/student/Tom"));fileSystem.mkdirs(new Path("/hdfs/student/LiMing"));fileSystem.mkdirs(new Path("/hdfs/student/Jerry"));
// 4、在Tom目录下上传,同时上传到LiMing、Jerry目录下。file = "E:/";inputStream = new FileInputStream(new File(file));outputStream = ate(new Path("/hdfs/student/"));pyBytes(inputStream, outputStream, configuration);
// file = "E:/";inputStream = new FileInputStream(new File(file));outputStream = ate(new Path("/hdfs/student/"));pyBytes(inputStream, outputStream, configuration);
// file = "E:/";inputStream = new FileInputStream(new File(file));outputStream = ate(new Path("/hdfs/student/"));pyBytes(inputStream, outputStream, configuration);
// 5、将student重命名为MyStudent。ame(new Path("/hdfs/student"), new Path("/hdfs/MyStudent"));
// 6、将Tom下的下载到E:/tom目录中file = "E:/tom";inputStream = fileSystem.open(new Path("/hdfs/MyStudent/"));outputStream = new FileOutputStream(new File(file));pyBytes(inputStream, outputStream, configuration);
// 7、将teacher下的也下载到此目录inputStream = fileSystem.open(new Path("/hdfs/"));outputStream = new FileOutputStream(new File(file));pyBytes(inputStream, outputStream, configuration);
// 8、删除hdfs中的Tom、LiMing目录fileSystem.delete(new Path("/hdfs/Tom"), true);fileSystem.delete(new Path("/hdfs/LiMing"), true);inputStream.close();outputStream.close();}@Beforepublic void setUp() throws Exception{configuration = new Configuration();fileSystem = (new URI(HDFS_PATH),configuration,"root");System.out.println("HDFSApp setUp");}@Afterpublic void tearDown() throws Exception{configuration = null;fileSystem = null;System.out.println("HDFSApp tearDown");}}
关闭防火墙
hadoop namenode -format多次运行,重新hdfs/hadoop namenode -format和启动hdfs: sbin/start-dfs.sh
通过hdfs shell的方式put,会采用默认的系数1
通过java api上传,在本地没有手工设置副本系数,会采用hadoop默认系数3
本文发布于:2024-01-28 04:55:31,感谢您对本站的认可!
本文链接:https://www.4u4v.net/it/17063889354934.html
版权声明:本站内容均来自互联网,仅供演示用,请勿用于商业和其他非法用途。如果侵犯了您的权益请与我们联系,我们将在24小时内删除。
留言与评论(共有 0 条评论) |