1、读
package
com.sdnware.start01
.hadoop
import java
.io.ByteArrayOutputStream
import java
.io.IOException
import java
.io.InputStream
import java
.net.URI
import java
.net.URISyntaxException
import java
.util.Arrays
import java
.util.List
import java
.util.function.Consumer
import org
.apache.hadoop.conf.Configuration
import org
.apache.hadoop.fs.FSDataInputStream
import org
.apache.hadoop.fs.FileStatus
import org
.apache.hadoop.fs.FileSystem
import org
.apache.hadoop.fs.Path
import org
.apache.hadoop.io.IOUtils
public class HDFSRead {
public static void main(String[] args) {
read1()
}
public static void read0(){
System
.setProperty(
"hadoop.home.dir",
"D:/soft/hadoop-2.6.0")
System
.setProperty(
"HADOOP_USER_NAME",
"root")
Configuration conf = new Configuration()
try {
FileSystem hdfs = FileSystem
.get(new URI(
"hdfs://f1:9000"), conf)
InputStream
in = hdfs
.open(new Path(
"/user/wordcount/My Father0.txt"))
ByteArrayOutputStream
out = new ByteArrayOutputStream()
IOUtils
.copyBytes(
in,
out,
4096,true)
String str =
out.toString()
System
.out.println(str)
} catch (IOException e) {
e
.printStackTrace()
} catch (URISyntaxException e) {
e
.printStackTrace()
}
}
public static void read1(){
System
.setProperty(
"hadoop.home.dir",
"D:/soft/hadoop-2.6.0")
System
.setProperty(
"HADOOP_USER_NAME",
"root")
Configuration conf = new Configuration()
try {
final FileSystem hdfs = FileSystem
.get(new URI(
"hdfs://f1:9000"), conf)
Path path = new Path(
"/user/wordcount")
if(hdfs
.exists(path)){
List<FileStatus> listStatus = Arrays
.asList(hdfs
.listStatus(path))
listStatus
.forEach(new Consumer<FileStatus>() {
public void accept(FileStatus t) {
if(t
.isFile()){
FSDataInputStream
in = null
try {
Path path2 = t
.getPath()
in = hdfs
.open(path2)
System
.out.println(
"Read File:"+ path2
.getName())
ByteArrayOutputStream
out = new ByteArrayOutputStream()
IOUtils
.copyBytes(
in,
out,
4096,true)
String str =
out.toString()
System
.out.println(str)
} catch (IOException e) {
e
.printStackTrace()
}finally{
IOUtils
.closeStream(
in)
}
}
}
})
}
} catch (IOException e) {
e
.printStackTrace()
} catch (URISyntaxException e) {
e
.printStackTrace()
}
}
}
2、写
package
com.sdnware.start01
.hadoop
import java
.io.IOException
import java
.net.URI
import java
.net.URISyntaxException
import org
.apache.commons.io.IOUtils
import org
.apache.hadoop.conf.Configuration
import org
.apache.hadoop.fs.FSDataInputStream
import org
.apache.hadoop.fs.FSDataOutputStream
import org
.apache.hadoop.fs.FileStatus
import org
.apache.hadoop.fs.FileSystem
import org
.apache.hadoop.fs.Path
public class HDFSWrite {
public static void main(String[] args) {
HDFSWrite writer = new HDFSWrite()
writer
.writer1()
}
public void writer0() {
FSDataOutputStream
out = null
FSDataInputStream
in = null
try {
System
.setProperty(
"hadoop.home.dir",
"D:/soft/hadoop-2.6.0")
Configuration conf = new Configuration()
FileSystem hdfs = FileSystem
.get(new URI(
"hdfs://f1:9000"), conf)
FileSystem local = FileSystem
.getLocal(conf)
Path inputDir = new Path(
"D:/testfile/")
// 会把该文件夹下的内容都上传
Path hdfsFile = new Path(
"/user/wordcount")
hdfs
.mkdirs(hdfsFile)
FileStatus[] inputFiles = local
.listStatus(inputDir)
for (int i =
0
System
.out.println(inputFiles[i]
.getPath()
.getName())
in = local
.open(inputFiles[i]
.getPath())
out = hdfs
.create(new Path(
"/user/wordcount/" + inputFiles[i]
.getPath()
.getName()))
byte buffer[] = new byte[
256]
int bytesRead =
0
while ((bytesRead =
in.read(buffer)) >
0) { // 每次读取buffer大小的部分
out.write(buffer,
0, bytesRead)
}
}
} catch (IOException e) {
e
.printStackTrace()
} catch (URISyntaxException e) {
e
.printStackTrace()
}finally{
IOUtils
.closeQuietly(
in)
IOUtils
.closeQuietly(
out)
}
}
public void writer1() {
FSDataOutputStream
out = null
FSDataInputStream
in = null
try {
System
.setProperty(
"hadoop.home.dir",
"D:/soft/hadoop-2.6.0")
System
.setProperty(
"HADOOP_USER_NAME",
"root")
Configuration conf = new Configuration()
FileSystem hdfs = FileSystem
.get(new URI(
"hdfs://f1:9000"), conf)
FileSystem local = FileSystem
.getLocal(conf)
Path inputDir = new Path(
"D:/testfile/")
// 会把该文件夹下的内容都上传
Path hdfsFile = new Path(
"/user/wordcount")
hdfs
.mkdirs(hdfsFile)
FileStatus[] inputFiles = local
.listStatus(inputDir)
for (int i =
0
System
.out.println(inputFiles[i]
.getPath()
.getName())
in = local
.open(inputFiles[i]
.getPath())
out = hdfs
.create(new Path(
"/user/wordcount/" + inputFiles[i]
.getPath()
.getName()))
org
.apache.hadoop.io.IOUtils.copyBytes(
in,
out,
4096,true)
}
} catch (IOException e) {
e
.printStackTrace()
} catch (URISyntaxException e) {
e
.printStackTrace()
}finally{
org
.apache.hadoop.io.IOUtils.closeStream(
in)
org
.apache.hadoop.io.IOUtils.closeStream(
out)
}
}
}
3、删除
package com.sdnware.start01.hadoop;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class HDFSDelete {
public static void main(String[] args) {
delete1();
}
public static void delete0(){
System.setProperty(
"hadoop.home.dir",
"D:/soft/hadoop-2.6.0");
System.setProperty(
"HADOOP_USER_NAME",
"root");
Configuration conf =
new Configuration();
try {
FileSystem hdfs = FileSystem.get(
new URI(
"hdfs://f1:9000"), conf);
Path hdfsFile =
new Path(
"/user/wordcount/My Father.txt");
hdfs.delete(hdfsFile,
false);
}
catch (IOException e) {
e.printStackTrace();
}
catch (URISyntaxException e) {
e.printStackTrace();
}
}
public static void delete1(){
System.setProperty(
"hadoop.home.dir",
"D:/soft/hadoop-2.6.0");
System.setProperty(
"HADOOP_USER_NAME",
"root");
Configuration conf =
new Configuration();
try {
FileSystem hdfs = FileSystem.get(
new URI(
"hdfs://f1:9000"), conf);
Path hdfsFile =
new Path(
"/usr");
hdfs.delete(hdfsFile,
true);
}
catch (IOException e) {
e.printStackTrace();
}
catch (URISyntaxException e) {
e.printStackTrace();
}
}
}