-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy pathHDFSWriter.java
More file actions
113 lines (94 loc) · 2.68 KB
/
HDFSWriter.java
File metadata and controls
113 lines (94 loc) · 2.68 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.InputStream;
import com.huawei.common.exception.ParameterException;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class HdfsWriter {
private FSDataOutputStream hdfsOutStream;
private BufferedOutputStream bufferOutStream;
private FileSystem fSystem;
private String fileFullName;
public HdfsWriter(FileSystem fSystem, String fileFullName) throws ParameterException {
if ((null == fSystem) || (null == fileFullName)) {
throw new ParameterException("some of input parameters are null.");
}
this.fSystem = fSystem;
this.fileFullName = fileFullName;
}
/**
* append the inputStream to a file in HDFS
*
* @param inputStream
* @throws IOException
* @throws ParameterException
*/
public void doWrite(InputStream inputStream) throws IOException, ParameterException {
if (null == inputStream) {
throw new ParameterException("some of input parameters are null.");
}
setWriteResource();
try {
outputToHDFS(inputStream);
} finally {
closeResource();
}
}
/**
* append the inputStream to a file in HDFS
*
* @param inputStream
* @throws IOException
* @throws ParameterException
*/
public void doAppend(InputStream inputStream) throws IOException, ParameterException {
if (null == inputStream) {
throw new ParameterException("some of input parameters are null.");
}
setAppendResource();
try {
outputToHDFS(inputStream);
} finally {
closeResource();
}
}
private void outputToHDFS(InputStream inputStream) throws IOException {
final int countForOneRead = 10240; // 10240 Bytes each time
final byte buff[] = new byte[countForOneRead];
int count;
while ((count = inputStream.read(buff, 0, countForOneRead)) > 0) {
bufferOutStream.write(buff, 0, count);
}
bufferOutStream.flush();
hdfsOutStream.hflush();
}
private void setWriteResource() throws IOException {
Path filepath = new Path(fileFullName);
hdfsOutStream = fSystem.create(filepath);
bufferOutStream = new BufferedOutputStream(hdfsOutStream);
}
private void setAppendResource() throws IOException {
Path filepath = new Path(fileFullName);
hdfsOutStream = fSystem.append(filepath);
bufferOutStream = new BufferedOutputStream(hdfsOutStream);
}
private void closeResource() {
// close hdfsOutStream
if (hdfsOutStream != null) {
try {
hdfsOutStream.close();
} catch (IOException e) {
System.out.println(e);
}
}
// close bufferOutStream
if (bufferOutStream != null) {
try {
bufferOutStream.close();
} catch (IOException e) {
System.out.println(e);
}
}
}
}