DBILITY

hadoop hdfs 파일저장 및 읽기 예제 본문

bigdata/hadoop

hadoop hdfs 파일저장 및 읽기 예제

DBILITY 2016. 9. 7. 11:15
반응형
  1. pom.xml
    <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemalocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
        <modelVersion>4.0.0</modelVersion>
        <groupId>com.dbility.hadoop</groupId>
        <artifactId>example01</artifactId>
        <version>0.0.1</version>
        <build>
            <finalName>${project.artifactId}</finalName>
        </build>
        <properties>
            <hadoop.version>1.2.1</hadoop.version>
            <slf4j.version>1.4.3</slf4j.version>
        </properties>
        <dependencies>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-core</artifactId>
                <version>${hadoop.version}</version>
            </dependency>
            <dependency>
                <groupId>org.slf4j</groupId>
                <artifactId>slf4j-api</artifactId>
                <version>${slf4j.version}</version>
            </dependency>  
        </dependencies>
    </project>
  2. SimpleFileWriteRead.java
    package com.dbility.hadoop;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.FSDataInputStream;
    import org.apache.hadoop.fs.FSDataOutputStream;
    import org.apache.hadoop.fs.FileSystem;
    import org.apache.hadoop.fs.Path;
    import org.slf4j.Logger;
    import org.slf4j.LoggerFactory;
    
    public class SingleFileWriteRead {
    
    	private static final Logger LOG = LoggerFactory.getLogger(SingleFileWriteRead.class);
    
    	public static void main(String[] args) {
    
    		if (args.length != 2) {
    			LOG.info("Usage : <filename> <contents>");
    			Runtime.getRuntime().exit(1);
    		}
    
    		try {
    			Configuration conf = new Configuration();
    			FileSystem hdfs = FileSystem.get(conf);
    
    			Path path = new Path(args[0]);
    
    			if ( hdfs.exists(path) ){
    				hdfs.delete(path, true);
    			}
    
    			FSDataOutputStream outputStream = hdfs.create(path);
    			outputStream.writeUTF(args[1]);
    			outputStream.close();
    
    			FSDataInputStream inputStream = hdfs.open(path);
    			String inputString = inputStream.readUTF();
    			inputStream.close();
    
    			LOG.info("contents : {}",inputString);
    
    		} catch (Exception e) {
    			LOG.info("Error : {}", e.getMessage());
    		}
    	}
    }

참고서적 : 시작하세요! 하둡프로그래밍 개정2판(위키북스) 정재화 지음

 

반응형

'bigdata > hadoop' 카테고리의 다른 글

hadoop window10 x64 eclipse에서 mapreduce debug  (0) 2016.10.02
hadoop 1.2.1 eclipse plugin  (0) 2016.09.28
hadoop WordCount 예제  (0) 2016.09.09
hadoop MapReduce 개발 과정 정리  (0) 2016.09.09
hadoop MapReduce Data Flow  (0) 2016.09.07
Comments