hadoop通过FileSystem API读取和写入数据

来源:转载

看hadoop指南(有更好的源码),自己测试着写了一个小例子,在伪分布式上测试成功,对JAVA的一些API也不太了解,都是一个熟悉的过程吧。

 

这个例子主要可以熟悉一些API,以及理解各个类之间的调用和转化关系,重新学习一门语言,主要还是API的一些操作问题,慢慢来吧,别着急,come on.

 

 

[java]  import java.io.InputStream; 

import java.io.OutputStream; 

import java.net.URI; 

 

 

import org.apache.hadoop.io.IOUtils; 

import org.apache.hadoop.conf.Configuration; 

import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; 
 
 
 
 
public class FileReadAndWrite { 
    public static void main(String [] args) throws Exception{ 
        String pathString = "hdfs://192.168.56.171:9000/testRead"; 
        URI pathURI = URI.create(pathString); 
        Configuration conf = new Configuration(); 
        FileSystem inputFileSystem = FileSystem.get(pathURI, conf); 
         
/*      InputStream in = inputFileSystem.open(new Path(pathString));        
        byte[] fileContext = new byte[1024];
        in.read(fileContext);
        String str = new String(fileContext);
        System.out.println(str);*/ 
         
        InputStream in = null; 
        try{ 
            in = inputFileSystem.open(new Path(pathString)); 
            IOUtils.copyBytes(in, System.out, conf); 
        } 
        finally{ 
            IOUtils.closeStream(in); 
        } 
        String writeString = "" + 
                " static FileSystem get(URI uri, Configuration conf)/n" + 
                " --Returns the FileSystem for this URI's scheme and authority./n" + 
                " FSDataInputStream open(Path f)/n" + 
                " --Opens an FSDataInputStream at the indicated Path./n" + 
                "static void copyBytes(InputStream in, OutputStream out, Configuration conf)/n"+ 
                "--Copies from one stream to another./n"; 
        byte[] stringBuffer = new byte[1024]; 
        stringBuffer = writeString.getBytes(); 
         
        Path inputPath = new Path("hdfs://192.168.56.171:9000/testWrite"); 
        OutputStream out = inputFileSystem.create(inputPath); 
        out.write(stringBuffer); 
        out.close(); 
     
    } 
 
 

import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;


import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

 


public class FileReadAndWrite {
 public static void main(String [] args) throws Exception{
  String pathString = "hdfs://192.168.56.171:9000/testRead";
  URI pathURI = URI.create(pathString);
  Configuration conf = new Configuration();
  FileSystem inputFileSystem = FileSystem.get(pathURI, conf);
  
/*  InputStream in = inputFileSystem.open(new Path(pathString));  
  byte[] fileContext = new byte[1024];
  in.read(fileContext);
  String str = new String(fileContext);
  System.out.println(str);*/
  
  InputStream in = null;
  try{
   in = inputFileSystem.open(new Path(pathString));
   IOUtils.copyBytes(in, System.out, conf);
  }
  finally{
   IOUtils.closeStream(in);
  }
  String writeString = "" +
    " static FileSystem get(URI uri, Configuration conf)/n" +
    " --Returns the FileSystem for this URI's scheme and authority./n" +
    " FSDataInputStream open(Path f)/n" +
    " --Opens an FSDataInputStream at the indicated Path./n" +
    "static void copyBytes(InputStream in, OutputStream out, Configuration conf)/n"+
    "--Copies from one stream to another./n";
  byte[] stringBuffer = new byte[1024];
  stringBuffer = writeString.getBytes();
  
  Path inputPath = new Path("hdfs://192.168.56.171:9000/testWrite");
  OutputStream out = inputFileSystem.create(inputPath);
  out.write(stringBuffer);
  out.close();
 
 }


}

 


分享给朋友:
您可能感兴趣的文章:
随机阅读: