hdfs - file not found exception in reading data from hadoop url -


i trying follow tom white's hadoop definative guide , stuck @ reading data hadoop url. have tried out various tweaks keep keep getting 'file not found error' file's path on hdfs.

here java class:

    package javainterfacepractice;     import java.io.inputstream;     import java.net.uri;     import java.net.url; import org.apache.hadoop.conf.configuration; import org.apache.hadoop.fs.filesystem; import org.apache.hadoop.fs.fsurlstreamhandlerfactory; import org.apache.hadoop.fs.path; import org.apache.hadoop.io.ioutils;  public class urlcat {      static{         url.seturlstreamhandlerfactory(new fsurlstreamhandlerfactory());     }      public static void main(string[] args) throws exception {         string uri = args[0];         inputstream in=null;         configuration conf = new configuration();         conf.addresource(new path("/usr/local/hadoop/etc/hadoop/core-site.xml"));         conf.addresource(new path("/usr/local/hadoop/etc/hadoop/hdfs-site.xml"));          filesystem fs = filesystem.get(uri.create(uri),conf);         try{             in=fs.open(new path(uri));             ioutils.copybytes(in, system.out, 4096,false);          }finally{             ioutils.closestream(in);         }     }  } 

here command using run code:

hadoop jar /home/hduser/workspace/maxtemperature/target/maxtemperature-0.0.1-snapshot.jar javainterfacepractice.urlcat /input/1901 

here error getting:

    exception in thread "main" java.io.filenotfoundexception: file not exist: /input/1901     @ org.apache.hadoop.hdfs.server.namenode.inodefile.valueof(inodefile.java:71)     @ org.apache.hadoop.hdfs.server.namenode.inodefile.valueof(inodefile.java:61)     @ org.apache.hadoop.hdfs.server.namenode.fsnamesystem.getblocklocationsint(fsnamesystem.java:1828)     @ org.apache.hadoop.hdfs.server.namenode.fsnamesystem.getblocklocations(fsnamesystem.java:1799)     @ org.apache.hadoop.hdfs.server.namenode.fsnamesystem.getblocklocations(fsnamesystem.java:1712)     @ org.apache.hadoop.hdfs.server.namenode.namenoderpcserver.getblocklocations(namenoderpcserver.java:587)     @ org.apache.hadoop.hdfs.protocolpb.clientnamenodeprotocolserversidetranslatorpb.getblocklocations(clientnamenodeprotocolserversidetranslatorpb.java:365)     @ org.apache.hadoop.hdfs.protocol.proto.clientnamenodeprotocolprotos$clientnamenodeprotocol$2.callblockingmethod(clientnamenodeprotocolprotos.java)     @ org.apache.hadoop.ipc.protobufrpcengine$server$protobufrpcinvoker.call(protobufrpcengine.java:616)     @ org.apache.hadoop.ipc.rpc$server.call(rpc.java:969)     @ org.apache.hadoop.ipc.server$handler$1.run(server.java:2049)     @ org.apache.hadoop.ipc.server$handler$1.run(server.java:2045)     @ java.security.accesscontroller.doprivileged(native method)     @ javax.security.auth.subject.doas(subject.java:415)     @ org.apache.hadoop.security.usergroupinformation.doas(usergroupinformation.java:1657)     @ org.apache.hadoop.ipc.server$handler.run(server.java:2043)      @ sun.reflect.nativeconstructoraccessorimpl.newinstance0(native method)     @ sun.reflect.nativeconstructoraccessorimpl.newinstance(nativeconstructoraccessorimpl.java:57)     @ sun.reflect.delegatingconstructoraccessorimpl.newinstance(delegatingconstructoraccessorimpl.java:45)     @ java.lang.reflect.constructor.newinstance(constructor.java:526)     @ org.apache.hadoop.ipc.remoteexception.instantiateexception(remoteexception.java:106)     @ org.apache.hadoop.ipc.remoteexception.unwrapremoteexception(remoteexception.java:73)     @ org.apache.hadoop.hdfs.dfsclient.callgetblocklocations(dfsclient.java:1228)     @ org.apache.hadoop.hdfs.dfsclient.getlocatedblocks(dfsclient.java:1213)     @ org.apache.hadoop.hdfs.dfsclient.getlocatedblocks(dfsclient.java:1201)     @ org.apache.hadoop.hdfs.dfsinputstream.fetchlocatedblocksandgetlastblocklength(dfsinputstream.java:306)     @ org.apache.hadoop.hdfs.dfsinputstream.openinfo(dfsinputstream.java:272)     @ org.apache.hadoop.hdfs.dfsinputstream.<init>(dfsinputstream.java:264)     @ org.apache.hadoop.hdfs.dfsclient.open(dfsclient.java:1526)     @ org.apache.hadoop.hdfs.distributedfilesystem$3.docall(distributedfilesystem.java:303)     @ org.apache.hadoop.hdfs.distributedfilesystem$3.docall(distributedfilesystem.java:299)     @ org.apache.hadoop.fs.filesystemlinkresolver.resolve(filesystemlinkresolver.java:81)     @ org.apache.hadoop.hdfs.distributedfilesystem.open(distributedfilesystem.java:299)     @ org.apache.hadoop.fs.filesystem.open(filesystem.java:767)     @ javainterfacepractice.urlcat.main(urlcat.java:28)     @ sun.reflect.nativemethodaccessorimpl.invoke0(native method)     @ sun.reflect.nativemethodaccessorimpl.invoke(nativemethodaccessorimpl.java:57)     @ sun.reflect.delegatingmethodaccessorimpl.invoke(delegatingmethodaccessorimpl.java:43)     @ java.lang.reflect.method.invoke(method.java:606)     @ org.apache.hadoop.util.runjar.run(runjar.java:221)     @ org.apache.hadoop.util.runjar.main(runjar.java:136) caused by: org.apache.hadoop.ipc.remoteexception(java.io.filenotfoundexception): file not exist: /input/1901     @ org.apache.hadoop.hdfs.server.namenode.inodefile.valueof(inodefile.java:71)     @ org.apache.hadoop.hdfs.server.namenode.inodefile.valueof(inodefile.java:61)     @ org.apache.hadoop.hdfs.server.namenode.fsnamesystem.getblocklocationsint(fsnamesystem.java:1828)     @ org.apache.hadoop.hdfs.server.namenode.fsnamesystem.getblocklocations(fsnamesystem.java:1799)     @ org.apache.hadoop.hdfs.server.namenode.fsnamesystem.getblocklocations(fsnamesystem.java:1712)     @ org.apache.hadoop.hdfs.server.namenode.namenoderpcserver.getblocklocations(namenoderpcserver.java:587)     @ org.apache.hadoop.hdfs.protocolpb.clientnamenodeprotocolserversidetranslatorpb.getblocklocations(clientnamenodeprotocolserversidetranslatorpb.java:365)     @ org.apache.hadoop.hdfs.protocol.proto.clientnamenodeprotocolprotos$clientnamenodeprotocol$2.callblockingmethod(clientnamenodeprotocolprotos.java)     @ org.apache.hadoop.ipc.protobufrpcengine$server$protobufrpcinvoker.call(protobufrpcengine.java:616)     @ org.apache.hadoop.ipc.rpc$server.call(rpc.java:969)     @ org.apache.hadoop.ipc.server$handler$1.run(server.java:2049)     @ org.apache.hadoop.ipc.server$handler$1.run(server.java:2045)     @ java.security.accesscontroller.doprivileged(native method)     @ javax.security.auth.subject.doas(subject.java:415)     @ org.apache.hadoop.security.usergroupinformation.doas(usergroupinformation.java:1657)     @ org.apache.hadoop.ipc.server$handler.run(server.java:2043)      @ org.apache.hadoop.ipc.client.call(client.java:1475)     @ org.apache.hadoop.ipc.client.call(client.java:1412)     @ org.apache.hadoop.ipc.protobufrpcengine$invoker.invoke(protobufrpcengine.java:229)     @ com.sun.proxy.$proxy9.getblocklocations(unknown source)     @ org.apache.hadoop.hdfs.protocolpb.clientnamenodeprotocoltranslatorpb.getblocklocations(clientnamenodeprotocoltranslatorpb.java:255)     @ sun.reflect.nativemethodaccessorimpl.invoke0(native method)     @ sun.reflect.nativemethodaccessorimpl.invoke(nativemethodaccessorimpl.java:57)     @ sun.reflect.delegatingmethodaccessorimpl.invoke(delegatingmethodaccessorimpl.java:43)     @ java.lang.reflect.method.invoke(method.java:606)     @ org.apache.hadoop.io.retry.retryinvocationhandler.invokemethod(retryinvocationhandler.java:191)     @ org.apache.hadoop.io.retry.retryinvocationhandler.invoke(retryinvocationhandler.java:102)     @ com.sun.proxy.$proxy10.getblocklocations(unknown source)     @ org.apache.hadoop.hdfs.dfsclient.callgetblocklocations(dfsclient.java:1226)     ... 18 more 

and here hdfs structure:

    drwxr-xr-x   - hduser supergroup          0 2016-04-20 06:50 input -rw-r--r--   1 hduser supergroup     888190 2016-04-17 12:53 input/1901 -rw-r--r--   1 hduser supergroup     888978 2016-04-20 06:50 input/1902 drwxr-xr-x   - hduser supergroup          0 2016-04-19 22:20 output -rw-r--r--   1 hduser supergroup          0 2016-04-19 22:20 output/_success -rw-r--r--   1 hduser supergroup          9 2016-04-19 22:20 output/part-r-00000 drwxr-xr-x   - hduser supergroup          0 2016-04-20 06:52 output2 -rw-r--r--   1 hduser supergroup          0 2016-04-20 06:52 output2/_success -rw-r--r--   1 hduser supergroup         18 2016-04-20 06:52 output2/part-r-00000 drwxr-xr-x   - hduser supergroup          0 2016-04-20 07:20 output3 -rw-r--r--   1 hduser supergroup          0 2016-04-20 07:20 output3/_success -rw-r--r--   1 hduser supergroup         18 2016-04-20 07:20 output3/part-r-00000 

if file present in hdfs why not able run this? appreciated.

assuming fixed since submitted 10 months ago, try full hdfs input:

hdfs://server:port/input/1901


Comments

Popular posts from this blog

html - Styling progress bar with inline style -

java - Oracle Sql developer error: could not install some modules -

How to use autoclose brackets in Jupyter notebook? -