Talend Hortonworks : Config error debug

Talend Hortonworks : Config error
Starting job migr at 14:58 15/03/2016.
[statistics] connecting to socket on port 3697
[statistics] connected
[WARN ]: org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
[ERROR]: org.apache.hadoop.util.Shell - Failed to locate the winutils binary in the hadoop binary path
java.io.IOException: Could not locate executable null\bin\winutils.exe in the Hadoop binaries.
 at org.apache.hadoop.util.Shell.getQualifiedBinPath(Shell.java:355)
 at org.apache.hadoop.util.Shell.getWinUtilsPath(Shell.java:370)
 at org.apache.hadoop.util.Shell.<clinit>(Shell.java:363)
 at org.apache.hadoop.util.StringUtils.<clinit>(StringUtils.java:79)
 at org.apache.hadoop.security.Groups.parseStaticMapping(Groups.java:104)
 at org.apache.hadoop.security.Groups.<init>(Groups.java:86)
 at org.apache.hadoop.security.Groups.<init>(Groups.java:66)
 at org.apache.hadoop.security.Groups.getUserToGroupsMappingService(Groups.java:280)
 at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:271)
 at org.apache.hadoop.security.UserGroupInformation.ensureInitialized(UserGroupInformation.java:248)
 at org.apache.hadoop.security.UserGroupInformation.isAuthenticationMethodEnabled(UserGroupInformation.java:325)
 at org.apache.hadoop.security.UserGroupInformation.isSecurityEnabled(UserGroupInformation.java:319)
 at org.apache.hadoop.ipc.RPC.getProtocolProxy(RPC.java:567)
 at org.apache.hadoop.hdfs.NameNodeProxies.createNNProxyWithClientProtocol(NameNodeProxies.java:420)
 at org.apache.hadoop.hdfs.NameNodeProxies.createNonHAProxy(NameNodeProxies.java:316)
 at org.apache.hadoop.hdfs.NameNodeProxies.createProxy(NameNodeProxies.java:178)
 at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:665)
 at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:601)
 at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:148)
 at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2596)
 at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:91)
 at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2630)
 at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2612)
 at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:370)
 at org.apache.hadoop.fs.FileSystem$1.run(FileSystem.java:159)
 at org.apache.hadoop.fs.FileSystem$1.run(FileSystem.java:156)
 at java.security.AccessController.doPrivileged(Native Method)
 at javax.security.auth.Subject.doAs(Unknown Source)
 at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
 at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:156)
 at zurich_poc.migr_0_1.migr.tHDFSInput_2Process(migr.java:475)
 at zurich_poc.migr_0_1.migr.runJobInTOS(migr.java:878)
 at zurich_poc.migr_0_1.migr.main(migr.java:735)
[WARN ]: org.apache.hadoop.hdfs.BlockReaderFactory - I/O error constructing remote block reader.
java.net.ConnectException: Connection timed out: no further information
 at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
 at sun.nio.ch.SocketChannelImpl.finishConnect(Unknown Source)
 at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
 at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
 at org.apache.hadoop.hdfs.DFSClient.newConnectedPeer(DFSClient.java:3101)
 at org.apache.hadoop.hdfs.BlockReaderFactory.nextTcpPeer(BlockReaderFactory.java:755)
 at org.apache.hadoop.hdfs.BlockReaderFactory.getRemoteBlockReaderFromTcp(BlockReaderFactory.java:670)
 at org.apache.hadoop.hdfs.BlockReaderFactory.build(BlockReaderFactory.java:337)
 at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:576)
 at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:800)
 at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:847)
 at java.io.DataInputStream.read(Unknown Source)
 at java.io.FilterInputStream.read(Unknown Source)
 at java.io.PushbackInputStream.read(Unknown Source)
 at org.talend.fileprocess.UnicodeReader.<init>(UnicodeReader.java:25)
 at org.talend.fileprocess.TOSDelimitedReader.<init>(TOSDelimitedReader.java:77)
 at org.talend.fileprocess.FileInputDelimited.<init>(FileInputDelimited.java:93)
 at zurich_poc.migr_0_1.migr.tHDFSInput_2Process(migr.java:526)
 at zurich_poc.migr_0_1.migr.runJobInTOS(migr.java:878)
 at zurich_poc.migr_0_1.migr.main(migr.java:735)
[WARN ]: org.apache.hadoop.hdfs.DFSClient - Failed to connect to sandbox.hortonworks.com/10.0.2.15:50010 for block, add to deadNodes and continue. java.net.ConnectException: Connection timed out: no further information
java.net.ConnectException: Connection timed out: no further information
 at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
 at sun.nio.ch.SocketChannelImpl.finishConnect(Unknown Source)
 at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
 at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
 at org.apache.hadoop.hdfs.DFSClient.newConnectedPeer(DFSClient.java:3101)
 at org.apache.hadoop.hdfs.BlockReaderFactory.nextTcpPeer(BlockReaderFactory.java:755)
 at org.apache.hadoop.hdfs.BlockReaderFactory.getRemoteBlockReaderFromTcp(BlockReaderFactory.java:670)
 at org.apache.hadoop.hdfs.BlockReaderFactory.build(BlockReaderFactory.java:337)
 at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:576)
 at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:800)
 at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:847)
 at java.io.DataInputStream.read(Unknown Source)
 at java.io.FilterInputStream.read(Unknown Source)
 at java.io.PushbackInputStream.read(Unknown Source)
 at org.talend.fileprocess.UnicodeReader.<init>(UnicodeReader.java:25)
 at org.talend.fileprocess.TOSDelimitedReader.<init>(TOSDelimitedReader.java:77)
 at org.talend.fileprocess.FileInputDelimited.<init>(FileInputDelimited.java:93)
 at zurich_poc.migr_0_1.migr.tHDFSInput_2Process(migr.java:526)
 at zurich_poc.migr_0_1.migr.runJobInTOS(migr.java:878)
 at zurich_poc.migr_0_1.migr.main(migr.java:735)
[WARN ]: org.apache.hadoop.hdfs.DFSClient - DFS chooseDataNode: got # 1 IOException, will wait for 763.5654367327579 msec.
[WARN ]: org.apache.hadoop.hdfs.BlockReaderFactory - I/O error constructing remote block reader.
java.net.ConnectException: Connection timed out: no further information
 at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
 at sun.nio.ch.SocketChannelImpl.finishConnect(Unknown Source)
 at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
 at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
 at org.apache.hadoop.hdfs.DFSClient.newConnectedPeer(DFSClient.java:3101)
 at org.apache.hadoop.hdfs.BlockReaderFactory.nextTcpPeer(BlockReaderFactory.java:755)
 at org.apache.hadoop.hdfs.BlockReaderFactory.getRemoteBlockReaderFromTcp(BlockReaderFactory.java:670)
 at org.apache.hadoop.hdfs.BlockReaderFactory.build(BlockReaderFactory.java:337)
 at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:576)
 at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:800)
 at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:847)
 at java.io.DataInputStream.read(Unknown Source)
 at java.io.FilterInputStream.read(Unknown Source)
 at java.io.PushbackInputStream.read(Unknown Source)
 at org.talend.fileprocess.UnicodeReader.<init>(UnicodeReader.java:25)
 at org.talend.fileprocess.TOSDelimitedReader.<init>(TOSDelimitedReader.java:77)
 at org.talend.fileprocess.FileInputDelimited.<init>(FileInputDelimited.java:93)
 at zurich_poc.migr_0_1.migr.tHDFSInput_2Process(migr.java:526)
 at zurich_poc.migr_0_1.migr.runJobInTOS(migr.java:878)
 at zurich_poc.migr_0_1.migr.main(migr.java:735)
[WARN ]: org.apache.hadoop.hdfs.DFSClient - Failed to connect to sandbox.hortonworks.com/10.0.2.15:50010 for block, add to deadNodes and continue. java.net.ConnectException: Connection timed out: no further information
java.net.ConnectException: Connection timed out: no further information
 at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
 at sun.nio.ch.SocketChannelImpl.finishConnect(Unknown Source)
 at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
 at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
 at org.apache.hadoop.hdfs.DFSClient.newConnectedPeer(DFSClient.java:3101)
 at org.apache.hadoop.hdfs.BlockReaderFactory.nextTcpPeer(BlockReaderFactory.java:755)
 at org.apache.hadoop.hdfs.BlockReaderFactory.getRemoteBlockReaderFromTcp(BlockReaderFactory.java:670)
 at org.apache.hadoop.hdfs.BlockReaderFactory.build(BlockReaderFactory.java:337)
 at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:576)
 at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:800)
 at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:847)
 at java.io.DataInputStream.read(Unknown Source)
 at java.io.FilterInputStream.read(Unknown Source)
 at java.io.PushbackInputStream.read(Unknown Source)
 at org.talend.fileprocess.UnicodeReader.<init>(UnicodeReader.java:25)
 at org.talend.fileprocess.TOSDelimitedReader.<init>(TOSDelimitedReader.java:77)
 at org.talend.fileprocess.FileInputDelimited.<init>(FileInputDelimited.java:93)
 at zurich_poc.migr_0_1.migr.tHDFSInput_2Process(migr.java:526)
 at zurich_poc.migr_0_1.migr.runJobInTOS(migr.java:878)
 at zurich_poc.migr_0_1.migr.main(migr.java:735)
[WARN ]: org.apache.hadoop.hdfs.DFSClient - DFS chooseDataNode: got # 2 IOException, will wait for 6387.60824878827 msec.
[WARN ]: org.apache.hadoop.hdfs.BlockReaderFactory - I/O error constructing remote block reader.
java.net.ConnectException: Connection timed out: no further information
 at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
 at sun.nio.ch.SocketChannelImpl.finishConnect(Unknown Source)
 at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
 at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
 at org.apache.hadoop.hdfs.DFSClient.newConnectedPeer(DFSClient.java:3101)
 at org.apache.hadoop.hdfs.BlockReaderFactory.nextTcpPeer(BlockReaderFactory.java:755)
 at org.apache.hadoop.hdfs.BlockReaderFactory.getRemoteBlockReaderFromTcp(BlockReaderFactory.java:670)
 at org.apache.hadoop.hdfs.BlockReaderFactory.build(BlockReaderFactory.java:337)
 at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:576)
 at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:800)
 at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:847)
 at java.io.DataInputStream.read(Unknown Source)
 at java.io.FilterInputStream.read(Unknown Source)
 at java.io.PushbackInputStream.read(Unknown Source)
 at org.talend.fileprocess.UnicodeReader.<init>(UnicodeReader.java:25)
 at org.talend.fileprocess.TOSDelimitedReader.<init>(TOSDelimitedReader.java:77)
 at org.talend.fileprocess.FileInputDelimited.<init>(FileInputDelimited.java:93)
 at zurich_poc.migr_0_1.migr.tHDFSInput_2Process(migr.java:526)
 at zurich_poc.migr_0_1.migr.runJobInTOS(migr.java:878)
 at zurich_poc.migr_0_1.migr.main(migr.java:735)
[WARN ]: org.apache.hadoop.hdfs.DFSClient - Failed to connect to sandbox.hortonworks.com/10.0.2.15:50010 for block, add to deadNodes and continue. java.net.ConnectException: Connection timed out: no further information
java.net.ConnectException: Connection timed out: no further information
 at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
 at sun.nio.ch.SocketChannelImpl.finishConnect(Unknown Source)
 at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
 at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
 at org.apache.hadoop.hdfs.DFSClient.newConnectedPeer(DFSClient.java:3101)
 at org.apache.hadoop.hdfs.BlockReaderFactory.nextTcpPeer(BlockReaderFactory.java:755)
 at org.apache.hadoop.hdfs.BlockReaderFactory.getRemoteBlockReaderFromTcp(BlockReaderFactory.java:670)
 at org.apache.hadoop.hdfs.BlockReaderFactory.build(BlockReaderFactory.java:337)
 at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:576)
 at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:800)
 at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:847)
 at java.io.DataInputStream.read(Unknown Source)
 at java.io.FilterInputStream.read(Unknown Source)
 at java.io.PushbackInputStream.read(Unknown Source)
 at org.talend.fileprocess.UnicodeReader.<init>(UnicodeReader.java:25)
 at org.talend.fileprocess.TOSDelimitedReader.<init>(TOSDelimitedReader.java:77)
 at org.talend.fileprocess.FileInputDelimited.<init>(FileInputDelimited.java:93)
 at zurich_poc.migr_0_1.migr.tHDFSInput_2Process(migr.java:526)
 at zurich_poc.migr_0_1.migr.runJobInTOS(migr.java:878)
 at zurich_poc.migr_0_1.migr.main(migr.java:735)
[WARN ]: org.apache.hadoop.hdfs.DFSClient - DFS chooseDataNode: got # 3 IOException, will wait for 9106.23384585922 msec.
[WARN ]: org.apache.hadoop.hdfs.BlockReaderFactory - I/O error constructing remote block reader.
java.net.ConnectException: Connection timed out: no further information
 at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
 at sun.nio.ch.SocketChannelImpl.finishConnect(Unknown Source)
 at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
 at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
 at org.apache.hadoop.hdfs.DFSClient.newConnectedPeer(DFSClient.java:3101)
 at org.apache.hadoop.hdfs.BlockReaderFactory.nextTcpPeer(BlockReaderFactory.java:755)
 at org.apache.hadoop.hdfs.BlockReaderFactory.getRemoteBlockReaderFromTcp(BlockReaderFactory.java:670)
 at org.apache.hadoop.hdfs.BlockReaderFactory.build(BlockReaderFactory.java:337)
 at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:576)
 at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:800)
 at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:847)
 at java.io.DataInputStream.read(Unknown Source)
 at java.io.FilterInputStream.read(Unknown Source)
 at java.io.PushbackInputStream.read(Unknown Source)
 at org.talend.fileprocess.UnicodeReader.<init>(UnicodeReader.java:25)
 at org.talend.fileprocess.TOSDelimitedReader.<init>(TOSDelimitedReader.java:77)
 at org.talend.fileprocess.FileInputDelimited.<init>(FileInputDelimited.java:93)
 at zurich_poc.migr_0_1.migr.tHDFSInput_2Process(migr.java:526)
 at zurich_poc.migr_0_1.migr.runJobInTOS(migr.java:878)
 at zurich_poc.migr_0_1.migr.main(migr.java:735)
[WARN ]: org.apache.hadoop.hdfs.DFSClient - Failed to connect to sandbox.hortonworks.com/10.0.2.15:50010 for block, add to deadNodes and continue. java.net.ConnectException: Connection timed out: no further information
java.net.ConnectException: Connection timed out: no further information
 at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
 at sun.nio.ch.SocketChannelImpl.finishConnect(Unknown Source)
 at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
 at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
 at org.apache.hadoop.hdfs.DFSClient.newConnectedPeer(DFSClient.java:3101)
 at org.apache.hadoop.hdfs.BlockReaderFactory.nextTcpPeer(BlockReaderFactory.java:755)
 at org.apache.hadoop.hdfs.BlockReaderFactory.getRemoteBlockReaderFromTcp(BlockReaderFactory.java:670)
 at org.apache.hadoop.hdfs.BlockReaderFactory.build(BlockReaderFactory.java:337)
 at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:576)
 at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:800)
 at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:847)
 at java.io.DataInputStream.read(Unknown Source)
 at java.io.FilterInputStream.read(Unknown Source)
 at java.io.PushbackInputStream.read(Unknown Source)
 at org.talend.fileprocess.UnicodeReader.<init>(UnicodeReader.java:25)
 at org.talend.fileprocess.TOSDelimitedReader.<init>(TOSDelimitedReader.java:77)
 at org.talend.fileprocess.FileInputDelimited.<init>(FileInputDelimited.java:93)
 at zurich_poc.migr_0_1.migr.tHDFSInput_2Process(migr.java:526)
 at zurich_poc.migr_0_1.migr.runJobInTOS(migr.java:878)
 at zurich_poc.migr_0_1.migr.main(migr.java:735)
[WARN ]: org.apache.hadoop.hdfs.DFSClient - Could not obtain block: BP-1124468226-10.0.2.15-1429879726015:blk_1073742146_1327 file=/user/hue/amine No live nodes contain current block Block locations: 10.0.2.15:50010 Dead nodes: 10.0.2.15:50010. Throwing a BlockMissingException
[WARN ]: org.apache.hadoop.hdfs.DFSClient - Could not obtain block: BP-1124468226-10.0.2.15-1429879726015:blk_1073742146_1327 file=/user/hue/amine No live nodes contain current block Block locations: 10.0.2.15:50010 Dead nodes: 10.0.2.15:50010. Throwing a BlockMissingException
[WARN ]: org.apache.hadoop.hdfs.DFSClient - DFS Read
org.apache.hadoop.hdfs.BlockMissingException: Could not obtain block: BP-1124468226-10.0.2.15-1429879726015:blk_1073742146_1327 file=/user/hue/amine
 at org.apache.hadoop.hdfs.DFSInputStream.chooseDataNode(DFSInputStream.java:888)
 at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:568)
 at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:800)
 at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:847)
Exception in component tHDFSInput_2
 at java.io.DataInputStream.read(Unknown Source)
 at java.io.FilterInputStream.read(Unknown Source)
 at java.io.PushbackInputStream.read(Unknown Source)
 at org.talend.fileprocess.UnicodeReader.<init>(UnicodeReader.java:25)
 at org.talend.fileprocess.TOSDelimitedReader.<init>(TOSDelimitedReader.java:77)
 at org.talend.fileprocess.FileInputDelimited.<init>(FileInputDelimited.java:93)
 at zurich_poc.migr_0_1.migr.tHDFSInput_2Process(migr.java:526)
 at zurich_poc.migr_0_1.migr.runJobInTOS(migr.java:878)
 at zurich_poc.migr_0_1.migr.main(migr.java:735)
[statistics] disconnected
org.apache.hadoop.hdfs.BlockMissingException: Could not obtain block: BP-1124468226-10.0.2.15-1429879726015:blk_1073742146_1327 file=/user/hue/amine
 at org.apache.hadoop.hdfs.DFSInputStream.chooseDataNode(DFSInputStream.java:888)
 at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:568)
 at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:800)
 at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:847)
 at java.io.DataInputStream.read(Unknown Source)
 at java.io.FilterInputStream.read(Unknown Source)
 at java.io.PushbackInputStream.read(Unknown Source)
 at org.talend.fileprocess.UnicodeReader.<init>(UnicodeReader.java:25)
 at org.talend.fileprocess.TOSDelimitedReader.<init>(TOSDelimitedReader.java:77)
 at org.talend.fileprocess.FileInputDelimited.<init>(FileInputDelimited.java:93)
 at zurich_poc.migr_0_1.migr.tHDFSInput_2Process(migr.java:526)
 at zurich_poc.migr_0_1.migr.runJobInTOS(migr.java:878)
 at zurich_poc.migr_0_1.migr.main(migr.java:735)
Job migr ended at 15:00 15/03/2016. [exit code=1]

 

 

Solution :

 

 

 

 

Leave a Reply