Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- Thu Dec 17 18:37:51 CST 2009 Starting master on hadoop01.local
- ulimit -n 1024
- 2009-12-17 18:37:51,815 INFO org.apache.hadoop.hbase.master.HMaster: vmName=Java HotSpot(TM) 64-Bit Server VM, vmVendor=Sun Microsystems Inc., vmVersion=14.3-b01
- 2009-12-17 18:37:51,816 INFO org.apache.hadoop.hbase.master.HMaster: vmInputArguments=[-Xmx4000m, -XX:+HeapDumpOnOutOfMemoryError, -XX:+UseConcMarkSweepGC, -XX:+CMSIncrementalMode, -Dcom.sun.management.jmxremote.ssl=false, -Dcom.sun.management.jmxremote.password.file=/etc/hbase-0.20/conf/jmxremote.password, -Dcom.sun.management.jmxremote.access.file=/etc/hbase-0.20/conf/jmxremote.access, -Dcom.sun.management.jmxremote.port=10011, -Dcom.sun.management.jmxremote.ssl=false, -Dcom.sun.management.jmxremote.password.file=/etc/hbase-0.20/conf/jmxremote.password, -Dcom.sun.management.jmxremote.access.file=/etc/hbase-0.20/conf/jmxremote.access, -Dcom.sun.management.jmxremote.port=10011, -Dhbase.log.dir=/opt/hbase-trunk/bin/../logs, -Dhbase.log.file=hbase-root-master-hadoop01.local.log, -Dhbase.home.dir=/opt/hbase-trunk/bin/.., -Dhbase.id.str=root, -Dhbase.root.logger=INFO,DRFA]
- 2009-12-17 18:37:52,024 WARN org.apache.hadoop.conf.Configuration: mapred.task.id is deprecated. Instead, use mapreduce.task.attempt.id
- 2009-12-17 18:37:52,086 ERROR org.apache.hadoop.hbase.master.HMaster: Failed to start master
- java.lang.reflect.InvocationTargetException
- at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
- at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:39)
- at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:27)
- at java.lang.reflect.Constructor.newInstance(Constructor.java:513)
- at org.apache.hadoop.hbase.master.HMaster.doMain(HMaster.java:1187)
- at org.apache.hadoop.hbase.master.HMaster.main(HMaster.java:1228)
- Caused by: org.apache.hadoop.ipc.RPC$VersionMismatch: Protocol org.apache.hadoop.hdfs.protocol.ClientProtocol version mismatch. (client = 50, server = 51)
- at org.apache.hadoop.ipc.RPC.getProxy(RPC.java:371)
- at org.apache.hadoop.hdfs.DFSClient.createRPCNamenode(DFSClient.java:174)
- at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:282)
- at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:240)
- at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:83)
- at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:1752)
- at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:71)
- at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:1780)
- at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:1768)
- at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:195)
- at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:103)
- at org.apache.hadoop.hbase.master.HMaster.<init>(HMaster.java:166)
- ... 6 more
Add Comment
Please, Sign In to add comment