Can not use kerberos ticket to create hadoop file with java code -
our hadoop cluster use kerberos,so need use kinit first, use command "hadoop fs -ls /". use jaas , gssapi login , create file in cluster,but failed.here code:
import java.security.privilegedaction; import javax.security.auth.subject; import javax.security.auth.login.logincontext; import javax.security.auth.login.loginexception; import org.apache.hadoop.conf.configuration; import org.apache.hadoop.fs.fsdataoutputstream; import org.apache.hadoop.fs.filesystem; import org.apache.hadoop.fs.path; import org.ietf.jgss.*; public static void main(string[] args) throws loginexception { system.setproperty("sun.security.krb5.debug", "false"); system.setproperty("java.security.krb5.realm", "h236"); system.setproperty("java.security.krb5.kdc", "172.16.0.236"); system.setproperty( "javax.security.auth.usesubjectcredsonly", "false"); system.setproperty("java.security.auth.login.config", "/etc/hadoop/conf/jaas.conf"); logincontext lc = null; lc = new logincontext("client"); lc.login(); system.out.println("authentication succeeded!"); subject subject = lc.getsubject(); subject.doas( subject, new privilegedaction<byte[]>() { public byte[] run() { configuration conf = new configuration(); try { oid krb5mechanism = new oid("1.2.840.113554.1.2.2"); gssmanager manager = gssmanager.getinstance(); gssname clientname = manager.createname("hdfs/172.16.0.239@h236", gssname.nt_user_name); gsscredential clientcreds = manager.createcredential(clientname, gsscredential.default_lifetime, krb5mechanism, gsscredential.initiate_only); gssname servername = manager.createname("hdfs@172.16.0.239", gssname.nt_hostbased_service); gsscontext context = manager.createcontext(servername, krb5mechanism, clientcreds, gsscontext.default_lifetime); context.requestmutualauth(true); context.requestconf(false); context.requestinteg(true); system.out.println(clientcreds.getname().tostring()); system.out.println(clientcreds.getremaininglifetime()); byte[] outtoken = context.initseccontext(new byte[0], 0, 0); //create file on hadoop cluster filesystem fs = filesystem.get(conf); path f = new path("hdfs:///hdfs/123"); fsdataoutputstream s = fs.create(f, true); system.out.println("done\n"); int = 0; (i = 0; < 100; ++i) s.writechars("test"); s.close(); }catch (exception e) { e.printstacktrace(); } return null; }//endof run }); }//endof main
jaas.conf below:
client { com.sun.security.auth.module.krb5loginmodule required debug=true storekey=true donotprompt=true usekeytab=true keytab="/etc/hadoop/conf/hdfs.keytab" principal="hdfs/172.16.0.239@h236"; };
my login user name root, before use "hadoop jar ./client.jar" run code, run kdestory delete kerberos cache ,then error below:
authentication succeeded! error security.usergroupinformation: priviledgedactionexception as:root (auth:kerberos) cause:javax.sec urity.sasl.saslexception: gss initiate failed [caused gssexception: no valid credentials provided (mechanism level: attempt obtain new initiate credentials failed! (null))] ipc.client: exception encountered while connecting server : javax.security.sasl.saslexcepti on: gss initiate failed [caused gssexception: no valid credentials provided (mechanism level: attempt obtain new initiate credentials failed! (null))] error security.usergroupinformation: priviledgedactionexception as:root (auth:kerberos) cause:java.io.i oexception: javax.security.sasl.saslexception: gss initiate failed [caused gssexception: no valid credentials provided (mechanism level: attempt obtain new initiate credentials failed! (null))] warn retry.retryinvocationhandler: exception while invoking class org.apache.hadoop.hdfs.protocolpb.cli entnamenodeprotocoltranslatorpb.create. not retrying because invoked method not idempotent, , unable determine whether invoked java.io.ioexception: failed on local exception: java.io.ioexception: javax.security.sasl.saslexception: gssinitiate failed [caused gssexception: no valid credentials provided (mechanism level: attempt obtain new initiate credentials failed! (null))]; host details : local host is: "xp236/172.16.0.236"; destination host is: "172.16.0.236":8020;at org.apache.hadoop.net.netutils.wrapexception(netutils.java:760)
i don't know how make works,can me,thanks lot.
please use below code access secure hadoop accessing through proxy user code,configure in core -site .xml also...similar how ooozie accessing
import java.security.privilegedexceptionaction; import java.text.simpledateformat; import org.apache.hadoop.conf.configuration; import org.apache.hadoop.fs.filestatus; import org.apache.hadoop.fs.filesystem; import org.apache.hadoop.fs.path; import org.apache.hadoop.security.usergroupinformation; public class hdfstest { public static void main(string args[]) { final configuration conf = new configuration(); try { conf.set("fs.defaultfs", "hdfs://ibm-r1-node7.ibmbi-nextgen.com:8020"); usergroupinformation.setconfiguration(conf); usergroupinformation ugi = usergroupinformation.createproxyuser( args[0], usergroupinformation.getugifromticketcache( "/tmp/krb5cc_0", args[1])); system.out.println("--------------status---:" + usergroupinformation.isloginkeytabbased()); system.out.println("---------after login-----:"); ugi.doas(new privilegedexceptionaction<void>() { public void run() throws exception { filesystem fs = filesystem.get(conf); path path = new path("hdfs://10.132.100.224:8020/tmp/root"); filestatus[] statusarray = fs.liststatus(path); system.out.println("------------------------------" + fs.liststatus(path)); int count = 0; simpledateformat sdf = null; (filestatus status : statusarray) { long blocksize = status.getblocksize(); string permission = status.getpermission() + ""; int replication = status.getreplication(); string owner = status.getowner(); string paths = status.getpath() + ""; boolean file = status.isfile(); long length = status.getlen(); string group = status.getgroup(); system.out.println("blocksize :" + blocksize); system.out.println("group :" + group); system.out.println("length :" + length); system.out.println("owner :" + owner); system.out.println("replication :" + replication); system.out.println("file :" + file); system.out.println("permission :" + permission); system.out.println("path :" + paths); count++; system.out .println("-------------without auth-----count---------------" + count); } return null; } }); } catch (exception e) { system.out.println("--------exception________________"); e.printstacktrace(); } }
}
Comments
Post a Comment