在前3節(jié)中,已經(jīng)就 hive 權(quán)限控制 進(jìn)行了基礎(chǔ)數(shù)據(jù)的維護(hù),現(xiàn)在用戶權(quán)限配置功能已經(jīng)實(shí)現(xiàn)。并且可以通過(guò)界面話的方式進(jìn)行維護(hù)和管理。接著,最重要的事情就是針對(duì)Hive源碼的修改。 主要是針對(duì)org.apache. hadoop . hive .conf.HiveConf及org.apache. hadoop .
在前3節(jié)中,已經(jīng)就hive權(quán)限控制進(jìn)行了基礎(chǔ)數(shù)據(jù)的維護(hù),現(xiàn)在用戶權(quán)限配置功能已經(jīng)實(shí)現(xiàn)。并且可以通過(guò)界面話的方式進(jìn)行維護(hù)和管理。接著,最重要的事情就是針對(duì)Hive源碼的修改。
主要是針對(duì)org.apache.hadoop.hive.conf.HiveConf及org.apache.hadoop.hive.ql.Driver
首先針對(duì)我們的特定需求,
擴(kuò)展org.apache.hadoop.hive.conf.HiveConf
??public static enum ConfVars { KUXUNUSER("hive.kuxun.username",""), //用戶名 KUXUNPASSWORD("hive.kuxun.password",""),//密碼 KUXUN_HIVESERVER_URL("hive.kuxun.hiveserver.url",""), //權(quán)限認(rèn)證數(shù)據(jù)庫(kù)地址 KUXUN_HIVESERVER_USER("hive.kuxun.hiveserver.username",""),//權(quán)限認(rèn)證數(shù)據(jù)庫(kù)用戶名 KUXUN_HIVESERVER_PASSWORD("hive.kuxun.hiveserver.password",""),//權(quán)限認(rèn)證數(shù)據(jù)庫(kù)密碼 KUXUN_RESERVE_A("hive.kuxun.resrver.a",""),//保留 KUXUN_RESERVE_B("hive.kuxun.resrver.b",""),//保留 KUXUN_RESERVE_C("hive.kuxun.resrver.c",""),//保留 KUXUN_RESERVE_D("hive.kuxun.resrver.d",""),//保留????????.......}
private String username ="";private String password ="";
this.username = HiveConf.getVar(conf, HiveConf.ConfVars.KUXUNUSER);this.password = HiveConf.getVar(conf, HiveConf.ConfVars.KUXUNPASSWORD);
private void doAuthorizationExtend(BaseSemanticAnalyzer sem) throws HiveException, AuthorizationException { //獲取用戶權(quán)限信息 UserAuthDataMode ua ; try{ ua = new UserAuthDataMode(this.username,this.password,this.conf); ua.run(); }catch(Exception e){ throw new AuthorizationException(e.getMessage()); } if(ua.isSuperUser()){ LOG.error("current user is super user,do not check authorization."); return ; } LOG.warn("current user is ["+this.username+"]. start check authorization.......");????LOG.warn("current user["+this.username+"] execute command ["+this.userCommand+"].");? HashSet inputs = sem.getInputs(); SessionState ss = SessionState.get(); HiveOperation op = ss.getHiveOperation(); if (op != null) {}//不處理這種方式,hiveserver并不提供寫入操作 LOG.debug("---------auth KUXUN--------------"); if (inputs != null && inputs.size() > 0) { if (inputs.size() > ua.getMaxMapCount()){ String errorMsg = "The max partition numbers which you can handler in one job is ["+ua.getMaxMapCount()+"],but current is ["+inputs.size()+"]. Pemission denied."; Exception ex = new Exception(errorMsg); throw new AuthorizationException(errorMsg,ex); } for (ReadEntity read : inputs) { if (read.getPartition() != null) { Table tbl = read.getTable(); String tblName = tbl.getTableName(); LOG.debug("-----dbName.tableName---------"+tbl.getDbName()+"."+tblName); String tblFullName = tbl.getDbName()+"."+tblName; //如果當(dāng)前表所在db不在用戶權(quán)限db中 ,同時(shí)表不在用戶權(quán)限table中,則拋出異常 if(ua.getDbNameList().indexOf(tbl.getDbName()) partValueList = part.getValues(); List partList = tbl.getPartitionKeys(); int partSize = partList.size(); for (int i=0;i tsoTopMap = parseCtx .getTopToTable();? for (Map.Entry> topOpMap : querySem .getParseContext().getTopOps().entrySet()) { Operator extends Serializable> topOp = topOpMap .getValue(); if (topOp instanceof TableScanOperator && tsoTopMap.containsKey(topOp)) { TableScanOperator tableScanOp = (TableScanOperator) topOp; Table tbl = tsoTopMap.get(tableScanOp); String dbName = tbl.getDbName(); String tblName = tbl.getTableName(); List neededColumnIds = tableScanOp .getNeededColumnIDs(); List columns = tbl.getCols(); List cols = new ArrayList(); if (neededColumnIds != null){ LOG.debug("-------neededColumnIds-----"+neededColumnIds.size()); }else{ LOG.debug("-------neededColumnIds-----null"); } if (neededColumnIds != null && neededColumnIds.size() > 0) { for (int i = 0; i < neededColumnIds.size(); i++) { cols.add(columns.get(neededColumnIds.get(i)) .getName()); } } else { for (int i = 0; i < columns.size(); i++) { cols.add(columns.get(i).getName()); } } //判斷非分區(qū)表,是否存在于權(quán)限對(duì)象中 String fullTableName = dbName +"."+tblName; if(ua.getDbNameList().indexOf(tbl.getDbName()) authColList = ua.getExcludeColumnList().get(fullTableName); for(String col:cols){ if(authColList.indexOf(col) !=-1){ throw new AuthorizationException("table ["+fullTableName+"] column ["+col+"] Pemission denied."); } LOG.debug("--------col------------"+dbName+"."+tblName+":"+col); } } //判斷是否有必須包含的列,但是在使用中沒有使用的 if(ua.getIncludeColumnList().containsKey(fullTableName)){ List authColList = ua.getIncludeColumnList().get(fullTableName); for(String authCol:authColList){ if(cols.indexOf(authCol) == -1 ){ throw new AuthorizationException("table ["+fullTableName+"] must contain??column ["+authCol+"]. Pemission denied."); } } }? } } } }}
public int compile(String command, boolean resetTaskIds){??????try{???? ??doAuthorizationExtend(sem);??????}catch (AuthorizationException authExp){???? ??errorMessage ="FAILED:Kuxun Authorization failed:" + authExp.getMessage()??????????+ " Please contact anyoneking@163.com for your information." ;???? ??console.printError("Kuxun Authorization failed:" + authExp.getMessage()??????????????????+ "Please contact anyoneking@163.com for your information.");??????????????return 403;??????}}請(qǐng)注意:errorMessage在獲取異常后,必須要進(jìn)行賦值,否則通過(guò)hive client訪問(wèn)的時(shí)候,出現(xiàn)異常的時(shí)候不會(huì)給出異常提示,只會(huì)給出NUll。
以上完成后,重新打包,放到hivelib下面就ok了。
同時(shí)要注意修改hive-site.xml以傳遞對(duì)應(yīng)的信息。
??hive.kuxun.username??test???hive.kuxun.password??test?????hive.kuxun.hiveserver.url??jdbc:mysql://localhost:3306/hiveserver??hiveserver jdbc connection url???hive.kuxun.hiveserver.username??test??username to use against hiveserver database???hive.kuxun.hiveserver.password??test??password to use against hiveserver database
原文地址:自定義Hive權(quán)限控制(4) 擴(kuò)展Hive以實(shí)現(xiàn)自定義權(quán)限控制, 感謝原作者分享。
聲明:本網(wǎng)頁(yè)內(nèi)容旨在傳播知識(shí),若有侵權(quán)等問(wèn)題請(qǐng)及時(shí)與本網(wǎng)聯(lián)系,我們將在第一時(shí)間刪除處理。TEL:177 7030 7066 E-MAIL:11247931@qq.com