001    /**
002     * Licensed to the Apache Software Foundation (ASF) under one
003     * or more contributor license agreements.  See the NOTICE file
004     * distributed with this work for additional information
005     * regarding copyright ownership.  The ASF licenses this file
006     * to you under the Apache License, Version 2.0 (the
007     * "License"); you may not use this file except in compliance
008     * with the License.  You may obtain a copy of the License at
009     *
010     *      http://www.apache.org/licenses/LICENSE-2.0
011     *
012     * Unless required by applicable law or agreed to in writing, software
013     * distributed under the License is distributed on an "AS IS" BASIS,
014     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015     * See the License for the specific language governing permissions and
016     * limitations under the License.
017     */
018    package org.apache.oozie.action.hadoop;
019    
020    import java.util.Map;
021    
022    import org.apache.hadoop.conf.Configuration;
023    import org.apache.hadoop.hbase.HBaseConfiguration;
024    import org.apache.hadoop.hbase.security.User;
025    import org.apache.hadoop.mapred.JobConf;
026    import org.apache.oozie.action.ActionExecutor.Context;
027    import org.apache.oozie.action.hadoop.Credentials;
028    import org.apache.oozie.action.hadoop.CredentialsProperties;
029    import org.apache.oozie.util.XLog;
030    import org.apache.hadoop.security.UserGroupInformation;
031    
032    
033    /**
034     * Hbase Credentials implementation to store in jobConf
035     * The jobConf is used further to pass credentials to the tasks while running
036     * Oozie server should be configured to use this Credentials class by including it via property 'oozie.credentials.credentialclasses'
037     *
038     */
039    public class HbaseCredentials extends Credentials {
040    
041    
042        /* (non-Javadoc)
043         * @see org.apache.oozie.action.hadoop.Credentials#addtoJobConf(org.apache.hadoop.mapred.JobConf, org.apache.oozie.action.hadoop.CredentialsProperties, org.apache.oozie.action.ActionExecutor.Context)
044         */
045        @Override
046        public void addtoJobConf(JobConf jobConf, CredentialsProperties props, Context context) throws Exception {
047            try {
048                // Create configuration using hbase-site.xml/hbase-default.xml
049                Configuration hbaseConf = HBaseConfiguration.create();
050                // copy cred props to hbaseconf and override if values already exists
051                addPropsConf(props, hbaseConf);
052                // copy conf from hbaseConf to jobConf without overriding the
053                // already existing values of jobConf
054                injectConf(hbaseConf, jobConf);
055                String user = context.getWorkflow().getUser();
056                UserGroupInformation ugi =  UserGroupInformation.createProxyUser(user, UserGroupInformation.getLoginUser());
057                User u = User.create(ugi);
058                u.obtainAuthTokenForJob(jobConf);
059            }
060            catch (Exception e) {
061                XLog.getLog(getClass()).warn("Exception in receiving hbase credentials", e);
062                throw e;
063            }
064        }
065    
066        private void addPropsConf(CredentialsProperties props, Configuration destConf) {
067            for (Map.Entry<String, String> entry : props.getProperties().entrySet()) {
068                destConf.set(entry.getKey(), entry.getValue());
069            }
070        }
071    
072        private void injectConf(Configuration srcConf, Configuration destConf) {
073            for (Map.Entry<String, String> entry : srcConf) {
074                String name = entry.getKey();
075                if (destConf.get(name) == null) {
076                    String value = entry.getValue();
077                    destConf.set(name, value);
078                }
079            }
080        }
081    }