本文整理汇总了Java中org.apache.hadoop.hbase.security.SaslUtil类的典型用法代码示例。如果您正苦于以下问题:Java SaslUtil类的具体用法?Java SaslUtil怎么用?Java SaslUtil使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
SaslUtil类属于org.apache.hadoop.hbase.security包,在下文中一共展示了SaslUtil类的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。
示例1: writeConnectionHeader
import org.apache.hadoop.hbase.security.SaslUtil; //导入依赖的package包/类
/**
* Write the connection header.
*/
private void writeConnectionHeader() throws IOException {
boolean isCryptoAesEnable = false;
// check if Crypto AES is enabled
if (saslRpcClient != null) {
boolean saslEncryptionEnabled = SaslUtil.QualityOfProtection.PRIVACY.
getSaslQop().equalsIgnoreCase(saslRpcClient.getSaslQOP());
isCryptoAesEnable = saslEncryptionEnabled && conf.getBoolean(
CRYPTO_AES_ENABLED_KEY, CRYPTO_AES_ENABLED_DEFAULT);
}
// if Crypto AES is enabled, set transformation and negotiate with server
if (isCryptoAesEnable) {
waitingConnectionHeaderResponse = true;
}
this.out.write(connectionHeaderWithLength);
this.out.flush();
}
开发者ID:apache,项目名称:hbase,代码行数:21,代码来源:BlockingRpcConnection.java
示例2: getSaslHandler
import org.apache.hadoop.hbase.security.SaslUtil; //导入依赖的package包/类
/**
* Get SASL handler
* @param bootstrap to reconnect to
* @return new SASL handler
* @throws java.io.IOException if handler failed to create
*/
private SaslClientHandler getSaslHandler(final UserGroupInformation realTicket,
final Bootstrap bootstrap) throws IOException {
return new SaslClientHandler(realTicket, authMethod, token, serverPrincipal,
client.fallbackAllowed, client.conf.get("hbase.rpc.protection",
SaslUtil.QualityOfProtection.AUTHENTICATION.name().toLowerCase()),
new SaslClientHandler.SaslExceptionHandler() {
@Override
public void handle(int retryCount, Random random, Throwable cause) {
try {
// Handle Sasl failure. Try to potentially get new credentials
handleSaslConnectionFailure(retryCount, cause, realTicket);
// Try to reconnect
client.newTimeout(new TimerTask() {
@Override
public void run(Timeout timeout) throws Exception {
connect(bootstrap);
}
}, random.nextInt(reloginMaxBackoff) + 1, TimeUnit.MILLISECONDS);
} catch (IOException | InterruptedException e) {
close(e);
}
}
}, new SaslClientHandler.SaslSuccessfulConnectHandler() {
@Override
public void onSuccess(Channel channel) {
startHBaseConnection(channel);
}
});
}
开发者ID:fengchen8086,项目名称:ditb,代码行数:37,代码来源:AsyncRpcChannel.java
示例3: getSaslHandler
import org.apache.hadoop.hbase.security.SaslUtil; //导入依赖的package包/类
/**
* Get SASL handler
*
* @param bootstrap to reconnect to
* @return new SASL handler
* @throws java.io.IOException if handler failed to create
*/
private SaslClientHandler getSaslHandler(final Bootstrap bootstrap) throws IOException {
return new SaslClientHandler(authMethod, token, serverPrincipal, client.fallbackAllowed,
client.conf.get("hbase.rpc.protection",
SaslUtil.QualityOfProtection.AUTHENTICATION.name().toLowerCase()),
new SaslClientHandler.SaslExceptionHandler() {
@Override public void handle(int retryCount, Random random, Throwable cause) {
try {
// Handle Sasl failure. Try to potentially get new credentials
handleSaslConnectionFailure(retryCount, cause, ticket.getUGI());
// Try to reconnect
AsyncRpcClient.WHEEL_TIMER.newTimeout(new TimerTask() {
@Override public void run(Timeout timeout) throws Exception {
connect(bootstrap);
}
}, random.nextInt(reloginMaxBackoff) + 1, TimeUnit.MILLISECONDS);
} catch (IOException | InterruptedException e) {
close(e);
}
}
}, new SaslClientHandler.SaslSuccessfulConnectHandler() {
@Override public void onSuccess(Channel channel) {
startHBaseConnection(channel);
}
});
}
开发者ID:jurmous,项目名称:async-hbase-client,代码行数:34,代码来源:AsyncRpcChannel.java
示例4: ThriftServerRunner
import org.apache.hadoop.hbase.security.SaslUtil; //导入依赖的package包/类
public ThriftServerRunner(Configuration conf) throws IOException {
UserProvider userProvider = UserProvider.instantiate(conf);
// login the server principal (if using secure Hadoop)
securityEnabled = userProvider.isHadoopSecurityEnabled()
&& userProvider.isHBaseSecurityEnabled();
if (securityEnabled) {
host = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
conf.get("hbase.thrift.dns.interface", "default"),
conf.get("hbase.thrift.dns.nameserver", "default")));
userProvider.login("hbase.thrift.keytab.file",
"hbase.thrift.kerberos.principal", host);
}
this.conf = HBaseConfiguration.create(conf);
this.listenPort = conf.getInt(PORT_CONF_KEY, DEFAULT_LISTEN_PORT);
this.metrics = new ThriftMetrics(conf, ThriftMetrics.ThriftServerType.ONE);
this.pauseMonitor = new JvmPauseMonitor(conf, this.metrics.getSource());
this.hbaseHandler = new HBaseHandler(conf, userProvider);
this.hbaseHandler.initMetrics(metrics);
this.handler = HbaseHandlerMetricsProxy.newInstance(
hbaseHandler, metrics, conf);
this.realUser = userProvider.getCurrent().getUGI();
String strQop = conf.get(THRIFT_QOP_KEY);
if (strQop != null) {
this.qop = SaslUtil.getQop(strQop);
}
doAsEnabled = conf.getBoolean(THRIFT_SUPPORT_PROXYUSER, false);
if (doAsEnabled) {
if (!conf.getBoolean(USE_HTTP_CONF_KEY, false)) {
LOG.warn("Fail to enable the doAs feature. hbase.regionserver.thrift.http is not " +
"configured ");
}
}
if (qop != null) {
if (qop != QualityOfProtection.AUTHENTICATION &&
qop != QualityOfProtection.INTEGRITY &&
qop != QualityOfProtection.PRIVACY) {
throw new IOException(String.format("Invalide %s: It must be one of %s, %s, or %s.",
THRIFT_QOP_KEY,
QualityOfProtection.AUTHENTICATION.name(),
QualityOfProtection.INTEGRITY.name(),
QualityOfProtection.PRIVACY.name()));
}
checkHttpSecurity(qop, conf);
if (!securityEnabled) {
throw new IOException("Thrift server must"
+ " run in secure mode to support authentication");
}
}
}
开发者ID:apache,项目名称:hbase,代码行数:50,代码来源:ThriftServerRunner.java
示例5: setupCryptoCipher
import org.apache.hadoop.hbase.security.SaslUtil; //导入依赖的package包/类
/**
* Set up cipher for rpc encryption with Apache Commons Crypto
*
* @throws FatalConnectionException
*/
private void setupCryptoCipher(final ConnectionHeader header,
RPCProtos.ConnectionHeaderResponse.Builder chrBuilder)
throws FatalConnectionException {
// If simple auth, return
if (saslServer == null) return;
// check if rpc encryption with Crypto AES
String qop = saslServer.getNegotiatedQop();
boolean isEncryption = SaslUtil.QualityOfProtection.PRIVACY
.getSaslQop().equalsIgnoreCase(qop);
boolean isCryptoAesEncryption = isEncryption && this.rpcServer.conf.getBoolean(
"hbase.rpc.crypto.encryption.aes.enabled", false);
if (!isCryptoAesEncryption) return;
if (!header.hasRpcCryptoCipherTransformation()) return;
String transformation = header.getRpcCryptoCipherTransformation();
if (transformation == null || transformation.length() == 0) return;
// Negotiates AES based on complete saslServer.
// The Crypto metadata need to be encrypted and send to client.
Properties properties = new Properties();
// the property for SecureRandomFactory
properties.setProperty(CryptoRandomFactory.CLASSES_KEY,
this.rpcServer.conf.get("hbase.crypto.sasl.encryption.aes.crypto.random",
"org.apache.commons.crypto.random.JavaCryptoRandom"));
// the property for cipher class
properties.setProperty(CryptoCipherFactory.CLASSES_KEY,
this.rpcServer.conf.get("hbase.rpc.crypto.encryption.aes.cipher.class",
"org.apache.commons.crypto.cipher.JceCipher"));
int cipherKeyBits = this.rpcServer.conf.getInt(
"hbase.rpc.crypto.encryption.aes.cipher.keySizeBits", 128);
// generate key and iv
if (cipherKeyBits % 8 != 0) {
throw new IllegalArgumentException("The AES cipher key size in bits" +
" should be a multiple of byte");
}
int len = cipherKeyBits / 8;
byte[] inKey = new byte[len];
byte[] outKey = new byte[len];
byte[] inIv = new byte[len];
byte[] outIv = new byte[len];
try {
// generate the cipher meta data with SecureRandom
CryptoRandom secureRandom = CryptoRandomFactory.getCryptoRandom(properties);
secureRandom.nextBytes(inKey);
secureRandom.nextBytes(outKey);
secureRandom.nextBytes(inIv);
secureRandom.nextBytes(outIv);
// create CryptoAES for server
cryptoAES = new CryptoAES(transformation, properties,
inKey, outKey, inIv, outIv);
// create SaslCipherMeta and send to client,
// for client, the [inKey, outKey], [inIv, outIv] should be reversed
RPCProtos.CryptoCipherMeta.Builder ccmBuilder = RPCProtos.CryptoCipherMeta.newBuilder();
ccmBuilder.setTransformation(transformation);
ccmBuilder.setInIv(getByteString(outIv));
ccmBuilder.setInKey(getByteString(outKey));
ccmBuilder.setOutIv(getByteString(inIv));
ccmBuilder.setOutKey(getByteString(inKey));
chrBuilder.setCryptoCipherMeta(ccmBuilder);
useCryptoAesWrap = true;
} catch (GeneralSecurityException | IOException ex) {
throw new UnsupportedCryptoException(ex.getMessage(), ex);
}
}
开发者ID:apache,项目名称:hbase,代码行数:71,代码来源:ServerRpcConnection.java
示例6: RpcServer
import org.apache.hadoop.hbase.security.SaslUtil; //导入依赖的package包/类
/**
* Constructs a server listening on the named port and address.
* @param server hosting instance of {@link Server}. We will do authentications if an
* instance else pass null for no authentication check.
* @param name Used keying this rpc servers' metrics and for naming the Listener thread.
* @param services A list of services.
* @param bindAddress Where to listen
* @param conf
* @param scheduler
* @param reservoirEnabled Enable ByteBufferPool or not.
*/
public RpcServer(final Server server, final String name,
final List<BlockingServiceAndInterface> services,
final InetSocketAddress bindAddress, Configuration conf,
RpcScheduler scheduler, boolean reservoirEnabled) throws IOException {
if (reservoirEnabled) {
int poolBufSize = conf.getInt(ByteBufferPool.BUFFER_SIZE_KEY,
ByteBufferPool.DEFAULT_BUFFER_SIZE);
// The max number of buffers to be pooled in the ByteBufferPool. The default value been
// selected based on the #handlers configured. When it is read request, 2 MB is the max size
// at which we will send back one RPC request. Means max we need 2 MB for creating the
// response cell block. (Well it might be much lesser than this because in 2 MB size calc, we
// include the heap size overhead of each cells also.) Considering 2 MB, we will need
// (2 * 1024 * 1024) / poolBufSize buffers to make the response cell block. Pool buffer size
// is by default 64 KB.
// In case of read request, at the end of the handler process, we will make the response
// cellblock and add the Call to connection's response Q and a single Responder thread takes
// connections and responses from that one by one and do the socket write. So there is chances
// that by the time a handler originated response is actually done writing to socket and so
// released the BBs it used, the handler might have processed one more read req. On an avg 2x
// we consider and consider that also for the max buffers to pool
int bufsForTwoMB = (2 * 1024 * 1024) / poolBufSize;
int maxPoolSize = conf.getInt(ByteBufferPool.MAX_POOL_SIZE_KEY,
conf.getInt(HConstants.REGION_SERVER_HANDLER_COUNT,
HConstants.DEFAULT_REGION_SERVER_HANDLER_COUNT) * bufsForTwoMB * 2);
this.reservoir = new ByteBufferPool(poolBufSize, maxPoolSize);
this.minSizeForReservoirUse = getMinSizeForReservoirUse(this.reservoir);
} else {
reservoir = null;
this.minSizeForReservoirUse = Integer.MAX_VALUE;// reservoir itself not in place.
}
this.server = server;
this.services = services;
this.bindAddress = bindAddress;
this.conf = conf;
// See declaration above for documentation on what this size is.
this.maxQueueSizeInBytes =
this.conf.getLong("hbase.ipc.server.max.callqueue.size", DEFAULT_MAX_CALLQUEUE_SIZE);
this.warnResponseTime = conf.getInt(WARN_RESPONSE_TIME, DEFAULT_WARN_RESPONSE_TIME);
this.warnResponseSize = conf.getInt(WARN_RESPONSE_SIZE, DEFAULT_WARN_RESPONSE_SIZE);
this.minClientRequestTimeout = conf.getInt(MIN_CLIENT_REQUEST_TIMEOUT,
DEFAULT_MIN_CLIENT_REQUEST_TIMEOUT);
this.maxRequestSize = conf.getInt(MAX_REQUEST_SIZE, DEFAULT_MAX_REQUEST_SIZE);
this.metrics = new MetricsHBaseServer(name, new MetricsHBaseServerWrapperImpl(this));
this.tcpNoDelay = conf.getBoolean("hbase.ipc.server.tcpnodelay", true);
this.tcpKeepAlive = conf.getBoolean("hbase.ipc.server.tcpkeepalive", true);
this.cellBlockBuilder = new CellBlockBuilder(conf);
this.authorize = conf.getBoolean(HADOOP_SECURITY_AUTHORIZATION, false);
this.userProvider = UserProvider.instantiate(conf);
this.isSecurityEnabled = userProvider.isHBaseSecurityEnabled();
if (isSecurityEnabled) {
saslProps = SaslUtil.initSaslProperties(conf.get("hbase.rpc.protection",
QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT)));
} else {
saslProps = Collections.emptyMap();
}
this.scheduler = scheduler;
}
开发者ID:apache,项目名称:hbase,代码行数:74,代码来源:RpcServer.java
注:本文中的org.apache.hadoop.hbase.security.SaslUtil类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论