本文整理汇总了Java中org.apache.hadoop.ipc.RPC.RpcKind类的典型用法代码示例。如果您正苦于以下问题:Java RpcKind类的具体用法?Java RpcKind怎么用?Java RpcKind使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
RpcKind类属于org.apache.hadoop.ipc.RPC包,在下文中一共展示了RpcKind类的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。
示例1: Call
import org.apache.hadoop.ipc.RPC.RpcKind; //导入依赖的package包/类
private Call(RPC.RpcKind rpcKind, Writable param) {
this.rpcKind = rpcKind;
this.rpcRequest = param;
final Integer id = callId.get();
if (id == null) {
this.id = nextCallId();
} else {
callId.set(null);
this.id = id;
}
final Integer rc = retryCount.get();
if (rc == null) {
this.retry = 0;
} else {
this.retry = rc;
}
}
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:20,代码来源:Client.java
示例2: writeConnectionContext
import org.apache.hadoop.ipc.RPC.RpcKind; //导入依赖的package包/类
private void writeConnectionContext(ConnectionId remoteId,
AuthMethod authMethod)
throws IOException {
// Write out the ConnectionHeader
IpcConnectionContextProto message = ProtoUtil.makeIpcConnectionContext(
RPC.getProtocolName(remoteId.getProtocol()),
remoteId.getTicket(),
authMethod);
RpcRequestHeaderProto connectionContextHeader = ProtoUtil
.makeRpcRequestHeader(RpcKind.RPC_PROTOCOL_BUFFER,
OperationProto.RPC_FINAL_PACKET, CONNECTION_CONTEXT_CALL_ID,
RpcConstants.INVALID_RETRY_COUNT, clientId);
RpcRequestMessageWrapper request =
new RpcRequestMessageWrapper(connectionContextHeader, message);
// Write out the packet length
out.writeInt(request.getLength());
request.write(out);
}
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:20,代码来源:Client.java
示例3: Call
import org.apache.hadoop.ipc.RPC.RpcKind; //导入依赖的package包/类
private Call(RPC.RpcKind rpcKind, Writable param) {
this.rpcKind = rpcKind;
this.rpcRequest = param;
final Integer id = callId.get();
if (id == null) {
this.id = nextCallId();
} else {
callId.set(null);
this.id = id;
}
final Integer rc = retryCount.get();
if (rc == null) {
this.retry = 0;
} else {
this.retry = rc;
}
this.externalHandler = EXTERNAL_CALL_HANDLER.get();
}
开发者ID:hopshadoop,项目名称:hops,代码行数:22,代码来源:Client.java
示例4: writeConnectionContext
import org.apache.hadoop.ipc.RPC.RpcKind; //导入依赖的package包/类
private void writeConnectionContext(ConnectionId remoteId,
AuthMethod authMethod)
throws IOException {
// Write out the ConnectionHeader
IpcConnectionContextProto message = ProtoUtil.makeIpcConnectionContext(
RPC.getProtocolName(remoteId.getProtocol()),
remoteId.getTicket(),
authMethod);
RpcRequestHeaderProto connectionContextHeader = ProtoUtil
.makeRpcRequestHeader(RpcKind.RPC_PROTOCOL_BUFFER,
OperationProto.RPC_FINAL_PACKET, CONNECTION_CONTEXT_CALL_ID,
RpcConstants.INVALID_RETRY_COUNT, clientId);
// do not flush. the context and first ipc call request must be sent
// together to avoid possibility of broken pipes upon authz failure.
// see writeConnectionHeader
final ResponseBuffer buf = new ResponseBuffer();
connectionContextHeader.writeDelimitedTo(buf);
message.writeDelimitedTo(buf);
synchronized (ipcStreams.out) {
ipcStreams.sendRequest(buf.toByteArray());
}
}
开发者ID:hopshadoop,项目名称:hops,代码行数:23,代码来源:Client.java
示例5: testRpcClientId
import org.apache.hadoop.ipc.RPC.RpcKind; //导入依赖的package包/类
@Test
public void testRpcClientId() {
byte[] uuid = ClientId.getClientId();
RpcRequestHeaderProto header = ProtoUtil.makeRpcRequestHeader(
RpcKind.RPC_PROTOCOL_BUFFER, OperationProto.RPC_FINAL_PACKET, 0,
RpcConstants.INVALID_RETRY_COUNT, uuid);
assertTrue(Arrays.equals(uuid, header.getClientId().toByteArray()));
}
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:9,代码来源:TestProtoUtil.java
示例6: call
import org.apache.hadoop.ipc.RPC.RpcKind; //导入依赖的package包/类
static Writable call(Client client, InetSocketAddress addr,
int serviceClass, Configuration conf) throws IOException {
final LongWritable param = new LongWritable(RANDOM.nextLong());
final ConnectionId remoteId = getConnectionId(addr, MIN_SLEEP_TIME, conf);
return client.call(RPC.RpcKind.RPC_BUILTIN, param, remoteId, serviceClass,
null);
}
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:8,代码来源:TestIPC.java
示例7: call
import org.apache.hadoop.ipc.RPC.RpcKind; //导入依赖的package包/类
static Writable call(Client client, Writable param,
InetSocketAddress address) throws IOException {
final ConnectionId remoteId = ConnectionId.getConnectionId(address, null,
null, 0, null, conf);
return client.call(RpcKind.RPC_BUILTIN, param, remoteId,
RPC.RPC_SERVICE_CLASS_DEFAULT, null);
}
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:8,代码来源:TestIPCServerResponder.java
示例8: call
import org.apache.hadoop.ipc.RPC.RpcKind; //导入依赖的package包/类
/**
* Same as {@link #call(RPC.RpcKind, Writable, InetSocketAddress,
* Class, UserGroupInformation, int, Configuration)}
* except that rpcKind is writable.
*/
public Writable call(Writable param, InetSocketAddress addr,
Class<?> protocol, UserGroupInformation ticket,
int rpcTimeout, Configuration conf) throws IOException {
ConnectionId remoteId = ConnectionId.getConnectionId(addr, protocol,
ticket, rpcTimeout, conf);
return call(RPC.RpcKind.RPC_BUILTIN, param, remoteId);
}
开发者ID:naver,项目名称:hadoop,代码行数:13,代码来源:Client.java
示例9: call
import org.apache.hadoop.ipc.RPC.RpcKind; //导入依赖的package包/类
@Override
public Writable call(RPC.RpcKind rpcKind, String protocol, Writable param,
long receiveTime) throws IOException {
firstCallLatch.countDown();
try {
callBlockLatch.await();
} catch (InterruptedException e) {
throw new IOException(e);
}
return param;
}
开发者ID:naver,项目名称:hadoop,代码行数:12,代码来源:TestIPC.java
示例10: call
import org.apache.hadoop.ipc.RPC.RpcKind; //导入依赖的package包/类
/**
* Same as {@link #call(RPC.RpcKind, Writable, ConnectionId)}
* for RPC_BUILTIN
*/
public Writable call(Writable param, InetSocketAddress address)
throws IOException {
ConnectionId remoteId = ConnectionId.getConnectionId(address, null, null, 0,
conf);
return call(RpcKind.RPC_BUILTIN, param, remoteId);
}
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:11,代码来源:Client.java
注:本文中的org.apache.hadoop.ipc.RPC.RpcKind类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论