本文整理汇总了Java中org.apache.pig.backend.hadoop.executionengine.HExecutionEngine类的典型用法代码示例。如果您正苦于以下问题:Java HExecutionEngine类的具体用法?Java HExecutionEngine怎么用?Java HExecutionEngine使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
HExecutionEngine类属于org.apache.pig.backend.hadoop.executionengine包,在下文中一共展示了HExecutionEngine类的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。
示例1: setParams
import org.apache.pig.backend.hadoop.executionengine.HExecutionEngine; //导入依赖的package包/类
public void setParams(PigServer pigServer)
{
mPigServer = pigServer;
mDfs = mPigServer.getPigContext().getDfs();
mLfs = mPigServer.getPigContext().getLfs();
mConf = mPigServer.getPigContext().getProperties();
shell = new FsShell(ConfigurationUtil.toConfiguration(mConf));
// TODO: this violates the abstraction layer decoupling between
// front end and back end and needs to be changed.
// Right now I am not clear on how the Job Id comes from to tell
// the back end to kill a given job (mJobClient is used only in
// processKill)
//
HExecutionEngine execEngine = mPigServer.getPigContext().getExecutionEngine();
mJobConf = execEngine.getJobConf();
}
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:19,代码来源:GruntParser.java
示例2: ExampleGenerator
import org.apache.pig.backend.hadoop.executionengine.HExecutionEngine; //导入依赖的package包/类
public ExampleGenerator(LogicalPlan plan, PigContext hadoopPigContext) {
this.plan = plan;
// pigContext = new PigContext(ExecType.LOCAL, hadoopPigContext
// .getProperties());
pigContext = hadoopPigContext;
// pigContext.setExecType(ExecType.LOCAL);
FileLocalizer.setInitialized(false);
try {
pigContext.connect();
} catch (ExecException e) {
log.error("Error connecting to the cluster "
+ e.getLocalizedMessage());
}
execEngine = new HExecutionEngine(pigContext);
localMRRunner = new LocalMapReduceSimulator();
poLoadToSchemaMap = new HashMap<POLoad, LogicalSchema>();
}
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:19,代码来源:ExampleGenerator.java
示例3: setParams
import org.apache.pig.backend.hadoop.executionengine.HExecutionEngine; //导入依赖的package包/类
public void setParams(PigServer pigServer)
{
mPigServer = pigServer;
mDfs = mPigServer.getPigContext().getDfs();
mLfs = mPigServer.getPigContext().getLfs();
mConf = mPigServer.getPigContext().getProperties();
shell = new FsShell(ConfigurationUtil.toConfiguration(mConf));
// TODO: this violates the abstraction layer decoupling between
// front end and back end and needs to be changed.
// Right now I am not clear on how the Job Id comes from to tell
// the back end to kill a given job (mJobClient is used only in
// processKill)
//
HExecutionEngine execEngine = mPigServer.getPigContext().getExecutionEngine();
mJobConf = execEngine.getJobConf();
}
开发者ID:PonIC,项目名称:PonIC,代码行数:19,代码来源:GruntParser.java
示例4: connect
import org.apache.pig.backend.hadoop.executionengine.HExecutionEngine; //导入依赖的package包/类
public void connect() throws ExecException {
switch (execType) {
case LOCAL:
case MAPREDUCE:
{
executionEngine = new HExecutionEngine (this);
executionEngine.init();
dfs = executionEngine.getDataStorage();
lfs = new HDataStorage(URI.create("file:///"),
properties);
}
break;
default:
{
int errCode = 2040;
String msg = "Unkown exec type: " + execType;
throw new ExecException(msg, errCode, PigException.BUG);
}
}
}
开发者ID:kaituo,项目名称:sedge,代码行数:27,代码来源:PigContext.java
示例5: ExampleGenerator
import org.apache.pig.backend.hadoop.executionengine.HExecutionEngine; //导入依赖的package包/类
public ExampleGenerator(LogicalPlan plan, PigContext hadoopPigContext) {
this.plan = plan;
// pigContext = new PigContext(ExecType.LOCAL, hadoopPigContext
// .getProperties());
pigContext = hadoopPigContext;
//Kaituo
// pigContext.setDefaultLogLevel(Level.FATAL);
// pigContext.setExecType(ExecType.LOCAL);
FileLocalizer.setInitialized(false);
try {
pigContext.connect();
} catch (ExecException e) {
log.error("Error connecting to the cluster "
+ e.getLocalizedMessage());
}
execEngine = new HExecutionEngine(pigContext);
localMRRunner = new LocalMapReduceSimulator();
poLoadToSchemaMap = new HashMap<POLoad, LogicalSchema>();
}
开发者ID:kaituo,项目名称:sedge,代码行数:21,代码来源:ExampleGenerator.java
示例6: cancel
import org.apache.pig.backend.hadoop.executionengine.HExecutionEngine; //导入依赖的package包/类
@Override
public void cancel(InterpreterContext context) {
LOGGER.info("Cancel paragraph:" + context.getParagraphId());
PigScriptListener listener = listenerMap.get(context.getParagraphId());
if (listener != null) {
Set<String> jobIds = listener.getJobIds();
if (jobIds.isEmpty()) {
LOGGER.info("No job is started, so can not cancel paragraph:" + context.getParagraphId());
}
for (String jobId : jobIds) {
LOGGER.info("Kill jobId:" + jobId);
HExecutionEngine engine =
(HExecutionEngine) getPigServer().getPigContext().getExecutionEngine();
try {
Field launcherField = HExecutionEngine.class.getDeclaredField("launcher");
launcherField.setAccessible(true);
Launcher launcher = (Launcher) launcherField.get(engine);
// It doesn't work for Tez Engine due to PIG-5035
launcher.killJob(jobId, new Configuration());
} catch (NoSuchFieldException | BackendException | IllegalAccessException e) {
LOGGER.error("Fail to cancel paragraph:" + context.getParagraphId(), e);
}
}
} else {
LOGGER.warn("No PigScriptListener found, can not cancel paragraph:"
+ context.getParagraphId());
}
}
开发者ID:apache,项目名称:zeppelin,代码行数:29,代码来源:BasePigInterpreter.java
示例7: runPlan
import org.apache.pig.backend.hadoop.executionengine.HExecutionEngine; //导入依赖的package包/类
/**
* Given a (modified) new logical plan, run the script.
* @param newPlan plan to run
* @param jobName name to give the MR jobs associated with this run
* @return list of exec jobs describing the jobs that were run.
* @throws FrontendException if plan translation fails.
* @throws ExecException if running the job fails.
*/
public List<ExecJob> runPlan(LogicalPlan newPlan,
String jobName) throws FrontendException, ExecException {
HExecutionEngine engine = new HExecutionEngine(pigContext);
PhysicalPlan pp = engine.compile(newPlan, null);
PigStats stats = launchPlan(pp, jobName);
return getJobs(stats);
}
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:17,代码来源:ToolsPigServer.java
示例8: connect
import org.apache.pig.backend.hadoop.executionengine.HExecutionEngine; //导入依赖的package包/类
public void connect() throws ExecException {
switch (execType) {
case LOCAL:
case SPARK:
case SPARKSTREAMING:
case MAPREDUCE:
{
executionEngine = new HExecutionEngine (this);
executionEngine.init();
dfs = executionEngine.getDataStorage();
lfs = new HDataStorage(URI.create("file:///"),
properties);
}
break;
default:
{
int errCode = 2040;
String msg = "Unkown exec type: " + execType;
throw new ExecException(msg, errCode, PigException.BUG);
}
}
}
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:29,代码来源:PigContext.java
示例9: buildPp
import org.apache.pig.backend.hadoop.executionengine.HExecutionEngine; //导入依赖的package包/类
public static PhysicalPlan buildPp(PigServer pigServer, String query)
throws Exception {
LogicalPlan lp = buildLp( pigServer, query );
lp.optimize(pigServer.getPigContext());
return ((HExecutionEngine)pigServer.getPigContext().getExecutionEngine()).compile(lp,
pigServer.getPigContext().getProperties());
}
开发者ID:sigmoidanalytics,项目名称:spork,代码行数:8,代码来源:Util.java
示例10: getExecutionEngine
import org.apache.pig.backend.hadoop.executionengine.HExecutionEngine; //导入依赖的package包/类
public HExecutionEngine getExecutionEngine() {
return executionEngine;
}
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:4,代码来源:PigContext.java
注:本文中的org.apache.pig.backend.hadoop.executionengine.HExecutionEngine类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论