本文整理汇总了Java中org.apache.hadoop.mapred.CleanupQueue.PathDeletionContext类的典型用法代码示例。如果您正苦于以下问题:Java PathDeletionContext类的具体用法?Java PathDeletionContext怎么用?Java PathDeletionContext使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
PathDeletionContext类属于org.apache.hadoop.mapred.CleanupQueue包,在下文中一共展示了PathDeletionContext类的18个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。
示例1: removeJobFiles
import org.apache.hadoop.mapred.CleanupQueue.PathDeletionContext; //导入依赖的package包/类
/**
* This job's files are no longer needed on this TT, remove them.
*
* @param rjob
* @throws IOException
*/
void removeJobFiles(String user, JobID jobId) throws IOException {
String userDir = getUserDir(user);
String jobDir = getLocalJobDir(user, jobId.toString());
PathDeletionContext jobCleanup =
new TaskController.DeletionContext(getTaskController(), false, user,
jobDir.substring(userDir.length()));
directoryCleanupThread.addToQueue(jobCleanup);
for (String str : localStorage.getDirs()) {
Path ttPrivateJobDir = FileSystem.getLocal(fConf).makeQualified(
new Path(str, TaskTracker.getPrivateDirForJob(user, jobId.toString())));
PathDeletionContext ttPrivateJobCleanup =
new CleanupQueue.PathDeletionContext(ttPrivateJobDir, fConf);
directoryCleanupThread.addToQueue(ttPrivateJobCleanup);
}
}
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:23,代码来源:TaskTracker.java
示例2: removeTaskFiles
import org.apache.hadoop.mapred.CleanupQueue.PathDeletionContext; //导入依赖的package包/类
/**
* Some or all of the files from this task are no longer required. Remove
* them via CleanupQueue.
*
* @param removeOutputs remove outputs as well as output
* @param taskId
* @throws IOException
*/
void removeTaskFiles(boolean removeOutputs) throws IOException {
if (localJobConf.getNumTasksToExecutePerJvm() == 1) {
String user = ugi.getShortUserName();
int userDirLen = TaskTracker.getUserDir(user).length();
String jobId = task.getJobID().toString();
String taskId = task.getTaskID().toString();
boolean cleanup = task.isTaskCleanupTask();
String taskDir;
if (!removeOutputs) {
taskDir = TaskTracker.getTaskWorkDir(user, jobId, taskId, cleanup);
} else {
taskDir = TaskTracker.getLocalTaskDir(user, jobId, taskId, cleanup);
}
PathDeletionContext item =
new TaskController.DeletionContext(taskController, false, user,
taskDir.substring(userDirLen));
directoryCleanupThread.addToQueue(item);
}
}
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:28,代码来源:TaskTracker.java
示例3: addToQueue
import org.apache.hadoop.mapred.CleanupQueue.PathDeletionContext; //导入依赖的package包/类
@Override
public void addToQueue(PathDeletionContext... contexts) {
// delete paths in-line
for (PathDeletionContext context : contexts) {
Exception exc = null;
try {
if (!deletePath(context)) {
LOG.warn("Stale path " + context.fullPath);
stalePaths.add(context.fullPath);
}
} catch (IOException e) {
exc = e;
} catch (InterruptedException ie) {
exc = ie;
}
if (exc != null) {
LOG.warn("Caught exception while deleting path "
+ context.fullPath);
LOG.info(StringUtils.stringifyException(exc));
stalePaths.add(context.fullPath);
}
}
}
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:24,代码来源:UtilsForTests.java
示例4: deletePathsInSecureCluster
import org.apache.hadoop.mapred.CleanupQueue.PathDeletionContext; //导入依赖的package包/类
private void deletePathsInSecureCluster(String newPathName, FileStatus status)
throws FileNotFoundException, IOException {
// In a secure tasktracker, the subdirectories belong
// to different user
PathDeletionContext item = null;
// iterate and queue subdirectories for cleanup
for (FileStatus subDirStatus : localFileSystem.listStatus(status.getPath())) {
String owner = subDirStatus.getOwner();
String path = subDirStatus.getPath().getName();
if (path.equals(owner)) {
// add it to the cleanup queue
item =
new TaskController.DeletionContext(taskController, false, owner,
newPathName + Path.SEPARATOR_CHAR + path);
cleanupQueue.addToQueue(item);
}
}
// queue the parent directory for cleanup
item =
new TaskController.DeletionContext(taskController, false,
status.getOwner(), newPathName);
cleanupQueue.addToQueue(item);
}
开发者ID:karahiyo,项目名称:hanoi-hadoop-2.0.0-cdh,代码行数:26,代码来源:MRAsyncDiskService.java
示例5: enableTaskForCleanup
import org.apache.hadoop.mapred.CleanupQueue.PathDeletionContext; //导入依赖的package包/类
/**
* Enables the task for cleanup by changing permissions of the specified path
* in the local filesystem
*/
@Override
void enableTaskForCleanup(PathDeletionContext context)
throws IOException {
if (context instanceof TaskControllerTaskPathDeletionContext) {
TaskControllerTaskPathDeletionContext tContext =
(TaskControllerTaskPathDeletionContext) context;
enablePathForCleanup(tContext,
TaskControllerCommands.ENABLE_TASK_FOR_CLEANUP,
buildTaskCleanupArgs(tContext));
}
else {
throw new IllegalArgumentException("PathDeletionContext provided is not "
+ "TaskControllerTaskPathDeletionContext.");
}
}
开发者ID:rekhajoshm,项目名称:mapreduce-fork,代码行数:20,代码来源:LinuxTaskController.java
示例6: deleteLogPath
import org.apache.hadoop.mapred.CleanupQueue.PathDeletionContext; //导入依赖的package包/类
/**
* Deletes the log path.
*
* This path will be removed through {@link CleanupQueue}
*
* @param logPath
* @throws IOException
*/
private void deleteLogPath(String logPath) throws IOException {
LOG.info("Deleting user log path " + logPath);
String user = getLogUser(logPath);
TaskController controller = userLogManager.getTaskController();
PathDeletionContext item =
new TaskController.DeletionContext(controller, true, user, logPath);
cleanupQueue.addToQueue(item);
}
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:17,代码来源:UserLogCleaner.java
示例7: deletePath
import org.apache.hadoop.mapred.CleanupQueue.PathDeletionContext; //导入依赖的package包/类
static boolean deletePath(PathDeletionContext context)
throws IOException, InterruptedException {
if (LOG.isDebugEnabled()) {
LOG.debug("Trying to delete " + context.fullPath);
}
// FileSystem fs = context.fullPath.getFileSystem(context.conf);
// if (fs.exists(context.fullPath)) {
// return fs.delete(context.fullPath, true);
// }
context.deletePath();
return true;
}
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:13,代码来源:UtilsForTests.java
示例8: enableTaskForCleanup
import org.apache.hadoop.mapred.CleanupQueue.PathDeletionContext; //导入依赖的package包/类
/**
* Enables the task for cleanup by changing permissions of the specified path
* in the local filesystem
*/
@Override
void enableTaskForCleanup(PathDeletionContext context)
throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("Going to do " + TaskCommands.ENABLE_TASK_FOR_CLEANUP.toString()
+ " for " + context.fullPath);
}
if (context instanceof TaskControllerPathDeletionContext) {
TaskControllerPathDeletionContext tContext =
(TaskControllerPathDeletionContext) context;
if (tContext.task.getUser() != null &&
tContext.fs instanceof LocalFileSystem) {
try {
runCommand(TaskCommands.ENABLE_TASK_FOR_CLEANUP,
tContext.task.getUser(),
buildTaskCleanupArgs(tContext), null, null);
} catch(IOException e) {
LOG.warn("Uanble to change permissions for " + tContext.fullPath);
}
}
else {
throw new IllegalArgumentException("Either user is null or the " +
"file system is not local file system.");
}
}
else {
throw new IllegalArgumentException("PathDeletionContext provided is not "
+ "TaskControllerPathDeletionContext.");
}
}
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:37,代码来源:LinuxTaskController.java
示例9: enableTaskForCleanup
import org.apache.hadoop.mapred.CleanupQueue.PathDeletionContext; //导入依赖的package包/类
/**
* Enables the task for cleanup by changing permissions of the specified path
* in the local filesystem
*/
@Override
void enableTaskForCleanup(PathDeletionContext context)
throws IOException {
try {
FileUtil.chmod(context.fullPath, "a+rwx", true);
} catch(InterruptedException e) {
LOG.warn("Interrupted while setting permissions for " + context.fullPath +
" for deletion.");
} catch(IOException ioe) {
LOG.warn("Unable to change permissions of " + context.fullPath);
}
}
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:17,代码来源:DefaultTaskController.java
示例10: buildPathDeletionContexts
import org.apache.hadoop.mapred.CleanupQueue.PathDeletionContext; //导入依赖的package包/类
private static PathDeletionContext[] buildPathDeletionContexts(FileSystem fs,
Path[] paths) {
int i = 0;
PathDeletionContext[] contexts = new PathDeletionContext[paths.length];
for (Path p : paths) {
contexts[i++] = new PathDeletionContext(fs, p.toUri().getPath());
}
return contexts;
}
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:11,代码来源:TaskTracker.java
示例11: buildTaskControllerPathDeletionContexts
import org.apache.hadoop.mapred.CleanupQueue.PathDeletionContext; //导入依赖的package包/类
static PathDeletionContext[] buildTaskControllerPathDeletionContexts(
FileSystem fs, Path[] paths, Task task, boolean isWorkDir,
TaskController taskController)
throws IOException {
int i = 0;
PathDeletionContext[] contexts =
new TaskControllerPathDeletionContext[paths.length];
for (Path p : paths) {
contexts[i++] = new TaskControllerPathDeletionContext(fs, p, task,
isWorkDir, taskController);
}
return contexts;
}
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:15,代码来源:TaskTracker.java
示例12: garbageCollect
import org.apache.hadoop.mapred.CleanupQueue.PathDeletionContext; //导入依赖的package包/类
/**
* The job is dead. We're now GC'ing it, getting rid of the job
* from all tables. Be sure to remove all of this job's tasks
* from the various tables.
*/
void garbageCollect() {
synchronized(this) {
// Cancel task tracker reservation
cancelReservedSlots();
// Let the JobTracker know that a job is complete
jobtracker.getInstrumentation().decWaitingMaps(getJobID(), pendingMaps());
jobtracker.getInstrumentation().decWaitingReduces(getJobID(), pendingReduces());
jobtracker.storeCompletedJob(this);
jobtracker.finalizeJob(this);
try {
// Definitely remove the local-disk copy of the job file
if (localJobFile != null) {
localFs.delete(localJobFile, true);
localJobFile = null;
}
Path tempDir = jobtracker.getSystemDirectoryForJob(getJobID());
new CleanupQueue().addToQueue(new PathDeletionContext(
jobtracker.getFileSystem(), tempDir.toUri().getPath()));
} catch (IOException e) {
LOG.warn("Error cleaning up "+profile.getJobID()+": "+e);
}
// free up the memory used by the data structures
this.nonRunningMapCache = null;
this.runningMapCache = null;
this.nonRunningReduces = null;
this.runningReduces = null;
}
// remove jobs delegation tokens
if(conf.getBoolean(MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN, true)) {
DelegationTokenRenewal.removeDelegationTokenRenewalForJob(jobId);
} // else don't remove it.May be used by spawned tasks
}
开发者ID:rekhajoshm,项目名称:mapreduce-fork,代码行数:44,代码来源:JobInProgress.java
示例13: enableJobForCleanup
import org.apache.hadoop.mapred.CleanupQueue.PathDeletionContext; //导入依赖的package包/类
/**
* Enables the job for cleanup by changing permissions of the specified path
* in the local filesystem
*/
@Override
void enableJobForCleanup(PathDeletionContext context)
throws IOException {
if (context instanceof TaskControllerJobPathDeletionContext) {
TaskControllerJobPathDeletionContext tContext =
(TaskControllerJobPathDeletionContext) context;
enablePathForCleanup(tContext,
TaskControllerCommands.ENABLE_JOB_FOR_CLEANUP,
buildJobCleanupArgs(tContext));
} else {
throw new IllegalArgumentException("PathDeletionContext provided is not "
+ "TaskControllerJobPathDeletionContext.");
}
}
开发者ID:rekhajoshm,项目名称:mapreduce-fork,代码行数:19,代码来源:LinuxTaskController.java
示例14: enablePathForCleanup
import org.apache.hadoop.mapred.CleanupQueue.PathDeletionContext; //导入依赖的package包/类
/**
* Enables the path for cleanup by changing permissions of the specified path
* in the local filesystem
*/
private void enablePathForCleanup(PathDeletionContext context)
throws IOException {
try {
FileUtil.chmod(context.fullPath, "u+rwx", true);
} catch(InterruptedException e) {
LOG.warn("Interrupted while setting permissions for " + context.fullPath +
" for deletion.");
} catch(IOException ioe) {
LOG.warn("Unable to change permissions of " + context.fullPath);
}
}
开发者ID:rekhajoshm,项目名称:mapreduce-fork,代码行数:16,代码来源:DefaultTaskController.java
示例15: buildPathDeletionContexts
import org.apache.hadoop.mapred.CleanupQueue.PathDeletionContext; //导入依赖的package包/类
/**
* Builds list of PathDeletionContext objects for the given paths
*/
private static PathDeletionContext[] buildPathDeletionContexts(FileSystem fs,
Path[] paths) {
int i = 0;
PathDeletionContext[] contexts = new PathDeletionContext[paths.length];
for (Path p : paths) {
contexts[i++] = new PathDeletionContext(fs, p.toUri().getPath());
}
return contexts;
}
开发者ID:rekhajoshm,项目名称:mapreduce-fork,代码行数:14,代码来源:TaskTracker.java
示例16: buildTaskControllerJobPathDeletionContexts
import org.apache.hadoop.mapred.CleanupQueue.PathDeletionContext; //导入依赖的package包/类
/**
* Builds list of {@link TaskControllerJobPathDeletionContext} objects for a
* job each pointing to the job's jobLocalDir.
* @param fs : FileSystem in which the dirs to be deleted
* @param paths : mapred-local-dirs
* @param id : {@link JobID} of the job for which the local-dir needs to
* be cleaned up.
* @param user : Job owner's username
* @param taskController : the task-controller to be used for deletion of
* jobLocalDir
*/
static PathDeletionContext[] buildTaskControllerJobPathDeletionContexts(
FileSystem fs, Path[] paths, JobID id, String user,
TaskController taskController)
throws IOException {
int i = 0;
PathDeletionContext[] contexts =
new TaskControllerPathDeletionContext[paths.length];
for (Path p : paths) {
contexts[i++] = new TaskControllerJobPathDeletionContext(fs, p, id, user,
taskController);
}
return contexts;
}
开发者ID:rekhajoshm,项目名称:mapreduce-fork,代码行数:26,代码来源:TaskTracker.java
示例17: buildTaskControllerTaskPathDeletionContexts
import org.apache.hadoop.mapred.CleanupQueue.PathDeletionContext; //导入依赖的package包/类
/**
* Builds list of TaskControllerTaskPathDeletionContext objects for a task
* @param fs : FileSystem in which the dirs to be deleted
* @param paths : mapred-local-dirs
* @param task : the task whose taskDir or taskWorkDir is going to be deleted
* @param isWorkDir : the dir to be deleted is workDir or taskDir
* @param taskController : the task-controller to be used for deletion of
* taskDir or taskWorkDir
*/
static PathDeletionContext[] buildTaskControllerTaskPathDeletionContexts(
FileSystem fs, Path[] paths, Task task, boolean isWorkDir,
TaskController taskController)
throws IOException {
int i = 0;
PathDeletionContext[] contexts =
new TaskControllerPathDeletionContext[paths.length];
for (Path p : paths) {
contexts[i++] = new TaskControllerTaskPathDeletionContext(fs, p, task,
isWorkDir, taskController);
}
return contexts;
}
开发者ID:rekhajoshm,项目名称:mapreduce-fork,代码行数:24,代码来源:TaskTracker.java
示例18: removeJobFiles
import org.apache.hadoop.mapred.CleanupQueue.PathDeletionContext; //导入依赖的package包/类
/**
* This job's files are no longer needed on this TT, remove them.
*
* @param rjob
* @throws IOException
*/
void removeJobFiles(String user, JobID jobId)
throws IOException {
PathDeletionContext[] contexts =
buildTaskControllerJobPathDeletionContexts(localFs,
getLocalFiles(fConf, ""), jobId, user, taskController);
directoryCleanupThread.addToQueue(contexts);
}
开发者ID:rekhajoshm,项目名称:mapreduce-fork,代码行数:14,代码来源:TaskTracker.java
注:本文中的org.apache.hadoop.mapred.CleanupQueue.PathDeletionContext类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论