001 /** 002 * Copyright (c) 2010 Yahoo! Inc. All rights reserved. 003 * Licensed under the Apache License, Version 2.0 (the "License"); 004 * you may not use this file except in compliance with the License. 005 * You may obtain a copy of the License at 006 * 007 * http://www.apache.org/licenses/LICENSE-2.0 008 * 009 * Unless required by applicable law or agreed to in writing, software 010 * distributed under the License is distributed on an "AS IS" BASIS, 011 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 012 * See the License for the specific language governing permissions and 013 * limitations under the License. See accompanying LICENSE file. 014 */ 015 package org.apache.oozie.command.wf; 016 017 import java.io.IOException; 018 import java.io.StringReader; 019 import java.net.URI; 020 import java.net.URISyntaxException; 021 022 import org.apache.hadoop.conf.Configuration; 023 import org.apache.hadoop.fs.FileSystem; 024 import org.apache.hadoop.fs.Path; 025 import org.apache.oozie.ErrorCode; 026 import org.apache.oozie.client.WorkflowJob; 027 import org.apache.oozie.command.CommandException; 028 import org.apache.oozie.service.HadoopAccessorException; 029 import org.apache.oozie.service.HadoopAccessorService; 030 import org.apache.oozie.service.Services; 031 import org.apache.oozie.store.StoreException; 032 import org.apache.oozie.store.WorkflowStore; 033 import org.apache.oozie.util.ParamChecker; 034 import org.apache.oozie.util.XConfiguration; 035 import org.apache.oozie.util.XLog; 036 037 /** 038 * This Command is expected to be called when a Workflow moves to any terminal 039 * state ( such as SUCCEEDED, KILLED, FAILED). This class primarily removes the 040 * temporary directory created for specific workflow id 041 */ 042 public class WfEndCommand extends WorkflowCommand<Void> { 043 044 WorkflowJob job = null; 045 private String id; 046 private static XLog LOG = XLog.getLog(WfEndCommand.class); 047 048 public WfEndCommand(String id) { 049 super("end", "end", 1, XLog.STD); 050 this.id = ParamChecker.notEmpty(id, "id"); 051 } 052 053 @Override 054 protected Void call(WorkflowStore store) throws StoreException, 055 CommandException { 056 job = store.getWorkflow(id, false); 057 LOG.debug("STARTED WfEndCommand " + job.getId()); 058 deleteWFDir(); 059 LOG.debug("ENDED WfEndCommand " + job.getId()); 060 return null; 061 } 062 063 private void deleteWFDir() throws CommandException { 064 FileSystem fs; 065 try { 066 fs = getAppFileSystem(job); 067 String wfDir = Services.get().getSystemId() + "/" + job.getId(); 068 Path wfDirPath = new Path(fs.getHomeDirectory(), wfDir); 069 LOG.debug("WF tmp dir :" + wfDirPath); 070 if (fs.exists(wfDirPath)) { 071 fs.delete(wfDirPath, true); 072 } 073 else { 074 LOG.debug("Tmp dir doesn't exist :" + wfDirPath); 075 } 076 } 077 catch (Exception e) { 078 LOG.error("Unable to delete WF temp dir of wf id :" + job.getId(), e); 079 throw new CommandException(ErrorCode.E0819); 080 } 081 082 } 083 084 protected FileSystem getAppFileSystem(WorkflowJob workflow) throws HadoopAccessorException, IOException, 085 URISyntaxException { 086 XConfiguration jobConf = new XConfiguration(new StringReader(workflow.getConf())); 087 Configuration fsConf = new Configuration(); 088 XConfiguration.copy(jobConf, fsConf); 089 return Services.get().get(HadoopAccessorService.class).createFileSystem(workflow.getUser(), 090 workflow.getGroup(), new URI(workflow.getAppPath()), fsConf); 091 } 092 }