java/code/src/com/redhat/rhn/taskomatic/InvalidJobLabelException.java | 4 java/code/src/com/redhat/rhn/taskomatic/TaskoBunch.java | 76 -- java/code/src/com/redhat/rhn/taskomatic/TaskoFactory.java | 65 + java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.hbm.xml | 50 + java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.java | 365 ++++++++++ java/code/src/com/redhat/rhn/taskomatic/TaskoXmlRpcHandler.java | 140 ++- java/code/src/com/redhat/rhn/taskomatic/core/RhnJobFactory.java | 12 java/code/src/com/redhat/rhn/taskomatic/task/RhnJob.java | 30 schema/spacewalk/common/tables/rhnTaskoSchedule.sql | 38 + 9 files changed, 656 insertions(+), 124 deletions(-)
New commits: commit f0e92f8a609575e42211e089a2bba1e35fc89d5f Author: Tomas Lestach tlestach@redhat.com Date: Wed Jun 16 19:25:57 2010 +0200
job creating changed
diff --git a/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.java b/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.java index 130dcc6..4b82d34 100644 --- a/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.java +++ b/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.java @@ -19,6 +19,7 @@ import org.hibernate.Hibernate; import org.quartz.Job; import org.quartz.JobExecutionContext; import org.quartz.JobExecutionException; +import org.quartz.Trigger;
import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -55,14 +56,14 @@ public class TaskoSchedule implements Job { }
public TaskoSchedule(Integer orgIdIn, TaskoBunch bunchIn, - String jobLabelIn, Map dataIn, Date activeFromIn, Date activeTillIn) { + String jobLabelIn, Map dataIn, Trigger trigger) { setOrgId(orgIdIn); setBunch(bunchIn); setJobLabel(jobLabelIn); data = serializeMap(dataIn); setActive(TASKO_SCHEDULE_ACTIVE); - setActiveFrom(activeFromIn); - setActiveTill(activeTillIn); + setActiveFrom(trigger.getStartTime()); + setActiveTill(trigger.getEndTime()); }
static { diff --git a/java/code/src/com/redhat/rhn/taskomatic/TaskoXmlRpcHandler.java b/java/code/src/com/redhat/rhn/taskomatic/TaskoXmlRpcHandler.java index 986f0c0..34a641f 100644 --- a/java/code/src/com/redhat/rhn/taskomatic/TaskoXmlRpcHandler.java +++ b/java/code/src/com/redhat/rhn/taskomatic/TaskoXmlRpcHandler.java @@ -16,6 +16,7 @@ package com.redhat.rhn.taskomatic;
import com.redhat.rhn.taskomatic.core.SchedulerKernel;
+import org.hibernate.HibernateException; import org.quartz.CronTrigger; import org.quartz.JobDetail; import org.quartz.SchedulerException; @@ -50,20 +51,32 @@ public class TaskoXmlRpcHandler { Date startTime, Date endTime, String cronExpression, Map params) throws InvalidJobLabelException, NoSuchBunchTaskException, ParseException { try { + TaskoBunch bunch = doBasicCheck(orgId, bunchName, jobLabel); // create trigger CronTrigger ct = new CronTrigger(jobLabel, orgId.toString(), cronExpression); - if (startTime != null) { + if (startTime == null) { ct.setStartTime(startTime); } + else { + ct.setStartTime(new Date()); + } if (endTime != null) { ct.setEndTime(endTime); } + // create schedule + TaskoSchedule schedule = null; + try { + schedule = new TaskoSchedule(orgId, bunch, jobLabel, params, ct); + TaskoFactory.save(schedule); + TaskoFactory.commitTransaction(); + } + catch (HibernateException he) { + TaskoFactory.rollbackTransaction(); + return null; + } // create job - JobDetail jobDetail = createJob(bunchName, orgId, jobLabel, params, - startTime, endTime); - // schedule job - return SchedulerKernel.getScheduler().scheduleJob(jobDetail, ct); + return createJob(schedule, ct); } catch (SchedulerException e) { return null; @@ -77,45 +90,58 @@ public class TaskoXmlRpcHandler { params); }
- public int unscheduleBunch(Integer orgId, String jobLabel) - throws NoSuchTaskoTriggerException { + private TaskoBunch doBasicCheck(Integer orgId, String bunchName, + String jobLabel) + throws NoSuchBunchTaskException, SchedulerException, + InvalidJobLabelException { + TaskoBunch bunch = checkBunchName(bunchName); + if (!checkUniqueName(jobLabel, orgId.toString())) { + throw new InvalidJobLabelException("jobLabel already in use"); + } + return bunch; + } + + public Integer unscheduleBunch(Integer orgId, String jobLabel) { + /* try { Trigger trigger = SchedulerKernel.getScheduler().getTrigger( jobLabel, orgId.toString()); - if (false) { - Trigger newTrigger = (Trigger) trigger.clone(); - newTrigger.setEndTime(new Date()); - SchedulerKernel.getScheduler().rescheduleJob(jobLabel, orgId.toString(), - newTrigger); - } - else { - SchedulerKernel.getScheduler().unscheduleJob(jobLabel, orgId.toString()); - TaskoSchedule schedule = - TaskoFactory.lookupActiveScheduleByOrgAndLabel(orgId, jobLabel); - Map map = schedule.getDataMap(); - schedule.unschedule(); - - // TaskoFactory.save(schedule); - TaskoFactory.commitTransaction(); - } - return 1; + Trigger newTrigger = (Trigger) trigger.clone(); + newTrigger.setEndTime(new Date()); + SchedulerKernel.getScheduler().rescheduleJob(jobLabel, orgId.toString(), + newTrigger); } catch (SchedulerException e) { - throw new NoSuchTaskoTriggerException(); + throw new NoSuchTaskoTriggerException(); } + */ + TaskoSchedule schedule = + TaskoFactory.lookupActiveScheduleByOrgAndLabel(orgId, jobLabel); + schedule.unschedule(); + TaskoFactory.commitTransaction(); + return destroyJob(schedule); }
public Date scheduleSingleBunchRun(Integer orgId, String bunchName, String jobLabel, Map params, Date start) throws InvalidJobLabelException, NoSuchBunchTaskException { try { + TaskoBunch bunch = doBasicCheck(orgId, bunchName, jobLabel); SimpleTrigger st = new SimpleTrigger(jobLabel, orgId.toString(), 1, 1); st.setEndTime(new Date()); - JobDetail jobDetail = createJob(bunchName, orgId, jobLabel, params, - start, st.getEndTime()); - - // schedule job - return SchedulerKernel.getScheduler().scheduleJob(jobDetail, st); + // create schedule + TaskoSchedule schedule = null; + try { + schedule = new TaskoSchedule(orgId, bunch, jobLabel, params, st); + TaskoFactory.save(schedule); + TaskoFactory.commitTransaction(); + } + catch (HibernateException he) { + TaskoFactory.rollbackTransaction(); + return null; + } + // create job + return createJob(schedule, st); } catch (SchedulerException e) { return null; @@ -128,29 +154,41 @@ public class TaskoXmlRpcHandler { return scheduleSingleBunchRun(orgId, bunchName, jobLabel, params, new Date()); }
- private JobDetail createJob(String bunchName, Integer orgId, - String jobLabel, Map params, Date start, Date end) - throws SchedulerException, InvalidJobLabelException, NoSuchBunchTaskException { - if (!checkUniqueName(jobLabel, orgId.toString())) { - throw new InvalidJobLabelException("jobLabel already in use"); - } - TaskoBunch bunch = TaskoFactory.lookupOrgBunchByName(bunchName); - if (bunch == null) { - throw new NoSuchBunchTaskException(bunchName); - } + private Date createJob(TaskoSchedule schedule, Trigger trigger) { // create job - JobDetail jobDetail = new JobDetail(jobLabel, orgId.toString(), + JobDetail jobDetail = new JobDetail(schedule.getJobLabel(), schedule.getOrgId().toString(), TaskoSchedule.class); - TaskoSchedule schedule = new TaskoSchedule(orgId, bunch, jobLabel, params, - start, end); - TaskoFactory.save(schedule); - TaskoFactory.commitTransaction(); // set job params - jobDetail.getJobDataMap().putAll(params); - jobDetail.getJobDataMap().put("org_id", orgId); + jobDetail.getJobDataMap().putAll(schedule.getDataMap()); jobDetail.getJobDataMap().put("schedule_id", schedule.getId());
- return jobDetail; + // schedule job + try { + return SchedulerKernel.getScheduler().scheduleJob(jobDetail, trigger); + } + catch (SchedulerException e) { + return null; + } + } + + private Integer destroyJob(TaskoSchedule schedule) { + try { + SchedulerKernel.getScheduler().unscheduleJob(schedule.getJobLabel(), + schedule.getOrgId().toString()); + return 1; + } + catch (SchedulerException e) { + return null; + } + } + + private TaskoBunch checkBunchName(String bunchName) + throws NoSuchBunchTaskException { + TaskoBunch bunch = TaskoFactory.lookupOrgBunchByName(bunchName); + if (bunch == null) { + throw new NoSuchBunchTaskException(bunchName); + } + return bunch; }
public int listBunchRuns(Integer orgId, String triggerName)
commit ad1cf3d60bc02275919066a73ec207bd2379fb49 Author: Tomas Lestach tlestach@redhat.com Date: Wed Jun 16 18:32:42 2010 +0200
move main task execution to TaskoSchedule
diff --git a/java/code/src/com/redhat/rhn/taskomatic/InvalidJobLabelException.java b/java/code/src/com/redhat/rhn/taskomatic/InvalidJobLabelException.java index fdb21b9..b6dd32e 100644 --- a/java/code/src/com/redhat/rhn/taskomatic/InvalidJobLabelException.java +++ b/java/code/src/com/redhat/rhn/taskomatic/InvalidJobLabelException.java @@ -17,6 +17,10 @@ package com.redhat.rhn.taskomatic;
public class InvalidJobLabelException extends Exception {
+ public InvalidJobLabelException(String message) { + super(message); + } + /** * Comment for <code>serialVersionUID</code> */ diff --git a/java/code/src/com/redhat/rhn/taskomatic/TaskoBunch.java b/java/code/src/com/redhat/rhn/taskomatic/TaskoBunch.java index 2615117..20872d9 100644 --- a/java/code/src/com/redhat/rhn/taskomatic/TaskoBunch.java +++ b/java/code/src/com/redhat/rhn/taskomatic/TaskoBunch.java @@ -14,26 +14,16 @@ */ package com.redhat.rhn.taskomatic;
-import org.apache.log4j.Logger; -import org.quartz.Job; -import org.quartz.JobDataMap; -import org.quartz.JobExecutionContext; -import org.quartz.JobExecutionException; - import java.util.ArrayList; import java.util.Date; -import java.util.HashMap; import java.util.List; -import java.util.Map;
/** * TaskoBunch * @version $Rev$ */ -public class TaskoBunch implements Job { +public class TaskoBunch {
- private static Logger log = Logger.getLogger(TaskoBunch.class); - private static Map<String, Integer> tasks = new HashMap(); private Long id; private String name; private String description; @@ -45,70 +35,6 @@ public class TaskoBunch implements Job { private Date created; private Date modified;
- static { - for (TaskoTask task : TaskoFactory.listTasks()) { - tasks.put(task.getName(), 0); - } - } - - /** - * {@inheritDoc} - */ - public void execute(JobExecutionContext context) - throws JobExecutionException { - JobDataMap dataMap = context.getJobDetail().getJobDataMap(); - Integer orgId = dataMap.getInt("org_id"); - String jobLabel = dataMap.getString("job_label"); - TaskoRun previousRun = null; - - log.info("Starting " + this.name + " (" + jobLabel + ") at " + new Date()); - - for (TaskoTemplate template : this.templates) { - if ((previousRun == null) || - (previousRun.getStatus() == template.getStartIf())) { - - while (isTaskRunning(template.getTask())) { - log.info("Task " + template.getTask().getName() + - " currently executing. Sleeping for 10 secs."); - TaskoFactory.sleep(10000); - } - markTaskRunning(template.getTask()); - TaskoRun taskRun = new TaskoRun(orgId, template, jobLabel); - taskRun.execute(context); - unmarkTaskRunning(template.getTask()); - log.debug(template.getTask().getName() + " ... " + taskRun.getStatus()); - previousRun = taskRun; - } - else { - log.info("Interrupting " + this.name + " (" + jobLabel + ")"); - break; - } - } - TaskoFactory.commitTransaction(); - - log.info("Finishing " + this.name + " (" + jobLabel + ") at " + new Date()); - } - - private boolean isTaskRunning(TaskoTask task) { - return tasks.get(task.getName()) > 0; - } - - private void markTaskRunning(TaskoTask task) { - synchronized (getClass()) { - int count = tasks.get(task.getName()); - count++; - tasks.put(task.getName(), count); - } - } - - private void unmarkTaskRunning(TaskoTask task) { - synchronized (getClass()) { - int count = tasks.get(task.getName()); - count--; - tasks.put(task.getName(), count); - } - } - /** * @return Returns the id. */ diff --git a/java/code/src/com/redhat/rhn/taskomatic/TaskoFactory.java b/java/code/src/com/redhat/rhn/taskomatic/TaskoFactory.java index 469621b..1758287 100644 --- a/java/code/src/com/redhat/rhn/taskomatic/TaskoFactory.java +++ b/java/code/src/com/redhat/rhn/taskomatic/TaskoFactory.java @@ -23,6 +23,8 @@ import org.quartz.SchedulerException; import org.quartz.Trigger;
import java.io.File; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; import java.util.Date; import java.util.HashMap; import java.util.List; @@ -160,6 +162,56 @@ public class TaskoFactory extends HibernateFactory { params.put("org_id", orgId); params.put("job_label", jobLabel); return (TaskoSchedule) singleton.lookupObjectByNamedQuery( - "TaskoSchedule.findActiveScheduleByOrgAndLabel", params); + "TaskoSchedule.findActiveScheduleByOrgAndLabel", params); + } + + public static TaskoSchedule lookupScheduleById(Long scheduleId) { + Map params = new HashMap(); + params.put("schedule_id", scheduleId); + return (TaskoSchedule) singleton.lookupObjectByNamedQuery( + "TaskoSchedule.lookupById", params); + } + + public static TaskoSchedule listSchedulesByOrgAndBunch(Integer orgId, TaskoBunch bunch) { + Map params = new HashMap(); + params.put("org_id", orgId); + params.put("bunch_id", bunch.getId()); + return (TaskoSchedule) singleton.listObjectsByNamedQuery( + "TaskoSchedule.listByOrgAndBunch", params); + } + + public static TaskoSchedule listSchedulesByOrgAndLabel(Integer orgId, String jobLabel) { + Map params = new HashMap(); + params.put("org_id", orgId); + params.put("job_label", jobLabel); + return (TaskoSchedule) singleton.listObjectsByNamedQuery( + "TaskoSchedule.listByOrgAndLabel", params); + } + + public static Boolean isTaskParalelizable(TaskoTask task) { + Class taskClass; + try { + taskClass = Class.forName(task.getTaskClass()); + Method isParallelizableMethod = taskClass.getMethod("isParallelizable", null); + return (Boolean) isParallelizableMethod.invoke(null, null); + } + catch (ClassNotFoundException e) { + return false; + } + catch (SecurityException e) { + return false; + } + catch (NoSuchMethodException e) { + return false; + } + catch (IllegalArgumentException e) { + return false; + } + catch (IllegalAccessException e) { + return false; + } + catch (InvocationTargetException e) { + return false; + } } } diff --git a/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.hbm.xml b/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.hbm.xml index 314bb1f..7ff114f 100644 --- a/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.hbm.xml +++ b/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.hbm.xml @@ -31,8 +31,16 @@ PUBLIC "-//Hibernate/Hibernate Mapping DTD 3.0//EN"
</class>
- <query name="TaskoSchedule.listByBunch"> - <![CDATA[FROM com.redhat.rhn.taskomatic.TaskoSchedule WHERE bunch.id = :bunch_id]]> + <query name="TaskoSchedule.lookupById"> + <![CDATA[FROM com.redhat.rhn.taskomatic.TaskoSchedule WHERE id = :schedule_id]]> + </query> + + <query name="TaskoSchedule.listByOrgAndLabel"> + <![CDATA[FROM com.redhat.rhn.taskomatic.TaskoSchedule WHERE org_id = :org_id AND jobLabel = :job_label]]> + </query> + + <query name="TaskoSchedule.listByOrgAndBunch"> + <![CDATA[FROM com.redhat.rhn.taskomatic.TaskoSchedule WHERE orgId = :org_id AND bunch.id = :bunch_id]]> </query>
<query name="TaskoSchedule.findActiveScheduleByOrgAndLabel"> diff --git a/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.java b/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.java index 51719de..130dcc6 100644 --- a/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.java +++ b/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.java @@ -1,5 +1,5 @@ /** - * Copyright (c) 2008 Red Hat, Inc. + * Copyright (c) 2010 Red Hat, Inc. * * This software is licensed to you under the GNU General Public License, * version 2 (GPLv2). There is NO WARRANTY for this software, express or @@ -14,7 +14,11 @@ */ package com.redhat.rhn.taskomatic;
+import org.apache.log4j.Logger; import org.hibernate.Hibernate; +import org.quartz.Job; +import org.quartz.JobExecutionContext; +import org.quartz.JobExecutionException;
import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -25,12 +29,15 @@ import java.sql.Blob; import java.sql.SQLException; import java.util.ArrayList; import java.util.Date; +import java.util.HashMap; import java.util.List; import java.util.Map;
-public class TaskoSchedule { - public static String TASKO_SCHEDULE_ACTIVE = "Y"; +public class TaskoSchedule implements Job { + private static final String TASKO_SCHEDULE_ACTIVE = "Y"; + private static Logger log = Logger.getLogger(TaskoBunch.class); + private static Map<String, Integer> tasks = new HashMap<String, Integer>();
private Long id; private String jobLabel; @@ -40,7 +47,7 @@ public class TaskoSchedule { private Date activeFrom; private Date activeTill; private byte[] data; - private List<TaskoRun> runs = new ArrayList(); + private List<TaskoRun> runs = new ArrayList<TaskoRun>(); private Date created; private Date modified;
@@ -58,6 +65,68 @@ public class TaskoSchedule { setActiveTill(activeTillIn); }
+ static { + for (TaskoTask task : TaskoFactory.listTasks()) { + tasks.put(task.getName(), 0); + } + } + + private boolean isTaskRunning(TaskoTask task) { + return tasks.get(task.getName()) > 0; + } + + private void markTaskRunning(TaskoTask task) { + synchronized (getClass()) { + int count = tasks.get(task.getName()); + count++; + tasks.put(task.getName(), count); + } + } + + private void unmarkTaskRunning(TaskoTask task) { + synchronized (getClass()) { + int count = tasks.get(task.getName()); + count--; + tasks.put(task.getName(), count); + } + } + + /** + * {@inheritDoc} + */ + public void execute(JobExecutionContext context) + throws JobExecutionException { + TaskoRun previousRun = null; + + log.info("Starting " + bunch.getName() + " (" + jobLabel + ") at " + new Date()); + + for (TaskoTemplate template : bunch.getTemplates()) { + if ((previousRun == null) || + (previousRun.getStatus() == template.getStartIf())) { + + if (!TaskoFactory.isTaskParalelizable(template.getTask())) { + while (isTaskRunning(template.getTask())) { + log.info("Task " + template.getTask().getName() + + " currently executing. Sleeping for 10 secs."); + TaskoFactory.sleep(10000); + } + } + markTaskRunning(template.getTask()); + TaskoRun taskRun = new TaskoRun(this.orgId, template, this.jobLabel); + taskRun.execute(context); + unmarkTaskRunning(template.getTask()); + log.debug(template.getTask().getName() + " ... " + taskRun.getStatus()); + previousRun = taskRun; + } + else { + log.info("Interrupting " + bunch.getName() + " (" + jobLabel + ")"); + break; + } + } + TaskoFactory.commitTransaction(); + + log.info("Finishing " + bunch.getName() + " (" + jobLabel + ") at " + new Date()); + }
public void unschedule() { setActiveTill(new Date()); @@ -104,23 +173,26 @@ public class TaskoSchedule { ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { return toByteArrayImpl(fromImageBlob, baos); - } catch (Exception e) { + } + catch (Exception e) { + // return null } return null; }
private byte[] toByteArrayImpl(Blob fromImageBlob, ByteArrayOutputStream baos) throws SQLException, IOException { - byte buf[] = new byte[4000]; + byte[] buf = new byte[4000]; int dataSize; InputStream is = fromImageBlob.getBinaryStream();
try { - while((dataSize = is.read(buf)) != -1) { + while ((dataSize = is.read(buf)) != -1) { baos.write(buf, 0, dataSize); } - } finally { - if(is != null) { + } + finally { + if (is != null) { is.close(); } } diff --git a/java/code/src/com/redhat/rhn/taskomatic/TaskoXmlRpcHandler.java b/java/code/src/com/redhat/rhn/taskomatic/TaskoXmlRpcHandler.java index 80c1205..986f0c0 100644 --- a/java/code/src/com/redhat/rhn/taskomatic/TaskoXmlRpcHandler.java +++ b/java/code/src/com/redhat/rhn/taskomatic/TaskoXmlRpcHandler.java @@ -60,7 +60,8 @@ public class TaskoXmlRpcHandler { ct.setEndTime(endTime); } // create job - JobDetail jobDetail = createJob(bunchName, orgId, jobLabel, params, startTime, endTime); + JobDetail jobDetail = createJob(bunchName, orgId, jobLabel, params, + startTime, endTime); // schedule job return SchedulerKernel.getScheduler().scheduleJob(jobDetail, ct); } @@ -84,11 +85,13 @@ public class TaskoXmlRpcHandler { if (false) { Trigger newTrigger = (Trigger) trigger.clone(); newTrigger.setEndTime(new Date()); - SchedulerKernel.getScheduler().rescheduleJob(jobLabel, orgId.toString(), newTrigger); + SchedulerKernel.getScheduler().rescheduleJob(jobLabel, orgId.toString(), + newTrigger); } else { SchedulerKernel.getScheduler().unscheduleJob(jobLabel, orgId.toString()); - TaskoSchedule schedule = TaskoFactory.lookupActiveScheduleByOrgAndLabel(orgId, jobLabel); + TaskoSchedule schedule = + TaskoFactory.lookupActiveScheduleByOrgAndLabel(orgId, jobLabel); Map map = schedule.getDataMap(); schedule.unschedule();
@@ -129,7 +132,7 @@ public class TaskoXmlRpcHandler { String jobLabel, Map params, Date start, Date end) throws SchedulerException, InvalidJobLabelException, NoSuchBunchTaskException { if (!checkUniqueName(jobLabel, orgId.toString())) { - throw new InvalidJobLabelException(); + throw new InvalidJobLabelException("jobLabel already in use"); } TaskoBunch bunch = TaskoFactory.lookupOrgBunchByName(bunchName); if (bunch == null) { @@ -137,28 +140,23 @@ public class TaskoXmlRpcHandler { } // create job JobDetail jobDetail = new JobDetail(jobLabel, orgId.toString(), - TaskoBunch.class); - // set job params - jobDetail.getJobDataMap().putAll(params); - jobDetail.getJobDataMap().put("org_id", orgId); - jobDetail.getJobDataMap().put("bunch_name", bunchName); - jobDetail.getJobDataMap().put("job_label", jobLabel); + TaskoSchedule.class); TaskoSchedule schedule = new TaskoSchedule(orgId, bunch, jobLabel, params, start, end); TaskoFactory.save(schedule); TaskoFactory.commitTransaction(); + // set job params + jobDetail.getJobDataMap().putAll(params); + jobDetail.getJobDataMap().put("org_id", orgId); + jobDetail.getJobDataMap().put("schedule_id", schedule.getId()); + return jobDetail; }
public int listBunchRuns(Integer orgId, String triggerName) - throws NoSuchTaskoTriggerException { - try { - return SchedulerKernel.getScheduler().unscheduleJob(triggerName, - orgId.toString()) ? 1 : 0; - } - catch (SchedulerException e) { - throw new NoSuchTaskoTriggerException(); - } + throws SchedulerException { + return SchedulerKernel.getScheduler().unscheduleJob(triggerName, + orgId.toString()) ? 1 : 0; }
public int clearRunHistory(Integer orgId, Date limitTime) { diff --git a/java/code/src/com/redhat/rhn/taskomatic/core/RhnJobFactory.java b/java/code/src/com/redhat/rhn/taskomatic/core/RhnJobFactory.java index 4406c5d..4886aae 100644 --- a/java/code/src/com/redhat/rhn/taskomatic/core/RhnJobFactory.java +++ b/java/code/src/com/redhat/rhn/taskomatic/core/RhnJobFactory.java @@ -15,8 +15,8 @@ package com.redhat.rhn.taskomatic.core;
import com.redhat.rhn.common.hibernate.HibernateRuntimeException; -import com.redhat.rhn.taskomatic.TaskoBunch; import com.redhat.rhn.taskomatic.TaskoFactory; +import com.redhat.rhn.taskomatic.TaskoSchedule;
import org.quartz.Job; import org.quartz.SchedulerException; @@ -35,16 +35,16 @@ public class RhnJobFactory implements JobFactory { * {@inheritDoc} */ public synchronized Job newJob(TriggerFiredBundle trigger) throws SchedulerException { - TaskoBunch bunch = null; - String bunchName = trigger.getJobDetail().getJobDataMap().getString("bunch_name"); + TaskoSchedule schedule = null; + Long scheduleId = trigger.getJobDetail().getJobDataMap().getLong("schedule_id");
try { - bunch = TaskoFactory.lookupOrgBunchByName(bunchName); + schedule = TaskoFactory.lookupScheduleById(scheduleId); } catch (HibernateRuntimeException re) { - throw new SchedulerException("No such bunch task " + bunchName); + throw new SchedulerException("No such schedule with id " + scheduleId); }
- return bunch; + return schedule; } } diff --git a/java/code/src/com/redhat/rhn/taskomatic/task/RhnJob.java b/java/code/src/com/redhat/rhn/taskomatic/task/RhnJob.java new file mode 100644 index 0000000..9d39292 --- /dev/null +++ b/java/code/src/com/redhat/rhn/taskomatic/task/RhnJob.java @@ -0,0 +1,30 @@ +/** + * Copyright (c) 2010 Red Hat, Inc. + * + * This software is licensed to you under the GNU General Public License, + * version 2 (GPLv2). There is NO WARRANTY for this software, express or + * implied, including the implied warranties of MERCHANTABILITY or FITNESS + * FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2 + * along with this software; if not, see + * http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. + * + * Red Hat trademarks are not licensed under GPLv2. No permission is + * granted to use or replicate Red Hat trademarks that are incorporated + * in this software or its documentation. + */ +package com.redhat.rhn.taskomatic.task; + +import org.quartz.Job; +import org.quartz.JobExecutionContext; +import org.quartz.JobExecutionException; + + +public abstract class RhnJob implements Job { + + public static boolean isParallelizable() { + return false; + } + + public abstract void execute(JobExecutionContext context) + throws JobExecutionException; +}
commit 01d1dd677e3ebe1966053aae55d9f2b3860667b2 Author: Tomas Lestach tlestach@redhat.com Date: Wed Jun 16 15:52:30 2010 +0200
changing JobDataMap to a simple Map to store to DB
diff --git a/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.java b/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.java index 62b2a05..51719de 100644 --- a/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.java +++ b/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.java @@ -15,7 +15,6 @@ package com.redhat.rhn.taskomatic;
import org.hibernate.Hibernate; -import org.quartz.JobDataMap;
import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -27,6 +26,7 @@ import java.sql.SQLException; import java.util.ArrayList; import java.util.Date; import java.util.List; +import java.util.Map;
public class TaskoSchedule { @@ -48,11 +48,11 @@ public class TaskoSchedule { }
public TaskoSchedule(Integer orgIdIn, TaskoBunch bunchIn, - String jobLabelIn, JobDataMap dataIn, Date activeFromIn, Date activeTillIn) { + String jobLabelIn, Map dataIn, Date activeFromIn, Date activeTillIn) { setOrgId(orgIdIn); setBunch(bunchIn); setJobLabel(jobLabelIn); - data = serializeDataMap(dataIn); + data = serializeMap(dataIn); setActive(TASKO_SCHEDULE_ACTIVE); setActiveFrom(activeFromIn); setActiveTill(activeTillIn); @@ -64,7 +64,7 @@ public class TaskoSchedule { setActive(null); }
- private byte[] serializeDataMap(JobDataMap dataMap) { + private byte[] serializeMap(Map dataMap) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); if (null != dataMap) { ObjectOutputStream out; @@ -80,7 +80,7 @@ public class TaskoSchedule { return baos.toByteArray(); }
- private JobDataMap getDataMapFromBlob(Blob blob) { + private Map getDataMapFromBlob(Blob blob) { Object obj = null;
try { @@ -97,7 +97,7 @@ public class TaskoSchedule { catch (Exception e) { // return null; } - return (JobDataMap) obj; + return (Map) obj; }
private byte[] toByteArray(Blob fromImageBlob) { @@ -127,11 +127,11 @@ public class TaskoSchedule { return baos.toByteArray(); }
- public void setDataMap(JobDataMap dataMap) { - data = serializeDataMap(dataMap); + public void setDataMap(Map dataMap) { + data = serializeMap(dataMap); }
- public JobDataMap getDataMap() { + public Map getDataMap() { return getDataMapFromBlob(getData()); }
diff --git a/java/code/src/com/redhat/rhn/taskomatic/TaskoXmlRpcHandler.java b/java/code/src/com/redhat/rhn/taskomatic/TaskoXmlRpcHandler.java index b6b3d63..80c1205 100644 --- a/java/code/src/com/redhat/rhn/taskomatic/TaskoXmlRpcHandler.java +++ b/java/code/src/com/redhat/rhn/taskomatic/TaskoXmlRpcHandler.java @@ -17,7 +17,6 @@ package com.redhat.rhn.taskomatic; import com.redhat.rhn.taskomatic.core.SchedulerKernel;
import org.quartz.CronTrigger; -import org.quartz.JobDataMap; import org.quartz.JobDetail; import org.quartz.SchedulerException; import org.quartz.SimpleTrigger; @@ -90,7 +89,7 @@ public class TaskoXmlRpcHandler { else { SchedulerKernel.getScheduler().unscheduleJob(jobLabel, orgId.toString()); TaskoSchedule schedule = TaskoFactory.lookupActiveScheduleByOrgAndLabel(orgId, jobLabel); - JobDataMap map = schedule.getDataMap(); + Map map = schedule.getDataMap(); schedule.unschedule();
// TaskoFactory.save(schedule); @@ -144,7 +143,7 @@ public class TaskoXmlRpcHandler { jobDetail.getJobDataMap().put("org_id", orgId); jobDetail.getJobDataMap().put("bunch_name", bunchName); jobDetail.getJobDataMap().put("job_label", jobLabel); - TaskoSchedule schedule = new TaskoSchedule(orgId, bunch, jobLabel, jobDetail.getJobDataMap(), + TaskoSchedule schedule = new TaskoSchedule(orgId, bunch, jobLabel, params, start, end); TaskoFactory.save(schedule); TaskoFactory.commitTransaction();
commit cb08970779b0db078b5633bae5250d245ac2f7a1 Author: Tomas Lestach tlestach@redhat.com Date: Wed Jun 16 14:47:44 2010 +0200
introducing TaskoSchedule for bunch history
diff --git a/java/code/src/com/redhat/rhn/taskomatic/TaskoFactory.java b/java/code/src/com/redhat/rhn/taskomatic/TaskoFactory.java index 6a62706..469621b 100644 --- a/java/code/src/com/redhat/rhn/taskomatic/TaskoFactory.java +++ b/java/code/src/com/redhat/rhn/taskomatic/TaskoFactory.java @@ -77,6 +77,10 @@ public class TaskoFactory extends HibernateFactory { singleton.saveObject(taskoTemplate); }
+ public static void save(TaskoSchedule taskoSchedule) { + singleton.saveObject(taskoSchedule); + } + public static void sleep(long millis) { try { Thread.sleep(millis); @@ -149,4 +153,13 @@ public class TaskoFactory extends HibernateFactory { } return false; } + + public static TaskoSchedule lookupActiveScheduleByOrgAndLabel(Integer orgId, + String jobLabel) { + Map params = new HashMap(); + params.put("org_id", orgId); + params.put("job_label", jobLabel); + return (TaskoSchedule) singleton.lookupObjectByNamedQuery( + "TaskoSchedule.findActiveScheduleByOrgAndLabel", params); + } } diff --git a/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.hbm.xml b/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.hbm.xml new file mode 100644 index 0000000..314bb1f --- /dev/null +++ b/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.hbm.xml @@ -0,0 +1,42 @@ +<?xml version="1.0" encoding="UTF-8"?> +<!DOCTYPE hibernate-mapping +PUBLIC "-//Hibernate/Hibernate Mapping DTD 3.0//EN" +"http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd"> +<hibernate-mapping> + <class name="com.redhat.rhn.taskomatic.TaskoSchedule" + table="rhnTaskoSchedule"> + + <id name="id" type="long" column="id"> + <meta attribute="scope-set">protected</meta> + <generator class="sequence"> + <param name="sequence">RHN_TASKO_SCHEDULE_ID_SEQ</param> + </generator> + </id> + + <property name="jobLabel" type="string" column="job_label"/> + <many-to-one + name="bunch" + class="com.redhat.rhn.taskomatic.TaskoBunch" + column="bunch_id"/> + <property name="orgId" type="integer" column="org_id"/> + <property name="active" type="string" column="active"/> + <property name="activeFrom" type="date" column="active_from"/> + <property name="activeTill" type="date" column="active_till"/> + <property name="data" type="blob" column="data"/> + + <property name="created" column="created" type="timestamp" + insert="false" update="false"/> + <property name="modified" column="modified" type="timestamp" + insert="false" update="false"/> + + </class> + + <query name="TaskoSchedule.listByBunch"> + <![CDATA[FROM com.redhat.rhn.taskomatic.TaskoSchedule WHERE bunch.id = :bunch_id]]> + </query> + + <query name="TaskoSchedule.findActiveScheduleByOrgAndLabel"> + <![CDATA[FROM com.redhat.rhn.taskomatic.TaskoSchedule WHERE orgId = :org_id AND active is not null AND jobLabel = :job_label]]> + </query> + +</hibernate-mapping> diff --git a/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.java b/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.java new file mode 100644 index 0000000..62b2a05 --- /dev/null +++ b/java/code/src/com/redhat/rhn/taskomatic/TaskoSchedule.java @@ -0,0 +1,292 @@ +/** + * Copyright (c) 2008 Red Hat, Inc. + * + * This software is licensed to you under the GNU General Public License, + * version 2 (GPLv2). There is NO WARRANTY for this software, express or + * implied, including the implied warranties of MERCHANTABILITY or FITNESS + * FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2 + * along with this software; if not, see + * http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. + * + * Red Hat trademarks are not licensed under GPLv2. No permission is + * granted to use or replicate Red Hat trademarks that are incorporated + * in this software or its documentation. + */ +package com.redhat.rhn.taskomatic; + +import org.hibernate.Hibernate; +import org.quartz.JobDataMap; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.sql.Blob; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + + +public class TaskoSchedule { + public static String TASKO_SCHEDULE_ACTIVE = "Y"; + + private Long id; + private String jobLabel; + private TaskoBunch bunch; + private Integer orgId; + private String active; + private Date activeFrom; + private Date activeTill; + private byte[] data; + private List<TaskoRun> runs = new ArrayList(); + private Date created; + private Date modified; + + public TaskoSchedule() { + } + + public TaskoSchedule(Integer orgIdIn, TaskoBunch bunchIn, + String jobLabelIn, JobDataMap dataIn, Date activeFromIn, Date activeTillIn) { + setOrgId(orgIdIn); + setBunch(bunchIn); + setJobLabel(jobLabelIn); + data = serializeDataMap(dataIn); + setActive(TASKO_SCHEDULE_ACTIVE); + setActiveFrom(activeFromIn); + setActiveTill(activeTillIn); + } + + + public void unschedule() { + setActiveTill(new Date()); + setActive(null); + } + + private byte[] serializeDataMap(JobDataMap dataMap) { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + if (null != dataMap) { + ObjectOutputStream out; + try { + out = new ObjectOutputStream(baos); + out.writeObject(dataMap); + out.flush(); + } + catch (IOException e) { + return null; + } + } + return baos.toByteArray(); + } + + private JobDataMap getDataMapFromBlob(Blob blob) { + Object obj = null; + + try { + if (blob != null) { + InputStream binaryInput = blob.getBinaryStream(); + + if (null != binaryInput) { + ObjectInputStream in = new ObjectInputStream(binaryInput); + obj = in.readObject(); + in.close(); + } + } + } + catch (Exception e) { + // return null; + } + return (JobDataMap) obj; + } + + private byte[] toByteArray(Blob fromImageBlob) { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try { + return toByteArrayImpl(fromImageBlob, baos); + } catch (Exception e) { + } + return null; + } + + private byte[] toByteArrayImpl(Blob fromImageBlob, + ByteArrayOutputStream baos) throws SQLException, IOException { + byte buf[] = new byte[4000]; + int dataSize; + InputStream is = fromImageBlob.getBinaryStream(); + + try { + while((dataSize = is.read(buf)) != -1) { + baos.write(buf, 0, dataSize); + } + } finally { + if(is != null) { + is.close(); + } + } + return baos.toByteArray(); + } + + public void setDataMap(JobDataMap dataMap) { + data = serializeDataMap(dataMap); + } + + public JobDataMap getDataMap() { + return getDataMapFromBlob(getData()); + } + + /** + * @return Returns the id. + */ + public Long getId() { + return id; + } + + /** + * @param idIn The id to set. + */ + public void setId(Long idIn) { + id = idIn; + } + + /** + * @return Returns the jobLabel. + */ + public String getJobLabel() { + return jobLabel; + } + + /** + * @param jobLabelIn The jobLabel to set. + */ + public void setJobLabel(String jobLabelIn) { + jobLabel = jobLabelIn; + } + + /** + * @return Returns the bunch. + */ + public TaskoBunch getBunch() { + return bunch; + } + + /** + * @param bunchIn The bunch to set. + */ + public void setBunch(TaskoBunch bunchIn) { + bunch = bunchIn; + } + + /** + * @return Returns the orgId. + */ + public Integer getOrgId() { + return orgId; + } + + /** + * @param orgIdIn The orgId to set. + */ + public void setOrgId(Integer orgIdIn) { + orgId = orgIdIn; + } + + /** + * @return Returns the activeFrom. + */ + public Date getActiveFrom() { + return activeFrom; + } + + /** + * @param activeFromIn The activeFrom to set. + */ + public void setActiveFrom(Date activeFromIn) { + activeFrom = activeFromIn; + } + + /** + * @return Returns the activeTill. + */ + public Date getActiveTill() { + return activeTill; + } + + /** + * @param activeTillIn The activeTill to set. + */ + public void setActiveTill(Date activeTillIn) { + activeTill = activeTillIn; + } + + /** + * @return Returns the data. + */ + public Blob getData() { + return Hibernate.createBlob(data); + } + + /** + * @param dataBlobIn The params to set. + */ + public void setData(Blob dataBlobIn) { + // data = dataBlobIn.getBytes(0, (int) dataBlobIn.length() - 1); + data = toByteArray(dataBlobIn); + } + + /** + * @return Returns the runs. + */ + public List<TaskoRun> getRuns() { + return runs; + } + + /** + * @param runsIn The runs to set. + */ + public void setRuns(List<TaskoRun> runsIn) { + runs = runsIn; + } + + /** + * @return Returns the created. + */ + public Date getCreated() { + return created; + } + + /** + * @param createdIn The created to set. + */ + public void setCreated(Date createdIn) { + created = createdIn; + } + + /** + * @return Returns the modified. + */ + public Date getModified() { + return modified; + } + + /** + * @param modifiedIn The modified to set. + */ + public void setModified(Date modifiedIn) { + modified = modifiedIn; + } + + /** + * @return Returns the active. + */ + public String getActive() { + return active; + } + + /** + * @param activeIn The active to set. + */ + public void setActive(String activeIn) { + active = activeIn; + } +} diff --git a/java/code/src/com/redhat/rhn/taskomatic/TaskoXmlRpcHandler.java b/java/code/src/com/redhat/rhn/taskomatic/TaskoXmlRpcHandler.java index ca6d88e..b6b3d63 100644 --- a/java/code/src/com/redhat/rhn/taskomatic/TaskoXmlRpcHandler.java +++ b/java/code/src/com/redhat/rhn/taskomatic/TaskoXmlRpcHandler.java @@ -17,6 +17,7 @@ package com.redhat.rhn.taskomatic; import com.redhat.rhn.taskomatic.core.SchedulerKernel;
import org.quartz.CronTrigger; +import org.quartz.JobDataMap; import org.quartz.JobDetail; import org.quartz.SchedulerException; import org.quartz.SimpleTrigger; @@ -50,7 +51,6 @@ public class TaskoXmlRpcHandler { Date startTime, Date endTime, String cronExpression, Map params) throws InvalidJobLabelException, NoSuchBunchTaskException, ParseException { try { - JobDetail jobDetail = createJob(bunchName, orgId, jobLabel, params); // create trigger CronTrigger ct = new CronTrigger(jobLabel, orgId.toString(), cronExpression); @@ -60,6 +60,8 @@ public class TaskoXmlRpcHandler { if (endTime != null) { ct.setEndTime(endTime); } + // create job + JobDetail jobDetail = createJob(bunchName, orgId, jobLabel, params, startTime, endTime); // schedule job return SchedulerKernel.getScheduler().scheduleJob(jobDetail, ct); } @@ -71,7 +73,7 @@ public class TaskoXmlRpcHandler { public Date scheduleBunch(Integer orgId, String bunchName, String jobLabel, String cronExpression, Map params) throws ParseException, InvalidJobLabelException, NoSuchBunchTaskException { - return scheduleBunch(orgId, bunchName, jobLabel, null, null, cronExpression, + return scheduleBunch(orgId, bunchName, jobLabel, new Date(), null, cronExpression, params); }
@@ -87,6 +89,12 @@ public class TaskoXmlRpcHandler { } else { SchedulerKernel.getScheduler().unscheduleJob(jobLabel, orgId.toString()); + TaskoSchedule schedule = TaskoFactory.lookupActiveScheduleByOrgAndLabel(orgId, jobLabel); + JobDataMap map = schedule.getDataMap(); + schedule.unschedule(); + + // TaskoFactory.save(schedule); + TaskoFactory.commitTransaction(); } return 1; } @@ -96,12 +104,13 @@ public class TaskoXmlRpcHandler { }
public Date scheduleSingleBunchRun(Integer orgId, String bunchName, String jobLabel, - Map params) + Map params, Date start) throws InvalidJobLabelException, NoSuchBunchTaskException { try { - JobDetail jobDetail = createJob(bunchName, orgId, jobLabel, params); SimpleTrigger st = new SimpleTrigger(jobLabel, orgId.toString(), 1, 1); st.setEndTime(new Date()); + JobDetail jobDetail = createJob(bunchName, orgId, jobLabel, params, + start, st.getEndTime());
// schedule job return SchedulerKernel.getScheduler().scheduleJob(jobDetail, st); @@ -111,8 +120,14 @@ public class TaskoXmlRpcHandler { } }
+ public Date scheduleSingleBunchRun(Integer orgId, String bunchName, String jobLabel, + Map params) + throws InvalidJobLabelException, NoSuchBunchTaskException { + return scheduleSingleBunchRun(orgId, bunchName, jobLabel, params, new Date()); + } + private JobDetail createJob(String bunchName, Integer orgId, - String jobLabel, Map params) + String jobLabel, Map params, Date start, Date end) throws SchedulerException, InvalidJobLabelException, NoSuchBunchTaskException { if (!checkUniqueName(jobLabel, orgId.toString())) { throw new InvalidJobLabelException(); @@ -129,6 +144,10 @@ public class TaskoXmlRpcHandler { jobDetail.getJobDataMap().put("org_id", orgId); jobDetail.getJobDataMap().put("bunch_name", bunchName); jobDetail.getJobDataMap().put("job_label", jobLabel); + TaskoSchedule schedule = new TaskoSchedule(orgId, bunch, jobLabel, jobDetail.getJobDataMap(), + start, end); + TaskoFactory.save(schedule); + TaskoFactory.commitTransaction(); return jobDetail; }
diff --git a/schema/spacewalk/common/tables/rhnTaskoSchedule.sql b/schema/spacewalk/common/tables/rhnTaskoSchedule.sql new file mode 100644 index 0000000..fe69ab3 --- /dev/null +++ b/schema/spacewalk/common/tables/rhnTaskoSchedule.sql @@ -0,0 +1,38 @@ +-- +-- Copyright (c) 2010 Red Hat, Inc. +-- +-- This software is licensed to you under the GNU General Public License, +-- version 2 (GPLv2). There is NO WARRANTY for this software, express or +-- implied, including the implied warranties of MERCHANTABILITY or FITNESS +-- FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2 +-- along with this software; if not, see +-- http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. +-- +-- Red Hat trademarks are not licensed under GPLv2. No permission is +-- granted to use or replicate Red Hat trademarks that are incorporated +-- in this software or its documentation. +-- + + +CREATE TABLE rhnTaskoSchedule +( + id NUMBER NOT NULL + CONSTRAINT rhn_tasko_schedule_id_pk PRIMARY KEY, + job_label VARCHAR2(50) NOT NULL, + bunch_id NUMBER NOT NULL + CONSTRAINT rhn_tasko_schedule_bunch_fk + REFERENCES rhnTaskoBunch (id), + org_id NUMBER, + active VARCHAR2(1), + active_from DATE, + active_till DATE, + data BLOB, + created DATE + DEFAULT (sysdate) NOT NULL, + modified DATE + DEFAULT (sysdate) NOT NULL +) +; + +CREATE SEQUENCE rhn_tasko_schedule_id_seq; +
spacewalk-commits@lists.fedorahosted.org