001/*
002 * #%L
003 * Netarchivesuite - harvester
004 * %%
005 * Copyright (C) 2005 - 2018 The Royal Danish Library, 
006 *             the National Library of France and the Austrian National Library.
007 * %%
008 * This program is free software: you can redistribute it and/or modify
009 * it under the terms of the GNU Lesser General Public License as
010 * published by the Free Software Foundation, either version 2.1 of the
011 * License, or (at your option) any later version.
012 * 
013 * This program is distributed in the hope that it will be useful,
014 * but WITHOUT ANY WARRANTY; without even the implied warranty of
015 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
016 * GNU General Lesser Public License for more details.
017 * 
018 * You should have received a copy of the GNU General Lesser Public
019 * License along with this program.  If not, see
020 * <http://www.gnu.org/licenses/lgpl-2.1.html>.
021 * #L%
022 */
023
024package dk.netarkivet.harvester.datamodel;
025
026import java.sql.Clob;
027import java.sql.Connection;
028import java.sql.PreparedStatement;
029import java.sql.ResultSet;
030import java.sql.SQLException;
031import java.util.ArrayList;
032import java.util.Calendar;
033import java.util.Date;
034import java.util.HashMap;
035import java.util.Iterator;
036import java.util.LinkedList;
037import java.util.List;
038import java.util.Map;
039
040import org.slf4j.Logger;
041import org.slf4j.LoggerFactory;
042
043import dk.netarkivet.common.CommonSettings;
044import dk.netarkivet.common.exceptions.ArgumentNotValid;
045import dk.netarkivet.common.exceptions.IOFailure;
046import dk.netarkivet.common.exceptions.IllegalState;
047import dk.netarkivet.common.exceptions.PermissionDenied;
048import dk.netarkivet.common.exceptions.UnknownID;
049import dk.netarkivet.common.utils.DBUtils;
050import dk.netarkivet.common.utils.ExceptionUtils;
051import dk.netarkivet.common.utils.Settings;
052import dk.netarkivet.common.utils.StringUtils;
053import dk.netarkivet.harvester.webinterface.HarvestStatus;
054import dk.netarkivet.harvester.webinterface.HarvestStatusQuery;
055import dk.netarkivet.harvester.webinterface.HarvestStatusQuery.SORT_ORDER;
056
057/**
058 * A database-based implementation of the JobDAO class. The statements to create the tables are now in
059 * scripts/sql/createfullhddb.sql
060 */
061public class JobDBDAO extends JobDAO {
062
063    /** The logger for this class. */
064    private static final Logger log = LoggerFactory.getLogger(JobDBDAO.class);
065
066    /**
067     * Create a new JobDAO implemented using database. This constructor also tries to upgrade the jobs and jobs_configs
068     * tables in the current database. throws and IllegalState exception, if it is impossible to make the necessary
069     * updates.
070     */
071    protected JobDBDAO() {
072        Connection connection = HarvestDBConnection.get();
073        try {
074            HarvesterDatabaseTables.checkVersion(connection, HarvesterDatabaseTables.JOBS);
075            HarvesterDatabaseTables.checkVersion(connection, HarvesterDatabaseTables.JOBCONFIGS);
076        } finally {
077            HarvestDBConnection.release(connection);
078        }
079    }
080
081    /**
082     * Creates an instance in persistent storage of the given job. 
083     * If the job doesn't have an ID (which it shouldn't at this point, one is generated for it.
084     * After that the harvestnamePrefix is set. Both existing harvestnameprefix factory-classes depends on
085     * the JobID being set before being called. 
086     *
087     * @param job a given job to add to persistent storage
088     * @throws PermissionDenied If a job already exists in persistent storage with the same id as the given job
089     * @throws IOFailure If some IOException occurs while writing the job to persistent storage
090     */
091    public synchronized void create(Job job) {
092        ArgumentNotValid.checkNotNull(job, "Job job");
093        // Check that job.getOrigHarvestDefinitionID() refers to existing harvestdefinition.
094        Long harvestId = job.getOrigHarvestDefinitionID();
095        if (!HarvestDefinitionDAO.getInstance().exists(harvestId)) {
096            throw new UnknownID("No harvestdefinition with ID=" + harvestId);
097        }
098
099        Connection connection = HarvestDBConnection.get();
100        if (job.getJobID() != null) {
101            log.warn("The jobId for the job is already set. This should probably never happen.");
102        } else {
103            job.setJobID(generateNextID(connection));
104        }
105        // Set the harvestNamePrefix. Every current implementation depends on the JobID being set before
106        // being initialized.
107        job.setDefaultHarvestNamePrefix();
108        
109        
110        if (job.getCreationDate() != null) {
111            log.warn("The creation time for the job is already set. This should probably never happen.");
112        } else {
113            job.setCreationDate(new Date());
114        }
115
116        log.debug("Creating " + job.toString());
117
118        PreparedStatement statement = null;
119        try {
120            connection.setAutoCommit(false);
121            statement = connection.prepareStatement("INSERT INTO jobs "
122                    + "(job_id, harvest_id, status, channel, forcemaxcount, "
123                    + "forcemaxbytes, forcemaxrunningtime, orderxml, " + "orderxmldoc, seedlist, "
124                    + "harvest_num, startdate, enddate, submitteddate, creationdate, "
125                    + "num_configs, edition, resubmitted_as_job, harvestname_prefix, snapshot) "
126                    + "VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?," + "?, ?, ?, ?, ?, ?)");
127
128            statement.setLong(1, job.getJobID());
129            statement.setLong(2, job.getOrigHarvestDefinitionID());
130            statement.setInt(3, job.getStatus().ordinal());
131            statement.setString(4, job.getChannel());
132            statement.setLong(5, job.getForceMaxObjectsPerDomain());
133            statement.setLong(6, job.getMaxBytesPerDomain());
134            statement.setLong(7, job.getMaxJobRunningTime());
135            DBUtils.setStringMaxLength(statement, 8, job.getOrderXMLName(), Constants.MAX_NAME_SIZE, job,
136                    "order.xml name");
137            final String orderString = job.getOrderXMLdoc().getXML();
138            DBUtils.setClobMaxLength(statement, 9, orderString, Constants.MAX_ORDERXML_SIZE, job, "order.xml");
139            DBUtils.setClobMaxLength(statement, 10, job.getSeedListAsString(), Constants.MAX_COMBINED_SEED_LIST_SIZE,
140                    job, "seedlist");
141            statement.setInt(11, job.getHarvestNum());
142            DBUtils.setDateMaybeNull(statement, 12, job.getActualStart());
143            DBUtils.setDateMaybeNull(statement, 13, job.getActualStop());
144            DBUtils.setDateMaybeNull(statement, 14, job.getSubmittedDate());
145            DBUtils.setDateMaybeNull(statement, 15, job.getCreationDate());
146
147            // The size of the configuration map == number of configurations
148            statement.setInt(16, job.getDomainConfigurationMap().size());
149            long initialEdition = 1;
150            statement.setLong(17, initialEdition);
151            DBUtils.setLongMaybeNull(statement, 18, job.getResubmittedAsJob());
152            statement.setString(19, job.getHarvestFilenamePrefix());
153            statement.setBoolean(20, job.isSnapshot());
154            statement.executeUpdate();
155            createJobConfigsEntries(connection, job);
156            connection.commit();
157            job.setEdition(initialEdition);
158        } catch (SQLException e) {
159            String message = "SQL error creating job " + job + " in database" + "\n"
160                    + ExceptionUtils.getSQLExceptionCause(e);
161            log.warn(message, e);
162            throw new IOFailure(message, e);
163        } finally {
164            DBUtils.rollbackIfNeeded(connection, "create job", job);
165            HarvestDBConnection.release(connection);
166        }
167    }
168
169    /**
170     * Create the entries in the job_configs table for this job. Since some jobs have up to 10000 configs, this must be
171     * optimized. The entries are only created, if job.configsChanged is true.
172     *
173     * @param dbconnection A connection to work on
174     * @param job The job to store entries for
175     * @throws SQLException If any problems occur during creation of the new entries in the job_configs table.
176     */
177    private void createJobConfigsEntries(Connection dbconnection, Job job) throws SQLException {
178        if (job.configsChanged) {
179            PreparedStatement statement = null;
180            String tmpTable = null;
181            Long jobID = job.getJobID();
182            try {
183                statement = dbconnection.prepareStatement("DELETE FROM job_configs WHERE job_id = ?");
184                statement.setLong(1, jobID);
185                statement.executeUpdate();
186                statement.close();
187                tmpTable = DBSpecifics.getInstance().getJobConfigsTmpTable(dbconnection);
188                final Map<String, String> domainConfigurationMap = job.getDomainConfigurationMap();
189                statement = dbconnection.prepareStatement("INSERT INTO " + tmpTable
190                        + " ( domain_name, config_name ) VALUES ( ?, ?)");
191                for (Map.Entry<String, String> entry : domainConfigurationMap.entrySet()) {
192                    statement.setString(1, entry.getKey());
193                    statement.setString(2, entry.getValue());
194                    statement.executeUpdate();
195                    statement.clearParameters();
196                }
197                statement.close();
198                // Now we have a temp table with all the domains and configs
199                statement = dbconnection.prepareStatement("INSERT INTO job_configs " + "( job_id, config_id ) "
200                        + "SELECT ?, configurations.config_id " + "  FROM domains, configurations, " + tmpTable
201                        + " WHERE domains.name = " + tmpTable + ".domain_name"
202                        + "   AND domains.domain_id = configurations.domain_id"
203                        + "   AND configurations.name = " + tmpTable + ".config_name"
204                );
205                statement.setLong(1, jobID);
206                int rows = statement.executeUpdate();
207                if (rows != domainConfigurationMap.size()) {
208                    log.debug("Domain or configuration in table for {} missing: Should have {}, got {}", job,
209                            domainConfigurationMap.size(), rows);
210                }
211                dbconnection.commit();
212            } finally {
213                if (tmpTable != null) {
214                    DBSpecifics.getInstance().dropJobConfigsTmpTable(dbconnection, tmpTable);
215                }
216                job.configsChanged = false;
217            }
218        }
219    }
220
221    /**
222     * Check whether a particular job exists.
223     *
224     * @param jobID Id of the job.
225     * @return true if the job exists in any state.
226     */
227    @Override
228    public boolean exists(Long jobID) {
229        ArgumentNotValid.checkNotNull(jobID, "Long jobID");
230
231        Connection c = HarvestDBConnection.get();
232        try {
233            return exists(c, jobID);
234        } finally {
235            HarvestDBConnection.release(c);
236        }
237    }
238
239    /**
240     * Check whether a particular job exists.
241     *
242     * @param c an open connection to the harvestDatabase
243     * @param jobID Id of the job.
244     * @return true if the job exists in any state.
245     */
246    private boolean exists(Connection c, Long jobID) {
247        return 1 == DBUtils.selectLongValue(c, "SELECT COUNT(*) FROM jobs WHERE job_id = ?", jobID);
248    }
249
250    /**
251     * Generates the next id of job.
252     *
253     * @param c an open connection to the harvestDatabase
254     * @return id
255     */
256    private Long generateNextID(Connection c) {
257        // Set to zero original, can be set after admin machine breakdown,
258        // and the use this as the point of reference.
259        Long restoreId = Settings.getLong(Constants.NEXT_JOB_ID);
260
261        Long maxVal = DBUtils.selectLongValue(c, "SELECT MAX(job_id) FROM jobs");
262        if (maxVal == null) {
263            maxVal = 0L;
264        }
265        // return the largest number of the two numbers: the NEXT_JOB_ID
266        // declared in settings and max value of job_id used
267        // in the jobs table.
268        return ((restoreId > maxVal) ? restoreId : maxVal + 1L);
269    }
270
271    /**
272     * Update a Job in persistent storage.
273     *
274     * @param job The Job to update
275     * @throws ArgumentNotValid If the Job is null
276     * @throws UnknownID If the Job doesn't exist in the DAO
277     * @throws IOFailure If writing the job to persistent storage fails
278     * @throws PermissionDenied If the job has been updated behind our backs
279     */
280    @Override
281    public synchronized void update(Job job) {
282        ArgumentNotValid.checkNotNull(job, "job");
283
284        Connection connection = HarvestDBConnection.get();
285        // Not done as a transaction as it's awfully big.
286        // TODO Make sure that a failed job update does... what?
287        PreparedStatement statement = null;
288        try {
289            final Long jobID = job.getJobID();
290            if (!exists(connection, jobID)) {
291                throw new UnknownID("Job id " + jobID + " is not known in persistent storage");
292            }
293
294            connection.setAutoCommit(false);
295            statement = connection.prepareStatement("UPDATE jobs SET " + "harvest_id = ?, status = ?, channel = ?, "
296                    + "forcemaxcount = ?, forcemaxbytes = ?, " + "forcemaxrunningtime = ?," + "orderxml = ?, "
297                    + "orderxmldoc = ?, seedlist = ?, " + "harvest_num = ?, harvest_errors = ?, "
298                    + "harvest_error_details = ?, upload_errors = ?, " + "upload_error_details = ?, startdate = ?,"
299                    + "enddate = ?, num_configs = ?, edition = ?, " + "submitteddate = ?, creationdate = ?, "
300                    + "resubmitted_as_job = ?, harvestname_prefix = ?," + "snapshot = ?"
301                    + " WHERE job_id = ? AND edition = ?");
302            statement.setLong(1, job.getOrigHarvestDefinitionID());
303            statement.setInt(2, job.getStatus().ordinal());
304            statement.setString(3, job.getChannel());
305            statement.setLong(4, job.getForceMaxObjectsPerDomain());
306            statement.setLong(5, job.getMaxBytesPerDomain());
307            statement.setLong(6, job.getMaxJobRunningTime());
308            DBUtils.setStringMaxLength(statement, 7, job.getOrderXMLName(), Constants.MAX_NAME_SIZE, job,
309                    "order.xml name");
310            final String orderreader = job.getOrderXMLdoc().getXML();
311            DBUtils.setClobMaxLength(statement, 8, orderreader, Constants.MAX_ORDERXML_SIZE, job, "order.xml");
312            DBUtils.setClobMaxLength(statement, 9, job.getSeedListAsString(), Constants.MAX_COMBINED_SEED_LIST_SIZE,
313                    job, "seedlist");
314            statement.setInt(10, job.getHarvestNum()); // Not in job yet
315            DBUtils.setStringMaxLength(statement, 11, job.getHarvestErrors(), Constants.MAX_ERROR_SIZE, job,
316                    "harvest_error");
317            DBUtils.setStringMaxLength(statement, 12, job.getHarvestErrorDetails(), Constants.MAX_ERROR_DETAIL_SIZE,
318                    job, "harvest_error_details");
319            DBUtils.setStringMaxLength(statement, 13, job.getUploadErrors(), Constants.MAX_ERROR_SIZE, job,
320                    "upload_error");
321            DBUtils.setStringMaxLength(statement, 14, job.getUploadErrorDetails(), Constants.MAX_ERROR_DETAIL_SIZE,
322                    job, "upload_error_details");
323            long edition = job.getEdition() + 1;
324            DBUtils.setDateMaybeNull(statement, 15, job.getActualStart());
325            DBUtils.setDateMaybeNull(statement, 16, job.getActualStop());
326            statement.setInt(17, job.getDomainConfigurationMap().size());
327            statement.setLong(18, edition);
328            DBUtils.setDateMaybeNull(statement, 19, job.getSubmittedDate());
329            DBUtils.setDateMaybeNull(statement, 20, job.getCreationDate());
330            DBUtils.setLongMaybeNull(statement, 21, job.getResubmittedAsJob());
331            statement.setString(22, job.getHarvestFilenamePrefix());
332            statement.setBoolean(23, job.isSnapshot());
333            statement.setLong(24, job.getJobID());
334            statement.setLong(25, job.getEdition());
335            final int rows = statement.executeUpdate();
336            if (rows == 0) {
337                String message = "Edition " + job.getEdition() + " has expired, not updating";
338                log.debug(message);
339                throw new PermissionDenied(message);
340            }
341            createJobConfigsEntries(connection, job);
342            connection.commit();
343            job.setEdition(edition);
344        } catch (SQLException e) {
345            String message = "SQL error updating job " + job + " in database" + "\n"
346                    + ExceptionUtils.getSQLExceptionCause(e);
347            log.warn(message, e);
348            throw new IOFailure(message, e);
349        } finally {
350            DBUtils.rollbackIfNeeded(connection, "update job", job);
351            HarvestDBConnection.release(connection);
352        }
353    }
354
355    /**
356     * Read a single job from the job database.
357     *
358     * @param jobID ID of the job.
359     * @return A Job object
360     * @throws UnknownID if the job id does not exist.
361     * @throws IOFailure if there was some problem talking to the database.
362     */
363    @Override
364    public Job read(long jobID) {
365        Connection connection = HarvestDBConnection.get();
366        try {
367            return read(connection, jobID);
368        } finally {
369            HarvestDBConnection.release(connection);
370        }
371    }
372
373    protected static final String GET_JOB_BY_ID_SQL = ""
374                + "SELECT "
375                        + "harvest_id,"
376                        + "status,"
377                        + "channel,"
378                    + "forcemaxcount,"
379                    + "forcemaxbytes,"
380                    + "forcemaxrunningtime,"
381                    + "orderxml,"
382                    + "orderxmldoc,"
383                    + "seedlist,"
384                    + "harvest_num,"
385                    + "harvest_errors,"
386                    + "harvest_error_details,"
387                    + "upload_errors,"
388                    + "upload_error_details,"
389                    + "startdate,"
390                    + "enddate,"
391                    + "submitteddate,"
392                    + "creationdate,"
393                    + "edition,"
394                    + "resubmitted_as_job,"
395                    + "continuationof,"
396                    + "harvestname_prefix,"
397                    + "snapshot "
398            + "FROM jobs WHERE job_id = ?";
399
400    /**
401     * Read a single job from the job database.
402     *
403     * @param jobID ID of the job.
404     * @param connection an open connection to the harvestDatabase
405     * @return A Job object
406     * @throws UnknownID if the job id does not exist.
407     * @throws IOFailure if there was some problem talking to the database.
408     */
409    private synchronized Job read(Connection connection, Long jobID) {
410        if (!exists(connection, jobID)) {
411            throw new UnknownID("Job id " + jobID + " is not known in persistent storage");
412        }
413        PreparedStatement statement = null;
414        try {
415            statement = connection.prepareStatement(GET_JOB_BY_ID_SQL);
416            statement.setLong(1, jobID);
417            ResultSet result = statement.executeQuery();
418            result.next();
419            long harvestID = result.getLong(1);
420            JobStatus status = JobStatus.fromOrdinal(result.getInt(2));
421            String channel = result.getString(3);
422            long forceMaxCount = result.getLong(4);
423            long forceMaxBytes = result.getLong(5);
424            long forceMaxRunningTime = result.getLong(6);
425            String orderxml = result.getString(7);
426
427            HeritrixTemplate orderXMLdoc = null;
428
429            boolean useClobs = DBSpecifics.getInstance().supportsClob();
430            String tmpStr;
431            if (useClobs) {
432                Clob clob = result.getClob(8);
433                tmpStr = clob.getSubString(1L, (int)clob.length());
434            } else {
435                tmpStr = result.getString(8);
436            }
437            orderXMLdoc = HeritrixTemplate.getTemplateFromString(-1, tmpStr);
438            String seedlist = "";
439            if (useClobs) {
440                Clob clob = result.getClob(9);
441                seedlist = clob.getSubString(1, (int) clob.length());
442            } else {
443                seedlist = result.getString(9);
444            }
445
446            int harvestNum = result.getInt(10);
447            String harvestErrors = result.getString(11);
448            String harvestErrorDetails = result.getString(12);
449            String uploadErrors = result.getString(13);
450            String uploadErrorDetails = result.getString(14);
451            Date startdate = DBUtils.getDateMaybeNull(result, 15);
452            Date stopdate = DBUtils.getDateMaybeNull(result, 16);
453            Date submittedDate = DBUtils.getDateMaybeNull(result, 17);
454            Date creationDate = DBUtils.getDateMaybeNull(result, 18);
455            Long edition = result.getLong(19);
456            Long resubmittedAsJob = DBUtils.getLongMaybeNull(result, 20);
457            Long continuationOfJob = DBUtils.getLongMaybeNull(result, 21);
458            String harvestnamePrefix = result.getString(22);
459            boolean snapshot = result.getBoolean(23);
460            statement.close();
461            // IDs should match up in a natural join
462            // The following if-block is an attempt to fix Bug 1856, an
463            // unexplained derby deadlock, by making this statement a dirty
464            // read.
465            String domainStatement = "SELECT domains.name, configurations.name "
466                    + "FROM domains, configurations, job_configs " + "WHERE job_configs.job_id = ?"
467                    + "  AND job_configs.config_id = configurations.config_id"
468                    + "  AND domains.domain_id = configurations.domain_id";
469            if (Settings.get(CommonSettings.DB_SPECIFICS_CLASS).contains(CommonSettings.DB_IS_DERBY_IF_CONTAINS)) {
470                statement = connection.prepareStatement(domainStatement + " WITH UR");
471            } else {
472                statement = connection.prepareStatement(domainStatement);
473            }
474            statement.setLong(1, jobID);
475            result = statement.executeQuery();
476            Map<String, String> configurationMap = new HashMap<String, String>();
477            while (result.next()) {
478                String domainName = result.getString(1);
479                String configName = result.getString(2);
480                configurationMap.put(domainName, configName);
481            }
482            final Job job = new Job(harvestID, configurationMap, channel, snapshot, forceMaxCount, forceMaxBytes,
483                    forceMaxRunningTime, status, orderxml, orderXMLdoc, seedlist, harvestNum, continuationOfJob);
484            job.appendHarvestErrors(harvestErrors);
485            job.appendHarvestErrorDetails(harvestErrorDetails);
486            job.appendUploadErrors(uploadErrors);
487            job.appendUploadErrorDetails(uploadErrorDetails);
488            if (startdate != null) {
489                job.setActualStart(startdate);
490            }
491            if (stopdate != null) {
492                job.setActualStop(stopdate);
493            }
494
495            if (submittedDate != null) {
496                job.setSubmittedDate(submittedDate);
497            }
498
499            if (creationDate != null) {
500                job.setCreationDate(creationDate);
501            }
502
503            job.configsChanged = false;
504            job.setJobID(jobID);
505            job.setEdition(edition);
506
507            if (resubmittedAsJob != null) {
508                job.setResubmittedAsJob(resubmittedAsJob);
509            }
510            if (harvestnamePrefix == null) {
511                job.setDefaultHarvestNamePrefix();
512            } else {
513                job.setHarvestFilenamePrefix(harvestnamePrefix);
514            }
515            return job;
516        } catch (SQLException e) {
517            String message = "SQL error reading job " + jobID + " in database" + "\n"
518                    + ExceptionUtils.getSQLExceptionCause(e);
519            log.warn(message, e);
520            throw new IOFailure(message, e);
521        } finally  {
522                try {
523                                statement.close();
524                } catch (SQLException e) {
525                        log.warn("Exception thrown when trying to close statement", e);
526                        }
527        }
528    }
529
530    
531
532    /**
533     * Return a list of all jobs with the given status, ordered by id.
534     *
535     * @param status A given status.
536     * @return A list of all job with given status
537     */
538    @Override
539    public synchronized Iterator<Job> getAll(JobStatus status) {
540        ArgumentNotValid.checkNotNull(status, "JobStatus status");
541
542        Connection c = HarvestDBConnection.get();
543        try {
544            List<Long> idList = DBUtils.selectLongList(c, "SELECT job_id FROM jobs WHERE status = ? "
545                    + "ORDER BY job_id", status.ordinal());
546            List<Job> orderedJobs = new LinkedList<Job>();
547            for (Long jobId : idList) {
548                orderedJobs.add(read(c, jobId));
549            }
550            return orderedJobs.iterator();
551        } finally {
552            HarvestDBConnection.release(c);
553        }
554    }
555
556    /**
557     * Return a list of all job_id's representing jobs with the given status.
558     *
559     * @param status A given status.
560     * @return A list of all job_id's representing jobs with given status
561     * @throws ArgumentNotValid If the given status is not one of the five valid statuses specified in Job.
562     */
563    @Override
564    public Iterator<Long> getAllJobIds(JobStatus status) {
565        ArgumentNotValid.checkNotNull(status, "JobStatus status");
566
567        Connection c = HarvestDBConnection.get();
568        try {
569            List<Long> idList = DBUtils.selectLongList(c, "SELECT job_id FROM jobs WHERE status = ? ORDER BY job_id",
570                    status.ordinal());
571            return idList.iterator();
572        } finally {
573            HarvestDBConnection.release(c);
574        }
575    }
576
577    @Override
578    public Iterator<Long> getAllJobIds(JobStatus status, HarvestChannel channel) {
579        ArgumentNotValid.checkNotNull(status, "JobStatus status");
580        ArgumentNotValid.checkNotNull(channel, "Channel");
581
582        Connection c = HarvestDBConnection.get();
583        try {
584            List<Long> idList = DBUtils.selectLongList(c, "SELECT job_id FROM jobs WHERE status = ? AND channel = ? "
585                    + "ORDER BY job_id", status.ordinal(), channel.getName());
586            return idList.iterator();
587        } finally {
588            HarvestDBConnection.release(c);
589        }
590    }
591
592    /**
593     * Return a list of all jobs.
594     *
595     * @return A list of all jobs
596     */
597    @Override
598    public synchronized Iterator<Job> getAll() {
599        Connection c = HarvestDBConnection.get();
600        try {
601            List<Long> idList = DBUtils.selectLongList(c, "SELECT job_id FROM jobs ORDER BY job_id");
602            List<Job> orderedJobs = new LinkedList<Job>();
603            for (Long jobId : idList) {
604                orderedJobs.add(read(c, jobId));
605            }
606            return orderedJobs.iterator();
607        } finally {
608            HarvestDBConnection.release(c);
609        }
610    }
611
612    /**
613     * Return a list of all job_ids .
614     *
615     * @return A list of all job_ids
616     */
617    public Iterator<Long> getAllJobIds() {
618        Connection c = HarvestDBConnection.get();
619        try {
620            List<Long> idList = DBUtils.selectLongList(c, "SELECT job_id FROM jobs ORDER BY job_id");
621            return idList.iterator();
622        } finally {
623            HarvestDBConnection.release(c);
624        }
625    }
626
627    /**
628     * Get a list of small and immediately usable status information for given status and in given order. Is used by
629     * getStatusInfo functions in order to share code (and SQL)
630     *  TODO should also include given harvest run
631     *
632     * @param connection an open connection to the harvestDatabase
633     * @param jobStatusCode code for jobstatus, -1 if all
634     * @param asc true if it is to be sorted in ascending order, false if it is to be sorted in descending order
635     * @return List of JobStatusInfo objects for all jobs.
636     * @throws ArgumentNotValid for invalid jobStatusCode
637     * @throws IOFailure on trouble getting data from database
638     */
639    private List<JobStatusInfo> getStatusInfo(Connection connection, int jobStatusCode, boolean asc) {
640        // Validate jobStatusCode
641        // Throws ArgumentNotValid if it is an invalid job status
642        if (jobStatusCode != JobStatus.ALL_STATUS_CODE) {
643            JobStatus.fromOrdinal(jobStatusCode);
644        }
645
646        StringBuffer sqlBuffer = new StringBuffer("SELECT jobs.job_id, status, jobs.harvest_id, "
647                + "harvestdefinitions.name, harvest_num, harvest_errors,"
648                + " upload_errors, orderxml, num_configs, submitteddate, creationdate, startdate,"
649                + " enddate, resubmitted_as_job" + " FROM jobs, harvestdefinitions "
650                + " WHERE harvestdefinitions.harvest_id = jobs.harvest_id ");
651
652        if (jobStatusCode != JobStatus.ALL_STATUS_CODE) {
653            sqlBuffer.append(" AND status = ").append(jobStatusCode);
654        }
655        sqlBuffer.append(" ORDER BY jobs.job_id");
656        if (!asc) { // Assume default is ASCENDING
657            sqlBuffer.append(" " + HarvestStatusQuery.SORT_ORDER.DESC.name());
658        }
659
660        PreparedStatement statement = null;
661        try {
662            statement = connection.prepareStatement(sqlBuffer.toString());
663            ResultSet res = statement.executeQuery();
664            return makeJobStatusInfoListFromResultset(res);
665        } catch (SQLException e) {
666            String message = "SQL error asking for job status list in database" + "\n"
667                    + ExceptionUtils.getSQLExceptionCause(e);
668            log.warn(message, e);
669            throw new IOFailure(message, e);
670        }
671    }
672
673    /**
674     * Get a list of small and immediately usable status information for given job status.
675     *
676     * @param status The status asked for.
677     * @return List of JobStatusInfo objects for all jobs with given job status
678     * @throws ArgumentNotValid for invalid jobStatus
679     * @throws IOFailure on trouble getting data from database
680     */
681    @Override
682    public List<JobStatusInfo> getStatusInfo(JobStatus status) {
683        ArgumentNotValid.checkNotNull(status, "status");
684        Connection c = HarvestDBConnection.get();
685        try {
686            return getStatusInfo(c, status.ordinal(), true);
687        } finally {
688            HarvestDBConnection.release(c);
689        }
690    }
691
692    /**
693     * Get a list of small and immediately usable status information for given job status and in given job id order.
694     *
695     * @param query the user query
696     * @throws IOFailure on trouble getting data from database
697     */
698    @Override
699    public HarvestStatus getStatusInfo(HarvestStatusQuery query) {
700        log.debug("Constructing Harveststatus based on given query.");
701        PreparedStatement s = null;
702        Connection c = HarvestDBConnection.get();
703
704        try {
705            // Obtain total count without limit
706            // NB this will be a performance bottleneck if the table gets big
707            long totalRowsCount = 0;
708
709            final HarvestStatusQueryBuilder harvestStatusQueryBuilder = buildSqlQuery(query, true);
710            log.debug("Unpopulated query is {}.", harvestStatusQueryBuilder);
711            s = harvestStatusQueryBuilder.getPopulatedStatement(c);
712            log.debug("Query is {}.", s);
713            ResultSet res = s.executeQuery();
714            res.next();
715            totalRowsCount = res.getLong(1);
716
717            s = buildSqlQuery(query, false).getPopulatedStatement(c);
718            res = s.executeQuery();
719            List<JobStatusInfo> jobs = makeJobStatusInfoListFromResultset(res);
720
721            log.debug("Harveststatus constructed based on given query.");
722            return new HarvestStatus(totalRowsCount, jobs);
723        } catch (SQLException e) {
724            String message = "SQL error asking for job status list in database" + "\n"
725                    + ExceptionUtils.getSQLExceptionCause(e);
726            log.warn(message, e);
727            throw new IOFailure(message, e);
728        } finally {
729            HarvestDBConnection.release(c);
730        }
731    }
732
733    /**
734     * Calculate all jobIDs to use for duplication reduction.
735     * <p>
736     * More precisely, this method calculates the following: If the job ID corresponds to a partial harvest, all jobIDs
737     * from the previous scheduled harvest are returned, or the empty list if this harvest hasn't been scheduled before.
738     * <p>
739     * If the job ID corresponds to a full harvest, the entire chain of harvests this is based on is returned, and all
740     * jobIDs from the previous chain of full harvests is returned.
741     * <p>
742     * This method is synchronized to avoid DB locking.
743     *
744     * @param jobID The job ID to find duplicate reduction data for.
745     * @return A list of job IDs (possibly empty) of potential previous harvests of this job, to use for duplicate
746     * reduction.
747     * @throws UnknownID if job ID is unknown
748     * @throws IOFailure on trouble querying database
749     */
750    public synchronized List<Long> getJobIDsForDuplicateReduction(long jobID) throws UnknownID {
751
752        Connection connection = HarvestDBConnection.get();
753        List<Long> jobs;
754        // Select the previous harvest from the same harvestdefinition
755        try {
756            if (!exists(connection, jobID)) {
757                throw new UnknownID("Job ID '" + jobID + "' does not exist in database");
758            }
759
760            jobs = DBUtils.selectLongList(connection, "SELECT jobs.job_id FROM jobs, jobs AS original_jobs"
761                    + " WHERE original_jobs.job_id=?" + " AND jobs.harvest_id=original_jobs.harvest_id"
762                    + " AND jobs.harvest_num=original_jobs.harvest_num-1", jobID);
763            List<Long> harvestDefinitions = getPreviousFullHarvests(connection, jobID);
764            if (!harvestDefinitions.isEmpty()) {
765                // Select all jobs from a given list of harvest definitions
766                jobs.addAll(DBUtils.selectLongList(connection, "SELECT jobs.job_id FROM jobs"
767                        + " WHERE jobs.harvest_id IN (" + StringUtils.conjoin(",", harvestDefinitions) + ")"));
768            }
769            return jobs;
770        } finally {
771            HarvestDBConnection.release(connection);
772        }
773    }
774
775    /**
776     * Find the harvest definition ids from this chain of snapshot harvests and the previous chain of snapshot harvests.
777     *
778     * @param connection an open connection to the harvestDatabase
779     * @param jobID The ID of the job
780     * @return A (possibly empty) list of harvest definition ids
781     */
782    private List<Long> getPreviousFullHarvests(Connection connection, long jobID) {
783        List<Long> results = new ArrayList<Long>();
784        // Find the jobs' fullharvest id
785        Long thisHarvest = DBUtils.selectFirstLongValueIfAny(connection,
786                "SELECT jobs.harvest_id FROM jobs, fullharvests WHERE jobs.harvest_id=fullharvests.harvest_id"
787                        + " AND jobs.job_id=?", jobID);
788
789        if (thisHarvest == null) {
790            // Not a full harvest
791            return results;
792        }
793
794        // Follow the chain of orginating IDs back
795        for (Long originatingHarvest = thisHarvest; originatingHarvest != null; originatingHarvest = DBUtils
796                .selectFirstLongValueIfAny(connection, "SELECT previoushd FROM fullharvests"
797                        + " WHERE fullharvests.harvest_id=?", originatingHarvest)) {
798            if (!originatingHarvest.equals(thisHarvest)) {
799                results.add(originatingHarvest);
800            }
801        }
802
803        // Find the first harvest in the chain
804        Long firstHarvest = thisHarvest;
805        if (!results.isEmpty()) {
806            firstHarvest = results.get(results.size() - 1);
807        }
808
809        // Find the last harvest in the chain before
810        Long olderHarvest = DBUtils.selectFirstLongValueIfAny(connection, "SELECT fullharvests.harvest_id"
811                + " FROM fullharvests, harvestdefinitions," + "  harvestdefinitions AS currenthd"
812                + " WHERE currenthd.harvest_id=?" + " AND fullharvests.harvest_id" + "=harvestdefinitions.harvest_id"
813                + " AND harvestdefinitions.submitted<currenthd.submitted" + " ORDER BY harvestdefinitions.submitted "
814                + HarvestStatusQuery.SORT_ORDER.DESC.name(), firstHarvest);
815        // Follow the chain of originating IDs back
816        // FIXME Rewrite this loop!
817        for (Long originatingHarvest = olderHarvest; originatingHarvest != null; originatingHarvest = DBUtils
818                .selectFirstLongValueIfAny(connection,
819                        "SELECT previoushd FROM fullharvests WHERE fullharvests.harvest_id=?", originatingHarvest)) {
820            results.add(originatingHarvest);
821        }
822        return results;
823    }
824
825    /**
826     * Returns the number of existing jobs.
827     *
828     * @return Number of jobs in 'jobs' table
829     */
830    @Override
831    public int getCountJobs() {
832        Connection c = HarvestDBConnection.get();
833        try {
834            return DBUtils.selectIntValue(c, "SELECT COUNT(*) FROM jobs");
835        } finally {
836            HarvestDBConnection.release(c);
837        }
838    }
839
840    @Override
841    public synchronized long rescheduleJob(long oldJobID) {
842        Connection connection = HarvestDBConnection.get();
843        long newJobID = generateNextID(connection);
844        PreparedStatement statement = null;
845        try {
846            statement = connection.prepareStatement("SELECT status FROM jobs WHERE job_id = ?");
847            statement.setLong(1, oldJobID);
848            ResultSet res = statement.executeQuery();
849            if (!res.next()) {
850                throw new UnknownID("No job with ID " + oldJobID + " to resubmit");
851            }
852            final JobStatus currentJobStatus = JobStatus.fromOrdinal(res.getInt(1));
853            if (currentJobStatus != JobStatus.SUBMITTED && currentJobStatus != JobStatus.FAILED) {
854                throw new IllegalState("Job " + oldJobID + " is not ready to be copied.");
855            }
856
857            // Now do the actual copying.
858            // Note that startdate, and enddate is not copied.
859            // They must be null in JobStatus NEW.
860            statement.close();
861            connection.setAutoCommit(false);
862
863            statement = connection.prepareStatement("INSERT INTO jobs "
864                    + " (job_id, harvest_id, channel, snapshot, status," + "  forcemaxcount, forcemaxbytes, orderxml,"
865                    + "  orderxmldoc, seedlist, harvest_num," + "  num_configs, edition, continuationof) "
866                    + " SELECT ?, harvest_id, channel, snapshot, ?," + "  forcemaxcount, forcemaxbytes, orderxml,"
867                    + "  orderxmldoc, seedlist, harvest_num," + " num_configs, ?, ?" + " FROM jobs WHERE job_id = ?");
868            statement.setLong(1, newJobID);
869            statement.setLong(2, JobStatus.NEW.ordinal());
870            long initialEdition = 1;
871            statement.setLong(3, initialEdition);
872            Long continuationOf = null;
873            // In case we want to try to continue using the Heritrix recover log
874            if (currentJobStatus == JobStatus.FAILED) {
875                continuationOf = oldJobID;
876            }
877            DBUtils.setLongMaybeNull(statement, 4, continuationOf);
878
879            statement.setLong(5, oldJobID);
880
881            statement.executeUpdate();
882            statement.close();
883            statement = connection.prepareStatement("INSERT INTO job_configs "
884                    + "( job_id, config_id ) SELECT ?, config_id FROM job_configs WHERE job_id = ?");
885            statement.setLong(1, newJobID);
886            statement.setLong(2, oldJobID);
887            statement.executeUpdate();
888            statement.close();
889            statement = connection.prepareStatement("UPDATE jobs SET status = ?, resubmitted_as_job = ? "
890                    + " WHERE job_id = ?");
891            statement.setInt(1, JobStatus.RESUBMITTED.ordinal());
892            statement.setLong(2, newJobID);
893            statement.setLong(3, oldJobID);
894            statement.executeUpdate();
895            connection.commit();
896        } catch (SQLException e) {
897            String message = "SQL error rescheduling job #" + oldJobID + " in database" + "\n"
898                    + ExceptionUtils.getSQLExceptionCause(e);
899            log.warn(message, e);
900            throw new IOFailure(message, e);
901        } finally {
902            DBUtils.closeStatementIfOpen(statement);
903            DBUtils.rollbackIfNeeded(connection, "resubmit job", oldJobID);
904            HarvestDBConnection.release(connection);
905        }
906        log.info("Job #{} successfully as job #{}", oldJobID, newJobID);
907        return newJobID;
908    }
909
910    /**
911     * Helper-method that constructs a list of JobStatusInfo objects from the given resultset.
912     *
913     * @param res a given resultset
914     * @return a list of JobStatusInfo objects
915     * @throws SQLException If any problem with accessing the data in the ResultSet
916     */
917    private List<JobStatusInfo> makeJobStatusInfoListFromResultset(ResultSet res) throws SQLException {
918        List<JobStatusInfo> joblist = new ArrayList<JobStatusInfo>();
919        while (res.next()) {
920            final long jobId = res.getLong(1);
921            joblist.add(new JobStatusInfo(jobId, JobStatus.fromOrdinal(res.getInt(2)), res.getLong(3),
922                    res.getString(4), res.getInt(5), res.getString(6), res.getString(7), res.getString(8), res
923                            .getInt(9), DBUtils.getDateMaybeNull(res, 10), DBUtils.getDateMaybeNull(res, 11), DBUtils
924                            .getDateMaybeNull(res, 12), DBUtils.getDateMaybeNull(res, 13), DBUtils.getLongMaybeNull(
925                            res, 14)));
926        }
927        return joblist;
928    }
929
930    /**
931     * Internal utility class to build a SQL query using a prepared statement.
932     */
933    private class HarvestStatusQueryBuilder {
934        /** The sql string. */
935        private String sqlString;
936        // from java.sql.Types
937        /** list of parameter classes. */
938        private LinkedList<Class<?>> paramClasses = new LinkedList<Class<?>>();
939        /** list of parameter values. */
940        private LinkedList<Object> paramValues = new LinkedList<Object>();
941
942        /**
943         * Constructor.
944         */
945        HarvestStatusQueryBuilder() {
946            super();
947        }
948
949        @Override public String toString() {
950            return sqlString;
951        }
952
953        /**
954         * @param sqlString the sqlString to set
955         */
956        void setSqlString(String sqlString) {
957            this.sqlString = sqlString;
958        }
959
960        /**
961         * Add the given class and given value to the list of paramClasses and paramValues respectively.
962         *
963         * @param clazz a given class.
964         * @param value a given value
965         */
966        void addParameter(Class<?> clazz, Object value) {
967            paramClasses.addLast(clazz);
968            paramValues.addLast(value);
969        }
970
971        /**
972         * Prepare a statement for the database that uses the sqlString, and the paramClasses, and paramValues. Only
973         * Integer, Long, String, and Date values accepted.
974         *
975         * @param c an Open connection to the harvestDatabase
976         * @return the prepared statement
977         * @throws SQLException If unable to prepare the statement
978         * @throws UnknownID If one of the parameter classes is unexpected
979         */
980        PreparedStatement getPopulatedStatement(Connection c) throws SQLException {
981            PreparedStatement stm = c.prepareStatement(sqlString);
982
983            Iterator<Class<?>> pClasses = paramClasses.iterator();
984            Iterator<Object> pValues = paramValues.iterator();
985            int pIndex = 0;
986            while (pClasses.hasNext()) {
987                pIndex++;
988                Class<?> pClass = pClasses.next();
989                Object pVal = pValues.next();
990
991                if (Integer.class.equals(pClass)) {
992                    stm.setInt(pIndex, (Integer) pVal);
993                } else if (Long.class.equals(pClass)) {
994                    stm.setLong(pIndex, (Long) pVal);
995                } else if (String.class.equals(pClass)) {
996                    stm.setString(pIndex, (String) pVal);
997                } else if (java.sql.Date.class.equals(pClass)) {
998                    stm.setDate(pIndex, (java.sql.Date) pVal);
999                } else {
1000                    throw new UnknownID("Unexpected parameter class " + pClass);
1001                }
1002            }
1003            return stm;
1004        }
1005
1006
1007    }
1008
1009    /**
1010     * Builds a query to fetch jobs according to selection criteria.
1011     *
1012     * @param query the selection criteria.
1013     * @param count build a count query instead of selecting columns.
1014     * @return the proper SQL query.
1015     */
1016    private HarvestStatusQueryBuilder buildSqlQuery(HarvestStatusQuery query, boolean count) {
1017        HarvestStatusQueryBuilder sq = new HarvestStatusQueryBuilder();
1018        StringBuffer sql = new StringBuffer("SELECT");
1019        if (count) {
1020            sql.append(" count(*)");
1021        } else {
1022            sql.append(" jobs.job_id, status, jobs.harvest_id,");
1023            sql.append(" harvestdefinitions.name, harvest_num,");
1024            sql.append(" harvest_errors, upload_errors, orderxml,");
1025            sql.append(" num_configs, submitteddate, creationdate, startdate, enddate,");
1026            sql.append(" resubmitted_as_job");
1027        }
1028        sql.append(" FROM jobs, harvestdefinitions ");
1029        sql.append(" WHERE harvestdefinitions.harvest_id = jobs.harvest_id ");
1030
1031        JobStatus[] jobStatuses = query.getSelectedJobStatuses();
1032        if (jobStatuses.length > 0) {
1033            if (jobStatuses.length == 1) {
1034                int statusOrdinal = jobStatuses[0].ordinal();
1035                sql.append(" AND status = ?");
1036                sq.addParameter(Integer.class, statusOrdinal);
1037            } else {
1038                sql.append("AND (status = ");
1039                sql.append(jobStatuses[0].ordinal());
1040                for (int i = 1; i < jobStatuses.length; i++) {
1041                    sql.append(" OR status = ?");
1042                    sq.addParameter(Integer.class, jobStatuses[i].ordinal());
1043                }
1044                sql.append(")");
1045            }
1046        }
1047
1048        String harvestName = query.getHarvestName();
1049        boolean caseSensitiveHarvestName = query.getCaseSensitiveHarvestName();
1050        if (!harvestName.isEmpty()) {
1051            if (caseSensitiveHarvestName) {
1052                if (harvestName.indexOf(HarvestStatusQuery.HARVEST_NAME_WILDCARD) == -1) {
1053                    // No wildcard, exact match
1054                    sql.append(" AND harvestdefinitions.name = ?");
1055                    sq.addParameter(String.class, harvestName);
1056                } else {
1057                    String harvestNamePattern = harvestName.replaceAll("\\*", "%");
1058                    sql.append(" AND harvestdefinitions.name LIKE ?");
1059                    sq.addParameter(String.class, harvestNamePattern);
1060                }
1061            } else {
1062                harvestName = harvestName.toUpperCase();
1063                if (harvestName.indexOf(HarvestStatusQuery.HARVEST_NAME_WILDCARD) == -1) {
1064                    // No wildcard, exact match
1065                    sql.append(" AND UPPER(harvestdefinitions.name) = ?");
1066                    sq.addParameter(String.class, harvestName);
1067                } else {
1068                    String harvestNamePattern = harvestName.replaceAll("\\*", "%");
1069                    sql.append(" AND UPPER(harvestdefinitions.name)  LIKE ?");
1070                    sq.addParameter(String.class, harvestNamePattern);
1071                }
1072            }
1073        }
1074
1075        Long harvestRun = query.getHarvestRunNumber();
1076        if (harvestRun != null) {
1077            sql.append(" AND jobs.harvest_num = ?");
1078            log.debug("Added harvest run number param {}.", harvestRun);
1079            sq.addParameter(Long.class, harvestRun);
1080        }
1081
1082        Long harvestId = query.getHarvestId();
1083        if (harvestId != null) {
1084            sql.append(" AND harvestdefinitions.harvest_id = ?");
1085            log.debug("Added harvest_id param {}.", harvestId);
1086            sq.addParameter(Long.class, harvestId);
1087        }
1088
1089        long startDate = query.getStartDate();
1090        if (startDate != HarvestStatusQuery.DATE_NONE) {
1091            sql.append(" AND startdate >= ?");
1092            sq.addParameter(java.sql.Date.class, new java.sql.Date(startDate));
1093        }
1094
1095        long endDate = query.getEndDate();
1096        if (endDate != HarvestStatusQuery.DATE_NONE) {
1097            sql.append(" AND enddate < ?");
1098            // end date must be set +1 day at midnight
1099            Calendar cal = Calendar.getInstance();
1100            cal.setTimeInMillis(endDate);
1101            cal.roll(Calendar.DAY_OF_YEAR, 1);
1102            sq.addParameter(java.sql.Date.class, new java.sql.Date(cal.getTimeInMillis()));
1103        }
1104        
1105        List<String> jobIdRangeIds = query.getPartialJobIdRangeAsList(false);
1106        List<String> jobIdRanges = query.getPartialJobIdRangeAsList(true);
1107        if (!jobIdRangeIds.isEmpty()) {
1108                String comma = "";
1109                sql.append(" AND (jobs.job_id IN (");
1110                for(String id : jobIdRangeIds) {
1111                        //id
1112                        sql.append(comma);
1113                        comma = ",";
1114                        sql.append("?");
1115                sq.addParameter(Long.class, Long.parseLong(id));
1116                }
1117                sql.append(") ");
1118
1119                
1120        }
1121        if(!jobIdRanges.isEmpty()) {
1122                String andOr = "AND";
1123                if (!jobIdRangeIds.isEmpty()) {
1124                        andOr = "OR";
1125                }
1126                
1127                for(String range : jobIdRanges) {
1128                        String[] r = range.split("-");
1129                        sql.append(" "+andOr+" jobs.job_id BETWEEN ? AND ? ");
1130                sq.addParameter(Long.class, Long.parseLong(r[0]));
1131                sq.addParameter(Long.class, Long.parseLong(r[1]));
1132                }
1133        }
1134        if (!jobIdRangeIds.isEmpty()) {
1135                sql.append(")");
1136        }
1137
1138        if (!count) {
1139            sql.append(" ORDER BY jobs.job_id");
1140            if (!query.isSortAscending()) {
1141                sql.append(" " + SORT_ORDER.DESC.name());
1142            } else {
1143                sql.append(" " + SORT_ORDER.ASC.name());
1144            }
1145
1146            long pagesize = query.getPageSize();
1147            if (pagesize != HarvestStatusQuery.PAGE_SIZE_NONE) {
1148                sql.append(" "
1149                        + DBSpecifics.getInstance().getOrderByLimitAndOffsetSubClause(pagesize,
1150                                (query.getStartPageIndex() - 1) * pagesize));
1151            }
1152        }
1153
1154        sq.setSqlString(sql.toString());
1155        return sq;
1156    }
1157
1158    /**
1159     * Get Jobstatus for the job with the given id.
1160     *
1161     * @param jobID A given Jobid
1162     * @return the Jobstatus for the job with the given id.
1163     * @throws UnknownID if no job exists with id jobID
1164     */
1165    public JobStatus getJobStatus(Long jobID) {
1166        ArgumentNotValid.checkNotNull(jobID, "Long jobID");
1167
1168        Connection c = HarvestDBConnection.get();
1169        try {
1170            Integer statusAsInteger = DBUtils.selectIntValue(c, "SELECT status FROM jobs WHERE job_id = ?", jobID);
1171            if (statusAsInteger == null) {
1172                throw new UnknownID("No known job with id=" + jobID);
1173            }
1174            return JobStatus.fromOrdinal(statusAsInteger);
1175        } finally {
1176            HarvestDBConnection.release(c);
1177        }
1178    }
1179
1180    /**
1181     * Get a list of AliasInfo objects for all the domains included in the job.
1182     *
1183     * @return a list of AliasInfo objects for all the domains included in the job.
1184     */
1185    public List<AliasInfo> getJobAliasInfo(Job job) {
1186        List<AliasInfo> aliases = new ArrayList<AliasInfo>();
1187        DomainDAO dao = DomainDAO.getInstance();
1188        for (String domain : job.getDomainConfigurationMap().keySet()) {
1189            aliases.addAll(dao.getAliases(domain));
1190        }
1191        return aliases;
1192    }
1193}