001/* 002 * #%L 003 * Netarchivesuite - harvester 004 * %% 005 * Copyright (C) 2005 - 2014 The Royal Danish Library, the Danish State and University Library, 006 * the National Library of France and the Austrian National Library. 007 * %% 008 * This program is free software: you can redistribute it and/or modify 009 * it under the terms of the GNU Lesser General Public License as 010 * published by the Free Software Foundation, either version 2.1 of the 011 * License, or (at your option) any later version. 012 * 013 * This program is distributed in the hope that it will be useful, 014 * but WITHOUT ANY WARRANTY; without even the implied warranty of 015 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 016 * GNU General Lesser Public License for more details. 017 * 018 * You should have received a copy of the GNU General Lesser Public 019 * License along with this program. If not, see 020 * <http://www.gnu.org/licenses/lgpl-2.1.html>. 021 * #L% 022 */ 023package dk.netarkivet.harvester.heritrix3.controller; 024 025import java.io.File; 026import java.io.IOException; 027import java.io.UnsupportedEncodingException; 028import java.util.ArrayList; 029import java.util.Arrays; 030import java.util.List; 031 032import org.apache.commons.lang.StringUtils; 033import org.apache.http.client.methods.HttpPost; 034import org.apache.http.entity.StringEntity; 035import org.netarchivesuite.heritrix3wrapper.EngineResult; 036import org.netarchivesuite.heritrix3wrapper.Heritrix3Wrapper; 037import org.netarchivesuite.heritrix3wrapper.Heritrix3Wrapper.CrawlControllerState; 038import org.netarchivesuite.heritrix3wrapper.JobResult; 039import org.netarchivesuite.heritrix3wrapper.ResultStatus; 040import org.netarchivesuite.heritrix3wrapper.ScriptResult; 041import org.netarchivesuite.heritrix3wrapper.jaxb.JobShort; 042import org.slf4j.Logger; 043import org.slf4j.LoggerFactory; 044 045import dk.netarkivet.common.exceptions.HeritrixLaunchException; 046import dk.netarkivet.common.exceptions.IOFailure; 047import dk.netarkivet.common.exceptions.IllegalState; 048import dk.netarkivet.common.exceptions.NotImplementedException; 049import dk.netarkivet.common.utils.SystemUtils; 050import dk.netarkivet.harvester.harvesting.distribute.CrawlProgressMessage; 051import dk.netarkivet.harvester.harvesting.distribute.CrawlProgressMessage.CrawlServiceInfo; 052import dk.netarkivet.harvester.harvesting.distribute.CrawlProgressMessage.CrawlServiceJobInfo; 053import dk.netarkivet.harvester.harvesting.distribute.CrawlProgressMessage.CrawlStatus; 054import dk.netarkivet.harvester.harvesting.frontier.FullFrontierReport; 055import dk.netarkivet.harvester.heritrix3.Heritrix3Files; 056 057/** 058 * This implementation of the HeritrixController interface starts Heritrix3 as a separate process and uses JMX to 059 * communicate with it. Each instance executes exactly one process that runs exactly one crawl job. 060 */ 061public class HeritrixController extends AbstractRestHeritrixController { 062 063 /** The logger for this class. */ 064 private static final Logger log = LoggerFactory.getLogger(HeritrixController.class); 065 066 /** 067 * The name that Heritrix3 gives to the job we ask it to create. 068 */ 069 private String jobName; 070 071 /** The header line (legend) for the statistics report. */ 072 private String progressStatisticsLegend; 073 074 private int heritrix3EngineRetries; 075 private int heritrix3EngineIntervalBetweenRetriesInMillis; 076 077 private String baseUrl; 078 079 /** 080 * Create a BnfHeritrixController object. 081 * 082 * @param files Files that are used to set up Heritrix3. 083 */ 084 public HeritrixController(Heritrix3Files files, String jobName) { 085 super(files); 086 this.jobName = jobName; 087 } 088 089 /** 090 * Initialize the JMXconnection to the Heritrix3. 091 * 092 * @throws IOFailure If Heritrix3 dies before initialisation, or we encounter any problems during the initialisation. 093 * @see IHeritrixController#initialize() 094 */ 095 @Override 096 public void initialize() { 097 098 ///////////////////////////////////////////////////// 099 // Initialize H3 wrapper 100 ///////////////////////////////////////////////////// 101 102 //TODO these numbers could be settings 103 this.heritrix3EngineRetries = 60; 104 this.heritrix3EngineIntervalBetweenRetriesInMillis = 1000; // 1 second 105 106 107 h3wrapper = Heritrix3Wrapper.getInstance(getHostName(), getGuiPort(), 108 null, null, getHeritrixAdminName(), getHeritrixAdminPassword()); 109 110 EngineResult engineResult; 111 try { 112 engineResult = h3wrapper.waitForEngineReady(heritrix3EngineRetries, heritrix3EngineIntervalBetweenRetriesInMillis); 113 } catch (Throwable e){ 114 e.printStackTrace(); 115 throw new IOFailure("Heritrix3 engine not started: " + e); 116 } 117 118 if (engineResult != null) { 119 if (engineResult.status != ResultStatus.OK) { 120 String errMsg = "Heritrix3 wrapper could not connect to Heritrix3. Resultstate = " + engineResult.status; 121 log.error(errMsg, engineResult.t); 122 throw new IOFailure(errMsg, engineResult.t); 123 } 124 } else { 125 throw new IOFailure("Unexpected error: Heritrix3 wrapper returned null engine result."); 126 } 127 128 baseUrl = "https://" + getHostName() + ":" + Integer.toString(getGuiPort()) + "/engine/"; 129 130 // POST: Heritrix3 is up and running and responds nicely 131 log.info("Heritrix3 REST interface up and running"); 132 } 133 134 @Override 135 public void requestCrawlStart() { 136 // Create a new job 137 File cxmlFile = getHeritrixFiles().getOrderFile(); 138 File seedsFile = getHeritrixFiles().getSeedsFile(); 139 JobResult jobResult; 140 141 File jobDir = files.getHeritrixJobDir(); 142 if (!jobDir.exists()) { 143 jobDir.mkdirs(); 144 } 145 146 try { 147 log.info("Copying the crawler-beans.cxml file and seeds.txt to the heritrix3 jobdir '{}'", jobDir); 148 Heritrix3Wrapper.copyFile( cxmlFile, jobDir ); 149 Heritrix3Wrapper.copyFileAs( seedsFile, jobDir, "seeds.txt" ); 150 } catch (IOException e) { 151 throw new IOFailure("Problem occurred during the copying of files to our heritrix job", e); 152 } 153 154 // PRE: h3 is running, and the job files copied to their final location 155 EngineResult engineResult = null; 156 try { 157 engineResult = h3wrapper.rescanJobDirectory(); 158 log.info("H3 jobs available for building: {}", knownJobsToString(engineResult)); 159 160 log.trace("Result of rescanJobDirectory() operation: " + new String(engineResult.response, "UTF-8")); 161 162 jobResult = h3wrapper.buildJobConfiguration(jobName); 163 log.trace("Result of buildJobConfiguration() operation: " + new String(jobResult.response, "UTF-8")); 164 if (jobResult.status == ResultStatus.OK) { 165 if (jobResult.job.statusDescription.equalsIgnoreCase("Unbuilt")) { 166 throw new HeritrixLaunchException("The job '" + jobName + "' could not be built. Last loglines are " + StringUtils.join(jobResult.job.jobLogTail, "\n")); 167 } else if (jobResult.job.statusDescription.equalsIgnoreCase("Ready")) { 168 log.info("Job {} built successfully", jobName); 169 } else if (jobResult.job.statusDescription.startsWith("Finished")) { // Created but not launchable 170 log.warn("The job {} seems unlaunchable. Tearing down the job. Last loglines are ", jobName, 171 StringUtils.join(jobResult.job.jobLogTail, "\n")); 172 jobResult = h3wrapper.teardownJob(jobName); 173 log.trace("Result of teardown() operation: " + new String(jobResult.response, "UTF-8")); 174 throw new HeritrixLaunchException("Job '" + jobName + "' failed to launch: " + StringUtils.join(jobResult.job.jobLogTail, "\n")); 175 } else { 176 throw new IllegalState("Unknown job.statusdescription returned from h3: " + jobResult.job.statusDescription); 177 } 178 } else { 179 throw new IllegalState("Unknown ResultStatus returned from h3wrapper: " 180 + ResultStatus.toString(jobResult.status)); 181 } 182 183 jobResult = h3wrapper.waitForJobState(jobName, CrawlControllerState.NASCENT, 60, 1000); 184 if (jobResult.job.crawlControllerState.equalsIgnoreCase(CrawlControllerState.NASCENT.toString())) { 185 log.info("The H3 job {} in now in state CrawlControllerState.NASCENT", jobName); 186 } else { 187 log.warn("The job state is now {}. Should have been CrawlControllerState.NASCENT", jobResult.job.crawlControllerState); 188 } 189 jobResult = h3wrapper.launchJob(jobName); 190 191 log.trace("Result of launchJob() operation: " + new String(jobResult.response, "UTF-8")); 192 jobResult = h3wrapper.waitForJobState(jobName, CrawlControllerState.PAUSED, 60, 1000); 193 if (jobResult.job.crawlControllerState.equalsIgnoreCase(CrawlControllerState.PAUSED.toString())) { 194 log.info("The H3 job {} in now in state CrawlControllerState.PAUSED", jobName); 195 } else { 196 log.warn("The job state is now {}. Should have been CrawlControllerState.PAUSED", jobResult.job.crawlControllerState); 197 } 198 199 jobResult = h3wrapper.unpauseJob(jobName); 200 log.info("The job {} is now in state {}", jobName, jobResult.job.crawlControllerState); 201 202 // POST: h3 is running, and the job with name 'jobName' is running 203 log.trace("h3-State after unpausing job '{}': {}", jobName, new String(jobResult.response, "UTF-8")); 204 205 206 207 } catch (UnsupportedEncodingException e) { 208 throw new IOFailure("Unexpected error during communication with heritrix3", e); 209 } 210 } 211 212 @Override 213 public void requestCrawlStop(String reason) { 214 log.info("Terminating job {}. Reason: {}", this.jobName, reason); 215 JobResult jobResult = h3wrapper.job(jobName); 216 if (jobResult != null) { 217 if (jobResult.job.isRunning) { 218 JobResult result = h3wrapper.terminateJob(this.jobName); 219 if (!result.job.isRunning) { 220 log.warn("Job '{}' terminated", this.jobName); 221 } else { 222 log.warn("Job '{}' not terminated correctly", this.jobName); 223 } 224 } else { 225 log.warn("Job '{}' not terminated, as it was not running", this.jobName); 226 } 227 } else { 228 log.warn("Job '{}' has maybe already been terminated and/or heritrix3 is no longer running", this.jobName); 229 } 230 } 231 232 @Override 233 public void stopHeritrix() { 234 log.debug("Stopping Heritrix3"); 235 try { 236 // Check if a heritrix3 process still exists for this jobName 237 ProcessBuilder processBuilder = new ProcessBuilder("pgrep", "-f", jobName); 238 log.info("Looking up heritrix3 process with. " + processBuilder.command()); 239 if (processBuilder.start().waitFor() == 0) { // Yes, ask heritrix3 to shutdown, ignoring any jobs named jobName 240 log.info("Heritrix running, requesting heritrix to stop and ignoring running job '{}'", jobName); 241 h3wrapper.exitJavaProcess(Arrays.asList(new String[] {jobName})); 242 } else { 243 log.info("Heritrix3 process not running for job '{}'", jobName); 244 } 245 // Check again 246 if (processBuilder.start().waitFor() == 0) { // The process is still alive, kill it 247 log.info("Heritrix3 process still running, pkill'ing heritrix3 "); 248 ProcessBuilder killerProcessBuilder = new ProcessBuilder("pkill", "-f", jobName); 249 int pkillExitValue = killerProcessBuilder.start().exitValue(); 250 if (pkillExitValue != 0) { 251 log.warn("Non xero exit value ({}) when trying to pkill Heritrix3.", pkillExitValue); 252 } else { 253 log.info("Heritrix process terminated successfully with the pkill command {}", killerProcessBuilder.command()); 254 } 255 } else { 256 log.info("Heritrix3 stopped successfully."); 257 } 258 } catch (IOException e) { 259 log.warn("Exception while trying to shutdown heritrix", e); 260 } catch (InterruptedException e) { 261 log.debug("stopHeritrix call interupted", e); 262 } 263 } 264 265 /** 266 * Return the URL for monitoring this instance. 267 * 268 * @return the URL for monitoring this instance. 269 */ 270 public String getHeritrixConsoleURL() { 271 return "https://" + SystemUtils.getLocalHostName() + ":" + getGuiPort() + "/engine"; 272 } 273 274 /** 275 * Cleanup after an Heritrix3 process. This entails sending the shutdown command to the Heritrix3 process, and killing 276 * it forcefully, if it is still alive after waiting the period of time specified by the 277 * CommonSettings.PROCESS_TIMEOUT setting. 278 * 279 * @param crawlDir the crawldir to cleanup (argument is currently not used) 280 * @see IHeritrixController#cleanup() 281 */ 282 public void cleanup(File crawlDir) { 283 JobResult jobResult; 284 try { 285 // Check engine status 286 EngineResult engineResult = h3wrapper.rescanJobDirectory(); 287 if (engineResult != null){ 288 List<JobShort> knownJobs = engineResult.engine.jobs; 289 if (knownJobs.size() != 1) { 290 log.warn("Should be one job but there is {} jobs: {}", knownJobs.size(), knownJobsToString(engineResult)); 291 } 292 } else { 293 log.warn("Unresponsive Heritrix3 engine. Let's try continuing the cleanup anyway"); 294 } 295 296 // Check that job jobName still exists in H3 engine 297 jobResult = h3wrapper.job(jobName); 298 if (jobResult != null) { 299 if (jobResult.status == ResultStatus.OK && jobResult.job.crawlControllerState != null) { 300 String TEARDOWN = "teardown"; 301 if (jobResult.job.availableActions.contains(TEARDOWN)) { 302 log.info("Tearing down h3 job {}" , jobName); 303 jobResult = h3wrapper.teardownJob(jobName); 304 } else { 305 String errMsg = "Tearing down h3 job '" + jobName + "' not possible. Not one of the actions available: " + StringUtils.join(jobResult.job.availableActions, ","); 306 log.warn(errMsg); 307 throw new IOFailure(errMsg); 308 } 309 } 310 } else { 311 throw new IOFailure("Unexpected error during communication with heritrix3 during cleanup"); 312 } 313 // Wait for the state: jobResult.job.crawlControllerState == null (but we only try ten times with 1 second interval 314 jobResult = h3wrapper.waitForJobState(jobName, null, 10, heritrix3EngineIntervalBetweenRetriesInMillis); 315 // Did we get the expected state? 316 if (jobResult.job.crawlControllerState != null) { 317 log.warn("The job {} is still lurking about. Shutdown heritrix3 and ignore the job", jobName); 318 List<String> jobsToIgnore = new ArrayList<String>(); 319 jobsToIgnore.add(jobName); 320 EngineResult result = h3wrapper.exitJavaProcess(jobsToIgnore); 321 if (result == null || (result.status != ResultStatus.RESPONSE_EXCEPTION && result.status != ResultStatus.OFFLINE)) { 322 throw new IOFailure("Heritrix3 could not be shut down"); 323 } 324 } else { 325 EngineResult result = h3wrapper.exitJavaProcess(null); 326 if (result == null || (result.status != ResultStatus.RESPONSE_EXCEPTION && result.status != ResultStatus.OFFLINE)) { 327 throw new IOFailure("Heritrix3 could not be shut down"); 328 } 329 } 330 } catch (Throwable e) { 331 throw new IOFailure("Unknown error during communication with heritrix3", e); 332 } 333 } 334 335 336 private String knownJobsToString(EngineResult engineResult) { 337 String result = ""; 338 if (engineResult == null || engineResult.engine == null || engineResult.engine.jobs == null) { 339 result = null; 340 } else { 341 List<JobShort> knownjobs = engineResult.engine.jobs; 342 for (JobShort js: knownjobs) { 343 result += js.shortName + " "; 344 } 345 } 346 347 return result; 348 } 349 350 /** 351 * Return the URL for monitoring this instance. 352 * 353 * @return the URL for monitoring this instance. 354 */ 355 public String getAdminInterfaceUrl() { 356 return "https://" + SystemUtils.getLocalHostName() + ":" + getGuiPort() + "/engine"; 357 } 358 359 /** 360 * Gets a message that stores the information summarizing the crawl progress. 361 * 362 * @return a message that stores the information summarizing the crawl progress. 363 */ 364 public CrawlProgressMessage getCrawlProgress() { 365 Heritrix3Files files = getHeritrixFiles(); 366 CrawlProgressMessage cpm = new CrawlProgressMessage(files.getHarvestID(), files.getJobID(), 367 progressStatisticsLegend); 368 cpm.setHostUrl(getHeritrixConsoleURL()); 369 JobResult jobResult = h3wrapper.job(jobName); 370 if (jobResult != null) { 371 getCrawlServiceAttributes(cpm, jobResult); 372 } else { 373 log.warn("Unable to get Heritrix3 status for job '{}'", jobName); 374 } 375 if (cpm.crawlIsFinished()) { 376 cpm.setStatus(CrawlStatus.CRAWLING_FINISHED); 377 // No need to go further, CrawlService.Job bean does not exist 378 return cpm; 379 } 380 if (jobResult != null) { 381 fetchCrawlServiceJobAttributes(cpm, jobResult); 382 } else { 383 log.warn("Unable to get JobAttributes for job '{}'", jobName); 384 } 385 return cpm; 386 } 387 388 /** 389 * Retrieve the values of the crawl service attributes and add them to the CrawlProgressMessage being put together. 390 * 391 * @param cpm the crawlProgress message being prepared 392 */ 393 private void getCrawlServiceAttributes(CrawlProgressMessage cpm, JobResult job) { 394 // TODO check job state?? 395 CrawlServiceInfo hStatus = cpm.getHeritrixStatus(); 396 hStatus.setAlertCount(job.job.alertCount); // info taken from job information 397 hStatus.setCurrentJob(this.jobName); // Note:Information not taken from H3 398 hStatus.setCrawling(job.job.isRunning);// info taken from job information 399 } 400 401 /** 402 * Retrieve the values of the crawl service job attributes and add them to the CrawlProgressMessage being put 403 * together. 404 * 405 * @param cpm the crawlProgress message being prepared 406 */ 407 private void fetchCrawlServiceJobAttributes(CrawlProgressMessage cpm, JobResult job) { 408 CrawlServiceJobInfo jStatus = cpm.getJobStatus(); 409 410/* 411 timestamp discovered queued downloaded doc/s(avg) KB/s(avg) dl-failures busy-thread mem-use-KB heap-size-KB congestion max-depth avg-depth 4122015-04-29T12:42:54Z 774 573 185 0.9(2.31) 49(41) 16 2 61249 270848 1 456 114 413*/ 414 /* 415 jStatus.setProgressStatistics(newProgressStats); 416 if (progressStatisticsLegend == null) { 417 progressStatisticsLegend = (String) executeMBeanOperation(CrawlServiceJobOperation.progressStatisticsLegend); 418 } 419 */ 420 421 long totalUriCount = job.job.uriTotalsReport.totalUriCount; 422 long downloadedUriCount = job.job.uriTotalsReport.downloadedUriCount; 423 Double progress; 424 if (totalUriCount == 0) { 425 progress = 0.0; 426 } else { 427 progress = downloadedUriCount * 100.0 / totalUriCount; 428 } 429 jStatus.setProgressStatistics(progress + "%"); 430 431 Long elapsedSeconds = job.job.elapsedReport.elapsedMilliseconds; 432 if (elapsedSeconds == null) { 433 elapsedSeconds = -1L; 434 } else { 435 elapsedSeconds = elapsedSeconds / 1000L; 436 } 437 jStatus.setElapsedSeconds(elapsedSeconds); 438 439 Double currentProcessedDocsPerSec = job.job.rateReport.currentDocsPerSecond; 440 if (currentProcessedDocsPerSec == null) { 441 currentProcessedDocsPerSec = new Double(-1L); 442 } 443 jStatus.setCurrentProcessedDocsPerSec(currentProcessedDocsPerSec); 444 445 Double processedDocsPerSec = job.job.rateReport.averageDocsPerSecond; 446 if (processedDocsPerSec == null) { 447 processedDocsPerSec = new Double(-1L); 448 } 449 jStatus.setProcessedDocsPerSec(processedDocsPerSec); 450 451 Integer kbRate = job.job.rateReport.currentKiBPerSec; 452 if (kbRate == null) { 453 kbRate = -1; 454 } 455 jStatus.setCurrentProcessedKBPerSec(kbRate); 456 457 Integer processedKBPerSec = job.job.rateReport.averageKiBPerSec; 458 if (processedKBPerSec == null) { 459 processedKBPerSec = -1; 460 } 461 jStatus.setProcessedKBPerSec(processedKBPerSec); 462 463 Long discoveredFilesCount = job.job.uriTotalsReport.totalUriCount; 464 if (discoveredFilesCount == null) { 465 discoveredFilesCount = -1L; 466 } 467 jStatus.setDiscoveredFilesCount(discoveredFilesCount); 468 469 Long downloadedCount = job.job.uriTotalsReport.downloadedUriCount; 470 if (downloadedCount == null) { 471 downloadedCount = -1L; 472 } 473 jStatus.setDownloadedFilesCount(downloadedCount); 474/* 47527 queues: 5 active (1 in-process; 0 ready; 4 snoozed); 0 inactive; 0 retired; 22 exhausted 476*/ 477 String frontierShortReport = String.format("%d queues: %d active (%d in-process; %d ready; %d snoozed); %d inactive; %d retired; %d exhausted", 478 job.job.frontierReport.totalQueues, 479 job.job.frontierReport.activeQueues, 480 job.job.frontierReport.inProcessQueues, 481 job.job.frontierReport.readyQueues, 482 job.job.frontierReport.snoozedQueues, 483 job.job.frontierReport.inactiveQueues, 484 job.job.frontierReport.retiredQueues, 485 job.job.frontierReport.exhaustedQueues); 486 jStatus.setFrontierShortReport(frontierShortReport); 487 488 String newStatus = "?"; 489 String StringValue = job.job.crawlControllerState; 490 if (StringValue != null) { 491 newStatus = (String) StringValue; 492 } 493 jStatus.setStatus(newStatus); 494 String status = (String) StringValue; 495 if (status.contains("PAUSE")) { // FIXME this is not correct 496 cpm.setStatus(CrawlStatus.CRAWLER_PAUSED); 497 } else { 498 cpm.setStatus(CrawlStatus.CRAWLER_ACTIVE); 499 } 500 501 Integer currentActiveToecount = job.job.loadReport.busyThreads; 502 if (currentActiveToecount == null) { 503 currentActiveToecount = -1; 504 } 505 jStatus.setActiveToeCount(currentActiveToecount); 506 } 507 508 /** 509 * Generates a full frontier report from H3 using an REST call (Groovy script) 510 * 511 * @return a Full frontier report. 512 */ 513 public FullFrontierReport getFullFrontierReport() { 514 //construct script request to send 515 HttpPost postRequest = new HttpPost(baseUrl + "job/" + jobName + "/script"); 516 StringEntity postEntity = null; 517 try { 518 postEntity = new StringEntity("engine=beanshell&script="+dk.netarkivet.harvester.heritrix3.Constants.FRONTIER_REPORT_GROOVY_SCRIPT); 519 } catch (UnsupportedEncodingException e) { 520 e.printStackTrace(); 521 } 522 postEntity.setContentType("application/x-www-form-urlencoded"); 523 postRequest.addHeader("Accept", "application/xml"); 524 postRequest.setEntity(postEntity); 525 ScriptResult result = h3wrapper.scriptResult(postRequest); 526 return FullFrontierReport.parseContentsAsXML( 527 jobName, result.response, dk.netarkivet.harvester.heritrix3.Constants.XML_RAWOUT_TAG); 528 } 529 530 @Override 531 public boolean atFinish() { 532 throw new NotImplementedException("Not implemented"); 533 } 534 535 @Override 536 public void beginCrawlStop() { 537 throw new NotImplementedException("Not implemented"); 538 } 539 540 @Override 541 public void cleanup() { 542 throw new NotImplementedException("Not implemented"); 543 } 544 545 @Override 546 public boolean crawlIsEnded() { 547 throw new NotImplementedException("Not implemented"); 548 } 549 550 @Override 551 public int getActiveToeCount() { 552 throw new NotImplementedException("Not implemented"); 553 } 554 555 @Override 556 public int getCurrentProcessedKBPerSec() { 557 throw new NotImplementedException("Not implemented"); 558 } 559 560 @Override 561 public String getHarvestInformation() { 562 throw new NotImplementedException("Not implemented"); 563 } 564 565 @Override 566 public String getProgressStats() { 567 throw new NotImplementedException("Not implemented"); 568 } 569 570 @Override 571 public long getQueuedUriCount() { 572 throw new NotImplementedException("Not implemented"); 573 } 574 575 @Override 576 public boolean isPaused() { 577 throw new NotImplementedException("Not implemented"); 578 } 579 580}