001/* 002 * #%L 003 * Netarchivesuite - harvester 004 * %% 005 * Copyright (C) 2005 - 2014 The Royal Danish Library, the Danish State and University Library, 006 * the National Library of France and the Austrian National Library. 007 * %% 008 * This program is free software: you can redistribute it and/or modify 009 * it under the terms of the GNU Lesser General Public License as 010 * published by the Free Software Foundation, either version 2.1 of the 011 * License, or (at your option) any later version. 012 * 013 * This program is distributed in the hope that it will be useful, 014 * but WITHOUT ANY WARRANTY; without even the implied warranty of 015 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 016 * GNU General Lesser Public License for more details. 017 * 018 * You should have received a copy of the GNU General Lesser Public 019 * License along with this program. If not, see 020 * <http://www.gnu.org/licenses/lgpl-2.1.html>. 021 * #L% 022 */ 023 024package dk.netarkivet.harvester.webinterface; 025 026import java.io.IOException; 027import java.io.InputStream; 028import java.util.List; 029 030import javax.servlet.http.HttpServletRequest; 031import javax.servlet.jsp.PageContext; 032 033import org.apache.commons.fileupload.FileItem; 034import org.apache.commons.fileupload.FileItemFactory; 035import org.apache.commons.fileupload.FileUploadException; 036import org.apache.commons.fileupload.disk.DiskFileItemFactory; 037import org.apache.commons.fileupload.servlet.ServletFileUpload; 038import org.slf4j.Logger; 039import org.slf4j.LoggerFactory; 040 041import dk.netarkivet.common.exceptions.ArgumentNotValid; 042import dk.netarkivet.common.exceptions.ForwardedToErrorPage; 043import dk.netarkivet.common.utils.I18n; 044import dk.netarkivet.common.webinterface.HTMLUtils; 045import dk.netarkivet.harvester.datamodel.GlobalCrawlerTrapList; 046import dk.netarkivet.harvester.datamodel.GlobalCrawlerTrapListDAO; 047import dk.netarkivet.harvester.datamodel.GlobalCrawlerTrapListDBDAO; 048 049/** 050 * This action processes multipart uploads to either create or update a global crawler trap list. The choice of which 051 * action to carry out is determined by whether the TRAP_ID is specified in the request. 052 */ 053@SuppressWarnings({"unchecked"}) 054public class TrapCreateOrUpdateAction extends TrapAction { 055 056 /** 057 * The logger for this class. 058 */ 059 protected static final Logger log = LoggerFactory.getLogger(TrapCreateOrUpdateAction.class); 060 061 @Override 062 protected void doAction(PageContext context, I18n i18n) { 063 String name = null; 064 boolean isActive = true; 065 String description = null; 066 InputStream is = null; 067 String id = null; 068 String fileName = null; 069 HttpServletRequest request = (HttpServletRequest) context.getRequest(); 070 FileItemFactory factory = new DiskFileItemFactory(); 071 ServletFileUpload upload = new ServletFileUpload(factory); 072 List<FileItem> items = null; 073 try { 074 items = upload.parseRequest(request); 075 } catch (FileUploadException e) { 076 HTMLUtils.forwardWithErrorMessage(context, i18n, e, "errormsg;crawlertrap.upload.error"); 077 throw new ForwardedToErrorPage("Error on multipart post", e); 078 } 079 for (FileItem item : items) { 080 if (item.isFormField()) { 081 if (item.getFieldName().equals(Constants.TRAP_NAME)) { 082 name = item.getString(); 083 } else if (item.getFieldName().equals(Constants.TRAP_IS_ACTIVE)) { 084 isActive = Boolean.parseBoolean(item.getString()); 085 } else if (item.getFieldName().equals(Constants.TRAP_DESCRIPTION)) { 086 description = item.getString(); 087 } else if (item.getFieldName().equals(Constants.TRAP_ID)) { 088 id = item.getString(); 089 } 090 } else { 091 try { 092 fileName = item.getName(); 093 is = item.getInputStream(); 094 } catch (IOException e) { 095 HTMLUtils.forwardWithErrorMessage(context, i18n, e, "errormsg;crawlertrap.upload.error"); 096 throw new ForwardedToErrorPage("Error on multipart post", e); 097 } 098 } 099 } 100 GlobalCrawlerTrapListDAO dao = GlobalCrawlerTrapListDBDAO.getInstance(); 101 if (id != null) { // update existing trap list 102 int trapId = Integer.parseInt(id); 103 GlobalCrawlerTrapList trap = dao.read(trapId); 104 trap.setActive(isActive); 105 trap.setDescription(description); 106 trap.setName(name); 107 if (fileName != null && !fileName.isEmpty()) { 108 log.debug("Reading global crawler trap list from '" + fileName + "'"); 109 try { 110 trap.setTrapsFromInputStream(is, name); 111 } catch (ArgumentNotValid argumentNotValid) { 112 HTMLUtils.forwardWithErrorMessage(context, i18n, "errormsg;crawlertrap.regexp.error"); 113 throw new ForwardedToErrorPage(argumentNotValid.getMessage()); 114 } 115 } 116 dao.update(trap); 117 } else { // create new trap list 118 log.debug("Reading global crawler trap list from '" + fileName + "'"); 119 GlobalCrawlerTrapList trap = new GlobalCrawlerTrapList(is, name, description, isActive); 120 if (!dao.exists(name)) { 121 dao.create(trap); 122 } else { 123 // crawlertrap named like this already exists. 124 HTMLUtils.forwardWithErrorMessage(context, i18n, "errormsg;crawlertrap.0.exists.error", name); 125 throw new ForwardedToErrorPage("Crawlertrap with name '" + name + "' exists already"); 126 } 127 } 128 } 129}