diff --git a/doc/release-notes/4813-allow-duplicate-files.md b/doc/release-notes/4813-allow-duplicate-files.md new file mode 100644 index 00000000000..a11af77c72b --- /dev/null +++ b/doc/release-notes/4813-allow-duplicate-files.md @@ -0,0 +1 @@ +We should note that duplicate files are now allowed, and installations may want to contact people now that this is available. Point to rules in the Guides. \ No newline at end of file diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index f9ce457f5c0..e377d3d9855 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -73,7 +73,6 @@ You can upload files to a dataset while first creating that dataset. You can als Certain file types in Dataverse are supported by additional functionality, which can include downloading in different formats, previews, file-level metadata preservation, file-level data citation with UNFs, and exploration through data visualization and analysis. See the :ref:`File Handling ` section of this page for more information. - HTTP Upload ----------- @@ -147,6 +146,20 @@ File Handling Certain file types in Dataverse are supported by additional functionality, which can include downloading in different formats, previews, file-level metadata preservation, file-level data citation; and exploration through data visualization and analysis. See the sections below for information about special functionality for specific file types. +.. _duplicate-files: + +Duplicate Files +=============== + +Beginning with Dataverse 5.0, the way Dataverse handles duplicate files (filename and checksums) is changing to be more flexible. Specifically: + +- Files with the same checksum can be included in a dataset, even if the files are in the same directory. +- Files with the same filename can be included in a dataset as long as the files are in different directories. +- If a user uploads a file to a directory where a file already exists with that directory/filename combination, Dataverse will adjust the file path and names by adding "-1" or "-2" as applicable. This change will be visible in the list of files being uploaded. +- If the directory or name of an existing or newly uploaded file is edited in such a way that would create a directory/filename combination that already exists, Dataverse will display an error. +- If a user attempts to replace a file with another file that has the same checksum, an error message will be displayed and the file will not be able to be replaced. +- If a user attempts to replace a file with a file that has the same checksum as a different file in the dataset, a warning will be displayed. + File Previews ------------- @@ -268,7 +281,7 @@ Variable Metadata can be edited directly through an API call (:ref:`API Guide: E File Path --------- -The File Path metadata field is Dataverse's way of representing a file's location in a folder structure. When a user uploads a .zip file containing a folder structure, Dataverse automatically fills in the File Path information for each file contained in the .zip. If a user downloads the full dataset or a selection of files from it, they will receive a folder structure with each file positioned according to its File Path. +The File Path metadata field is Dataverse's way of representing a file's location in a folder structure. When a user uploads a .zip file containing a folder structure, Dataverse automatically fills in the File Path information for each file contained in the .zip. If a user downloads the full dataset or a selection of files from it, they will receive a folder structure with each file positioned according to its File Path. Only one file with a given path and name may exist in a dataset. Editing a file to give it the same path and name as another file already existing in the dataset will cause an error. A file's File Path can be manually added or edited on the Edit Files page. Changing a file's File Path will change its location in the folder structure that is created when a user downloads the full dataset or a selection of files from it. diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index 6218629549c..560048db9ca 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -254,6 +254,37 @@ public boolean isDeleted() { public void setDeleted(boolean deleted) { this.deleted = deleted; } + + /* + For use during file upload so that the user may delete + files that have already been uploaded to the current dataset version + */ + + @Transient + private boolean markedAsDuplicate; + + public boolean isMarkedAsDuplicate() { + return markedAsDuplicate; + } + + public void setMarkedAsDuplicate(boolean markedAsDuplicate) { + this.markedAsDuplicate = markedAsDuplicate; + } + + @Transient + private String duplicateFilename; + + public String getDuplicateFilename() { + return duplicateFilename; + } + + public void setDuplicateFilename(String duplicateFilename) { + this.duplicateFilename = duplicateFilename; + } + + + + /** * All constructors should use this method diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index f8663087219..5ca7e4df502 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -84,7 +84,6 @@ public class EditDatafilesPage implements java.io.Serializable { private static final Logger logger = Logger.getLogger(EditDatafilesPage.class.getCanonicalName()); - private boolean uploadWarningMessageIsNotAnError; public enum FileEditMode { @@ -142,6 +141,8 @@ public enum FileEditMode { private Long versionId; private List newFiles = new ArrayList<>(); private List uploadedFiles = new ArrayList<>(); + private List uploadedInThisProcess = new ArrayList<>(); + private DatasetVersion workingVersion; private DatasetVersion clone; private String dropBoxSelection = ""; @@ -869,10 +870,25 @@ private String getBundleString(String msgName){ public void deleteFilesCompleted(){ } - - public void deleteFiles() { + + public void deleteFiles(){ + deleteFiles(this.selectedFiles); + } + + public void deleteDuplicateFiles(){ + List filesForDelete = new ArrayList(); + for(DataFile df : newFiles ){ + if (df.isMarkedAsDuplicate()){ + filesForDelete.add(df.getFileMetadata()); + } + } + deleteFiles(filesForDelete); + } + + + private void deleteFiles(List filesForDelete) { logger.fine("entering bulk file delete (EditDataFilesPage)"); - if (isFileReplaceOperation()){ + if (isFileReplaceOperation()) { try { deleteReplacementFile(); } catch (FileReplaceException ex) { @@ -880,10 +896,17 @@ public void deleteFiles() { } return; } - + + /* + If selected files are empty it means that we are dealing + with a duplicate files delete situation + so we are adding the marked as dup files as selected + and moving on accordingly. + */ + String fileNames = null; - for (FileMetadata fmd : this.getSelectedFiles()) { - // collect the names of the files, + for (FileMetadata fmd : filesForDelete) { + // collect the names of the files, // to show in the success message: if (fileNames == null) { fileNames = fmd.getLabel(); @@ -892,29 +915,29 @@ public void deleteFiles() { } } - for (FileMetadata markedForDelete : this.getSelectedFiles()) { - logger.fine("delete requested on file "+markedForDelete.getLabel()); - logger.fine("file metadata id: "+markedForDelete.getId()); - logger.fine("datafile id: "+markedForDelete.getDataFile().getId()); - logger.fine("page is in edit mode "+mode.name()); - - // has this filemetadata been saved already? (or is it a brand new - // filemetadata, created as part of a brand new version, created when - // the user clicked 'delete', that hasn't been saved in the db yet?) - if (markedForDelete.getId() != null) { - logger.fine("this is a filemetadata from an existing draft version"); + for (FileMetadata markedForDelete : filesForDelete) { + logger.fine("delete requested on file " + markedForDelete.getLabel()); + logger.fine("file metadata id: " + markedForDelete.getId()); + logger.fine("datafile id: " + markedForDelete.getDataFile().getId()); + logger.fine("page is in edit mode " + mode.name()); + + // has this filemetadata been saved already? (or is it a brand new + // filemetadata, created as part of a brand new version, created when + // the user clicked 'delete', that hasn't been saved in the db yet?) + if (markedForDelete.getId() != null) { + logger.fine("this is a filemetadata from an existing draft version"); // so all we remove is the file from the fileMetadatas (from the // file metadatas attached to the editVersion, and from the // display list of file metadatas that are being edited) // and let the delete be handled in the command (by adding it to the // filesToBeDeleted list): - dataset.getEditVersion().getFileMetadatas().remove(markedForDelete); - fileMetadatas.remove(markedForDelete); - filesToBeDeleted.add(markedForDelete); - } else { - logger.fine("this is a brand-new (unsaved) filemetadata"); - // ok, this is a brand-new DRAFT version. + dataset.getEditVersion().getFileMetadatas().remove(markedForDelete); + fileMetadatas.remove(markedForDelete); + filesToBeDeleted.add(markedForDelete); + } else { + logger.fine("this is a brand-new (unsaved) filemetadata"); + // ok, this is a brand-new DRAFT version. // if (mode != FileEditMode.CREATE) { // If the bean is in the 'CREATE' mode, the page is using @@ -922,38 +945,48 @@ public void deleteFiles() { // so there's no need to delete this meta from the local // fileMetadatas list. (but doing both just adds a no-op and won't cause an // error) - - // 1. delete the filemetadata from the local display list: + // 1. delete the filemetadata from the local display list: removeFileMetadataFromList(fileMetadatas, markedForDelete); - // 2. delete the filemetadata from the version: + // 2. delete the filemetadata from the version: removeFileMetadataFromList(dataset.getEditVersion().getFileMetadatas(), markedForDelete); - } - + } if (markedForDelete.getDataFile().getId() == null) { logger.fine("this is a brand new file."); // the file was just added during this step, so in addition to // removing it from the fileMetadatas lists (above), we also remove it from // the newFiles list and the dataset's files, so it never gets saved. - + removeDataFileFromList(dataset.getFiles(), markedForDelete.getDataFile()); removeDataFileFromList(newFiles, markedForDelete.getDataFile()); FileUtil.deleteTempFile(markedForDelete.getDataFile(), dataset, ingestService); // Also remove checksum from the list of newly uploaded checksums (perhaps odd // to delete and then try uploading the same file again, but it seems like it // should be allowed/the checksum list is part of the state to clean-up - checksumMapNew.remove(markedForDelete.getDataFile().getChecksumValue()); - - } - } + if(checksumMapNew != null && markedForDelete.getDataFile().getChecksumValue() != null) + checksumMapNew.remove(markedForDelete.getDataFile().getChecksumValue()); + + } + } + if (fileNames != null) { - String successMessage = getBundleString("file.deleted.success"); + String successMessage; + if (mode == FileEditMode.UPLOAD) { + if (fileNames.contains(", ")) { + successMessage = getBundleString("file.deleted.upload.success.multiple"); + } else { + successMessage = getBundleString("file.deleted.upload.success.single"); + } + } else { + successMessage = getBundleString("file.deleted.success"); + successMessage = successMessage.replace("{0}", fileNames); + } logger.fine(successMessage); - successMessage = successMessage.replace("{0}", fileNames); JsfHelper.addFlashMessage(successMessage); - } - } - + } + } + + private void removeFileMetadataFromList(List fmds, FileMetadata fmToDelete) { Iterator fmit = fmds.iterator(); while (fmit.hasNext()) { @@ -1032,7 +1065,8 @@ public String saveReplacementFile() throws FileReplaceException{ } public String save() { - Collection duplicates = IngestUtil.findDuplicateFilenames(workingVersion); + + Collection duplicates = IngestUtil.findDuplicateFilenames(workingVersion, newFiles); if (!duplicates.isEmpty()) { JH.addMessage(FacesMessage.SEVERITY_ERROR, BundleUtil.getStringFromBundle("dataset.message.filesFailure"), BundleUtil.getStringFromBundle("dataset.message.editMetadata.duplicateFilenames", new ArrayList<>(duplicates))); return null; @@ -1737,6 +1771,7 @@ public void uploadFinished() { newFiles.add(dataFile); } + if(uploadInProgress.isTrue()) { uploadedFiles.clear(); @@ -1745,44 +1780,104 @@ public void uploadFinished() { // refresh the warning message below the upload component, if exists: if (uploadComponentId != null) { if (uploadWarningMessage != null) { - if (uploadWarningMessageIsNotAnError) { - FacesContext.getCurrentInstance().addMessage(uploadComponentId, new FacesMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("dataset.file.uploadWarning"), uploadWarningMessage)); - } else { - FacesContext.getCurrentInstance().addMessage(uploadComponentId, new FacesMessage(FacesMessage.SEVERITY_ERROR, BundleUtil.getStringFromBundle("dataset.file.uploadWarning"), uploadWarningMessage)); + if (existingFilesWithDupeContent != null || newlyUploadedFilesWithDupeContent != null) { + setWarningMessageForAlreadyExistsPopUp(uploadWarningMessage); + setHeaderForAlreadyExistsPopUp(); + setLabelForDeleteFilesPopup(); + PrimeFaces.current().ajax().update("datasetForm:fileAlreadyExistsPopup"); + PrimeFaces.current().executeScript("PF('fileAlreadyExistsPopup').show();"); } + + + //taking this out for now based on design feedback 7/8/2020 + // FacesContext.getCurrentInstance().addMessage(uploadComponentId, new FacesMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("dataset.file.uploadWarning"), uploadWarningMessage)); + } else if (uploadSuccessMessage != null) { FacesContext.getCurrentInstance().addMessage(uploadComponentId, new FacesMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.file.uploadWorked"), uploadSuccessMessage)); } } - if(isFileReplaceOperation() && fileReplacePageHelper.hasContentTypeWarning()){ + if(isFileReplaceOperation() && fileReplacePageHelper.wasPhase1Successful() && fileReplacePageHelper.hasContentTypeWarning()){ //RequestContext context = RequestContext.getCurrentInstance(); //RequestContext.getCurrentInstance().update("datasetForm:fileTypeDifferentPopup"); PrimeFaces.current().ajax().update("datasetForm:fileTypeDifferentPopup"); //context.execute("PF('fileTypeDifferentPopup').show();"); PrimeFaces.current().executeScript("PF('fileTypeDifferentPopup').show();"); } - + + if(isFileReplaceOperation() && fileReplacePageHelper.getAddReplaceFileHelper().isDuplicateFileErrorFound() ) { + FacesContext.getCurrentInstance().addMessage(uploadComponentId, new FacesMessage(FacesMessage.SEVERITY_ERROR, fileReplacePageHelper.getAddReplaceFileHelper().getDuplicateFileErrorString(), fileReplacePageHelper.getAddReplaceFileHelper().getDuplicateFileErrorString())); + } + + if (isFileReplaceOperation() && !fileReplacePageHelper.getAddReplaceFileHelper().isDuplicateFileErrorFound() && fileReplacePageHelper.getAddReplaceFileHelper().isDuplicateFileWarningFound()) { + setWarningMessageForAlreadyExistsPopUp(fileReplacePageHelper.getAddReplaceFileHelper().getDuplicateFileWarningString()); + setHeaderForAlreadyExistsPopUp(); + setLabelForDeleteFilesPopup(); + PrimeFaces.current().ajax().update("datasetForm:fileAlreadyExistsPopup"); + PrimeFaces.current().executeScript("PF('fileAlreadyExistsPopup').show();"); + } // We clear the following duplicate warning labels, because we want to // only inform the user of the duplicates dropped in the current upload // attempt - for ex., one batch of drag-and-dropped files, or a single // file uploaded through the file chooser. - dupeFileNamesExisting = null; - dupeFileNamesNew = null; + newlyUploadedFilesWithDupeContent = null; + existingFilesWithDupeContent = null; multipleDupesExisting = false; multipleDupesNew = false; uploadWarningMessage = null; uploadSuccessMessage = null; } - private String warningMessageForPopUp; + private String warningMessageForFileTypeDifferentPopUp; - public String getWarningMessageForPopUp() { - return warningMessageForPopUp; + public String getWarningMessageForFileTypeDifferentPopUp() { + return warningMessageForFileTypeDifferentPopUp; } - public void setWarningMessageForPopUp(String warningMessageForPopUp) { - this.warningMessageForPopUp = warningMessageForPopUp; + public void setWarningMessageForFileTypeDifferentPopUp(String warningMessageForPopUp) { + this.warningMessageForFileTypeDifferentPopUp = warningMessageForPopUp; + } + + private String warningMessageForAlreadyExistsPopUp; + + public String getWarningMessageForAlreadyExistsPopUp() { + return warningMessageForAlreadyExistsPopUp; + } + + public void setWarningMessageForAlreadyExistsPopUp(String warningMessageForAlreadyExistsPopUp) { + this.warningMessageForAlreadyExistsPopUp = warningMessageForAlreadyExistsPopUp; + } + + private String headerForAlreadyExistsPopUp; + + public String getHeaderForAlreadyExistsPopUp() { + return headerForAlreadyExistsPopUp; + } + + public void setHeaderForAlreadyExistsPopUp(String headerForAlreadyExistsPopUp) { + this.headerForAlreadyExistsPopUp = headerForAlreadyExistsPopUp; + } + + private String labelForDeleteFilesPopup; + + public String getLabelForDeleteFilesPopup() { + return labelForDeleteFilesPopup; + } + + public void setLabelForDeleteFilesPopup(String labelForDeleteFilesPopup) { + this.labelForDeleteFilesPopup = labelForDeleteFilesPopup; + } + + public void setLabelForDeleteFilesPopup() { + this.labelForDeleteFilesPopup = ((multipleDupesExisting|| multipleDupesNew) ? BundleUtil.getStringFromBundle("file.delete.duplicate.multiple") : + BundleUtil.getStringFromBundle("file.delete.duplicate.single")); + } + + //((multipleDupesExisting|| multipleDupesNew) ? BundleUtil.getStringFromBundle("file.addreplace.already_exists.header.multiple"): BundleUtil.getStringFromBundle("file.addreplace.already_exists.header")); + + public void setHeaderForAlreadyExistsPopUp() { + + this.headerForAlreadyExistsPopUp = ((multipleDupesExisting|| multipleDupesNew) ? BundleUtil.getStringFromBundle("file.addreplace.already_exists.header.multiple"): BundleUtil.getStringFromBundle("file.addreplace.already_exists.header")); } private void handleReplaceFileUpload(FacesEvent event, InputStream inputStream, @@ -1810,7 +1905,7 @@ private void handleReplaceFileUpload(FacesEvent event, InputStream inputStream, */ if (fileReplacePageHelper.hasContentTypeWarning()){ //Add warning to popup instead of page for Content Type Difference - setWarningMessageForPopUp(fileReplacePageHelper.getContentTypeWarning()); + setWarningMessageForFileTypeDifferentPopUp(fileReplacePageHelper.getContentTypeWarning()); /* Note on the info messages - upload errors, warnings and success messages: Instead of trying to display the message here (commented out code below), @@ -1874,7 +1969,7 @@ private void handleReplaceFileUpload(String fullStorageLocation, */ if (fileReplacePageHelper.hasContentTypeWarning()){ //Add warning to popup instead of page for Content Type Difference - setWarningMessageForPopUp(fileReplacePageHelper.getContentTypeWarning()); + setWarningMessageForFileTypeDifferentPopUp(fileReplacePageHelper.getContentTypeWarning()); } } else { uploadWarningMessage = fileReplacePageHelper.getErrorMessages(); @@ -1895,6 +1990,14 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { if (uploadInProgress.isFalse()) { uploadInProgress.setValue(true); } + + //resetting marked as dup in case there are multiple uploads + //we only want to delete as dupes those that we uploaded in this + //session + + newFiles.forEach((df) -> { + df.setMarkedAsDuplicate(false); + }); if (event == null){ throw new NullPointerException("event cannot be null"); @@ -1916,13 +2019,24 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { uFile.getContentType(), event, null); - if(fileReplacePageHelper.hasContentTypeWarning()){ + if( fileReplacePageHelper.wasPhase1Successful() && fileReplacePageHelper.hasContentTypeWarning()){ + //RequestContext context = RequestContext.getCurrentInstance(); + //RequestContext.getCurrentInstance().update("datasetForm:fileTypeDifferentPopup"); + //context.execute("PF('fileTypeDifferentPopup').show();"); + PrimeFaces.current().ajax().update("datasetForm:fileTypeDifferentPopup"); + PrimeFaces.current().executeScript("PF('fileTypeDifferentPopup').show();"); + } + /* + + + if(fileReplacePageHelper.){ //RequestContext context = RequestContext.getCurrentInstance(); //RequestContext.getCurrentInstance().update("datasetForm:fileTypeDifferentPopup"); //context.execute("PF('fileTypeDifferentPopup').show();"); PrimeFaces.current().ajax().update("datasetForm:fileTypeDifferentPopup"); PrimeFaces.current().executeScript("PF('fileTypeDifferentPopup').show();"); } + */ return; } @@ -2079,15 +2193,69 @@ public void handleExternalUpload() { * @param dFileList */ - private String dupeFileNamesExisting = null; - private String dupeFileNamesNew = null; + private String existingFilesWithDupeContent = null; + private String uploadedFilesWithDupeContentToExisting = null; + private String uploadedFilesWithDupeContentToNewlyUploaded = null; + private String newlyUploadedFilesWithDupeContent = null; + private boolean multipleDupesExisting = false; private boolean multipleDupesNew = false; + public String getExistingFilesWithDupeContent() { + return existingFilesWithDupeContent; + } + + public void setExistingFilesWithDupeContent(String existingFilesWithDupeContent) { + this.existingFilesWithDupeContent = existingFilesWithDupeContent; + } + + public String getUploadedFilesWithDupeContentToExisting() { + return uploadedFilesWithDupeContentToExisting; + } + + public void setUploadedFilesWithDupeContentToExisting(String uploadedFilesWithDupeContentToExisting) { + this.uploadedFilesWithDupeContentToExisting = uploadedFilesWithDupeContentToExisting; + } + + public String getUploadedFilesWithDupeContentToNewlyUploaded() { + return uploadedFilesWithDupeContentToNewlyUploaded; + } + + public void setUploadedFilesWithDupeContentToNewlyUploaded(String uploadedFilesWithDupeContentToNewlyUploaded) { + this.uploadedFilesWithDupeContentToNewlyUploaded = uploadedFilesWithDupeContentToNewlyUploaded; + } + + public String getNewlyUploadedFilesWithDupeContent() { + return newlyUploadedFilesWithDupeContent; + } + + public void setNewlyUploadedFilesWithDupeContent(String newlyUploadedFilesWithDupeContent) { + this.newlyUploadedFilesWithDupeContent = newlyUploadedFilesWithDupeContent; + } + + + public boolean isMultipleDupesExisting() { + return multipleDupesExisting; + } + + public void setMultipleDupesExisting(boolean multipleDupesExisting) { + this.multipleDupesExisting = multipleDupesExisting; + } + + public boolean isMultipleDupesNew() { + return multipleDupesNew; + } + + public void setMultipleDupesNew(boolean multipleDupesNew) { + this.multipleDupesNew = multipleDupesNew; + } + private String processUploadedFileList(List dFileList) { if (dFileList == null) { return null; } + + uploadedInThisProcess = new ArrayList(); DataFile dataFile; String warningMessage = null; @@ -2119,39 +2287,71 @@ private String processUploadedFileList(List dFileList) { // or if another file with the same checksum has already been // uploaded. // ----------------------------------------------------------- + if (isFileAlreadyInDataset(dataFile)) { - if (dupeFileNamesExisting == null) { - dupeFileNamesExisting = dataFile.getFileMetadata().getLabel(); + DataFile existingFile = fileAlreadyExists.get(dataFile); + + // String alreadyExists = dataFile.getFileMetadata().getLabel() + " at " + existingFile.getDirectoryLabel() != null ? existingFile.getDirectoryLabel() + "/" + existingFile.getDisplayName() : existingFile.getDisplayName(); + String uploadedDuplicateFileName = dataFile.getFileMetadata().getLabel(); + String existingFileName = existingFile.getDisplayName(); + List args = Arrays.asList(existingFileName); + String inLineMessage = BundleUtil.getStringFromBundle("dataset.file.inline.message", args); + + if (existingFilesWithDupeContent == null) { + existingFilesWithDupeContent = existingFileName; + uploadedFilesWithDupeContentToExisting = uploadedDuplicateFileName; } else { - dupeFileNamesExisting = dupeFileNamesExisting.concat(", " + dataFile.getFileMetadata().getLabel()); + existingFilesWithDupeContent = existingFilesWithDupeContent.concat(", " + existingFileName); + uploadedFilesWithDupeContentToExisting = uploadedFilesWithDupeContentToExisting.concat(", " + uploadedDuplicateFileName); multipleDupesExisting = true; } - // remove temp file - FileUtil.deleteTempFile(dataFile, dataset, ingestService); + //now we are marking as duplicate and + //allowing the user to decide whether to delete + // deleteTempFile(dataFile); + dataFile.setMarkedAsDuplicate(true); + dataFile.setDuplicateFilename(inLineMessage); + } else if (isFileAlreadyUploaded(dataFile)) { - if (dupeFileNamesNew == null) { - dupeFileNamesNew = dataFile.getFileMetadata().getLabel(); + DataFile existingFile = checksumMapNew.get(dataFile.getChecksumValue()); + String alreadyUploadedWithSame = existingFile.getDisplayName(); + String newlyUploadedDupe = dataFile.getFileMetadata().getLabel(); + if (newlyUploadedFilesWithDupeContent == null) { + newlyUploadedFilesWithDupeContent = newlyUploadedDupe; + uploadedFilesWithDupeContentToNewlyUploaded = alreadyUploadedWithSame; } else { - dupeFileNamesNew = dupeFileNamesNew.concat(", " + dataFile.getFileMetadata().getLabel()); + newlyUploadedFilesWithDupeContent = newlyUploadedFilesWithDupeContent.concat(", " + newlyUploadedDupe); + uploadedFilesWithDupeContentToNewlyUploaded = uploadedFilesWithDupeContentToNewlyUploaded.concat(", " + alreadyUploadedWithSame); multipleDupesNew = true; } - // remove temp file - FileUtil.deleteTempFile(dataFile, dataset, ingestService); + //now we are marking as duplicate and + //allowing the user to decide whether to delete + dataFile.setMarkedAsDuplicate(true); + List args = Arrays.asList(existingFile.getDisplayName()); + String inLineMessage = BundleUtil.getStringFromBundle("dataset.file.inline.message", args); + dataFile.setDuplicateFilename(inLineMessage); } else { // OK, this one is not a duplicate, we want it. // But let's check if its filename is a duplicate of another // file already uploaded, or already in the dataset: + /* dataFile.getFileMetadata().setLabel(duplicateFilenameCheck(dataFile.getFileMetadata())); if (isTemporaryPreviewAvailable(dataFile.getStorageIdentifier(), dataFile.getContentType())) { dataFile.setPreviewImageAvailable(true); } uploadedFiles.add(dataFile); + */ // We are NOT adding the fileMetadata to the list that is being used // to render the page; we'll do that once we know that all the individual uploads // in this batch (as in, a bunch of drag-and-dropped files) have finished. //fileMetadatas.add(dataFile.getFileMetadata()); } - + + dataFile.getFileMetadata().setLabel(duplicateFilenameCheck(dataFile.getFileMetadata())); + if (isTemporaryPreviewAvailable(dataFile.getStorageIdentifier(), dataFile.getContentType())) { + dataFile.setPreviewImageAvailable(true); + } + uploadedFiles.add(dataFile); + uploadedInThisProcess.add(dataFile); /* preserved old, pre 4.6 code - mainly as an illustration of how we used to do this. @@ -2194,32 +2394,35 @@ private String processUploadedFileList(List dFileList) { // (note the separate messages for the files already in the dataset, // and the newly uploaded ones) // ----------------------------------------------------------- - if (dupeFileNamesExisting != null) { + if (existingFilesWithDupeContent != null) { String duplicateFilesErrorMessage = null; - if (multipleDupesExisting) { - duplicateFilesErrorMessage = getBundleString("dataset.files.exist") + dupeFileNamesExisting + getBundleString("dataset.file.skip"); + List args = Arrays.asList(uploadedFilesWithDupeContentToExisting, existingFilesWithDupeContent); + + if (multipleDupesExisting) { + duplicateFilesErrorMessage = BundleUtil.getStringFromBundle("dataset.files.exist", args); } else { - duplicateFilesErrorMessage = getBundleString("dataset.file.exist") + dupeFileNamesExisting; + duplicateFilesErrorMessage = BundleUtil.getStringFromBundle("dataset.file.exist", args); } if (warningMessage == null) { warningMessage = duplicateFilesErrorMessage; } else { - warningMessage = warningMessage.concat("; " + duplicateFilesErrorMessage); + warningMessage = warningMessage.concat(" " + duplicateFilesErrorMessage); } } - if (dupeFileNamesNew != null) { + if (newlyUploadedFilesWithDupeContent != null) { String duplicateFilesErrorMessage = null; - if (multipleDupesNew) { - duplicateFilesErrorMessage = getBundleString("dataset.files.duplicate") + dupeFileNamesNew + getBundleString("dataset.file.skip"); + List args = Arrays.asList(newlyUploadedFilesWithDupeContent, uploadedFilesWithDupeContentToNewlyUploaded); + + if (multipleDupesNew) { + duplicateFilesErrorMessage = BundleUtil.getStringFromBundle("dataset.files.duplicate", args); } else { - duplicateFilesErrorMessage = getBundleString("dataset.file.duplicate") + dupeFileNamesNew + getBundleString("dataset.file.skip"); + duplicateFilesErrorMessage = BundleUtil.getStringFromBundle("dataset.file.duplicate", args); } - if (warningMessage == null) { warningMessage = duplicateFilesErrorMessage; } else { - warningMessage = warningMessage.concat("; " + duplicateFilesErrorMessage); + warningMessage = warningMessage.concat(" " + duplicateFilesErrorMessage); } } @@ -2289,8 +2492,9 @@ private String duplicateFilenameCheck(FileMetadata fileMetadata) { return IngestUtil.duplicateFilenameCheck(fileMetadata, fileLabelsExisting); } - private Map checksumMapOld = null; // checksums of the files already in the dataset - private Map checksumMapNew = null; // checksums of the new files already uploaded + private Map checksumMapOld = null; // checksums of the files already in the dataset + private Map checksumMapNew = null; // checksums of the new files already uploaded + private Map fileAlreadyExists = null; private void initChecksumMap() { checksumMapOld = new HashMap<>(); @@ -2302,7 +2506,7 @@ private void initChecksumMap() { if (fm.getDataFile() != null && fm.getDataFile().getId() != null) { String chksum = fm.getDataFile().getChecksumValue(); if (chksum != null) { - checksumMapOld.put(chksum, 1); + checksumMapOld.put(chksum, fm.getDataFile()); } } @@ -2315,28 +2519,28 @@ private boolean isFileAlreadyInDataset(DataFile dataFile) { initChecksumMap(); } + if (fileAlreadyExists == null) { + fileAlreadyExists = new HashMap<>(); + } + + String chksum = dataFile.getChecksumValue(); + if(checksumMapOld.get(chksum) != null){ + fileAlreadyExists.put(dataFile, checksumMapOld.get(chksum)); + } + return chksum == null ? false : checksumMapOld.get(chksum) != null; } private boolean isFileAlreadyUploaded(DataFile dataFile) { + if (checksumMapNew == null) { checksumMapNew = new HashMap<>(); } + + return FileUtil.isFileAlreadyUploaded(dataFile, checksumMapNew, fileAlreadyExists); - String chksum = dataFile.getChecksumValue(); - - if (chksum == null) { - return false; - } - - if (checksumMapNew.get(chksum) != null) { - return true; - } - - checksumMapNew.put(chksum, 1); - return false; } diff --git a/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java b/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java index 90b5562e8fd..99730a3a024 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java @@ -472,6 +472,7 @@ public void setSelected(boolean selected) { this.selected = selected; } + @Transient private boolean restrictedUI; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 94613100b16..2a82acc7622 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -670,6 +670,15 @@ protected Response ok( String msg ) { .type(MediaType.APPLICATION_JSON) .build(); } + + protected Response ok( String msg, JsonObjectBuilder bld ) { + return Response.ok().entity(Json.createObjectBuilder() + .add("status", STATUS_OK) + .add("message", Json.createObjectBuilder().add("message",msg)) + .add("data", bld).build()) + .type(MediaType.APPLICATION_JSON) + .build(); + } protected Response ok( boolean value ) { return Response.ok().entity(Json.createObjectBuilder() diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 61844ab6b9c..0c25fc4403f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -1659,7 +1659,13 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, * user. Human readable. */ logger.fine("successMsg: " + successMsg); - return ok(addFileHelper.getSuccessResultAsJsonObjectBuilder()); + String duplicateWarning = addFileHelper.getDuplicateFileWarning(); + if (duplicateWarning != null && !duplicateWarning.isEmpty()) { + return ok(addFileHelper.getDuplicateFileWarning(), addFileHelper.getSuccessResultAsJsonObjectBuilder()); + } else { + return ok(addFileHelper.getSuccessResultAsJsonObjectBuilder()); + } + //"Look at that! You added a file! (hey hey, it may have worked)"); } catch (NoFilesException ex) { Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 7759761f35e..81db7f9dec1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -372,15 +372,9 @@ public Response updateFileMetadata(@FormDataParam("jsonData") String jsonData, List fmdList = editVersion.getFileMetadatas(); for(FileMetadata testFmd : fmdList) { DataFile daf = testFmd.getDataFile(); - // Not sure I understand why we are comparing the checksum values here, - // and not the DataFile ids. (probably because this code was - // copy-and-pasted from somewhere where it was potentially operating - // on *new* datafiles, that haven't been saved in the database yet; - // but it should never be the case in the context of this API) - // -- L.A. Mar. 2020 - if(daf.getChecksumType().equals(df.getChecksumType()) - && daf.getChecksumValue().equals(df.getChecksumValue())) { - upFmd = testFmd; + if(daf.equals(df)){ + upFmd = testFmd; + break; } } @@ -413,7 +407,7 @@ public Response updateFileMetadata(@FormDataParam("jsonData") String jsonData, } catch (Exception e) { logger.log(Level.WARNING, "Dataset publication finalization: exception while exporting:{0}", e); - return error(Response.Status.INTERNAL_SERVER_ERROR, "Error adding metadata to DataFile" + e); + return error(Response.Status.INTERNAL_SERVER_ERROR, "Error adding metadata to DataFile: " + e); } } catch (WrappedResponse wr) { diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index 18bf172f5d3..4928100dfff 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -153,6 +153,55 @@ public class AddReplaceFileHelper{ private boolean contentTypeWarningFound; private String contentTypeWarningString; + private boolean duplicateFileErrorFound; + + private String duplicateFileErrorString; + + private boolean duplicateFileWarningFound; + private String duplicateFileWarningString; + + private String duplicateFileComponentMessage; + + public String getDuplicateFileComponentMessage() { + return duplicateFileComponentMessage; + } + + public void setDuplicateFileComponentMessage(String duplicateFileComponentMessage) { + this.duplicateFileComponentMessage = duplicateFileComponentMessage; + } + + public boolean isDuplicateFileErrorFound() { + return duplicateFileErrorFound; + } + + public void setDuplicateFileErrorFound(boolean duplicateFileErrorFound) { + this.duplicateFileErrorFound = duplicateFileErrorFound; + } + + public String getDuplicateFileErrorString() { + return duplicateFileErrorString; + } + + public void setDuplicateFileErrorString(String duplicateFileErrorString) { + this.duplicateFileErrorString = duplicateFileErrorString; + } + + public boolean isDuplicateFileWarningFound() { + return duplicateFileWarningFound; + } + + public void setDuplicateFileWarningFound(boolean duplicateFileWarningFound) { + this.duplicateFileWarningFound = duplicateFileWarningFound; + } + + public String getDuplicateFileWarningString() { + return duplicateFileWarningString; + } + + public void setDuplicateFileWarningString(String duplicateFileWarningString) { + this.duplicateFileWarningString = duplicateFileWarningString; + } + public void resetFileHelper(){ initErrorHandling(); @@ -761,6 +810,17 @@ private void addError(Response.Status badHttpResponse, String errMsg){ } + private void addErrorWarning(String errMsg){ + if (errMsg == null){ + throw new NullPointerException("errMsg cannot be null"); + } + + logger.severe(errMsg); + this.setDuplicateFileWarning(errMsg); + this.errorMessages.add(errMsg); + + } + private void addErrorSevere(String errMsg){ @@ -1134,6 +1194,8 @@ private boolean step_030_createNewFilesViaIngest(){ * @return */ private boolean step_040_auto_checkForDuplicates(){ + this.duplicateFileErrorString = ""; + this.duplicateFileErrorFound = false; msgt("step_040_auto_checkForDuplicates"); if (this.hasError()){ @@ -1179,20 +1241,24 @@ private boolean step_040_auto_checkForDuplicates(){ // ----------------------------------------------------------- // (2) Check for duplicates + // Only a warning now // ----------------------------------------------------------- if (isFileReplaceOperation() && Objects.equals(df.getChecksumValue(), fileToReplace.getChecksumValue())){ - this.addErrorSevere(getBundleErr("replace.new_file_same_as_replacement")); + this.addError(getBundleErr("replace.new_file_same_as_replacement")); + this.duplicateFileErrorFound = true; + this.duplicateFileErrorString = getBundleErr("replace.new_file_same_as_replacement"); break; - } else if (DuplicateFileChecker.isDuplicateOriginalWay(workingVersion, df.getFileMetadata())){ + } + + if (DuplicateFileChecker.isDuplicateOriginalWay(workingVersion, df.getFileMetadata())){ String dupeName = df.getFileMetadata().getLabel(); - //removeUnSavedFilesFromWorkingVersion(); - //removeLinkedFileFromDataset(dataset, df); - //abandonOperationRemoveAllNewFilesFromDataset(); - this.addErrorSevere(getBundleErr("duplicate_file") + " " + dupeName); - //return false; - } else { - finalFileList.add(df); - } + this.duplicateFileWarningFound = true; + this.duplicateFileWarningString = BundleUtil.getStringFromBundle("file.addreplace.warning.duplicate_file", + Arrays.asList(dupeName)); + this.addErrorWarning(this.duplicateFileWarningString); + + } + finalFileList.add(df); } if (this.hasError()){ @@ -1913,6 +1979,16 @@ public String getContentTypeWarningString(){ return contentTypeWarningString; } + private String duplicateFileWarning; + + public String getDuplicateFileWarning() { + return duplicateFileWarning; + } + + public void setDuplicateFileWarning(String duplicateFileWarning) { + this.duplicateFileWarning = duplicateFileWarning; + } + } // end class /* DatasetPage sequence: diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileChecker.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileChecker.java index a709f07b5b7..dd55ec72213 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileChecker.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileChecker.java @@ -9,7 +9,9 @@ import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.DatasetVersionServiceBean; import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.util.BundleUtil; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -150,18 +152,27 @@ public static boolean isDuplicateOriginalWay(DatasetVersion workingVersion, File List wvCopy = new ArrayList<>(workingVersion.getFileMetadatas()); Iterator fmIt = wvCopy.iterator(); - while (fmIt.hasNext()) { + while (fmIt.hasNext()) { FileMetadata fm = fmIt.next(); - String currentCheckSum = fm.getDataFile().getChecksumValue(); + String currentCheckSum = fm.getDataFile().getChecksumValue(); if (currentCheckSum != null) { + if (currentCheckSum.equals(selectedCheckSum)) { + DataFile existingFile = fm.getDataFile(); + List args = Arrays.asList(existingFile.getDisplayName()); + String inLineMessage = BundleUtil.getStringFromBundle("dataset.file.inline.message", args); + fileMetadata.getDataFile().setDuplicateFilename(inLineMessage); + return true; + } + /* if (checkSumMap.get(currentCheckSum) != null) { checkSumMap.put(currentCheckSum, checkSumMap.get(currentCheckSum).intValue() + 1); } else { checkSumMap.put(currentCheckSum, 1); - } + }*/ } } - return checkSumMap.get(selectedCheckSum) != null; // && checkSumMap.get(selectedCheckSum).intValue() > 1; + return false; + // return checkSumMap.get(selectedCheckSum) != null; // && checkSumMap.get(selectedCheckSum).intValue() > 1; } diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileReplacePageHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileReplacePageHelper.java index d79c4a48094..93bd903130c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileReplacePageHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileReplacePageHelper.java @@ -199,6 +199,10 @@ public List getNewFileMetadatasBeforeSave(){ } + public AddReplaceFileHelper getAddReplaceFileHelper(){ + return replaceFileHelper; + } + /** * * Show file upload component if Phase 1 hasn't happened yet diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java index 150d6cfb43c..7f01e217cfa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java @@ -133,14 +133,23 @@ public static boolean conflictsWithExistingFilenames(String pathPlusFilename, Li } /** - * Given a DatasetVersion, iterate across all the files (including their + * Given a DatasetVersion, and the newFiles about to be added to the + * version iterate across all the files (including their * paths) and return any duplicates. * * @param datasetVersion + * @param newFiles * @return A Collection of Strings in the form of path/to/file.txt */ - public static Collection findDuplicateFilenames(DatasetVersion datasetVersion) { - List allFileNamesWithPaths = getPathsAndFileNames(datasetVersion.getFileMetadatas()); + public static Collection findDuplicateFilenames(DatasetVersion datasetVersion, List newFiles) { + List toTest = new ArrayList(); + datasetVersion.getFileMetadatas().forEach((fm) -> { + toTest.add(fm); + }); + newFiles.forEach((df) -> { + toTest.add(df.getFileMetadata()); + }); + List allFileNamesWithPaths = getPathsAndFileNames(toTest); return findDuplicates(allFileNamesWithPaths); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 59dca5bb2da..608566bcae1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1836,5 +1836,29 @@ public static void deleteTempFile(DataFile dataFile, Dataset dataset, IngestServ dataFile.setOwner(null); } } + + public static boolean isFileAlreadyUploaded(DataFile dataFile, Map checksumMapNew, Map fileAlreadyExists) { + if (checksumMapNew == null) { + checksumMapNew = new HashMap<>(); + } + + if (fileAlreadyExists == null) { + fileAlreadyExists = new HashMap<>(); + } + + String chksum = dataFile.getChecksumValue(); + + if (chksum == null) { + return false; + } + + if (checksumMapNew.get(chksum) != null) { + fileAlreadyExists.put(dataFile, checksumMapNew.get(chksum)); + return true; + } + + checksumMapNew.put(chksum, dataFile); + return false; + } } diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index cb3765141e7..7ac21b97d51 100755 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1509,9 +1509,13 @@ file.replaced.warning.draft.warningMessage=You can not replace a file that has b file.replaced.warning.previous.warningMessage=You can not edit a file that has been replaced in a previous dataset version. In order to edit it you must go to the most recently published version of the file. file.alreadyDeleted.previous.warningMessage=This file has already been deleted in current version. It may not be edited. file.delete=Delete +file.delete.duplicate.multiple=Delete Duplicate Files +file.delete.duplicate.single=Delete Duplicate File file.metadata=Metadata file.deleted.success=Files "{0}" will be permanently deleted from this version of this dataset once you click on the Save Changes button. file.deleted.replacement.success=The replacement file has been deleted. +file.deleted.upload.success.single=File has been deleted and won\u2019t be included in this upload. +file.deleted.upload.success.multiple=Files have been deleted and won\u2019t be included in this upload. file.editAccess=Edit Access file.restrict=Restrict file.unrestrict=Unrestrict @@ -1915,15 +1919,18 @@ file.addreplace.error.no_edit_dataset_permission=You do not have permission to e file.addreplace.error.filename_undetermined=The file name cannot be determined. file.addreplace.error.file_content_type_undetermined=The file content type cannot be determined. file.addreplace.error.file_upload_failed=The file upload failed. -file.addreplace.error.duplicate_file=This file already exists in the dataset. +file.addreplace.warning.duplicate_file=This file has the same content as {0} that is in the dataset. +file.addreplace.error.duplicate_file.continue=You may delete if it was not intentional. file.addreplace.error.existing_file_to_replace_id_is_null=The ID of the existing file to replace must be provided. file.addreplace.error.existing_file_to_replace_not_found_by_id=Replacement file not found. There was no file found for ID: {0} file.addreplace.error.existing_file_to_replace_is_null=The file to replace cannot be null. file.addreplace.error.existing_file_to_replace_not_in_dataset=The file to replace does not belong to this dataset. file.addreplace.error.existing_file_not_in_latest_published_version=You cannot replace a file that is not in the most recently published dataset. (The file is unpublished or was deleted from a previous version.) file.addreplace.content_type.header=File Type Different +file.addreplace.already_exists.header=Duplicate File Uploaded +file.addreplace.already_exists.header.multiple=Duplicate Files Uploaded file.addreplace.error.replace.new_file_has_different_content_type=The original file ({0}) and replacement file ({1}) are different file types. -file.addreplace.error.replace.new_file_same_as_replacement=You cannot replace a file with the exact same file. +file.addreplace.error.replace.new_file_same_as_replacement=Error! You may not replace a file with a file that has duplicate content. file.addreplace.error.unpublished_file_cannot_be_replaced=You cannot replace an unpublished file. Please delete it instead of replacing it. file.addreplace.error.ingest_create_file_err=There was an error when trying to add the new file. file.addreplace.error.initial_file_list_empty=An error occurred and the new file was not added. @@ -1938,6 +1945,8 @@ file.addreplace.success.replace=File successfully replaced! file.addreplace.error.auth=The API key is invalid. file.addreplace.error.invalid_datafile_tag=Not a valid Tabular Data Tag: + + # 500.xhtml error.500.page.title=500 Internal Server Error error.500.message=Internal Server Error - An unexpected error was encountered, no more information is available. @@ -2106,16 +2115,19 @@ dataverse.alias.taken=This Alias is already taken. #editDatafilesPage.java dataset.save.fail=Dataset Save Failed -dataset.files.exist=The following files already exist in the dataset: -dataset.file.exist=The following file already exists in the dataset: -dataset.files.duplicate=The following files are duplicates of (an) already uploaded file(s): -dataset.file.duplicate=The following file is a duplicate of an already uploaded file: -dataset.file.skip=(skipping) + +dataset.files.exist=Files {0} have the same content as {1} that already exists in the dataset. +dataset.file.exist=File {0} has the same content as {1} that already exists in the dataset. +dataset.file.exist.test={0, choice, 1#File |2#Files |} {1} {0, choice, 1#has |2#have |} the same content as {2} that already {0, choice, 1#exist |2#exist |}in the dataset. +dataset.files.duplicate=Files {0} have the same content as {1} that have already been uploaded. +dataset.file.duplicate=File {0} has the same content as {1} that has already been uploaded. +dataset.file.inline.message= This file has the same content as {0}. dataset.file.upload=Succesful {0} is uploaded. dataset.file.uploadFailure=upload failure dataset.file.uploadFailure.detailmsg=the file {0} failed to upload! dataset.file.uploadWarning=upload warning dataset.file.uploadWorked=upload worked +dataset.file.upload.popup.explanation.tip=For more information, please refer to the Duplicate Files section of the User Guide. #EmailValidator.java email.invalid=is not a valid email address. diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index a1ec27859cf..9a516325b2f 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -283,8 +283,8 @@