diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e2860b9a5..3006bde48 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -11,10 +11,10 @@ on: env: GH_USER_NAME: github.actor - SCRIPTS_VERSION: 5.7.0 + SCRIPTS_VERSION: 5.8.0 BOM_VERSION: 5.7.5 - MIGRATIONS_VERSION: 5.8.0 - RELEASE_VERSION: 5.8.0 + MIGRATIONS_VERSION: 5.9.0 + RELEASE_VERSION: 5.9.0 jobs: release: diff --git a/build.gradle b/build.gradle index 581dfb5dd..61f2f4888 100644 --- a/build.gradle +++ b/build.gradle @@ -50,7 +50,7 @@ repositories { dependencyManagement { imports { - mavenBom(releaseMode ? 'com.epam.reportportal:commons-bom:' + getProperty('bom.version') : 'com.github.reportportal:commons-bom:2014aa5') + mavenBom(releaseMode ? 'com.epam.reportportal:commons-bom:' + getProperty('bom.version') : 'com.github.reportportal:commons-bom:80a17605') mavenBom('io.zonky.test.postgres:embedded-postgres-binaries-bom:12.9.0') } } diff --git a/gradle.properties b/gradle.properties index dfc09987a..24fc8c428 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1 +1 @@ -version=5.8.1 \ No newline at end of file +version=5.9.0 \ No newline at end of file diff --git a/project-properties.gradle b/project-properties.gradle index 12d121162..fd8d4da6f 100755 --- a/project-properties.gradle +++ b/project-properties.gradle @@ -9,7 +9,7 @@ project.ext { dependencyRepos = ["commons", "commons-rules", "commons-model", "commons-bom"] releaseMode = project.hasProperty("releaseMode") scriptsUrl = commonScriptsUrl + (releaseMode ? getProperty('scripts.version') : 'master') - migrationsUrl = migrationsScriptsUrl + (releaseMode ? getProperty('migrations.version') : 'hotfix/5.7.5') + migrationsUrl = migrationsScriptsUrl + (releaseMode ? getProperty('migrations.version') : 'master') //TODO refactor with archive download testScriptsSrc = [ (migrationsUrl + '/migrations/0_extensions.up.sql') : 'V001__extensions.sql', @@ -67,7 +67,8 @@ project.ext { (migrationsUrl + '/migrations/59_stale_materialized_view.up.sql') : 'V059__stale_materialized_view.sql', (migrationsUrl + '/migrations/60_sender_case_operator.up.sql') : 'V060__sender_case_operator.sql', (migrationsUrl + '/migrations/61_remove_acl.up.sql') : 'V061__remove_acl.sql', - (migrationsUrl + '/migrations/62_remove_dashboard_cascade_drop.up.sql') : 'V062_remove_dashboard_cascade_drop.sql', + (migrationsUrl + '/migrations/62_remove_dashboard_cascade_drop.up.sql') : 'V062__remove_dashboard_cascade_drop.sql', + (migrationsUrl + '/migrations/67_api_keys.up.sql') : 'V067__api_keys.sql', ] excludeTests = [ diff --git a/src/main/java/com/epam/ta/reportportal/binary/impl/AttachmentBinaryDataServiceImpl.java b/src/main/java/com/epam/ta/reportportal/binary/impl/AttachmentBinaryDataServiceImpl.java index fcdeb0e3f..b93900258 100644 --- a/src/main/java/com/epam/ta/reportportal/binary/impl/AttachmentBinaryDataServiceImpl.java +++ b/src/main/java/com/epam/ta/reportportal/binary/impl/AttachmentBinaryDataServiceImpl.java @@ -26,8 +26,10 @@ import com.epam.ta.reportportal.entity.attachment.Attachment; import com.epam.ta.reportportal.entity.attachment.AttachmentMetaInfo; import com.epam.ta.reportportal.entity.attachment.BinaryData; +import com.epam.ta.reportportal.entity.enums.FeatureFlag; import com.epam.ta.reportportal.exception.ReportPortalException; import com.epam.ta.reportportal.filesystem.FilePathGenerator; +import com.epam.ta.reportportal.util.FeatureFlagHandler; import com.epam.ta.reportportal.ws.model.ErrorType; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; @@ -53,125 +55,152 @@ @Service public class AttachmentBinaryDataServiceImpl implements AttachmentBinaryDataService { - private static final Logger LOGGER = LoggerFactory.getLogger(AttachmentBinaryDataServiceImpl.class); - - private final ContentTypeResolver contentTypeResolver; - - private final FilePathGenerator filePathGenerator; - - private final DataStoreService dataStoreService; - - private final AttachmentRepository attachmentRepository; - - private final CreateLogAttachmentService createLogAttachmentService; - - @Autowired - public AttachmentBinaryDataServiceImpl(ContentTypeResolver contentTypeResolver, FilePathGenerator filePathGenerator, - @Qualifier("attachmentDataStoreService") DataStoreService dataStoreService, AttachmentRepository attachmentRepository, - CreateLogAttachmentService createLogAttachmentService) { - this.contentTypeResolver = contentTypeResolver; - this.filePathGenerator = filePathGenerator; - this.dataStoreService = dataStoreService; - this.attachmentRepository = attachmentRepository; - this.createLogAttachmentService = createLogAttachmentService; - } - - @Override - public Optional saveAttachment(AttachmentMetaInfo metaInfo, MultipartFile file) { - Optional result = Optional.empty(); - try (InputStream inputStream = file.getInputStream(); ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) { - inputStream.transferTo(outputStream); - String contentType = resolveContentType(file.getContentType(), outputStream); - String fileName = resolveFileName(metaInfo, file, contentType); - - String commonPath = filePathGenerator.generate(metaInfo); - String targetPath = Paths.get(commonPath, fileName).toString(); - - String fileId; - try (ByteArrayInputStream copy = new ByteArrayInputStream(outputStream.toByteArray())) { - fileId = dataStoreService.save(targetPath, copy); - } - - result = Optional.of(BinaryDataMetaInfo.BinaryDataMetaInfoBuilder.aBinaryDataMetaInfo() - .withFileId(fileId) - .withContentType(contentType) - .withFileSize(file.getSize()) - .build()); - } catch (IOException e) { - LOGGER.error("Unable to save binary data", e); - } finally { - if (file instanceof CommonsMultipartFile) { - ((CommonsMultipartFile) file).getFileItem().delete(); - } - } - return result; - } - - private String resolveFileName(AttachmentMetaInfo metaInfo, MultipartFile file, String contentType) { - String extension = resolveExtension(contentType).orElse(resolveExtension(true, file)); - return metaInfo.getLogUuid() + "-" + file.getName() + extension; - } - - @Override - public void saveFileAndAttachToLog(MultipartFile file, AttachmentMetaInfo attachmentMetaInfo) { - saveAttachment(attachmentMetaInfo, file).ifPresent(it -> attachToLog(it, attachmentMetaInfo)); - } - - @Override - public void attachToLog(BinaryDataMetaInfo binaryDataMetaInfo, AttachmentMetaInfo attachmentMetaInfo) { - try { - Attachment attachment = new Attachment(); - attachment.setFileId(binaryDataMetaInfo.getFileId()); - attachment.setThumbnailId(binaryDataMetaInfo.getThumbnailFileId()); - attachment.setContentType(binaryDataMetaInfo.getContentType()); - attachment.setFileSize(binaryDataMetaInfo.getFileSize()); - - attachment.setProjectId(attachmentMetaInfo.getProjectId()); - attachment.setLaunchId(attachmentMetaInfo.getLaunchId()); - attachment.setItemId(attachmentMetaInfo.getItemId()); - attachment.setCreationDate(attachmentMetaInfo.getCreationDate()); - - createLogAttachmentService.create(attachment, attachmentMetaInfo.getLogId()); - } catch (Exception exception) { - LOGGER.error("Cannot save log to database, remove files ", exception); - - dataStoreService.delete(binaryDataMetaInfo.getFileId()); - dataStoreService.delete(binaryDataMetaInfo.getThumbnailFileId()); - throw exception; - } - } - - @Override - public BinaryData load(Long fileId, ReportPortalUser.ProjectDetails projectDetails) { - try { - Attachment attachment = attachmentRepository.findById(fileId) - .orElseThrow(() -> new ReportPortalException(ErrorType.ATTACHMENT_NOT_FOUND, fileId)); - InputStream data = dataStoreService.load(attachment.getFileId()) - .orElseThrow(() -> new ReportPortalException(ErrorType.UNABLE_TO_LOAD_BINARY_DATA, fileId)); - expect(attachment.getProjectId(), Predicate.isEqual(projectDetails.getProjectId())).verify(ErrorType.ACCESS_DENIED, - formattedSupplier("You are not assigned to project '{}'", projectDetails.getProjectName()) - ); - return new BinaryData(attachment.getContentType(), (long) data.available(), data); - } catch (IOException e) { - LOGGER.error("Unable to load binary data", e); - throw new ReportPortalException(ErrorType.UNCLASSIFIED_REPORT_PORTAL_ERROR, "Unable to load binary data"); - } - } - - @Override - public void delete(String fileId) { - if (StringUtils.isNotEmpty(fileId)) { - dataStoreService.delete(fileId); - attachmentRepository.findByFileId(fileId).ifPresent(attachmentRepository::delete); - } - } - - private String resolveContentType(String contentType, ByteArrayOutputStream outputStream) throws IOException { - if (isContentTypePresent(contentType)) { - return contentType; - } - try (ByteArrayInputStream copy = new ByteArrayInputStream(outputStream.toByteArray())) { - return contentTypeResolver.detectContentType(copy); - } - } + private static final Logger LOGGER = + LoggerFactory.getLogger(AttachmentBinaryDataServiceImpl.class); + + private final ContentTypeResolver contentTypeResolver; + + private final FilePathGenerator filePathGenerator; + + private final DataStoreService dataStoreService; + + private final AttachmentRepository attachmentRepository; + + private final CreateLogAttachmentService createLogAttachmentService; + + private final FeatureFlagHandler featureFlagHandler; + + /** + * Creates {@link AttachmentBinaryDataService}. + * + * @param contentTypeResolver {@link ContentTypeResolver} + * @param filePathGenerator {@link FilePathGenerator} + * @param dataStoreService {@link DataStoreService} + * @param attachmentRepository {@link AttachmentRepository} + * @param createLogAttachmentService {@link CreateLogAttachmentService} + * @param featureFlagHandler {@link FeatureFlagHandler} + */ + @Autowired + public AttachmentBinaryDataServiceImpl(ContentTypeResolver contentTypeResolver, + FilePathGenerator filePathGenerator, + @Qualifier("attachmentDataStoreService") DataStoreService dataStoreService, + AttachmentRepository attachmentRepository, + CreateLogAttachmentService createLogAttachmentService, + FeatureFlagHandler featureFlagHandler) { + this.contentTypeResolver = contentTypeResolver; + this.filePathGenerator = filePathGenerator; + this.dataStoreService = dataStoreService; + this.attachmentRepository = attachmentRepository; + this.createLogAttachmentService = createLogAttachmentService; + this.featureFlagHandler = featureFlagHandler; + } + + @Override + public Optional saveAttachment(AttachmentMetaInfo metaInfo, + MultipartFile file) { + Optional result = Optional.empty(); + try (InputStream inputStream = file.getInputStream(); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) { + inputStream.transferTo(outputStream); + String contentType = resolveContentType(file.getContentType(), outputStream); + String fileName = resolveFileName(metaInfo, file, contentType); + + String commonPath; + if (featureFlagHandler.isEnabled(FeatureFlag.SINGLE_BUCKET)) { + commonPath = Paths.get(PROJECT_PATH, filePathGenerator.generate(metaInfo)).toString(); + } else { + commonPath = filePathGenerator.generate(metaInfo); + } + String targetPath = Paths.get(commonPath, fileName).toString(); + + String fileId; + try (ByteArrayInputStream copy = new ByteArrayInputStream(outputStream.toByteArray())) { + fileId = dataStoreService.save(targetPath, copy); + } + + result = Optional.of( + BinaryDataMetaInfo.BinaryDataMetaInfoBuilder.aBinaryDataMetaInfo().withFileId(fileId) + .withContentType(contentType).withFileSize(file.getSize()).build()); + } catch (IOException e) { + LOGGER.error("Unable to save binary data", e); + } finally { + if (file instanceof CommonsMultipartFile) { + ((CommonsMultipartFile) file).getFileItem().delete(); + } + } + return result; + } + + private String resolveFileName(AttachmentMetaInfo metaInfo, MultipartFile file, + String contentType) { + String extension = resolveExtension(contentType).orElse(resolveExtension(true, file)); + return metaInfo.getLogUuid() + "-" + file.getName() + extension; + } + + @Override + public void saveFileAndAttachToLog(MultipartFile file, AttachmentMetaInfo attachmentMetaInfo) { + saveAttachment(attachmentMetaInfo, file).ifPresent(it -> attachToLog(it, attachmentMetaInfo)); + } + + @Override + public void attachToLog(BinaryDataMetaInfo binaryDataMetaInfo, + AttachmentMetaInfo attachmentMetaInfo) { + try { + Attachment attachment = new Attachment(); + attachment.setFileId(binaryDataMetaInfo.getFileId()); + attachment.setThumbnailId(binaryDataMetaInfo.getThumbnailFileId()); + attachment.setContentType(binaryDataMetaInfo.getContentType()); + attachment.setFileSize(binaryDataMetaInfo.getFileSize()); + + attachment.setProjectId(attachmentMetaInfo.getProjectId()); + attachment.setLaunchId(attachmentMetaInfo.getLaunchId()); + attachment.setItemId(attachmentMetaInfo.getItemId()); + attachment.setCreationDate(attachmentMetaInfo.getCreationDate()); + + createLogAttachmentService.create(attachment, attachmentMetaInfo.getLogId()); + } catch (Exception exception) { + LOGGER.error("Cannot save log to database, remove files ", exception); + + dataStoreService.delete(binaryDataMetaInfo.getFileId()); + dataStoreService.delete(binaryDataMetaInfo.getThumbnailFileId()); + throw exception; + } + } + + @Override + public BinaryData load(Long fileId, ReportPortalUser.ProjectDetails projectDetails) { + try { + Attachment attachment = attachmentRepository.findById(fileId) + .orElseThrow(() -> new ReportPortalException(ErrorType.ATTACHMENT_NOT_FOUND, fileId)); + InputStream data = dataStoreService.load(attachment.getFileId()).orElseThrow( + () -> new ReportPortalException(ErrorType.UNABLE_TO_LOAD_BINARY_DATA, fileId)); + expect(attachment.getProjectId(), Predicate.isEqual(projectDetails.getProjectId())).verify( + ErrorType.ACCESS_DENIED, + formattedSupplier("You are not assigned to project '{}'", projectDetails.getProjectName()) + ); + return new BinaryData(attachment.getContentType(), (long) data.available(), data); + } catch (IOException e) { + LOGGER.error("Unable to load binary data", e); + throw new ReportPortalException( + ErrorType.UNCLASSIFIED_REPORT_PORTAL_ERROR, "Unable to load binary data"); + } + } + + @Override + public void delete(String fileId) { + if (StringUtils.isNotEmpty(fileId)) { + dataStoreService.delete(fileId); + attachmentRepository.findByFileId(fileId).ifPresent(attachmentRepository::delete); + } + } + + private String resolveContentType(String contentType, ByteArrayOutputStream outputStream) + throws IOException { + if (isContentTypePresent(contentType)) { + return contentType; + } + try (ByteArrayInputStream copy = new ByteArrayInputStream(outputStream.toByteArray())) { + return contentTypeResolver.detectContentType(copy); + } + } } diff --git a/src/main/java/com/epam/ta/reportportal/binary/impl/CommonDataStoreService.java b/src/main/java/com/epam/ta/reportportal/binary/impl/CommonDataStoreService.java index dc0ce8e6e..03c5ef21e 100644 --- a/src/main/java/com/epam/ta/reportportal/binary/impl/CommonDataStoreService.java +++ b/src/main/java/com/epam/ta/reportportal/binary/impl/CommonDataStoreService.java @@ -16,44 +16,43 @@ package com.epam.ta.reportportal.binary.impl; +import static java.util.Optional.ofNullable; + import com.epam.ta.reportportal.binary.DataStoreService; import com.epam.ta.reportportal.filesystem.DataEncoder; import com.epam.ta.reportportal.filesystem.DataStore; - import java.io.InputStream; import java.util.Optional; -import static java.util.Optional.ofNullable; - /** * @author Ihar Kahadouski */ public abstract class CommonDataStoreService implements DataStoreService { - protected DataStore dataStore; + protected DataStore dataStore; - protected DataEncoder dataEncoder; + protected DataEncoder dataEncoder; - CommonDataStoreService(DataStore dataStore, DataEncoder dataEncoder) { - this.dataStore = dataStore; - this.dataEncoder = dataEncoder; - } + CommonDataStoreService(DataStore dataStore, DataEncoder dataEncoder) { + this.dataStore = dataStore; + this.dataEncoder = dataEncoder; + } - @Override - public String save(String fileName, InputStream data) { - return dataEncoder.encode(dataStore.save(fileName, data)); - } + @Override + public String save(String fileName, InputStream data) { + return dataEncoder.encode(dataStore.save(fileName, data)); + } - @Override - public abstract String saveThumbnail(String fileName, InputStream data); + @Override + public abstract String saveThumbnail(String fileName, InputStream data); - @Override - public void delete(String fileId) { - dataStore.delete(dataEncoder.decode(fileId)); - } + @Override + public void delete(String fileId) { + dataStore.delete(dataEncoder.decode(fileId)); + } - @Override - public Optional load(String fileId) { - return ofNullable(dataStore.load(dataEncoder.decode(fileId))); - } + @Override + public Optional load(String fileId) { + return ofNullable(dataStore.load(dataEncoder.decode(fileId))); + } } diff --git a/src/main/java/com/epam/ta/reportportal/binary/impl/DataStoreUtils.java b/src/main/java/com/epam/ta/reportportal/binary/impl/DataStoreUtils.java index 325c13af2..e5866fd9e 100644 --- a/src/main/java/com/epam/ta/reportportal/binary/impl/DataStoreUtils.java +++ b/src/main/java/com/epam/ta/reportportal/binary/impl/DataStoreUtils.java @@ -34,45 +34,60 @@ */ public class DataStoreUtils { - private static final Logger LOGGER = LoggerFactory.getLogger(DataStoreUtils.class); + private static final Logger LOGGER = LoggerFactory.getLogger(DataStoreUtils.class); - private static final String THUMBNAIL_PREFIX = "thumbnail-"; + private static final String THUMBNAIL_PREFIX = "thumbnail-"; - private static final String DOT = "."; + private static final String DOT = "."; - static final String ROOT_USER_PHOTO_DIR = "users"; + static final String ROOT_USER_PHOTO_DIR = "users"; - static final String ATTACHMENT_CONTENT_TYPE = "attachmentContentType"; + static final String ATTACHMENT_CONTENT_TYPE = "attachmentContentType"; - private DataStoreUtils() { - //static only - } + static final String PROJECT_PATH = "project-data"; - public static Optional resolveExtension(String contentType) { - Optional result = Optional.empty(); - try { - result = Optional.of(MimeTypes.getDefaultMimeTypes().forName(contentType).getExtension()); - } catch (MimeTypeException e) { - LOGGER.warn("Cannot resolve file extension from content type '{}'", contentType, e); - } - return result; - } + static final String USER_DATA_PATH = "user-data"; - public static String resolveExtension(boolean prefixDot, MultipartFile file) { - final String extension = FilenameUtils.getExtension(file.getOriginalFilename()); - return prefixDot ? DOT + extension : extension; - } + static final String PHOTOS_PATH = "photos"; - public static String buildThumbnailFileName(String commonPath, String fileName) { - Path thumbnailTargetPath = Paths.get(commonPath, THUMBNAIL_PREFIX.concat(fileName)); - return thumbnailTargetPath.toString(); - } + public static final String INTEGRATION_SECRETS_PATH = "integration-secrets"; - public static boolean isImage(String contentType) { - return contentType != null && contentType.contains("image"); - } + private DataStoreUtils() { + //static only + } - public static boolean isContentTypePresent(String contentType) { - return !Strings.isNullOrEmpty(contentType) && !MediaType.APPLICATION_OCTET_STREAM_VALUE.equals(contentType); - } + /** + * Returns {@link Optional} of extension by contentType. + * + * @param contentType Content type + * @return {@link Optional} of {@link String} + */ + public static Optional resolveExtension(String contentType) { + Optional result = Optional.empty(); + try { + result = Optional.of(MimeTypes.getDefaultMimeTypes().forName(contentType).getExtension()); + } catch (MimeTypeException e) { + LOGGER.warn("Cannot resolve file extension from content type '{}'", contentType, e); + } + return result; + } + + public static String resolveExtension(boolean prefixDot, MultipartFile file) { + final String extension = FilenameUtils.getExtension(file.getOriginalFilename()); + return prefixDot ? DOT + extension : extension; + } + + public static String buildThumbnailFileName(String commonPath, String fileName) { + Path thumbnailTargetPath = Paths.get(commonPath, THUMBNAIL_PREFIX.concat(fileName)); + return thumbnailTargetPath.toString(); + } + + public static boolean isImage(String contentType) { + return contentType != null && contentType.contains("image"); + } + + public static boolean isContentTypePresent(String contentType) { + return !Strings.isNullOrEmpty(contentType) && !MediaType.APPLICATION_OCTET_STREAM_VALUE.equals( + contentType); + } } diff --git a/src/main/java/com/epam/ta/reportportal/binary/impl/UserBinaryDataServiceImpl.java b/src/main/java/com/epam/ta/reportportal/binary/impl/UserBinaryDataServiceImpl.java index a60315fe4..6d56f963d 100644 --- a/src/main/java/com/epam/ta/reportportal/binary/impl/UserBinaryDataServiceImpl.java +++ b/src/main/java/com/epam/ta/reportportal/binary/impl/UserBinaryDataServiceImpl.java @@ -16,14 +16,28 @@ package com.epam.ta.reportportal.binary.impl; +import static com.epam.ta.reportportal.binary.impl.DataStoreUtils.ATTACHMENT_CONTENT_TYPE; +import static com.epam.ta.reportportal.binary.impl.DataStoreUtils.PHOTOS_PATH; +import static com.epam.ta.reportportal.binary.impl.DataStoreUtils.ROOT_USER_PHOTO_DIR; +import static com.epam.ta.reportportal.binary.impl.DataStoreUtils.USER_DATA_PATH; +import static com.epam.ta.reportportal.binary.impl.DataStoreUtils.buildThumbnailFileName; +import static java.util.Optional.ofNullable; + import com.epam.ta.reportportal.binary.DataStoreService; import com.epam.ta.reportportal.binary.UserBinaryDataService; import com.epam.ta.reportportal.entity.Metadata; import com.epam.ta.reportportal.entity.attachment.BinaryData; +import com.epam.ta.reportportal.entity.enums.FeatureFlag; import com.epam.ta.reportportal.entity.user.User; import com.epam.ta.reportportal.exception.ReportPortalException; +import com.epam.ta.reportportal.util.FeatureFlagHandler; import com.epam.ta.reportportal.ws.model.ErrorType; import com.google.common.collect.Maps; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Paths; +import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -34,96 +48,106 @@ import org.springframework.util.StreamUtils; import org.springframework.web.multipart.MultipartFile; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Paths; -import java.util.Optional; - -import static com.epam.ta.reportportal.binary.impl.DataStoreUtils.*; -import static java.util.Optional.ofNullable; - /** * @author Ihar Kahadouski */ @Service public class UserBinaryDataServiceImpl implements UserBinaryDataService { - private static final Logger LOGGER = LoggerFactory.getLogger(UserBinaryDataServiceImpl.class); + private static final Logger LOGGER = LoggerFactory.getLogger(UserBinaryDataServiceImpl.class); + + private DataStoreService dataStoreService; - private DataStoreService dataStoreService; + private FeatureFlagHandler featureFlagHandler; - private static final String DEFAULT_USER_PHOTO = "image/defaultAvatar.png"; + private static final String DEFAULT_USER_PHOTO = "image/defaultAvatar.png"; - @Autowired - public UserBinaryDataServiceImpl(@Qualifier("userDataStoreService") DataStoreService dataStoreService) { - this.dataStoreService = dataStoreService; - } + @Autowired + public UserBinaryDataServiceImpl( + @Qualifier("userDataStoreService") DataStoreService dataStoreService, + FeatureFlagHandler featureFlagHandler) { + this.dataStoreService = dataStoreService; + this.featureFlagHandler = featureFlagHandler; + } - @Override - public void saveUserPhoto(User user, MultipartFile file) { - try { - saveUserPhoto(user, file.getInputStream(), file.getContentType()); - } catch (IOException e) { - LOGGER.error("Unable to save user photo", e); - throw new ReportPortalException(ErrorType.BINARY_DATA_CANNOT_BE_SAVED, e); - } - } + @Override + public void saveUserPhoto(User user, MultipartFile file) { + try { + saveUserPhoto(user, file.getInputStream(), file.getContentType()); + } catch (IOException e) { + LOGGER.error("Unable to save user photo", e); + throw new ReportPortalException(ErrorType.BINARY_DATA_CANNOT_BE_SAVED, e); + } + } - @Override - public void saveUserPhoto(User user, BinaryData binaryData) { - saveUserPhoto(user, binaryData.getInputStream(), binaryData.getContentType()); - } + @Override + public void saveUserPhoto(User user, BinaryData binaryData) { + saveUserPhoto(user, binaryData.getInputStream(), binaryData.getContentType()); + } - @Override - public void saveUserPhoto(User user, InputStream inputStream, String contentType) { - try { - byte[] data = StreamUtils.copyToByteArray(inputStream); - try (InputStream userPhotoCopy = new ByteArrayInputStream(data); InputStream thumbnailCopy = new ByteArrayInputStream(data)) { - user.setAttachment(dataStoreService.save(Paths.get(ROOT_USER_PHOTO_DIR, user.getLogin()).toString(), userPhotoCopy)); - user.setAttachmentThumbnail(dataStoreService.saveThumbnail(buildThumbnailFileName(ROOT_USER_PHOTO_DIR, user.getLogin()), - thumbnailCopy - )); - } - ofNullable(user.getMetadata()).orElseGet(() -> new Metadata(Maps.newHashMap())) - .getMetadata() - .put(ATTACHMENT_CONTENT_TYPE, contentType); - } catch (IOException e) { - LOGGER.error("Unable to save user photo", e); - } - } + @Override + public void saveUserPhoto(User user, InputStream inputStream, String contentType) { + try { + byte[] data = StreamUtils.copyToByteArray(inputStream); + try (InputStream userPhotoCopy = new ByteArrayInputStream(data); + InputStream thumbnailCopy = new ByteArrayInputStream(data)) { + if (featureFlagHandler.isEnabled(FeatureFlag.SINGLE_BUCKET)) { + user.setAttachment(dataStoreService.save( + Paths.get(USER_DATA_PATH, PHOTOS_PATH, user.getLogin()).toString(), userPhotoCopy)); + user.setAttachmentThumbnail(dataStoreService.saveThumbnail( + buildThumbnailFileName(Paths.get(USER_DATA_PATH, PHOTOS_PATH).toString(), + user.getLogin() + ), thumbnailCopy)); + } else { + user.setAttachment( + dataStoreService.save(Paths.get(ROOT_USER_PHOTO_DIR, user.getLogin()).toString(), + userPhotoCopy + )); + user.setAttachmentThumbnail(dataStoreService.saveThumbnail( + buildThumbnailFileName(ROOT_USER_PHOTO_DIR, user.getLogin()), thumbnailCopy)); + } + } + ofNullable(user.getMetadata()).orElseGet(() -> new Metadata(Maps.newHashMap())).getMetadata() + .put(ATTACHMENT_CONTENT_TYPE, contentType); + } catch (IOException e) { + LOGGER.error("Unable to save user photo", e); + } + } - @Override - public BinaryData loadUserPhoto(User user, boolean loadThumbnail) { - Optional fileId = ofNullable(loadThumbnail ? user.getAttachmentThumbnail() : user.getAttachment()); - InputStream data; - String contentType; - try { - if (fileId.isPresent()) { - contentType = (String) user.getMetadata().getMetadata().get(ATTACHMENT_CONTENT_TYPE); - data = dataStoreService.load(fileId.get()) - .orElseThrow(() -> new ReportPortalException(ErrorType.UNABLE_TO_LOAD_BINARY_DATA, fileId.get())); - } else { - data = new ClassPathResource(DEFAULT_USER_PHOTO).getInputStream(); - contentType = MimeTypeUtils.IMAGE_JPEG_VALUE; - } - return new BinaryData(contentType, (long) data.available(), data); - } catch (IOException e) { - LOGGER.error("Unable to load user photo", e); - throw new ReportPortalException(ErrorType.UNCLASSIFIED_REPORT_PORTAL_ERROR, "Unable to load user photo"); - } - } + @Override + public BinaryData loadUserPhoto(User user, boolean loadThumbnail) { + Optional fileId = + ofNullable(loadThumbnail ? user.getAttachmentThumbnail() : user.getAttachment()); + InputStream data; + String contentType; + try { + if (fileId.isPresent()) { + contentType = (String) user.getMetadata().getMetadata().get(ATTACHMENT_CONTENT_TYPE); + data = dataStoreService.load(fileId.get()).orElseThrow( + () -> new ReportPortalException(ErrorType.UNABLE_TO_LOAD_BINARY_DATA, fileId.get())); + } else { + data = new ClassPathResource(DEFAULT_USER_PHOTO).getInputStream(); + contentType = MimeTypeUtils.IMAGE_JPEG_VALUE; + } + return new BinaryData(contentType, (long) data.available(), data); + } catch (IOException e) { + LOGGER.error("Unable to load user photo", e); + throw new ReportPortalException( + ErrorType.UNCLASSIFIED_REPORT_PORTAL_ERROR, "Unable to load user photo"); + } + } - @Override - public void deleteUserPhoto(User user) { - ofNullable(user.getAttachment()).ifPresent(fileId -> { - dataStoreService.delete(fileId); - user.setAttachment(null); - Optional.ofNullable(user.getAttachmentThumbnail()).ifPresent(thumbnailId -> { - dataStoreService.delete(thumbnailId); - user.setAttachmentThumbnail(null); - }); - ofNullable(user.getMetadata()).ifPresent(metadata -> metadata.getMetadata().remove(ATTACHMENT_CONTENT_TYPE)); - }); - } + @Override + public void deleteUserPhoto(User user) { + ofNullable(user.getAttachment()).ifPresent(fileId -> { + dataStoreService.delete(fileId); + user.setAttachment(null); + Optional.ofNullable(user.getAttachmentThumbnail()).ifPresent(thumbnailId -> { + dataStoreService.delete(thumbnailId); + user.setAttachmentThumbnail(null); + }); + ofNullable(user.getMetadata()).ifPresent( + metadata -> metadata.getMetadata().remove(ATTACHMENT_CONTENT_TYPE)); + }); + } } diff --git a/src/main/java/com/epam/ta/reportportal/config/DataStoreConfiguration.java b/src/main/java/com/epam/ta/reportportal/config/DataStoreConfiguration.java index e35f9a9eb..fd44e297e 100644 --- a/src/main/java/com/epam/ta/reportportal/config/DataStoreConfiguration.java +++ b/src/main/java/com/epam/ta/reportportal/config/DataStoreConfiguration.java @@ -23,12 +23,14 @@ import com.epam.ta.reportportal.filesystem.DataStore; import com.epam.ta.reportportal.filesystem.LocalDataStore; import com.epam.ta.reportportal.filesystem.distributed.s3.S3DataStore; +import com.epam.ta.reportportal.util.FeatureFlagHandler; import com.google.common.base.Optional; import com.google.common.base.Supplier; import com.google.common.cache.CacheLoader; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.inject.Module; +import java.util.Set; import org.jclouds.ContextBuilder; import org.jclouds.aws.s3.config.AWSS3HttpApiModule; import org.jclouds.blobstore.BlobStore; @@ -42,156 +44,191 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import java.util.Set; - /** * @author Dzianis_Shybeka */ @Configuration public class DataStoreConfiguration { - /** - * Amazon has a general work flow they publish that allows clients to always find the correct URL endpoint for a given bucket: - * 1) ask s3.amazonaws.com for the bucket location - * 2) use the url returned to make the container specific request (get/put, etc) - * Jclouds cache the results from the first getBucketLocation call and use that region-specific URL, as needed. - * In this custom implementation of {@link AWSS3HttpApiModule} we are providing location from environment variable, so that - * we don't need to make getBucketLocation call - */ - @ConfiguresHttpApi - private static class CustomBucketToRegionModule extends AWSS3HttpApiModule { - private final String region; - - public CustomBucketToRegionModule(String region) { - this.region = region; - } - - @Override - @SuppressWarnings("Guava") - protected CacheLoader> bucketToRegion(Supplier> regionSupplier, S3Client client) { - Set regions = regionSupplier.get(); - if (regions.isEmpty()) { - return new CacheLoader<>() { - - @Override - @SuppressWarnings({ "Guava", "NullableProblems" }) - public Optional load(String bucket) { - if (CustomBucketToRegionModule.this.region != null) { - return Optional.of(CustomBucketToRegionModule.this.region); - } - return Optional.absent(); - } - - @Override - public String toString() { - return "noRegions()"; - } - }; - } else if (regions.size() == 1) { - final String onlyRegion = Iterables.getOnlyElement(regions); - return new CacheLoader<>() { - @SuppressWarnings("OptionalUsedAsFieldOrParameterType") - final Optional onlyRegionOption = Optional.of(onlyRegion); - - @Override - @SuppressWarnings("NullableProblems") - public Optional load(String bucket) { - if (CustomBucketToRegionModule.this.region != null) { - return Optional.of(CustomBucketToRegionModule.this.region); - } - return onlyRegionOption; - } - - @Override - public String toString() { - return "onlyRegion(" + onlyRegion + ")"; - } - }; - } else { - return new CacheLoader<>() { - @Override - @SuppressWarnings("NullableProblems") - public Optional load(String bucket) { - if (CustomBucketToRegionModule.this.region != null) { - return Optional.of(CustomBucketToRegionModule.this.region); - } - try { - return Optional.fromNullable(client.getBucketLocation(bucket)); - } catch (ContainerNotFoundException e) { - return Optional.absent(); - } - } - - @Override - public String toString() { - return "bucketToRegion()"; - } - }; - } - } - } - - @Bean - @ConditionalOnProperty(name = "datastore.type", havingValue = "filesystem") - public DataStore localDataStore(@Value("${datastore.default.path:/data/store}") String storagePath) { - return new LocalDataStore(storagePath); - } - - @Bean - @ConditionalOnProperty(name = "datastore.type", havingValue = "minio") - public BlobStore minioBlobStore(@Value("${datastore.minio.accessKey}") String accessKey, - @Value("${datastore.minio.secretKey}") String secretKey, @Value("${datastore.minio.endpoint}") String endpoint) { - - BlobStoreContext blobStoreContext = ContextBuilder.newBuilder("s3") - .endpoint(endpoint) - .credentials(accessKey, secretKey) - .buildView(BlobStoreContext.class); - - return blobStoreContext.getBlobStore(); - } - - @Bean - @ConditionalOnProperty(name = "datastore.type", havingValue = "minio") - public DataStore minioDataStore(@Autowired BlobStore blobStore, @Value("${datastore.minio.bucketPrefix}") String bucketPrefix, - @Value("${datastore.minio.defaultBucketName}") String defaultBucketName, @Value("${datastore.minio.region}") String region) { - return new S3DataStore(blobStore, bucketPrefix, defaultBucketName, region); - } - - @Bean - @ConditionalOnProperty(name = "datastore.type", havingValue = "s3") - public BlobStore s3BlobStore(@Value("${datastore.s3.accessKey}") String accessKey, @Value("${datastore.s3.secretKey}") String secretKey, - @Value("${datastore.s3.region}") String region) { - Iterable modules = ImmutableSet.of(new CustomBucketToRegionModule(region)); - - BlobStoreContext blobStoreContext = ContextBuilder.newBuilder("aws-s3") - .modules(modules) - .credentials(accessKey, secretKey) - .buildView(BlobStoreContext.class); - - return blobStoreContext.getBlobStore(); - } - - @Bean - @ConditionalOnProperty(name = "datastore.type", havingValue = "s3") - public DataStore s3DataStore(@Autowired BlobStore blobStore, @Value("${datastore.s3.bucketPrefix}") String bucketPrefix, - @Value("${datastore.s3.defaultBucketName}") String defaultBucketName, @Value("${datastore.s3.region}") String region) { - return new S3DataStore(blobStore, bucketPrefix, defaultBucketName, region); - } - - @Bean("attachmentThumbnailator") - public Thumbnailator attachmentThumbnailator(@Value("${datastore.thumbnail.attachment.width}") int width, - @Value("${datastore.thumbnail.attachment.height}") int height) { - return new ThumbnailatorImpl(width, height); - } - - @Bean("userPhotoThumbnailator") - public Thumbnailator userPhotoThumbnailator(@Value("${datastore.thumbnail.avatar.width}") int width, - @Value("${datastore.thumbnail.avatar.height}") int height) { - return new ThumbnailatorImpl(width, height); - } - - @Bean - public ContentTypeResolver contentTypeResolver() { - return new TikaContentTypeResolver(); - } + /** + * Amazon has a general work flow they publish that allows clients to always find the correct URL + * endpoint for a given bucket: + * 1) ask s3.amazonaws.com for the bucket location + * 2) use the url returned to make the container specific request (get/put, etc) + * Jclouds cache the results from the first getBucketLocation call and use that region-specific + * URL, as needed. + * In this custom implementation of {@link AWSS3HttpApiModule} we are providing location + * from environment variable, so that + * we don't need to make getBucketLocation call + */ + @ConfiguresHttpApi + private static class CustomBucketToRegionModule extends AWSS3HttpApiModule { + private final String region; + + public CustomBucketToRegionModule(String region) { + this.region = region; + } + + @Override + @SuppressWarnings("Guava") + protected CacheLoader> bucketToRegion( + Supplier> regionSupplier, S3Client client) { + Set regions = regionSupplier.get(); + if (regions.isEmpty()) { + return new CacheLoader<>() { + + @Override + @SuppressWarnings({ "Guava", "NullableProblems" }) + public Optional load(String bucket) { + if (CustomBucketToRegionModule.this.region != null) { + return Optional.of(CustomBucketToRegionModule.this.region); + } + return Optional.absent(); + } + + @Override + public String toString() { + return "noRegions()"; + } + }; + } else if (regions.size() == 1) { + final String onlyRegion = Iterables.getOnlyElement(regions); + return new CacheLoader<>() { + @SuppressWarnings("OptionalUsedAsFieldOrParameterType") + final Optional onlyRegionOption = Optional.of(onlyRegion); + + @Override + @SuppressWarnings("NullableProblems") + public Optional load(String bucket) { + if (CustomBucketToRegionModule.this.region != null) { + return Optional.of(CustomBucketToRegionModule.this.region); + } + return onlyRegionOption; + } + + @Override + public String toString() { + return "onlyRegion(" + onlyRegion + ")"; + } + }; + } else { + return new CacheLoader<>() { + @Override + @SuppressWarnings("NullableProblems") + public Optional load(String bucket) { + if (CustomBucketToRegionModule.this.region != null) { + return Optional.of(CustomBucketToRegionModule.this.region); + } + try { + return Optional.fromNullable(client.getBucketLocation(bucket)); + } catch (ContainerNotFoundException e) { + return Optional.absent(); + } + } + + @Override + public String toString() { + return "bucketToRegion()"; + } + }; + } + } + } + + @Bean + @ConditionalOnProperty(name = "datastore.type", havingValue = "filesystem") + public DataStore localDataStore( + @Value("${datastore.path:/data/store}") String storagePath) { + return new LocalDataStore(storagePath); + } + + /** + * Creates BlobStore bean, that works with MinIO. + * + * @param accessKey accessKey to use + * @param secretKey secretKey to use + * @param endpoint MinIO endpoint + * @return {@link BlobStore} + */ + @Bean + @ConditionalOnProperty(name = "datastore.type", havingValue = "minio") + public BlobStore minioBlobStore(@Value("${datastore.accessKey}") String accessKey, + @Value("${datastore.secretKey}") String secretKey, + @Value("${datastore.endpoint}") String endpoint) { + + BlobStoreContext blobStoreContext = + ContextBuilder.newBuilder("s3").endpoint(endpoint).credentials(accessKey, secretKey) + .buildView(BlobStoreContext.class); + + return blobStoreContext.getBlobStore(); + } + + /** + * Creates DataStore bean to work with MinIO. + * + * @param blobStore {@link BlobStore} object + * @param bucketPrefix Prefix for bucket name + * @param defaultBucketName Name of default bucket to use + * @param region Region to store + * @param featureFlagHandler Instance of {@link FeatureFlagHandler} to check enabled features + * @return {@link DataStore} object + */ + @Bean + @ConditionalOnProperty(name = "datastore.type", havingValue = "minio") + public DataStore minioDataStore(@Autowired BlobStore blobStore, + @Value("${datastore.bucketPrefix}") String bucketPrefix, + @Value("${datastore.defaultBucketName}") String defaultBucketName, + @Value("${datastore.region}") String region, FeatureFlagHandler featureFlagHandler) { + return new S3DataStore(blobStore, bucketPrefix, defaultBucketName, region, featureFlagHandler); + } + + /** + * Creates BlobStore bean, that works with AWS S3. + * + * @param accessKey accessKey to use + * @param secretKey secretKey to use + * @param region AWS S3 region to use. + * @return {@link BlobStore} + */ + @Bean + @ConditionalOnProperty(name = "datastore.type", havingValue = "s3") + public BlobStore s3BlobStore(@Value("${datastore.accessKey}") String accessKey, + @Value("${datastore.secretKey}") String secretKey, + @Value("${datastore.region}") String region) { + Iterable modules = ImmutableSet.of(new CustomBucketToRegionModule(region)); + + BlobStoreContext blobStoreContext = + ContextBuilder.newBuilder("aws-s3").modules(modules).credentials(accessKey, secretKey) + .buildView(BlobStoreContext.class); + + return blobStoreContext.getBlobStore(); + } + + @Bean + @ConditionalOnProperty(name = "datastore.type", havingValue = "s3") + public DataStore s3DataStore(@Autowired BlobStore blobStore, + @Value("${datastore.bucketPrefix}") String bucketPrefix, + @Value("${datastore.defaultBucketName}") String defaultBucketName, + @Value("${datastore.region}") String region, FeatureFlagHandler featureFlagHandler) { + return new S3DataStore(blobStore, bucketPrefix, defaultBucketName, region, featureFlagHandler); + } + + @Bean("attachmentThumbnailator") + public Thumbnailator attachmentThumbnailator( + @Value("${datastore.thumbnail.attachment.width}") int width, + @Value("${datastore.thumbnail.attachment.height}") int height) { + return new ThumbnailatorImpl(width, height); + } + + @Bean("userPhotoThumbnailator") + public Thumbnailator userPhotoThumbnailator( + @Value("${datastore.thumbnail.avatar.width}") int width, + @Value("${datastore.thumbnail.avatar.height}") int height) { + return new ThumbnailatorImpl(width, height); + } + + @Bean + public ContentTypeResolver contentTypeResolver() { + return new TikaContentTypeResolver(); + } } \ No newline at end of file diff --git a/src/main/java/com/epam/ta/reportportal/config/EncryptConfiguration.java b/src/main/java/com/epam/ta/reportportal/config/EncryptConfiguration.java index 261759fef..bddc864c7 100644 --- a/src/main/java/com/epam/ta/reportportal/config/EncryptConfiguration.java +++ b/src/main/java/com/epam/ta/reportportal/config/EncryptConfiguration.java @@ -16,8 +16,20 @@ package com.epam.ta.reportportal.config; +import static com.epam.ta.reportportal.binary.impl.DataStoreUtils.INTEGRATION_SECRETS_PATH; + +import com.epam.ta.reportportal.entity.enums.FeatureFlag; import com.epam.ta.reportportal.exception.ReportPortalException; import com.epam.ta.reportportal.filesystem.DataStore; +import com.epam.ta.reportportal.util.FeatureFlagHandler; +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.nio.file.Paths; +import java.security.SecureRandom; +import java.util.Base64; import org.apache.commons.io.IOUtils; import org.jasypt.encryption.pbe.StandardPBEStringEncryptor; import org.jasypt.util.text.BasicTextEncryptor; @@ -29,14 +41,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import java.io.ByteArrayInputStream; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; -import java.security.SecureRandom; -import java.util.Base64; - /** * Encrypt beans configuration for password values * @@ -45,62 +49,81 @@ @Configuration public class EncryptConfiguration implements InitializingBean { - private static final Logger LOGGER = LoggerFactory.getLogger(EncryptConfiguration.class); - - @Value("${rp.integration.salt.path:keystore}") - private String integrationSaltPath; - - @Value("${rp.integration.salt.file:secret-integration-salt}") - private String integrationSaltFile; - - @Value("${rp.integration.salt.migration:migration}") - private String migrationFile; - - private String secretFilePath; - private String migrationFilePath; - - private DataStore dataStore; - - @Autowired - public EncryptConfiguration(DataStore dataStore) { - this.dataStore = dataStore; - } - - @Bean(name = "basicEncryptor") - public BasicTextEncryptor getBasicEncrypt() throws IOException { - BasicTextEncryptor basic = new BasicTextEncryptor(); - basic.setPassword(IOUtils.toString(dataStore.load(secretFilePath), StandardCharsets.UTF_8)); - return basic; - } - - @Bean(name = "strongEncryptor") - public StandardPBEStringEncryptor getStrongEncryptor() throws IOException { - StandardPBEStringEncryptor strong = new StandardPBEStringEncryptor(); - strong.setPassword(IOUtils.toString(dataStore.load(secretFilePath), StandardCharsets.UTF_8)); - strong.setAlgorithm("PBEWithMD5AndTripleDES"); - return strong; - } - - @Override - public void afterPropertiesSet() throws Exception { - secretFilePath = integrationSaltPath + File.separator + integrationSaltFile; - migrationFilePath = integrationSaltPath + File.separator + migrationFile; - loadOrGenerateIntegrationSalt(dataStore); - } - - private void loadOrGenerateIntegrationSalt(DataStore dataStore) { - try { - dataStore.load(secretFilePath); - } catch (ReportPortalException ex) { - byte[] bytes = new byte[20]; - new SecureRandom().nextBytes(bytes); - try (InputStream secret = new ByteArrayInputStream(Base64.getUrlEncoder().withoutPadding().encode(bytes)); - InputStream empty = new ByteArrayInputStream(new byte[1])) { - dataStore.save(secretFilePath, secret); - dataStore.save(migrationFilePath, empty); - } catch (IOException ioEx) { - LOGGER.error("Unable to generate secret file", ioEx); - } - } - } + private static final Logger LOGGER = LoggerFactory.getLogger(EncryptConfiguration.class); + + @Value("${rp.integration.salt.path:keystore}") + private String integrationSaltPath; + + @Value("${rp.integration.salt.file:secret-integration-salt}") + private String integrationSaltFile; + + @Value("${rp.integration.salt.migration:migration}") + private String migrationFile; + + private String secretFilePath; + private String migrationFilePath; + + private final DataStore dataStore; + + private final FeatureFlagHandler featureFlagHandler; + + @Autowired + public EncryptConfiguration(DataStore dataStore, FeatureFlagHandler featureFlagHandler) { + this.dataStore = dataStore; + this.featureFlagHandler = featureFlagHandler; + } + + /** + * Creates bean of {@link BasicTextEncryptor} for encrypting purposes. + * + * @return {@link BasicTextEncryptor} instance + */ + @Bean(name = "basicEncryptor") + public BasicTextEncryptor getBasicEncrypt() throws IOException { + BasicTextEncryptor basic = new BasicTextEncryptor(); + basic.setPassword(IOUtils.toString(dataStore.load(secretFilePath), StandardCharsets.UTF_8)); + return basic; + } + + /** + * Creates bean of {@link StandardPBEStringEncryptor} for encrypting purposes. + * + * @return {@link StandardPBEStringEncryptor} instance + */ + @Bean(name = "strongEncryptor") + public StandardPBEStringEncryptor getStrongEncryptor() throws IOException { + StandardPBEStringEncryptor strong = new StandardPBEStringEncryptor(); + strong.setPassword(IOUtils.toString(dataStore.load(secretFilePath), StandardCharsets.UTF_8)); + strong.setAlgorithm("PBEWithMD5AndTripleDES"); + return strong; + } + + @Override + public void afterPropertiesSet() throws Exception { + if (featureFlagHandler.isEnabled(FeatureFlag.SINGLE_BUCKET)) { + secretFilePath = Paths.get(INTEGRATION_SECRETS_PATH, integrationSaltFile).toString(); + migrationFilePath = Paths.get(INTEGRATION_SECRETS_PATH, migrationFile).toString(); + } else { + secretFilePath = integrationSaltPath + File.separator + integrationSaltFile; + migrationFilePath = integrationSaltPath + File.separator + migrationFile; + } + loadOrGenerateIntegrationSalt(dataStore); + } + + private void loadOrGenerateIntegrationSalt(DataStore dataStore) { + try { + dataStore.load(secretFilePath); + } catch (ReportPortalException ex) { + byte[] bytes = new byte[20]; + new SecureRandom().nextBytes(bytes); + try (InputStream secret = new ByteArrayInputStream( + Base64.getUrlEncoder().withoutPadding().encode(bytes)); + InputStream empty = new ByteArrayInputStream(new byte[1])) { + dataStore.save(secretFilePath, secret); + dataStore.save(migrationFilePath, empty); + } catch (IOException ioEx) { + LOGGER.error("Unable to generate secret file", ioEx); + } + } + } } \ No newline at end of file diff --git a/src/main/java/com/epam/ta/reportportal/dao/ApiKeyRepository.java b/src/main/java/com/epam/ta/reportportal/dao/ApiKeyRepository.java new file mode 100644 index 000000000..a9e5961cd --- /dev/null +++ b/src/main/java/com/epam/ta/reportportal/dao/ApiKeyRepository.java @@ -0,0 +1,50 @@ +/* + * Copyright 2023 EPAM Systems + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.epam.ta.reportportal.dao; + +import com.epam.ta.reportportal.entity.user.ApiKey; +import java.util.List; + +/** + * ApiKey Repository + * + * @author Andrei Piankouski + */ +public interface ApiKeyRepository extends ReportPortalRepository { + + /** + * + * @param hash hash of api key + * @return {@link ApiKey} + */ + ApiKey findByHash(String hash); + + /** + * + * @param name name of user Api key + * @param userId {@link com.epam.ta.reportportal.entity.user.User#id} + * @return if exists 'true' else 'false' + */ + boolean existsByNameAndUserId(String name, Long userId); + + /** + * + * @param userId {@link com.epam.ta.reportportal.entity.user.User#id} + * @return list of user api keys + */ + List findByUserId(Long userId); +} diff --git a/src/main/java/com/epam/ta/reportportal/entity/enums/FeatureFlag.java b/src/main/java/com/epam/ta/reportportal/entity/enums/FeatureFlag.java new file mode 100644 index 000000000..8feebfc48 --- /dev/null +++ b/src/main/java/com/epam/ta/reportportal/entity/enums/FeatureFlag.java @@ -0,0 +1,35 @@ +package com.epam.ta.reportportal.entity.enums; + +import java.util.Arrays; +import java.util.Optional; + +/** + * Enumeration of current feature flags. + * + * @author Ivan Kustau + */ +public enum FeatureFlag { + SINGLE_BUCKET("singleBucket"); + + private final String name; + + FeatureFlag(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + /** + * Returns {@link Optional} of {@link FeatureFlag} by string. + * + * @param name Name of feature flag + * @return {@link Optional} of {@link FeatureFlag} by string + */ + public static Optional fromString(String name) { + return Optional.ofNullable(name).flatMap( + str -> Arrays.stream(values()).filter(it -> it.name.equalsIgnoreCase(str)).findAny()); + + } +} diff --git a/src/main/java/com/epam/ta/reportportal/entity/user/ApiKey.java b/src/main/java/com/epam/ta/reportportal/entity/user/ApiKey.java new file mode 100644 index 000000000..f47a6e5b6 --- /dev/null +++ b/src/main/java/com/epam/ta/reportportal/entity/user/ApiKey.java @@ -0,0 +1,95 @@ +/* + * Copyright 2022 EPAM Systems + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.epam.ta.reportportal.entity.user; + +import java.time.LocalDateTime; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; + +/** + * Representation of ApiKeys table. + * + * @author Andrei Piankouski + */ +@Entity +@Table(name = "api_keys", schema = "public") +public class ApiKey { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Column(name = "id") + private Long id; + + @Column(name = "name") + private String name; + + @Column(name = "hash") + private String hash; + + @Column(name = "created_at") + private LocalDateTime createdAt; + + @Column(name = "user_id") + private Long userId; + + public ApiKey() { + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getHash() { + return hash; + } + + public void setHash(String hash) { + this.hash = hash; + } + + public LocalDateTime getCreatedAt() { + return createdAt; + } + + public void setCreatedAt(LocalDateTime createdAt) { + this.createdAt = createdAt; + } + + public Long getUserId() { + return userId; + } + + public void setUserId(Long userId) { + this.userId = userId; + } +} diff --git a/src/main/java/com/epam/ta/reportportal/filesystem/distributed/s3/S3DataStore.java b/src/main/java/com/epam/ta/reportportal/filesystem/distributed/s3/S3DataStore.java index 803ecf531..e2601b95b 100644 --- a/src/main/java/com/epam/ta/reportportal/filesystem/distributed/s3/S3DataStore.java +++ b/src/main/java/com/epam/ta/reportportal/filesystem/distributed/s3/S3DataStore.java @@ -16,9 +16,17 @@ package com.epam.ta.reportportal.filesystem.distributed.s3; +import com.epam.ta.reportportal.entity.enums.FeatureFlag; import com.epam.ta.reportportal.exception.ReportPortalException; import com.epam.ta.reportportal.filesystem.DataStore; +import com.epam.ta.reportportal.util.FeatureFlagHandler; import com.epam.ta.reportportal.ws.model.ErrorType; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; import org.jclouds.blobstore.BlobStore; import org.jclouds.blobstore.domain.Blob; import org.jclouds.domain.Location; @@ -27,107 +35,118 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReentrantLock; - /** * @author Ivan Budayeu */ public class S3DataStore implements DataStore { - private static final Logger LOGGER = LoggerFactory.getLogger(S3DataStore.class); - private static final Lock CREATE_BUCKET_LOCK = new ReentrantLock(); + private static final Logger LOGGER = LoggerFactory.getLogger(S3DataStore.class); + private static final Lock CREATE_BUCKET_LOCK = new ReentrantLock(); + + private final BlobStore blobStore; + private final String bucketPrefix; + private final String defaultBucketName; + private final Location location; - private final BlobStore blobStore; - private final String bucketPrefix; - private final String defaultBucketName; - private final Location location; + private final FeatureFlagHandler featureFlagHandler; - public S3DataStore(BlobStore blobStore, String bucketPrefix, String defaultBucketName, String region) { - this.blobStore = blobStore; - this.bucketPrefix = bucketPrefix; - this.defaultBucketName = defaultBucketName; - this.location = getLocationFromString(region); - } + /** + * Initialises {@link S3DataStore}. + * + * @param blobStore {@link BlobStore} + * @param bucketPrefix Prefix for bucket name + * @param defaultBucketName Name of default bucket to use + * @param region Region to use + * @param featureFlagHandler {@link FeatureFlagHandler} + */ + public S3DataStore(BlobStore blobStore, String bucketPrefix, String defaultBucketName, + String region, FeatureFlagHandler featureFlagHandler) { + this.blobStore = blobStore; + this.bucketPrefix = bucketPrefix; + this.defaultBucketName = defaultBucketName; + this.location = getLocationFromString(region); + this.featureFlagHandler = featureFlagHandler; + } - @Override - public String save(String filePath, InputStream inputStream) { - S3File s3File = getS3File(filePath); - try { - if (!blobStore.containerExists(s3File.getBucket())) { - CREATE_BUCKET_LOCK.lock(); - try { - if (!blobStore.containerExists(s3File.getBucket())) { - blobStore.createContainerInLocation(location, s3File.getBucket()); - } - } finally { - CREATE_BUCKET_LOCK.unlock(); - } - } + @Override + public String save(String filePath, InputStream inputStream) { + S3File s3File = getS3File(filePath); + try { + if (!blobStore.containerExists(s3File.getBucket())) { + CREATE_BUCKET_LOCK.lock(); + try { + if (!blobStore.containerExists(s3File.getBucket())) { + blobStore.createContainerInLocation(location, s3File.getBucket()); + } + } finally { + CREATE_BUCKET_LOCK.unlock(); + } + } - Blob objectBlob = blobStore.blobBuilder(s3File.getFilePath()) - .payload(inputStream) - .contentDisposition(s3File.getFilePath()) - .contentLength(inputStream.available()) - .build(); - blobStore.putBlob(s3File.getBucket(), objectBlob); - return Paths.get(filePath).toString(); - } catch (IOException e) { - LOGGER.error("Unable to save file '{}'", filePath, e); - throw new ReportPortalException(ErrorType.INCORRECT_REQUEST, "Unable to save file"); - } - } + Blob objectBlob = blobStore.blobBuilder(s3File.getFilePath()).payload(inputStream) + .contentDisposition(s3File.getFilePath()).contentLength(inputStream.available()).build(); + blobStore.putBlob(s3File.getBucket(), objectBlob); + return Paths.get(filePath).toString(); + } catch (IOException e) { + LOGGER.error("Unable to save file '{}'", filePath, e); + throw new ReportPortalException(ErrorType.INCORRECT_REQUEST, "Unable to save file"); + } + } - @Override - public InputStream load(String filePath) { - S3File s3File = getS3File(filePath); - try { - return blobStore.getBlob(s3File.getBucket(), s3File.getFilePath()).getPayload().openStream(); - } catch (Exception e) { - LOGGER.error("Unable to find file '{}'", filePath, e); - throw new ReportPortalException(ErrorType.UNABLE_TO_LOAD_BINARY_DATA, "Unable to find file"); - } - } + @Override + public InputStream load(String filePath) { + S3File s3File = getS3File(filePath); + try { + Blob fileBlob = blobStore.getBlob(s3File.getBucket(), s3File.getFilePath()); + if (fileBlob != null) { + return fileBlob.getPayload().openStream(); + } else { + throw new Exception(); + } + } catch (Exception e) { + LOGGER.error("Unable to find file '{}'", filePath, e); + throw new ReportPortalException(ErrorType.UNABLE_TO_LOAD_BINARY_DATA, "Unable to find file"); + } + } - @Override - public void delete(String filePath) { - S3File s3File = getS3File(filePath); - try { - blobStore.removeBlob(s3File.getBucket(), s3File.getFilePath()); - } catch (Exception e) { - LOGGER.error("Unable to delete file '{}'", filePath, e); - throw new ReportPortalException(ErrorType.INCORRECT_REQUEST, "Unable to delete file"); - } - } + @Override + public void delete(String filePath) { + S3File s3File = getS3File(filePath); + try { + blobStore.removeBlob(s3File.getBucket(), s3File.getFilePath()); + } catch (Exception e) { + LOGGER.error("Unable to delete file '{}'", filePath, e); + throw new ReportPortalException(ErrorType.INCORRECT_REQUEST, "Unable to delete file"); + } + } - private S3File getS3File(String filePath) { - Path targetPath = Paths.get(filePath); - int nameCount = targetPath.getNameCount(); - if (nameCount > 1) { - return new S3File(bucketPrefix + retrievePath(targetPath, 0, 1), retrievePath(targetPath, 1, nameCount)); - } else { - return new S3File(defaultBucketName, retrievePath(targetPath, 0, 1)); - } + private S3File getS3File(String filePath) { + if (featureFlagHandler.isEnabled(FeatureFlag.SINGLE_BUCKET)) { + return new S3File(defaultBucketName, filePath); + } + Path targetPath = Paths.get(filePath); + int nameCount = targetPath.getNameCount(); + if (nameCount > 1) { + return new S3File(bucketPrefix + retrievePath(targetPath, 0, 1), + retrievePath(targetPath, 1, nameCount) + ); + } else { + return new S3File(defaultBucketName, retrievePath(targetPath, 0, 1)); + } - } + } - private Location getLocationFromString(String locationString) { - Location location = null; - if (locationString != null) { - location = new LocationBuilder() - .scope(LocationScope.REGION) - .id(locationString) - .description("region") - .build(); - } - return location; - } + private Location getLocationFromString(String locationString) { + Location location = null; + if (locationString != null) { + location = + new LocationBuilder().scope(LocationScope.REGION).id(locationString).description("region") + .build(); + } + return location; + } - private String retrievePath(Path path, int beginIndex, int endIndex) { - return String.valueOf(path.subpath(beginIndex, endIndex)); - } -} + private String retrievePath(Path path, int beginIndex, int endIndex) { + return String.valueOf(path.subpath(beginIndex, endIndex)); + } +} \ No newline at end of file diff --git a/src/main/java/com/epam/ta/reportportal/jooq/Indexes.java b/src/main/java/com/epam/ta/reportportal/jooq/Indexes.java index c953ff267..dd9e5779c 100755 --- a/src/main/java/com/epam/ta/reportportal/jooq/Indexes.java +++ b/src/main/java/com/epam/ta/reportportal/jooq/Indexes.java @@ -5,6 +5,7 @@ import com.epam.ta.reportportal.jooq.tables.JActivity; +import com.epam.ta.reportportal.jooq.tables.JApiKeys; import com.epam.ta.reportportal.jooq.tables.JAttachment; import com.epam.ta.reportportal.jooq.tables.JAttachmentDeletion; import com.epam.ta.reportportal.jooq.tables.JAttribute; @@ -86,6 +87,9 @@ public class Indexes { public static final Index ACTIVITY_OBJECT_IDX = Indexes0.ACTIVITY_OBJECT_IDX; public static final Index ACTIVITY_PK = Indexes0.ACTIVITY_PK; public static final Index ACTIVITY_PROJECT_IDX = Indexes0.ACTIVITY_PROJECT_IDX; + public static final Index API_KEYS_PKEY = Indexes0.API_KEYS_PKEY; + public static final Index HASH_API_KEYS_IDX = Indexes0.HASH_API_KEYS_IDX; + public static final Index USERS_API_KEYS_UNIQUE = Indexes0.USERS_API_KEYS_UNIQUE; public static final Index ATT_ITEM_IDX = Indexes0.ATT_ITEM_IDX; public static final Index ATT_LAUNCH_IDX = Indexes0.ATT_LAUNCH_IDX; public static final Index ATT_PROJECT_IDX = Indexes0.ATT_PROJECT_IDX; @@ -219,6 +223,9 @@ private static class Indexes0 { public static Index ACTIVITY_OBJECT_IDX = Internal.createIndex("activity_object_idx", JActivity.ACTIVITY, new OrderField[] { JActivity.ACTIVITY.OBJECT_ID }, false); public static Index ACTIVITY_PK = Internal.createIndex("activity_pk", JActivity.ACTIVITY, new OrderField[] { JActivity.ACTIVITY.ID }, true); public static Index ACTIVITY_PROJECT_IDX = Internal.createIndex("activity_project_idx", JActivity.ACTIVITY, new OrderField[] { JActivity.ACTIVITY.PROJECT_ID }, false); + public static Index API_KEYS_PKEY = Internal.createIndex("api_keys_pkey", JApiKeys.API_KEYS, new OrderField[] { JApiKeys.API_KEYS.ID }, true); + public static Index HASH_API_KEYS_IDX = Internal.createIndex("hash_api_keys_idx", JApiKeys.API_KEYS, new OrderField[] { JApiKeys.API_KEYS.HASH }, false); + public static Index USERS_API_KEYS_UNIQUE = Internal.createIndex("users_api_keys_unique", JApiKeys.API_KEYS, new OrderField[] { JApiKeys.API_KEYS.NAME, JApiKeys.API_KEYS.USER_ID }, true); public static Index ATT_ITEM_IDX = Internal.createIndex("att_item_idx", JAttachment.ATTACHMENT, new OrderField[] { JAttachment.ATTACHMENT.ITEM_ID }, false); public static Index ATT_LAUNCH_IDX = Internal.createIndex("att_launch_idx", JAttachment.ATTACHMENT, new OrderField[] { JAttachment.ATTACHMENT.LAUNCH_ID }, false); public static Index ATT_PROJECT_IDX = Internal.createIndex("att_project_idx", JAttachment.ATTACHMENT, new OrderField[] { JAttachment.ATTACHMENT.PROJECT_ID }, false); diff --git a/src/main/java/com/epam/ta/reportportal/jooq/JPublic.java b/src/main/java/com/epam/ta/reportportal/jooq/JPublic.java index be82a5183..077841828 100644 --- a/src/main/java/com/epam/ta/reportportal/jooq/JPublic.java +++ b/src/main/java/com/epam/ta/reportportal/jooq/JPublic.java @@ -5,6 +5,7 @@ import com.epam.ta.reportportal.jooq.tables.JActivity; +import com.epam.ta.reportportal.jooq.tables.JApiKeys; import com.epam.ta.reportportal.jooq.tables.JAttachment; import com.epam.ta.reportportal.jooq.tables.JAttachmentDeletion; import com.epam.ta.reportportal.jooq.tables.JAttribute; @@ -88,7 +89,7 @@ @SuppressWarnings({ "all", "unchecked", "rawtypes" }) public class JPublic extends SchemaImpl { - private static final long serialVersionUID = -1224964026; + private static final long serialVersionUID = -962528853; /** * The reference instance of public @@ -100,6 +101,11 @@ public class JPublic extends SchemaImpl { */ public final JActivity ACTIVITY = com.epam.ta.reportportal.jooq.tables.JActivity.ACTIVITY; + /** + * The table public.api_keys. + */ + public final JApiKeys API_KEYS = com.epam.ta.reportportal.jooq.tables.JApiKeys.API_KEYS; + /** * The table public.attachment. */ @@ -409,6 +415,7 @@ public final List> getSequences() { private final List> getSequences0() { return Arrays.>asList( Sequences.ACTIVITY_ID_SEQ, + Sequences.API_KEYS_ID_SEQ, Sequences.ATTACHMENT_ID_SEQ, Sequences.ATTRIBUTE_ID_SEQ, Sequences.CLUSTERS_ID_SEQ, @@ -454,6 +461,7 @@ public final List> getTables() { private final List> getTables0() { return Arrays.>asList( JActivity.ACTIVITY, + JApiKeys.API_KEYS, JAttachment.ATTACHMENT, JAttachmentDeletion.ATTACHMENT_DELETION, JAttribute.ATTRIBUTE, diff --git a/src/main/java/com/epam/ta/reportportal/jooq/Keys.java b/src/main/java/com/epam/ta/reportportal/jooq/Keys.java index 40213342f..954cf3faf 100755 --- a/src/main/java/com/epam/ta/reportportal/jooq/Keys.java +++ b/src/main/java/com/epam/ta/reportportal/jooq/Keys.java @@ -5,6 +5,7 @@ import com.epam.ta.reportportal.jooq.tables.JActivity; +import com.epam.ta.reportportal.jooq.tables.JApiKeys; import com.epam.ta.reportportal.jooq.tables.JAttachment; import com.epam.ta.reportportal.jooq.tables.JAttachmentDeletion; import com.epam.ta.reportportal.jooq.tables.JAttribute; @@ -58,6 +59,7 @@ import com.epam.ta.reportportal.jooq.tables.JWidget; import com.epam.ta.reportportal.jooq.tables.JWidgetFilter; import com.epam.ta.reportportal.jooq.tables.records.JActivityRecord; +import com.epam.ta.reportportal.jooq.tables.records.JApiKeysRecord; import com.epam.ta.reportportal.jooq.tables.records.JAttachmentDeletionRecord; import com.epam.ta.reportportal.jooq.tables.records.JAttachmentRecord; import com.epam.ta.reportportal.jooq.tables.records.JAttributeRecord; @@ -138,6 +140,7 @@ public class Keys { // ------------------------------------------------------------------------- public static final Identity IDENTITY_ACTIVITY = Identities0.IDENTITY_ACTIVITY; + public static final Identity IDENTITY_API_KEYS = Identities0.IDENTITY_API_KEYS; public static final Identity IDENTITY_ATTACHMENT = Identities0.IDENTITY_ATTACHMENT; public static final Identity IDENTITY_ATTRIBUTE = Identities0.IDENTITY_ATTRIBUTE; public static final Identity IDENTITY_CLUSTERS = Identities0.IDENTITY_CLUSTERS; @@ -175,6 +178,8 @@ public class Keys { // ------------------------------------------------------------------------- public static final UniqueKey ACTIVITY_PK = UniqueKeys0.ACTIVITY_PK; + public static final UniqueKey API_KEYS_PKEY = UniqueKeys0.API_KEYS_PKEY; + public static final UniqueKey USERS_API_KEYS_UNIQUE = UniqueKeys0.USERS_API_KEYS_UNIQUE; public static final UniqueKey ATTACHMENT_PK = UniqueKeys0.ATTACHMENT_PK; public static final UniqueKey ATTACHMENT_DELETION_PKEY = UniqueKeys0.ATTACHMENT_DELETION_PKEY; public static final UniqueKey ATTRIBUTE_PK = UniqueKeys0.ATTRIBUTE_PK; @@ -253,6 +258,7 @@ public class Keys { public static final ForeignKey ACTIVITY__ACTIVITY_USER_ID_FKEY = ForeignKeys0.ACTIVITY__ACTIVITY_USER_ID_FKEY; public static final ForeignKey ACTIVITY__ACTIVITY_PROJECT_ID_FKEY = ForeignKeys0.ACTIVITY__ACTIVITY_PROJECT_ID_FKEY; + public static final ForeignKey API_KEYS__API_KEYS_USER_ID_FKEY = ForeignKeys0.API_KEYS__API_KEYS_USER_ID_FKEY; public static final ForeignKey CONTENT_FIELD__CONTENT_FIELD_ID_FKEY = ForeignKeys0.CONTENT_FIELD__CONTENT_FIELD_ID_FKEY; public static final ForeignKey DASHBOARD__DASHBOARD_ID_FK = ForeignKeys0.DASHBOARD__DASHBOARD_ID_FK; public static final ForeignKey DASHBOARD_WIDGET__DASHBOARD_WIDGET_DASHBOARD_ID_FKEY = ForeignKeys0.DASHBOARD_WIDGET__DASHBOARD_WIDGET_DASHBOARD_ID_FKEY; @@ -315,6 +321,7 @@ public class Keys { private static class Identities0 { public static Identity IDENTITY_ACTIVITY = Internal.createIdentity(JActivity.ACTIVITY, JActivity.ACTIVITY.ID); + public static Identity IDENTITY_API_KEYS = Internal.createIdentity(JApiKeys.API_KEYS, JApiKeys.API_KEYS.ID); public static Identity IDENTITY_ATTACHMENT = Internal.createIdentity(JAttachment.ATTACHMENT, JAttachment.ATTACHMENT.ID); public static Identity IDENTITY_ATTRIBUTE = Internal.createIdentity(JAttribute.ATTRIBUTE, JAttribute.ATTRIBUTE.ID); public static Identity IDENTITY_CLUSTERS = Internal.createIdentity(JClusters.CLUSTERS, JClusters.CLUSTERS.ID); @@ -350,6 +357,8 @@ private static class Identities0 { private static class UniqueKeys0 { public static final UniqueKey ACTIVITY_PK = Internal.createUniqueKey(JActivity.ACTIVITY, "activity_pk", JActivity.ACTIVITY.ID); + public static final UniqueKey API_KEYS_PKEY = Internal.createUniqueKey(JApiKeys.API_KEYS, "api_keys_pkey", JApiKeys.API_KEYS.ID); + public static final UniqueKey USERS_API_KEYS_UNIQUE = Internal.createUniqueKey(JApiKeys.API_KEYS, "users_api_keys_unique", JApiKeys.API_KEYS.NAME, JApiKeys.API_KEYS.USER_ID); public static final UniqueKey ATTACHMENT_PK = Internal.createUniqueKey(JAttachment.ATTACHMENT, "attachment_pk", JAttachment.ATTACHMENT.ID); public static final UniqueKey ATTACHMENT_DELETION_PKEY = Internal.createUniqueKey(JAttachmentDeletion.ATTACHMENT_DELETION, "attachment_deletion_pkey", JAttachmentDeletion.ATTACHMENT_DELETION.ID); public static final UniqueKey ATTRIBUTE_PK = Internal.createUniqueKey(JAttribute.ATTRIBUTE, "attribute_pk", JAttribute.ATTRIBUTE.ID); @@ -426,6 +435,7 @@ private static class UniqueKeys0 { private static class ForeignKeys0 { public static final ForeignKey ACTIVITY__ACTIVITY_USER_ID_FKEY = Internal.createForeignKey(com.epam.ta.reportportal.jooq.Keys.USERS_PK, JActivity.ACTIVITY, "activity__activity_user_id_fkey", JActivity.ACTIVITY.USER_ID); public static final ForeignKey ACTIVITY__ACTIVITY_PROJECT_ID_FKEY = Internal.createForeignKey(com.epam.ta.reportportal.jooq.Keys.PROJECT_PK, JActivity.ACTIVITY, "activity__activity_project_id_fkey", JActivity.ACTIVITY.PROJECT_ID); + public static final ForeignKey API_KEYS__API_KEYS_USER_ID_FKEY = Internal.createForeignKey(com.epam.ta.reportportal.jooq.Keys.USERS_PK, JApiKeys.API_KEYS, "api_keys__api_keys_user_id_fkey", JApiKeys.API_KEYS.USER_ID); public static final ForeignKey CONTENT_FIELD__CONTENT_FIELD_ID_FKEY = Internal.createForeignKey(com.epam.ta.reportportal.jooq.Keys.WIDGET_PKEY, JContentField.CONTENT_FIELD, "content_field__content_field_id_fkey", JContentField.CONTENT_FIELD.ID); public static final ForeignKey DASHBOARD__DASHBOARD_ID_FK = Internal.createForeignKey(com.epam.ta.reportportal.jooq.Keys.SHAREABLE_PK, JDashboard.DASHBOARD, "dashboard__dashboard_id_fk", JDashboard.DASHBOARD.ID); public static final ForeignKey DASHBOARD_WIDGET__DASHBOARD_WIDGET_DASHBOARD_ID_FKEY = Internal.createForeignKey(com.epam.ta.reportportal.jooq.Keys.DASHBOARD_PKEY, JDashboardWidget.DASHBOARD_WIDGET, "dashboard_widget__dashboard_widget_dashboard_id_fkey", JDashboardWidget.DASHBOARD_WIDGET.DASHBOARD_ID); diff --git a/src/main/java/com/epam/ta/reportportal/jooq/Sequences.java b/src/main/java/com/epam/ta/reportportal/jooq/Sequences.java index 5f75df1e3..9633442ab 100644 --- a/src/main/java/com/epam/ta/reportportal/jooq/Sequences.java +++ b/src/main/java/com/epam/ta/reportportal/jooq/Sequences.java @@ -28,6 +28,11 @@ public class Sequences { */ public static final Sequence ACTIVITY_ID_SEQ = new SequenceImpl("activity_id_seq", JPublic.PUBLIC, org.jooq.impl.SQLDataType.BIGINT.nullable(false)); + /** + * The sequence public.api_keys_id_seq + */ + public static final Sequence API_KEYS_ID_SEQ = new SequenceImpl("api_keys_id_seq", JPublic.PUBLIC, org.jooq.impl.SQLDataType.BIGINT.nullable(false)); + /** * The sequence public.attachment_id_seq */ diff --git a/src/main/java/com/epam/ta/reportportal/jooq/Tables.java b/src/main/java/com/epam/ta/reportportal/jooq/Tables.java index af7657bc3..c334ebfbd 100644 --- a/src/main/java/com/epam/ta/reportportal/jooq/Tables.java +++ b/src/main/java/com/epam/ta/reportportal/jooq/Tables.java @@ -5,6 +5,7 @@ import com.epam.ta.reportportal.jooq.tables.JActivity; +import com.epam.ta.reportportal.jooq.tables.JApiKeys; import com.epam.ta.reportportal.jooq.tables.JAttachment; import com.epam.ta.reportportal.jooq.tables.JAttachmentDeletion; import com.epam.ta.reportportal.jooq.tables.JAttribute; @@ -85,6 +86,11 @@ public class Tables { */ public static final JActivity ACTIVITY = JActivity.ACTIVITY; + /** + * The table public.api_keys. + */ + public static final JApiKeys API_KEYS = JApiKeys.API_KEYS; + /** * The table public.attachment. */ diff --git a/src/main/java/com/epam/ta/reportportal/jooq/tables/JApiKeys.java b/src/main/java/com/epam/ta/reportportal/jooq/tables/JApiKeys.java new file mode 100644 index 000000000..adbca4ae9 --- /dev/null +++ b/src/main/java/com/epam/ta/reportportal/jooq/tables/JApiKeys.java @@ -0,0 +1,187 @@ +/* + * This file is generated by jOOQ. + */ +package com.epam.ta.reportportal.jooq.tables; + + +import com.epam.ta.reportportal.jooq.Indexes; +import com.epam.ta.reportportal.jooq.JPublic; +import com.epam.ta.reportportal.jooq.Keys; +import com.epam.ta.reportportal.jooq.tables.records.JApiKeysRecord; + +import java.sql.Timestamp; +import java.util.Arrays; +import java.util.List; + +import javax.annotation.processing.Generated; + +import org.jooq.Field; +import org.jooq.ForeignKey; +import org.jooq.Identity; +import org.jooq.Index; +import org.jooq.Name; +import org.jooq.Record; +import org.jooq.Row5; +import org.jooq.Schema; +import org.jooq.Table; +import org.jooq.TableField; +import org.jooq.UniqueKey; +import org.jooq.impl.DSL; +import org.jooq.impl.TableImpl; + + +/** + * This class is generated by jOOQ. + */ +@Generated( + value = { + "http://www.jooq.org", + "jOOQ version:3.12.4" + }, + comments = "This class is generated by jOOQ" +) +@SuppressWarnings({ "all", "unchecked", "rawtypes" }) +public class JApiKeys extends TableImpl { + + private static final long serialVersionUID = 1459363165; + + /** + * The reference instance of public.api_keys + */ + public static final JApiKeys API_KEYS = new JApiKeys(); + + /** + * The class holding records for this type + */ + @Override + public Class getRecordType() { + return JApiKeysRecord.class; + } + + /** + * The column public.api_keys.id. + */ + public final TableField ID = createField(DSL.name("id"), org.jooq.impl.SQLDataType.BIGINT.nullable(false).defaultValue(org.jooq.impl.DSL.field("nextval('api_keys_id_seq'::regclass)", org.jooq.impl.SQLDataType.BIGINT)), this, ""); + + /** + * The column public.api_keys.name. + */ + public final TableField NAME = createField(DSL.name("name"), org.jooq.impl.SQLDataType.VARCHAR(255), this, ""); + + /** + * The column public.api_keys.hash. + */ + public final TableField HASH = createField(DSL.name("hash"), org.jooq.impl.SQLDataType.VARCHAR(255), this, ""); + + /** + * The column public.api_keys.created_at. + */ + public final TableField CREATED_AT = createField(DSL.name("created_at"), org.jooq.impl.SQLDataType.TIMESTAMP.nullable(false), this, ""); + + /** + * The column public.api_keys.user_id. + */ + public final TableField USER_ID = createField(DSL.name("user_id"), org.jooq.impl.SQLDataType.BIGINT, this, ""); + + /** + * Create a public.api_keys table reference + */ + public JApiKeys() { + this(DSL.name("api_keys"), null); + } + + /** + * Create an aliased public.api_keys table reference + */ + public JApiKeys(String alias) { + this(DSL.name(alias), API_KEYS); + } + + /** + * Create an aliased public.api_keys table reference + */ + public JApiKeys(Name alias) { + this(alias, API_KEYS); + } + + private JApiKeys(Name alias, Table aliased) { + this(alias, aliased, null); + } + + private JApiKeys(Name alias, Table aliased, Field[] parameters) { + super(alias, null, aliased, parameters, DSL.comment("")); + } + + public JApiKeys(Table child, ForeignKey key) { + super(child, key, API_KEYS); + } + + @Override + public Schema getSchema() { + return JPublic.PUBLIC; + } + + @Override + public List getIndexes() { + return Arrays.asList(Indexes.API_KEYS_PKEY, Indexes.HASH_API_KEYS_IDX, Indexes.USERS_API_KEYS_UNIQUE); + } + + @Override + public Identity getIdentity() { + return Keys.IDENTITY_API_KEYS; + } + + @Override + public UniqueKey getPrimaryKey() { + return Keys.API_KEYS_PKEY; + } + + @Override + public List> getKeys() { + return Arrays.>asList(Keys.API_KEYS_PKEY, Keys.USERS_API_KEYS_UNIQUE); + } + + @Override + public List> getReferences() { + return Arrays.>asList(Keys.API_KEYS__API_KEYS_USER_ID_FKEY); + } + + public JUsers users() { + return new JUsers(this, Keys.API_KEYS__API_KEYS_USER_ID_FKEY); + } + + @Override + public JApiKeys as(String alias) { + return new JApiKeys(DSL.name(alias), this); + } + + @Override + public JApiKeys as(Name alias) { + return new JApiKeys(alias, this); + } + + /** + * Rename this table + */ + @Override + public JApiKeys rename(String name) { + return new JApiKeys(DSL.name(name), null); + } + + /** + * Rename this table + */ + @Override + public JApiKeys rename(Name name) { + return new JApiKeys(name, null); + } + + // ------------------------------------------------------------------------- + // Row5 type methods + // ------------------------------------------------------------------------- + + @Override + public Row5 fieldsRow() { + return (Row5) super.fieldsRow(); + } +} diff --git a/src/main/java/com/epam/ta/reportportal/jooq/tables/records/JApiKeysRecord.java b/src/main/java/com/epam/ta/reportportal/jooq/tables/records/JApiKeysRecord.java new file mode 100644 index 000000000..78a6fb40e --- /dev/null +++ b/src/main/java/com/epam/ta/reportportal/jooq/tables/records/JApiKeysRecord.java @@ -0,0 +1,266 @@ +/* + * This file is generated by jOOQ. + */ +package com.epam.ta.reportportal.jooq.tables.records; + + +import com.epam.ta.reportportal.jooq.tables.JApiKeys; + +import java.sql.Timestamp; + +import javax.annotation.processing.Generated; + +import org.jooq.Field; +import org.jooq.Record1; +import org.jooq.Record5; +import org.jooq.Row5; +import org.jooq.impl.UpdatableRecordImpl; + + +/** + * This class is generated by jOOQ. + */ +@Generated( + value = { + "http://www.jooq.org", + "jOOQ version:3.12.4" + }, + comments = "This class is generated by jOOQ" +) +@SuppressWarnings({ "all", "unchecked", "rawtypes" }) +public class JApiKeysRecord extends UpdatableRecordImpl implements Record5 { + + private static final long serialVersionUID = 186305862; + + /** + * Setter for public.api_keys.id. + */ + public void setId(Long value) { + set(0, value); + } + + /** + * Getter for public.api_keys.id. + */ + public Long getId() { + return (Long) get(0); + } + + /** + * Setter for public.api_keys.name. + */ + public void setName(String value) { + set(1, value); + } + + /** + * Getter for public.api_keys.name. + */ + public String getName() { + return (String) get(1); + } + + /** + * Setter for public.api_keys.hash. + */ + public void setHash(String value) { + set(2, value); + } + + /** + * Getter for public.api_keys.hash. + */ + public String getHash() { + return (String) get(2); + } + + /** + * Setter for public.api_keys.created_at. + */ + public void setCreatedAt(Timestamp value) { + set(3, value); + } + + /** + * Getter for public.api_keys.created_at. + */ + public Timestamp getCreatedAt() { + return (Timestamp) get(3); + } + + /** + * Setter for public.api_keys.user_id. + */ + public void setUserId(Long value) { + set(4, value); + } + + /** + * Getter for public.api_keys.user_id. + */ + public Long getUserId() { + return (Long) get(4); + } + + // ------------------------------------------------------------------------- + // Primary key information + // ------------------------------------------------------------------------- + + @Override + public Record1 key() { + return (Record1) super.key(); + } + + // ------------------------------------------------------------------------- + // Record5 type implementation + // ------------------------------------------------------------------------- + + @Override + public Row5 fieldsRow() { + return (Row5) super.fieldsRow(); + } + + @Override + public Row5 valuesRow() { + return (Row5) super.valuesRow(); + } + + @Override + public Field field1() { + return JApiKeys.API_KEYS.ID; + } + + @Override + public Field field2() { + return JApiKeys.API_KEYS.NAME; + } + + @Override + public Field field3() { + return JApiKeys.API_KEYS.HASH; + } + + @Override + public Field field4() { + return JApiKeys.API_KEYS.CREATED_AT; + } + + @Override + public Field field5() { + return JApiKeys.API_KEYS.USER_ID; + } + + @Override + public Long component1() { + return getId(); + } + + @Override + public String component2() { + return getName(); + } + + @Override + public String component3() { + return getHash(); + } + + @Override + public Timestamp component4() { + return getCreatedAt(); + } + + @Override + public Long component5() { + return getUserId(); + } + + @Override + public Long value1() { + return getId(); + } + + @Override + public String value2() { + return getName(); + } + + @Override + public String value3() { + return getHash(); + } + + @Override + public Timestamp value4() { + return getCreatedAt(); + } + + @Override + public Long value5() { + return getUserId(); + } + + @Override + public JApiKeysRecord value1(Long value) { + setId(value); + return this; + } + + @Override + public JApiKeysRecord value2(String value) { + setName(value); + return this; + } + + @Override + public JApiKeysRecord value3(String value) { + setHash(value); + return this; + } + + @Override + public JApiKeysRecord value4(Timestamp value) { + setCreatedAt(value); + return this; + } + + @Override + public JApiKeysRecord value5(Long value) { + setUserId(value); + return this; + } + + @Override + public JApiKeysRecord values(Long value1, String value2, String value3, Timestamp value4, Long value5) { + value1(value1); + value2(value2); + value3(value3); + value4(value4); + value5(value5); + return this; + } + + // ------------------------------------------------------------------------- + // Constructors + // ------------------------------------------------------------------------- + + /** + * Create a detached JApiKeysRecord + */ + public JApiKeysRecord() { + super(JApiKeys.API_KEYS); + } + + /** + * Create a detached, initialised JApiKeysRecord + */ + public JApiKeysRecord(Long id, String name, String hash, Timestamp createdAt, Long userId) { + super(JApiKeys.API_KEYS); + + set(0, id); + set(1, name); + set(2, hash); + set(3, createdAt); + set(4, userId); + } +} diff --git a/src/main/java/com/epam/ta/reportportal/util/FeatureFlagHandler.java b/src/main/java/com/epam/ta/reportportal/util/FeatureFlagHandler.java new file mode 100644 index 000000000..111c55382 --- /dev/null +++ b/src/main/java/com/epam/ta/reportportal/util/FeatureFlagHandler.java @@ -0,0 +1,38 @@ +package com.epam.ta.reportportal.util; + +import com.epam.ta.reportportal.entity.enums.FeatureFlag; +import java.util.HashSet; +import java.util.Optional; +import java.util.Set; +import org.apache.commons.collections.CollectionUtils; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +/** + * Component for checking enabled feature flags. + * + * @author Ivan Kustau + */ +@Component +public class FeatureFlagHandler { + + private final Set enabledFeatureFlagsSet = new HashSet<>(); + + /** + * Initialises {@link FeatureFlagHandler} by environment variable with enabled feature flags. + * + * @param featureFlags Set of enabled feature flags + */ + public FeatureFlagHandler( + @Value("#{'${rp.feature.flags}'.split(',')}") Set featureFlags) { + + if (!CollectionUtils.isEmpty(featureFlags)) { + featureFlags.stream().map(FeatureFlag::fromString).filter(Optional::isPresent) + .map(Optional::get).forEach(enabledFeatureFlagsSet::add); + } + } + + public boolean isEnabled(FeatureFlag featureFlag) { + return enabledFeatureFlagsSet.contains(featureFlag); + } +} diff --git a/src/test/java/com/epam/ta/reportportal/binary/impl/AttachmentDataStoreServiceTest.java b/src/test/java/com/epam/ta/reportportal/binary/impl/AttachmentDataStoreServiceTest.java index 561d549ce..5f8c7922f 100644 --- a/src/test/java/com/epam/ta/reportportal/binary/impl/AttachmentDataStoreServiceTest.java +++ b/src/test/java/com/epam/ta/reportportal/binary/impl/AttachmentDataStoreServiceTest.java @@ -16,68 +16,78 @@ package com.epam.ta.reportportal.binary.impl; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + import com.epam.ta.reportportal.BaseTest; import com.epam.ta.reportportal.exception.ReportPortalException; -import org.junit.jupiter.api.Test; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.core.io.ClassPathResource; - import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Paths; import java.util.Optional; import java.util.Random; - -import static org.junit.jupiter.api.Assertions.*; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.core.io.ClassPathResource; /** * @author Ihar Kahadouski */ class AttachmentDataStoreServiceTest extends BaseTest { - @Autowired - private AttachmentDataStoreService attachmentDataStoreService; + @Autowired + private AttachmentDataStoreService attachmentDataStoreService; - @Value("${datastore.default.path:/data/store}") - private String storageRootPath; + @Value("${datastore.path:/data/store}") + private String storageRootPath; - private static Random random = new Random(); + private static Random random = new Random(); - @Test - void saveLoadAndDeleteTest() throws IOException { - InputStream inputStream = new ClassPathResource("meh.jpg").getInputStream(); + @Test + void saveLoadAndDeleteTest() throws IOException { + InputStream inputStream = new ClassPathResource("meh.jpg").getInputStream(); - String fileId = attachmentDataStoreService.save(random.nextLong() + "meh.jpg", inputStream); + String fileId = attachmentDataStoreService.save(random.nextLong() + "meh.jpg", inputStream); - Optional loadedData = attachmentDataStoreService.load(fileId); + Optional loadedData = attachmentDataStoreService.load(fileId); - assertTrue(loadedData.isPresent()); - assertTrue(Files.exists(Paths.get(storageRootPath, attachmentDataStoreService.dataEncoder.decode(fileId)))); + assertTrue(loadedData.isPresent()); + assertTrue(Files.exists( + Paths.get(storageRootPath, attachmentDataStoreService.dataEncoder.decode(fileId)))); - attachmentDataStoreService.delete(fileId); + attachmentDataStoreService.delete(fileId); - ReportPortalException exception = assertThrows(ReportPortalException.class, () -> attachmentDataStoreService.load(fileId)); - assertEquals("Unable to load binary data by id 'Unable to find file'", exception.getMessage()); - assertFalse(Files.exists(Paths.get(storageRootPath, attachmentDataStoreService.dataEncoder.decode(fileId)))); - } + ReportPortalException exception = + assertThrows(ReportPortalException.class, () -> attachmentDataStoreService.load(fileId)); + assertEquals("Unable to load binary data by id 'Unable to find file'", exception.getMessage()); + assertFalse(Files.exists( + Paths.get(storageRootPath, attachmentDataStoreService.dataEncoder.decode(fileId)))); + } - @Test - void saveLoadAndDeleteThumbnailTest() throws IOException { - InputStream inputStream = new ClassPathResource("meh.jpg").getInputStream(); + @Test + void saveLoadAndDeleteThumbnailTest() throws IOException { + InputStream inputStream = new ClassPathResource("meh.jpg").getInputStream(); - String thumbnailId = attachmentDataStoreService.saveThumbnail(random.nextLong() + "thumbnail.jpg", inputStream); + String thumbnailId = + attachmentDataStoreService.saveThumbnail(random.nextLong() + "thumbnail.jpg", inputStream); - Optional loadedData = attachmentDataStoreService.load(thumbnailId); + Optional loadedData = attachmentDataStoreService.load(thumbnailId); - assertTrue(loadedData.isPresent()); - assertTrue(Files.exists(Paths.get(storageRootPath, attachmentDataStoreService.dataEncoder.decode(thumbnailId)))); + assertTrue(loadedData.isPresent()); + assertTrue(Files.exists( + Paths.get(storageRootPath, attachmentDataStoreService.dataEncoder.decode(thumbnailId)))); - attachmentDataStoreService.delete(thumbnailId); + attachmentDataStoreService.delete(thumbnailId); - ReportPortalException exception = assertThrows(ReportPortalException.class, () -> attachmentDataStoreService.load(thumbnailId)); - assertEquals("Unable to load binary data by id 'Unable to find file'", exception.getMessage()); - assertFalse(Files.exists(Paths.get(storageRootPath, attachmentDataStoreService.dataEncoder.decode(thumbnailId)))); - } + ReportPortalException exception = assertThrows(ReportPortalException.class, + () -> attachmentDataStoreService.load(thumbnailId) + ); + assertEquals("Unable to load binary data by id 'Unable to find file'", exception.getMessage()); + assertFalse(Files.exists( + Paths.get(storageRootPath, attachmentDataStoreService.dataEncoder.decode(thumbnailId)))); + } } \ No newline at end of file diff --git a/src/test/java/com/epam/ta/reportportal/binary/impl/CommonDataStoreServiceTest.java b/src/test/java/com/epam/ta/reportportal/binary/impl/CommonDataStoreServiceTest.java index 477db322d..cf840c7fd 100644 --- a/src/test/java/com/epam/ta/reportportal/binary/impl/CommonDataStoreServiceTest.java +++ b/src/test/java/com/epam/ta/reportportal/binary/impl/CommonDataStoreServiceTest.java @@ -16,9 +16,21 @@ package com.epam.ta.reportportal.binary.impl; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + import com.epam.ta.reportportal.BaseTest; import com.epam.ta.reportportal.binary.DataStoreService; import com.epam.ta.reportportal.filesystem.DataEncoder; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Optional; +import java.util.Random; import org.apache.commons.fileupload.FileItem; import org.apache.commons.fileupload.disk.DiskFileItem; import org.apache.commons.io.IOUtils; @@ -29,84 +41,76 @@ import org.springframework.core.io.ClassPathResource; import org.springframework.web.multipart.commons.CommonsMultipartFile; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.util.Optional; -import java.util.Random; - -import static org.junit.jupiter.api.Assertions.*; - /** * @author Ihar Kahadouski */ class CommonDataStoreServiceTest extends BaseTest { - @Autowired - @Qualifier("userDataStoreService") - private DataStoreService dataStoreService; - - @Autowired - private DataEncoder dataEncoder; - - @Value("${datastore.default.path:/data/store}") - private String storageRootPath; - - @Test - void saveTest() throws IOException { - CommonsMultipartFile multipartFile = getMultipartFile("meh.jpg"); - String fileId = dataStoreService.save(multipartFile.getOriginalFilename(), multipartFile.getInputStream()); - assertNotNull(fileId); - assertTrue(Files.exists(Paths.get(storageRootPath, dataEncoder.decode(fileId)))); - dataStoreService.delete(fileId); - } - - @Test - void saveThumbnailTest() throws IOException { - CommonsMultipartFile multipartFile = getMultipartFile("meh.jpg"); - String fileId = dataStoreService.saveThumbnail(multipartFile.getOriginalFilename(), multipartFile.getInputStream()); - assertNotNull(fileId); - assertTrue(Files.exists(Paths.get(storageRootPath, dataEncoder.decode(fileId)))); - dataStoreService.delete(fileId); - } - - @Test - void saveAndLoadTest() throws IOException { - CommonsMultipartFile multipartFile = getMultipartFile("meh.jpg"); - String fileId = dataStoreService.saveThumbnail(multipartFile.getOriginalFilename(), multipartFile.getInputStream()); - - Optional content = dataStoreService.load(fileId); - - assertTrue(content.isPresent()); - dataStoreService.delete(fileId); - } - - @Test - void saveAndDeleteTest() throws IOException { - CommonsMultipartFile multipartFile = getMultipartFile("meh.jpg"); - Random random = new Random(); - String fileId = dataStoreService.save(random.nextLong() + "/" + multipartFile.getOriginalFilename(), - multipartFile.getInputStream() - ); - - dataStoreService.delete(fileId); - - assertFalse(Files.exists(Paths.get(dataEncoder.decode(fileId)))); - } - - public static CommonsMultipartFile getMultipartFile(String path) throws IOException { - File file = new ClassPathResource(path).getFile(); - FileItem fileItem = new DiskFileItem("mainFile", - Files.probeContentType(file.toPath()), - false, - file.getName(), - (int) file.length(), - file.getParentFile() - ); - IOUtils.copy(new FileInputStream(file), fileItem.getOutputStream()); - return new CommonsMultipartFile(fileItem); - } + @Autowired + @Qualifier("userDataStoreService") + private DataStoreService dataStoreService; + + @Autowired + private DataEncoder dataEncoder; + + @Value("${datastore.path:/data/store}") + private String storageRootPath; + + @Test + void saveTest() throws IOException { + CommonsMultipartFile multipartFile = getMultipartFile("meh.jpg"); + String fileId = + dataStoreService.save(multipartFile.getOriginalFilename(), multipartFile.getInputStream()); + assertNotNull(fileId); + assertTrue(Files.exists(Paths.get(storageRootPath, dataEncoder.decode(fileId)))); + dataStoreService.delete(fileId); + } + + @Test + void saveThumbnailTest() throws IOException { + CommonsMultipartFile multipartFile = getMultipartFile("meh.jpg"); + String fileId = dataStoreService.saveThumbnail(multipartFile.getOriginalFilename(), + multipartFile.getInputStream() + ); + assertNotNull(fileId); + assertTrue(Files.exists(Paths.get(storageRootPath, dataEncoder.decode(fileId)))); + dataStoreService.delete(fileId); + } + + @Test + void saveAndLoadTest() throws IOException { + CommonsMultipartFile multipartFile = getMultipartFile("meh.jpg"); + String fileId = dataStoreService.saveThumbnail(multipartFile.getOriginalFilename(), + multipartFile.getInputStream() + ); + + Optional content = dataStoreService.load(fileId); + + assertTrue(content.isPresent()); + dataStoreService.delete(fileId); + } + + @Test + void saveAndDeleteTest() throws IOException { + CommonsMultipartFile multipartFile = getMultipartFile("meh.jpg"); + Random random = new Random(); + String fileId = + dataStoreService.save(random.nextLong() + "/" + multipartFile.getOriginalFilename(), + multipartFile.getInputStream() + ); + + dataStoreService.delete(fileId); + + assertFalse(Files.exists(Paths.get(dataEncoder.decode(fileId)))); + } + + public static CommonsMultipartFile getMultipartFile(String path) throws IOException { + File file = new ClassPathResource(path).getFile(); + FileItem fileItem = + new DiskFileItem("mainFile", Files.probeContentType(file.toPath()), false, file.getName(), + (int) file.length(), file.getParentFile() + ); + IOUtils.copy(new FileInputStream(file), fileItem.getOutputStream()); + return new CommonsMultipartFile(fileItem); + } } \ No newline at end of file diff --git a/src/test/java/com/epam/ta/reportportal/binary/impl/UserDataStoreServiceTest.java b/src/test/java/com/epam/ta/reportportal/binary/impl/UserDataStoreServiceTest.java index f4876c243..a35198301 100644 --- a/src/test/java/com/epam/ta/reportportal/binary/impl/UserDataStoreServiceTest.java +++ b/src/test/java/com/epam/ta/reportportal/binary/impl/UserDataStoreServiceTest.java @@ -16,68 +16,77 @@ package com.epam.ta.reportportal.binary.impl; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + import com.epam.ta.reportportal.BaseTest; import com.epam.ta.reportportal.exception.ReportPortalException; -import org.junit.jupiter.api.Test; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.core.io.ClassPathResource; - import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Paths; import java.util.Optional; import java.util.Random; - -import static org.junit.jupiter.api.Assertions.*; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.core.io.ClassPathResource; /** * @author Ihar Kahadouski */ class UserDataStoreServiceTest extends BaseTest { - @Autowired - private UserDataStoreService userDataStoreService; + @Autowired + private UserDataStoreService userDataStoreService; - @Value("${datastore.default.path:/data/store}") - private String storageRootPath; + @Value("${datastore.path:/data/store}") + private String storageRootPath; - private static Random random = new Random(); + private static Random random = new Random(); - @Test - void saveLoadAndDeleteTest() throws IOException { - InputStream inputStream = new ClassPathResource("meh.jpg").getInputStream(); + @Test + void saveLoadAndDeleteTest() throws IOException { + InputStream inputStream = new ClassPathResource("meh.jpg").getInputStream(); - String fileId = userDataStoreService.save(random.nextLong() + "meh.jpg", inputStream); + String fileId = userDataStoreService.save(random.nextLong() + "meh.jpg", inputStream); - Optional loadedData = userDataStoreService.load(fileId); + Optional loadedData = userDataStoreService.load(fileId); - assertTrue(loadedData.isPresent()); - assertTrue(Files.exists(Paths.get(storageRootPath, userDataStoreService.dataEncoder.decode(fileId)))); + assertTrue(loadedData.isPresent()); + assertTrue( + Files.exists(Paths.get(storageRootPath, userDataStoreService.dataEncoder.decode(fileId)))); - userDataStoreService.delete(fileId); + userDataStoreService.delete(fileId); - ReportPortalException exception = assertThrows(ReportPortalException.class, () -> userDataStoreService.load(fileId)); - assertEquals("Unable to load binary data by id 'Unable to find file'", exception.getMessage()); - assertFalse(Files.exists(Paths.get(storageRootPath, userDataStoreService.dataEncoder.decode(fileId)))); - } + ReportPortalException exception = + assertThrows(ReportPortalException.class, () -> userDataStoreService.load(fileId)); + assertEquals("Unable to load binary data by id 'Unable to find file'", exception.getMessage()); + assertFalse( + Files.exists(Paths.get(storageRootPath, userDataStoreService.dataEncoder.decode(fileId)))); + } - @Test - void saveLoadAndDeleteThumbnailTest() throws IOException { - InputStream inputStream = new ClassPathResource("meh.jpg").getInputStream(); + @Test + void saveLoadAndDeleteThumbnailTest() throws IOException { + InputStream inputStream = new ClassPathResource("meh.jpg").getInputStream(); - String thumbnailId = userDataStoreService.saveThumbnail(random.nextLong() + "thmbnail.jpg", inputStream); + String thumbnailId = + userDataStoreService.saveThumbnail(random.nextLong() + "thmbnail.jpg", inputStream); - Optional loadedData = userDataStoreService.load(thumbnailId); + Optional loadedData = userDataStoreService.load(thumbnailId); - assertTrue(loadedData.isPresent()); - assertTrue(Files.exists(Paths.get(storageRootPath, userDataStoreService.dataEncoder.decode(thumbnailId)))); + assertTrue(loadedData.isPresent()); + assertTrue(Files.exists( + Paths.get(storageRootPath, userDataStoreService.dataEncoder.decode(thumbnailId)))); - userDataStoreService.delete(thumbnailId); + userDataStoreService.delete(thumbnailId); - ReportPortalException exception = assertThrows(ReportPortalException.class, () -> userDataStoreService.load(thumbnailId)); - assertEquals("Unable to load binary data by id 'Unable to find file'", exception.getMessage()); - assertFalse(Files.exists(Paths.get(storageRootPath, userDataStoreService.dataEncoder.decode(thumbnailId)))); - } + ReportPortalException exception = + assertThrows(ReportPortalException.class, () -> userDataStoreService.load(thumbnailId)); + assertEquals("Unable to load binary data by id 'Unable to find file'", exception.getMessage()); + assertFalse(Files.exists( + Paths.get(storageRootPath, userDataStoreService.dataEncoder.decode(thumbnailId)))); + } } \ No newline at end of file diff --git a/src/test/java/com/epam/ta/reportportal/config/TestConfiguration.java b/src/test/java/com/epam/ta/reportportal/config/TestConfiguration.java index ad9f33003..a3cc2652e 100644 --- a/src/test/java/com/epam/ta/reportportal/config/TestConfiguration.java +++ b/src/test/java/com/epam/ta/reportportal/config/TestConfiguration.java @@ -21,6 +21,8 @@ import com.epam.reportportal.commons.ThumbnailatorImpl; import com.epam.reportportal.commons.TikaContentTypeResolver; import com.epam.ta.reportportal.filesystem.DataEncoder; +import com.epam.ta.reportportal.util.FeatureFlagHandler; +import java.util.Set; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.quartz.QuartzAutoConfiguration; @@ -37,26 +39,34 @@ @PropertySource({ "classpath:test-application.properties" }) public class TestConfiguration { - @Bean("attachmentThumbnailator") - public Thumbnailator attachmentThumbnailator(@Value("${datastore.thumbnail.attachment.width}") int width, - @Value("${datastore.thumbnail.attachment.height}") int height) { - return new ThumbnailatorImpl(width, height); - } - - @Bean("userPhotoThumbnailator") - public Thumbnailator userPhotoThumbnailator(@Value("${datastore.thumbnail.avatar.width}") int width, - @Value("${datastore.thumbnail.avatar.height}") int height) { - return new ThumbnailatorImpl(width, height); - } - - @Bean - public ContentTypeResolver contentTypeResolver() { - return new TikaContentTypeResolver(); - } - - @Bean - public DataEncoder dataEncoder() { - return new DataEncoder(); - } + @Bean("attachmentThumbnailator") + public Thumbnailator attachmentThumbnailator( + @Value("${datastore.thumbnail.attachment.width}") int width, + @Value("${datastore.thumbnail.attachment.height}") int height) { + return new ThumbnailatorImpl(width, height); + } + + @Bean("userPhotoThumbnailator") + public Thumbnailator userPhotoThumbnailator( + @Value("${datastore.thumbnail.avatar.width}") int width, + @Value("${datastore.thumbnail.avatar.height}") int height) { + return new ThumbnailatorImpl(width, height); + } + + @Bean + public FeatureFlagHandler featureFlagHandler( + @Value("#{'${rp.feature.flags}'.split(',')}") Set featureFlagsSet) { + return new FeatureFlagHandler(featureFlagsSet); + } + + @Bean + public ContentTypeResolver contentTypeResolver() { + return new TikaContentTypeResolver(); + } + + @Bean + public DataEncoder dataEncoder() { + return new DataEncoder(); + } } diff --git a/src/test/java/com/epam/ta/reportportal/dao/ApiKeyRepositoryTest.java b/src/test/java/com/epam/ta/reportportal/dao/ApiKeyRepositoryTest.java new file mode 100644 index 000000000..f28d302c3 --- /dev/null +++ b/src/test/java/com/epam/ta/reportportal/dao/ApiKeyRepositoryTest.java @@ -0,0 +1,48 @@ +/* + * Copyright 2022 EPAM Systems + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.epam.ta.reportportal.dao; + +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import com.epam.ta.reportportal.BaseTest; +import com.epam.ta.reportportal.entity.user.ApiKey; +import java.time.LocalDateTime; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * @author Andrei Piankouski + */ +public class ApiKeyRepositoryTest extends BaseTest { + + @Autowired + private ApiKeyRepository apiKeyRepository; + + @Test + void shouldInsertAndSetId() { + final ApiKey apiKey = new ApiKey(); + apiKey.setName("ApiKey"); + apiKey.setHash("8743b52063cd84097a65d1633f5c74f5"); + apiKey.setCreatedAt(LocalDateTime.now()); + apiKey.setUserId(1L); + + ApiKey saved = apiKeyRepository.save(apiKey); + + assertNotNull(saved.getId()); + } + +} diff --git a/src/test/java/com/epam/ta/reportportal/filesystem/FilePathGeneratorTest.java b/src/test/java/com/epam/ta/reportportal/filesystem/FilePathGeneratorTest.java index 2e41a4620..d5590e2bb 100644 --- a/src/test/java/com/epam/ta/reportportal/filesystem/FilePathGeneratorTest.java +++ b/src/test/java/com/epam/ta/reportportal/filesystem/FilePathGeneratorTest.java @@ -16,42 +16,41 @@ package com.epam.ta.reportportal.filesystem; +import static org.mockito.Mockito.when; + import com.epam.ta.reportportal.entity.attachment.AttachmentMetaInfo; import com.epam.ta.reportportal.util.DateTimeProvider; +import java.io.File; +import java.time.LocalDateTime; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.Mockito; -import java.time.LocalDateTime; - -import static org.mockito.Mockito.when; - class FilePathGeneratorTest { - private DateTimeProvider dateTimeProvider; + private DateTimeProvider dateTimeProvider; - @BeforeEach - void setUp() { - dateTimeProvider = Mockito.mock(DateTimeProvider.class); - } + @BeforeEach + void setUp() { + dateTimeProvider = Mockito.mock(DateTimeProvider.class); + } - @Test - void generate_different_even_for_same_date() { + @Test + void generate_different_even_for_same_date() { - // given: - AttachmentMetaInfo metaInfo = AttachmentMetaInfo.builder() - .withProjectId(1L) - .withLaunchUuid("271b5881-9a62-4df4-b477-335a96acbe14") - .build(); + //given: + AttachmentMetaInfo metaInfo = AttachmentMetaInfo.builder().withProjectId(1L) + .withLaunchUuid("271b5881-9a62-4df4-b477-335a96acbe14").build(); - LocalDateTime date = LocalDateTime.of(2018, 5, 28, 3, 3); - when(dateTimeProvider.localDateTimeNow()).thenReturn(date); - // + LocalDateTime date = LocalDateTime.of(2018, 5, 28, 3, 3); + when(dateTimeProvider.localDateTimeNow()).thenReturn(date); + // - // when: - String pathOne = new FilePathGenerator(dateTimeProvider).generate(metaInfo); + //when: + String pathOne = new FilePathGenerator(dateTimeProvider).generate(metaInfo); - Assertions.assertThat(pathOne).isEqualTo("1/2018-5/271b5881-9a62-4df4-b477-335a96acbe14"); - } + Assertions.assertThat(pathOne).isEqualTo( + "1" + File.separator + "2018-5" + File.separator + "271b5881-9a62-4df4-b477-335a96acbe14"); + } } diff --git a/src/test/java/com/epam/ta/reportportal/filesystem/distributed/s3/S3DataStoreTest.java b/src/test/java/com/epam/ta/reportportal/filesystem/distributed/s3/S3DataStoreTest.java index ec731b244..4d8d62d0b 100644 --- a/src/test/java/com/epam/ta/reportportal/filesystem/distributed/s3/S3DataStoreTest.java +++ b/src/test/java/com/epam/ta/reportportal/filesystem/distributed/s3/S3DataStoreTest.java @@ -1,5 +1,5 @@ /* - * Copyright 2019 EPAM Systems + * Copyright 2023 EPAM Systems * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,6 +16,15 @@ package com.epam.ta.reportportal.filesystem.distributed.s3; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import com.epam.ta.reportportal.entity.enums.FeatureFlag; +import com.epam.ta.reportportal.util.FeatureFlagHandler; +import java.io.InputStream; import org.jclouds.blobstore.BlobStore; import org.jclouds.blobstore.domain.Blob; import org.jclouds.blobstore.domain.BlobBuilder; @@ -23,67 +32,69 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import java.io.InputStream; - -import static org.mockito.Mockito.*; - /** * @author Ivan Budayeu */ class S3DataStoreTest { - private static final String FILE_PATH = "someFile"; - private static final String BUCKET_PREFIX = "prj-"; - private static final String DEFAULT_BUCKET_NAME = "rp-bucket"; - private static final String REGION = "us-east-1"; - private static final int ZERO = 0; + private static final String FILE_PATH = "someFile"; + private static final String BUCKET_PREFIX = "prj-"; + private static final String DEFAULT_BUCKET_NAME = "rp-bucket"; + private static final String REGION = "us-east-1"; + private static final int ZERO = 0; + + private final BlobStore blobStore = mock(BlobStore.class); + private final InputStream inputStream = mock(InputStream.class); + + private final FeatureFlagHandler featureFlagHandler = mock(FeatureFlagHandler.class); - private final BlobStore blobStore = mock(BlobStore.class); - private final InputStream inputStream = mock(InputStream.class); + private final S3DataStore s3DataStore = + new S3DataStore(blobStore, BUCKET_PREFIX, DEFAULT_BUCKET_NAME, REGION, featureFlagHandler); - private final S3DataStore s3DataStore = new S3DataStore(blobStore, BUCKET_PREFIX, DEFAULT_BUCKET_NAME, REGION); + @Test + void save() throws Exception { - @Test - void save() throws Exception { + BlobBuilder blobBuilderMock = mock(BlobBuilder.class); + BlobBuilder.PayloadBlobBuilder payloadBlobBuilderMock = + mock(BlobBuilder.PayloadBlobBuilder.class); + Blob blobMock = mock(Blob.class); - BlobBuilder blobBuilderMock = mock(BlobBuilder.class); - BlobBuilder.PayloadBlobBuilder payloadBlobBuilderMock = mock(BlobBuilder.PayloadBlobBuilder.class); - Blob blobMock = mock(Blob.class); + when(inputStream.available()).thenReturn(ZERO); + when(payloadBlobBuilderMock.contentDisposition(FILE_PATH)).thenReturn(payloadBlobBuilderMock); + when(payloadBlobBuilderMock.contentLength(ZERO)).thenReturn(payloadBlobBuilderMock); + when(payloadBlobBuilderMock.build()).thenReturn(blobMock); + when(blobBuilderMock.payload(inputStream)).thenReturn(payloadBlobBuilderMock); - when(inputStream.available()).thenReturn(ZERO); - when(payloadBlobBuilderMock.contentDisposition(FILE_PATH)).thenReturn(payloadBlobBuilderMock); - when(payloadBlobBuilderMock.contentLength(ZERO)).thenReturn(payloadBlobBuilderMock); - when(payloadBlobBuilderMock.build()).thenReturn(blobMock); - when(blobBuilderMock.payload(inputStream)).thenReturn(payloadBlobBuilderMock); + when(blobStore.containerExists(any(String.class))).thenReturn(true); + when(blobStore.blobBuilder(FILE_PATH)).thenReturn(blobBuilderMock); - when(blobStore.containerExists(any(String.class))).thenReturn(true); - when(blobStore.blobBuilder(FILE_PATH)).thenReturn(blobBuilderMock); + when(featureFlagHandler.isEnabled(FeatureFlag.SINGLE_BUCKET)).thenReturn(false); - s3DataStore.save(FILE_PATH, inputStream); + s3DataStore.save(FILE_PATH, inputStream); - verify(blobStore, times(1)).putBlob(DEFAULT_BUCKET_NAME, blobMock); - } + verify(blobStore, times(1)).putBlob(DEFAULT_BUCKET_NAME, blobMock); + } - @Test - void load() throws Exception { + @Test + void load() throws Exception { - Blob mockBlob = mock(Blob.class); - Payload mockPayload = mock(Payload.class); + Blob mockBlob = mock(Blob.class); + Payload mockPayload = mock(Payload.class); - when(mockPayload.openStream()).thenReturn(inputStream); - when(mockBlob.getPayload()).thenReturn(mockPayload); + when(mockPayload.openStream()).thenReturn(inputStream); + when(mockBlob.getPayload()).thenReturn(mockPayload); - when(blobStore.getBlob(DEFAULT_BUCKET_NAME, FILE_PATH)).thenReturn(mockBlob); - InputStream loaded = s3DataStore.load(FILE_PATH); + when(blobStore.getBlob(DEFAULT_BUCKET_NAME, FILE_PATH)).thenReturn(mockBlob); + InputStream loaded = s3DataStore.load(FILE_PATH); - Assertions.assertEquals(inputStream, loaded); - } + Assertions.assertEquals(inputStream, loaded); + } - @Test - void delete() throws Exception { + @Test + void delete() throws Exception { - s3DataStore.delete(FILE_PATH); + s3DataStore.delete(FILE_PATH); - verify(blobStore, times(1)).removeBlob(DEFAULT_BUCKET_NAME, FILE_PATH); - } + verify(blobStore, times(1)).removeBlob(DEFAULT_BUCKET_NAME, FILE_PATH); + } } \ No newline at end of file diff --git a/src/test/resources/test-application.properties b/src/test/resources/test-application.properties index 2673e6adc..420981fa4 100644 --- a/src/test/resources/test-application.properties +++ b/src/test/resources/test-application.properties @@ -18,15 +18,16 @@ embedded.datasource.dir=${java.io.tmpdir}/reportportal/embedded-postgres embedded.datasource.clean=true embedded.datasource.port=0 rp.binarystore.path=${java.io.tmpdir}/reportportal/datastore +rp.feature.flags= -datastore.default.path=${rp.binarystore.path:/data/storage} +datastore.path=${rp.binarystore.path:/data/storage} datastore.seaweed.master.host=${rp.binarystore.master.host:localhost} datastore.seaweed.master.port=${rp.binarystore.master.port:9333} datastore.s3.endpoint=${rp.binarystore.s3.endpoint:https://play.min.io} datastore.s3.accessKey=${rp.binarystore.s3.accessKey:Q3AM3UQ867SPQQA43P2F} datastore.s3.secretKey=${rp.binarystore.s3.secretKey:zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG} # could be one of [seaweed, filesystem, s3] -datastore.type=${rp.binarystore.type:filesystem} +datastore.type=filesystem datastore.thumbnail.attachment.width=${rp.binarystore.thumbnail.attachment.width:100} datastore.thumbnail.attachment.height=${rp.binarystore.thumbnail.attachment.height:55} datastore.thumbnail.avatar.width=${rp.binarystore.thumbnail.avatar.width:40}