diff --git a/.scalafmt.conf b/.scalafmt.conf
new file mode 100644
index 000000000..c2a939b96
--- /dev/null
+++ b/.scalafmt.conf
@@ -0,0 +1,17 @@
+version = 3.8.3
+align = none
+align.openParenCallSite = true
+align.openParenDefnSite = true
+maxColumn = 120
+continuationIndent.defnSite = 2
+assumeStandardLibraryStripMargin = true
+danglingParentheses.preset = true
+rewrite.rules = [SortImports, RedundantBraces, RedundantParens, SortModifiers]
+docstrings.style = keep
+project.excludeFilters = [
+ Dependencies.scala,
+ Settings.scala,
+ build.sbt
+]
+runner.dialect = scala213
+project.git = true
diff --git a/project/plugins.sbt b/project/plugins.sbt
index b2c566a19..d26c78e72 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -4,4 +4,6 @@ addSbtPlugin("io.spray" % "sbt-revolver" % "0.10.0")
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.2.1")
+addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.2")
+
addDependencyTreePlugin
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/Application.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/Application.scala
index 692832be8..1af5e2ca4 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/Application.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/Application.scala
@@ -17,4 +17,5 @@ case class Application(agoraDAO: AgoraDAO,
shareLogDAO: ShareLogDAO,
shibbolethDAO: ShibbolethDAO,
cwdsDAO: CwdsDAO,
- ecmDAO: ExternalCredsDAO)
+ ecmDAO: ExternalCredsDAO
+)
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/Boot.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/Boot.scala
index 7cc09796f..3b1002a2b 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/Boot.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/Boot.scala
@@ -27,8 +27,10 @@ object Boot extends App with LazyLogging {
val app: Application = buildApplication
- val agoraPermissionServiceConstructor: (UserInfo) => AgoraPermissionService = AgoraPermissionService.constructor(app)
- val exportEntitiesByTypeActorConstructor: (ExportEntitiesByTypeArguments) => ExportEntitiesByTypeActor = ExportEntitiesByTypeActor.constructor(app, system)
+ val agoraPermissionServiceConstructor: (UserInfo) => AgoraPermissionService =
+ AgoraPermissionService.constructor(app)
+ val exportEntitiesByTypeActorConstructor: (ExportEntitiesByTypeArguments) => ExportEntitiesByTypeActor =
+ ExportEntitiesByTypeActor.constructor(app, system)
val entityServiceConstructor: (ModelSchema) => EntityService = EntityService.constructor(app)
val libraryServiceConstructor: (UserInfo) => LibraryService = LibraryService.constructor(app)
val ontologyServiceConstructor: () => OntologyService = OntologyService.constructor(app)
@@ -36,15 +38,17 @@ object Boot extends App with LazyLogging {
val nihServiceConstructor: () => NihService = NihService.constructor(app)
val registerServiceConstructor: () => RegisterService = RegisterService.constructor(app)
val workspaceServiceConstructor: (WithAccessToken) => WorkspaceService = WorkspaceService.constructor(app)
- val permissionReportServiceConstructor: (UserInfo) => PermissionReportService = PermissionReportService.constructor(app)
+ val permissionReportServiceConstructor: (UserInfo) => PermissionReportService =
+ PermissionReportService.constructor(app)
val userServiceConstructor: (UserInfo) => UserService = UserService.constructor(app)
val shareLogServiceConstructor: () => ShareLogService = ShareLogService.constructor(app)
val managedGroupServiceConstructor: (WithAccessToken) => ManagedGroupService = ManagedGroupService.constructor(app)
- //Boot HealthMonitor actor
+ // Boot HealthMonitor actor
val healthChecks = new HealthChecks(app)
val healthMonitorChecks = healthChecks.healthMonitorChecks
- val healthMonitor = system.actorOf(HealthMonitor.props(healthMonitorChecks().keySet)( healthMonitorChecks ), "health-monitor")
+ val healthMonitor =
+ system.actorOf(HealthMonitor.props(healthMonitorChecks().keySet)(healthMonitorChecks), "health-monitor")
system.scheduler.scheduleWithFixedDelay(3.seconds, 1.minute, healthMonitor, HealthMonitor.CheckAll)
val statusServiceConstructor: () => StatusService = StatusService.constructor(healthMonitor)
@@ -77,16 +81,16 @@ object Boot extends App with LazyLogging {
)
}
- binding <- Http().newServerAt( "0.0.0.0", 8080).bind(service.route)
+ binding <- Http().newServerAt("0.0.0.0", 8080).bind(service.route)
_ <- binding.whenTerminated
- } yield {
+ } yield {}
- }
-
- runningService.recover {
- case t: Throwable => logger.error("FATAL - error starting Firecloud Orchestration", t)
- }.flatMap(_ => system.terminate())
+ runningService
+ .recover { case t: Throwable =>
+ logger.error("FATAL - error starting Firecloud Orchestration", t)
+ }
+ .flatMap(_ => system.terminate())
}
private def buildApplication(implicit system: ActorSystem) = {
@@ -94,33 +98,67 @@ object Boot extends App with LazyLogging {
val rawlsDAO: RawlsDAO = new HttpRawlsDAO
val samDAO: SamDAO = new HttpSamDAO
val thurloeDAO: ThurloeDAO = new HttpThurloeDAO
- val ecmDAO: ExternalCredsDAO = if (FireCloudConfig.ExternalCreds.enabled) new HttpExternalCredsDAO else new DisabledExternalCredsDAO
+ val ecmDAO: ExternalCredsDAO =
+ if (FireCloudConfig.ExternalCreds.enabled) new HttpExternalCredsDAO else new DisabledExternalCredsDAO
// can be disabled
- val agoraDAO: AgoraDAO = whenEnabled[AgoraDAO](FireCloudConfig.Agora.enabled, new HttpAgoraDAO(FireCloudConfig.Agora))
- val googleServicesDAO: GoogleServicesDAO = whenEnabled[GoogleServicesDAO](FireCloudConfig.GoogleCloud.enabled, new HttpGoogleServicesDAO(FireCloudConfig.GoogleCloud.priceListUrl, GooglePriceList(GooglePrices(FireCloudConfig.GoogleCloud.defaultStoragePriceList, UsTieredPriceItem(FireCloudConfig.GoogleCloud.defaultEgressPriceList)), "v1", "1")))
- val shibbolethDAO: ShibbolethDAO = whenEnabled[ShibbolethDAO](FireCloudConfig.Shibboleth.enabled, new HttpShibbolethDAO)
- val cwdsDAO: CwdsDAO = whenEnabled[CwdsDAO](FireCloudConfig.Cwds.enabled, new HttpCwdsDAO(FireCloudConfig.Cwds.enabled, FireCloudConfig.Cwds.supportedFormats))
+ val agoraDAO: AgoraDAO =
+ whenEnabled[AgoraDAO](FireCloudConfig.Agora.enabled, new HttpAgoraDAO(FireCloudConfig.Agora))
+ val googleServicesDAO: GoogleServicesDAO = whenEnabled[GoogleServicesDAO](
+ FireCloudConfig.GoogleCloud.enabled,
+ new HttpGoogleServicesDAO(
+ FireCloudConfig.GoogleCloud.priceListUrl,
+ GooglePriceList(GooglePrices(FireCloudConfig.GoogleCloud.defaultStoragePriceList,
+ UsTieredPriceItem(FireCloudConfig.GoogleCloud.defaultEgressPriceList)
+ ),
+ "v1",
+ "1"
+ )
+ )
+ )
+ val shibbolethDAO: ShibbolethDAO =
+ whenEnabled[ShibbolethDAO](FireCloudConfig.Shibboleth.enabled, new HttpShibbolethDAO)
+ val cwdsDAO: CwdsDAO = whenEnabled[CwdsDAO](
+ FireCloudConfig.Cwds.enabled,
+ new HttpCwdsDAO(FireCloudConfig.Cwds.enabled, FireCloudConfig.Cwds.supportedFormats)
+ )
val elasticSearchClient: Option[TransportClient] = Option.when(FireCloudConfig.ElasticSearch.enabled) {
ElasticUtils.buildClient(FireCloudConfig.ElasticSearch.servers, FireCloudConfig.ElasticSearch.clusterName)
}
- val ontologyDAO: OntologyDAO = elasticSearchClient.map(new ElasticSearchOntologyDAO(_, FireCloudConfig.ElasticSearch.ontologyIndexName)).getOrElse(DisabledServiceFactory.newDisabledService[OntologyDAO])
+ val ontologyDAO: OntologyDAO = elasticSearchClient
+ .map(new ElasticSearchOntologyDAO(_, FireCloudConfig.ElasticSearch.ontologyIndexName))
+ .getOrElse(DisabledServiceFactory.newDisabledService[OntologyDAO])
val researchPurposeSupport: ResearchPurposeSupport = new ESResearchPurposeSupport(ontologyDAO)
- val searchDAO: SearchDAO = elasticSearchClient.map(new ElasticSearchDAO(_, FireCloudConfig.ElasticSearch.indexName, researchPurposeSupport)).getOrElse(DisabledServiceFactory.newDisabledService[SearchDAO])
- val shareLogDAO: ShareLogDAO = elasticSearchClient.map(new ElasticSearchShareLogDAO(_, FireCloudConfig.ElasticSearch.shareLogIndexName)).getOrElse(DisabledServiceFactory.newDisabledService[ShareLogDAO])
-
- Application(agoraDAO, googleServicesDAO, ontologyDAO, rawlsDAO, samDAO, searchDAO, researchPurposeSupport, thurloeDAO, shareLogDAO, shibbolethDAO, cwdsDAO, ecmDAO)
+ val searchDAO: SearchDAO = elasticSearchClient
+ .map(new ElasticSearchDAO(_, FireCloudConfig.ElasticSearch.indexName, researchPurposeSupport))
+ .getOrElse(DisabledServiceFactory.newDisabledService[SearchDAO])
+ val shareLogDAO: ShareLogDAO = elasticSearchClient
+ .map(new ElasticSearchShareLogDAO(_, FireCloudConfig.ElasticSearch.shareLogIndexName))
+ .getOrElse(DisabledServiceFactory.newDisabledService[ShareLogDAO])
+
+ Application(agoraDAO,
+ googleServicesDAO,
+ ontologyDAO,
+ rawlsDAO,
+ samDAO,
+ searchDAO,
+ researchPurposeSupport,
+ thurloeDAO,
+ shareLogDAO,
+ shibbolethDAO,
+ cwdsDAO,
+ ecmDAO
+ )
}
- private def whenEnabled[T : ClassTag](enabled: Boolean, realService: => T): T = {
+ private def whenEnabled[T: ClassTag](enabled: Boolean, realService: => T): T =
if (enabled) {
realService
} else {
DisabledServiceFactory.newDisabledService
}
- }
startup()
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/EntityService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/EntityService.scala
index 8c53be5fc..b38486141 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/EntityService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/EntityService.scala
@@ -28,11 +28,15 @@ object EntityService {
def constructor(app: Application)(modelSchema: ModelSchema)(implicit executionContext: ExecutionContext) =
new EntityService(app.rawlsDAO, app.cwdsDAO, app.googleServicesDAO, modelSchema)
- def colNamesToAttributeNames(headers: Seq[String], requiredAttributes: Map[String, String]): Seq[(String, Option[String])] = {
- headers.tail map { colName => (colName, requiredAttributes.get(colName))}
- }
-
- def backwardsCompatStripIdSuffixes(tsvLoadFile: TSVLoadFile, entityType: String, modelSchema: ModelSchema): TSVLoadFile = {
+ def colNamesToAttributeNames(headers: Seq[String],
+ requiredAttributes: Map[String, String]
+ ): Seq[(String, Option[String])] =
+ headers.tail map { colName => (colName, requiredAttributes.get(colName)) }
+
+ def backwardsCompatStripIdSuffixes(tsvLoadFile: TSVLoadFile,
+ entityType: String,
+ modelSchema: ModelSchema
+ ): TSVLoadFile =
modelSchema.getTypeSchema(entityType) match {
case Failure(_) => tsvLoadFile // the failure will be handled during parsing
case Success(metaData) =>
@@ -46,12 +50,16 @@ object EntityService {
}
tsvLoadFile.copy(headers = newHeaders)
}
- }
}
-class EntityService(rawlsDAO: RawlsDAO, cwdsDAO: CwdsDAO, googleServicesDAO: GoogleServicesDAO, modelSchema: ModelSchema)(implicit val executionContext: ExecutionContext)
- extends TSVFileSupport with LazyLogging {
+class EntityService(rawlsDAO: RawlsDAO,
+ cwdsDAO: CwdsDAO,
+ googleServicesDAO: GoogleServicesDAO,
+ modelSchema: ModelSchema
+)(implicit val executionContext: ExecutionContext)
+ extends TSVFileSupport
+ with LazyLogging {
val format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZZ")
@@ -59,98 +67,124 @@ class EntityService(rawlsDAO: RawlsDAO, cwdsDAO: CwdsDAO, googleServicesDAO: Goo
* Returns the plural form of the entity type.
* Bails with a 400 Bad Request if the entity type is unknown to the schema and we are using firecloud model
* If using flexible model, just appends an 's' */
- private def withPlural(entityType: String)(op: String => Future[PerRequestMessage]): Future[PerRequestMessage] = {
+ private def withPlural(entityType: String)(op: String => Future[PerRequestMessage]): Future[PerRequestMessage] =
modelSchema.getPlural(entityType) match {
case Failure(regret) => Future(RequestCompleteWithErrorReport(BadRequest, regret.getMessage))
case Success(plural) => op(plural)
}
- }
-
/**
* Verifies that the provided list of headers includes all attributes required by the schema for this entity type.
* Bails with a 400 Bad Request if the entity type is unknown or if some attributes are missing.
* Returns the list of required attributes if all is well. */
- private def withRequiredAttributes(entityType: String, headers: Seq[String])(op: Map[String, String] => Future[PerRequestMessage]):Future[PerRequestMessage] = {
+ private def withRequiredAttributes(entityType: String, headers: Seq[String])(
+ op: Map[String, String] => Future[PerRequestMessage]
+ ): Future[PerRequestMessage] =
modelSchema.getRequiredAttributes(entityType) match {
case Failure(regret) => Future(RequestCompleteWithErrorReport(BadRequest, regret.getMessage))
case Success(requiredAttributes) =>
- if( !requiredAttributes.keySet.subsetOf(headers.toSet) ) {
- Future( RequestCompleteWithErrorReport(BadRequest,
- "TSV is missing required attributes: " + (requiredAttributes.keySet -- headers).mkString(", ")) )
+ if (!requiredAttributes.keySet.subsetOf(headers.toSet)) {
+ Future(
+ RequestCompleteWithErrorReport(
+ BadRequest,
+ "TSV is missing required attributes: " + (requiredAttributes.keySet -- headers).mkString(", ")
+ )
+ )
} else {
op(requiredAttributes)
}
}
- }
/**
* Imports collection members into a collection type entity. */
- private def importMembershipTSV(
- workspaceNamespace: String, workspaceName: String, tsv: TSVLoadFile, entityType: String, userInfo: UserInfo, isAsync: Boolean ): Future[PerRequestMessage] = {
+ private def importMembershipTSV(workspaceNamespace: String,
+ workspaceName: String,
+ tsv: TSVLoadFile,
+ entityType: String,
+ userInfo: UserInfo,
+ isAsync: Boolean
+ ): Future[PerRequestMessage] =
withMemberCollectionType(entityType, modelSchema) { memberTypeOpt =>
validateMembershipTSV(tsv, memberTypeOpt) {
withPlural(memberTypeOpt.get) { memberPlural =>
- val rawlsCalls = (tsv.tsvData groupBy(_.head) map { case (entityName, rows) =>
+ val rawlsCalls = (tsv.tsvData groupBy (_.head) map { case (entityName, rows) =>
val ops = rows map { row =>
- //row(1) is the entity to add as a member of the entity in row.head
- val attrRef = AttributeEntityReference(memberTypeOpt.get,row(1))
- Map(addListMemberOperation,"attributeListName"->AttributeString(memberPlural),"newMember"->attrRef)
+ // row(1) is the entity to add as a member of the entity in row.head
+ val attrRef = AttributeEntityReference(memberTypeOpt.get, row(1))
+ Map(addListMemberOperation, "attributeListName" -> AttributeString(memberPlural), "newMember" -> attrRef)
}
- EntityUpdateDefinition(entityName,entityType,ops)
+ EntityUpdateDefinition(entityName, entityType, ops)
}).toSeq
maybeAsyncBatchUpdate(isAsync, true, workspaceNamespace, workspaceName, entityType, rawlsCalls, userInfo)
}
}
}
- }
/**
* Creates or updates entities from an entity TSV. Required attributes must exist in column headers. */
- private def importEntityTSV(
- workspaceNamespace: String, workspaceName: String, tsv: TSVLoadFile, entityType: String, userInfo: UserInfo, isAsync: Boolean, deleteEmptyValues: Boolean ): Future[PerRequestMessage] = {
- //we're setting attributes on a bunch of entities
+ private def importEntityTSV(workspaceNamespace: String,
+ workspaceName: String,
+ tsv: TSVLoadFile,
+ entityType: String,
+ userInfo: UserInfo,
+ isAsync: Boolean,
+ deleteEmptyValues: Boolean
+ ): Future[PerRequestMessage] =
+ // we're setting attributes on a bunch of entities
checkFirstColumnDistinct(tsv) {
withMemberCollectionType(entityType, modelSchema) { memberTypeOpt =>
checkNoCollectionMemberAttribute(tsv, memberTypeOpt) {
withRequiredAttributes(entityType, tsv.headers) { requiredAttributes =>
val colInfo = colNamesToAttributeNames(tsv.headers, requiredAttributes)
- val rawlsCalls = tsv.tsvData.map(row => setAttributesOnEntity(entityType, memberTypeOpt, row, colInfo, modelSchema, deleteEmptyValues))
+ val rawlsCalls = tsv.tsvData.map(row =>
+ setAttributesOnEntity(entityType, memberTypeOpt, row, colInfo, modelSchema, deleteEmptyValues)
+ )
maybeAsyncBatchUpdate(isAsync, true, workspaceNamespace, workspaceName, entityType, rawlsCalls, userInfo)
}
}
}
}
- }
/**
* Updates existing entities from TSV. All entities must already exist. */
- private def importUpdateTSV(
- workspaceNamespace: String, workspaceName: String, tsv: TSVLoadFile, entityType: String, userInfo: UserInfo, isAsync: Boolean, deleteEmptyValues: Boolean ): Future[PerRequestMessage] = {
- //we're setting attributes on a bunch of entities
+ private def importUpdateTSV(workspaceNamespace: String,
+ workspaceName: String,
+ tsv: TSVLoadFile,
+ entityType: String,
+ userInfo: UserInfo,
+ isAsync: Boolean,
+ deleteEmptyValues: Boolean
+ ): Future[PerRequestMessage] =
+ // we're setting attributes on a bunch of entities
checkFirstColumnDistinct(tsv) {
withMemberCollectionType(entityType, modelSchema) { memberTypeOpt =>
checkNoCollectionMemberAttribute(tsv, memberTypeOpt) {
modelSchema.getRequiredAttributes(entityType) match {
- //Required attributes aren't required to be headers in update TSVs - they should already have been
- //defined when the entity was created. But we still need the type information if the headers do exist.
+ // Required attributes aren't required to be headers in update TSVs - they should already have been
+ // defined when the entity was created. But we still need the type information if the headers do exist.
case Failure(regret) => Future(RequestCompleteWithErrorReport(BadRequest, regret.getMessage))
case Success(requiredAttributes) =>
val colInfo = colNamesToAttributeNames(tsv.headers, requiredAttributes)
- val rawlsCalls = tsv.tsvData.map(row => setAttributesOnEntity(entityType, memberTypeOpt, row, colInfo, modelSchema, deleteEmptyValues))
+ val rawlsCalls = tsv.tsvData.map(row =>
+ setAttributesOnEntity(entityType, memberTypeOpt, row, colInfo, modelSchema, deleteEmptyValues)
+ )
maybeAsyncBatchUpdate(isAsync, false, workspaceNamespace, workspaceName, entityType, rawlsCalls, userInfo)
}
}
}
}
- }
- private def maybeAsyncBatchUpdate(isAsync: Boolean, isUpsert: Boolean, workspaceNamespace: String, workspaceName: String,
- entityType: String, rawlsCalls: Seq[EntityUpdateDefinition], userInfo: UserInfo): Future[PerRequestMessage] = {
+ private def maybeAsyncBatchUpdate(isAsync: Boolean,
+ isUpsert: Boolean,
+ workspaceNamespace: String,
+ workspaceName: String,
+ entityType: String,
+ rawlsCalls: Seq[EntityUpdateDefinition],
+ userInfo: UserInfo
+ ): Future[PerRequestMessage] =
if (isAsync) {
- asyncImport(workspaceNamespace, workspaceName, isUpsert, rawlsCalls, userInfo).recover {
- case e: Exception =>
- RequestCompleteWithErrorReport(InternalServerError, "Unexpected error during async TSV import", e)
+ asyncImport(workspaceNamespace, workspaceName, isUpsert, rawlsCalls, userInfo).recover { case e: Exception =>
+ RequestCompleteWithErrorReport(InternalServerError, "Unexpected error during async TSV import", e)
}
} else {
val rawlsResponse = if (isUpsert) {
@@ -160,10 +194,13 @@ class EntityService(rawlsDAO: RawlsDAO, cwdsDAO: CwdsDAO, googleServicesDAO: Goo
}
handleBatchRawlsResponse(entityType, rawlsResponse)
}
- }
- private def asyncImport(workspaceNamespace: String, workspaceName: String, isUpsert: Boolean,
- rawlsCalls: Seq[EntityUpdateDefinition], userInfo: UserInfo): Future[PerRequestMessage] = {
+ private def asyncImport(workspaceNamespace: String,
+ workspaceName: String,
+ isUpsert: Boolean,
+ rawlsCalls: Seq[EntityUpdateDefinition],
+ userInfo: UserInfo
+ ): Future[PerRequestMessage] = {
import spray.json._
val dataBytes = rawlsCalls.toJson.prettyPrint.getBytes(StandardCharsets.UTF_8)
getWorkspaceId(workspaceNamespace, workspaceName, userInfo) map { workspaceId =>
@@ -180,26 +217,31 @@ class EntityService(rawlsDAO: RawlsDAO, cwdsDAO: CwdsDAO, googleServicesDAO: Goo
s"gs://${insertedObject.bucketName.value}/${insertedObject.objectName.value}"
}
- private def getRawlsJsonImportRequest(gcsPath: String, isUpsert: Boolean): AsyncImportRequest = {
+ private def getRawlsJsonImportRequest(gcsPath: String, isUpsert: Boolean): AsyncImportRequest =
AsyncImportRequest(gcsPath, FILETYPE_RAWLS, Some(ImportOptions(None, Some(isUpsert))))
- }
- private def getWorkspaceId(workspaceNamespace: String, workspaceName: String, userInfo: UserInfo): Future[String] = {
+ private def getWorkspaceId(workspaceNamespace: String, workspaceName: String, userInfo: UserInfo): Future[String] =
rawlsDAO.getWorkspace(workspaceNamespace, workspaceName)(userInfo).map(_.workspace.workspaceId)
- }
- private def importToCWDS(workspaceNamespace: String, workspaceName: String, workspaceId: String, userInfo: UserInfo, importRequest: AsyncImportRequest
- ): PerRequestMessage = {
+ private def importToCWDS(workspaceNamespace: String,
+ workspaceName: String,
+ workspaceId: String,
+ userInfo: UserInfo,
+ importRequest: AsyncImportRequest
+ ): PerRequestMessage = {
// create the job in cWDS
val cwdsJob = cwdsDAO.importV1(workspaceId, importRequest)(userInfo)
// massage the cWDS job into the response format Orch requires
- RequestComplete(Accepted, AsyncImportResponse(url = importRequest.url,
- jobId = cwdsJob.getJobId.toString,
- workspace = WorkspaceName(workspaceNamespace, workspaceName)))
+ RequestComplete(Accepted,
+ AsyncImportResponse(url = importRequest.url,
+ jobId = cwdsJob.getJobId.toString,
+ workspace = WorkspaceName(workspaceNamespace, workspaceName)
+ )
+ )
}
- private def handleBatchRawlsResponse(entityType: String, response: Future[HttpResponse]): Future[PerRequestMessage] = {
+ private def handleBatchRawlsResponse(entityType: String, response: Future[HttpResponse]): Future[PerRequestMessage] =
response map { response =>
response.status match {
case NoContent =>
@@ -210,28 +252,46 @@ class EntityService(rawlsDAO: RawlsDAO, cwdsDAO: CwdsDAO, googleServicesDAO: Goo
logger.warn("Unanticipated response: " + response.status.defaultMessage)
RequestComplete(response)
}
- } recover {
- case e: Throwable => RequestCompleteWithErrorReport(InternalServerError, "Service API call failed", e)
+ } recover { case e: Throwable =>
+ RequestCompleteWithErrorReport(InternalServerError, "Service API call failed", e)
}
- }
- private def importEntitiesFromTSVLoadFile(workspaceNamespace: String, workspaceName: String, tsv: TSVLoadFile, tsvType: TsvType, entityType: String, userInfo: UserInfo, isAsync: Boolean, deleteEmptyValues: Boolean): Future[PerRequestMessage] = {
+ private def importEntitiesFromTSVLoadFile(workspaceNamespace: String,
+ workspaceName: String,
+ tsv: TSVLoadFile,
+ tsvType: TsvType,
+ entityType: String,
+ userInfo: UserInfo,
+ isAsync: Boolean,
+ deleteEmptyValues: Boolean
+ ): Future[PerRequestMessage] =
tsvType match {
- case TsvTypes.MEMBERSHIP => importMembershipTSV(workspaceNamespace, workspaceName, tsv, entityType, userInfo, isAsync)
- case TsvTypes.ENTITY => importEntityTSV(workspaceNamespace, workspaceName, tsv, entityType, userInfo, isAsync, deleteEmptyValues)
- case TsvTypes.UPDATE => importUpdateTSV(workspaceNamespace, workspaceName, tsv, entityType, userInfo, isAsync, deleteEmptyValues)
- case _ => Future(RequestCompleteWithErrorReport(BadRequest, "Invalid TSV type.")) //We should never get to this case
+ case TsvTypes.MEMBERSHIP =>
+ importMembershipTSV(workspaceNamespace, workspaceName, tsv, entityType, userInfo, isAsync)
+ case TsvTypes.ENTITY =>
+ importEntityTSV(workspaceNamespace, workspaceName, tsv, entityType, userInfo, isAsync, deleteEmptyValues)
+ case TsvTypes.UPDATE =>
+ importUpdateTSV(workspaceNamespace, workspaceName, tsv, entityType, userInfo, isAsync, deleteEmptyValues)
+ case _ =>
+ Future(RequestCompleteWithErrorReport(BadRequest, "Invalid TSV type.")) // We should never get to this case
}
- }
/**
* Determines the TSV type from the first column header and routes it to the correct import function. */
- def importEntitiesFromTSV(workspaceNamespace: String, workspaceName: String, tsvString: String, userInfo: UserInfo, isAsync: Boolean = false, deleteEmptyValues: Boolean = false): Future[PerRequestMessage] = {
+ def importEntitiesFromTSV(workspaceNamespace: String,
+ workspaceName: String,
+ tsvString: String,
+ userInfo: UserInfo,
+ isAsync: Boolean = false,
+ deleteEmptyValues: Boolean = false
+ ): Future[PerRequestMessage] = {
def stripEntityType(entityTypeString: String): String = {
val entityType = entityTypeString.stripSuffix("_id")
if (entityType == entityTypeString)
- throw new FireCloudExceptionWithErrorReport(errorReport = ErrorReport(StatusCodes.BadRequest, "Invalid first column header, entity type should end in _id"))
+ throw new FireCloudExceptionWithErrorReport(errorReport =
+ ErrorReport(StatusCodes.BadRequest, "Invalid first column header, entity type should end in _id")
+ )
entityType
}
@@ -241,58 +301,82 @@ class EntityService(rawlsDAO: RawlsDAO, cwdsDAO: CwdsDAO, googleServicesDAO: Goo
case Array(tsvTypeString, entityTypeString) =>
val tsvType = Try(TsvTypes.withName(tsvTypeString)) match {
case Success(t) => t
- case Failure(err) => throw new FireCloudExceptionWithErrorReport(errorReport = ErrorReport(StatusCodes.BadRequest, err.toString))
+ case Failure(err) =>
+ throw new FireCloudExceptionWithErrorReport(errorReport =
+ ErrorReport(StatusCodes.BadRequest, err.toString)
+ )
}
(tsvType, stripEntityType(entityTypeString))
- case _ => throw new FireCloudExceptionWithErrorReport(errorReport = ErrorReport(StatusCodes.BadRequest, "Invalid first column header, should look like tsvType:entity_type_id"))
+ case _ =>
+ throw new FireCloudExceptionWithErrorReport(errorReport =
+ ErrorReport(StatusCodes.BadRequest, "Invalid first column header, should look like tsvType:entity_type_id")
+ )
}
val strippedTsv = if (modelSchema.supportsBackwardsCompatibleIds()) {
- backwardsCompatStripIdSuffixes(tsv, entityType, modelSchema)
- } else {
- tsv
- }
- importEntitiesFromTSVLoadFile(workspaceNamespace, workspaceName, strippedTsv, tsvType, entityType, userInfo, isAsync, deleteEmptyValues)
+ backwardsCompatStripIdSuffixes(tsv, entityType, modelSchema)
+ } else {
+ tsv
+ }
+ importEntitiesFromTSVLoadFile(workspaceNamespace,
+ workspaceName,
+ strippedTsv,
+ tsvType,
+ entityType,
+ userInfo,
+ isAsync,
+ deleteEmptyValues
+ )
}
}
- def importJob(workspaceNamespace: String, workspaceName: String, importRequest: AsyncImportRequest, userInfo: UserInfo): Future[PerRequestMessage] = {
+ def importJob(workspaceNamespace: String,
+ workspaceName: String,
+ importRequest: AsyncImportRequest,
+ userInfo: UserInfo
+ ): Future[PerRequestMessage] = {
// validate that filetype exists in the importRequest
if (importRequest.filetype.isEmpty)
throw new FireCloudExceptionWithErrorReport(ErrorReport(BadRequest, "filetype must be specified"))
getWorkspaceId(workspaceNamespace, workspaceName, userInfo) map { workspaceId =>
importToCWDS(workspaceNamespace, workspaceName, workspaceId, userInfo, importRequest)
- } recover {
- case apiEx:ApiException => throw wrapCwdsException(apiEx)
+ } recover { case apiEx: ApiException =>
+ throw wrapCwdsException(apiEx)
}
}
- def listJobs(workspaceNamespace: String, workspaceName: String, runningOnly: Boolean, userInfo: UserInfo): Future[List[CwdsListResponse]] = {
+ def listJobs(workspaceNamespace: String,
+ workspaceName: String,
+ runningOnly: Boolean,
+ userInfo: UserInfo
+ ): Future[List[CwdsListResponse]] =
rawlsDAO.getWorkspace(workspaceNamespace, workspaceName)(userInfo) map { workspace =>
cwdsDAO.listJobsV1(workspace.workspace.workspaceId, runningOnly)(userInfo)
- } recover {
- case apiEx:ApiException => throw wrapCwdsException(apiEx)
+ } recover { case apiEx: ApiException =>
+ throw wrapCwdsException(apiEx)
}
- }
- def getJob(workspaceNamespace: String, workspaceName: String, jobId: String, userInfo: UserInfo): Future[CwdsListResponse] = {
+ def getJob(workspaceNamespace: String,
+ workspaceName: String,
+ jobId: String,
+ userInfo: UserInfo
+ ): Future[CwdsListResponse] =
rawlsDAO.getWorkspace(workspaceNamespace, workspaceName)(userInfo) map { workspace =>
val cwdsResponse = cwdsDAO.getJobV1(workspace.workspace.workspaceId, jobId)(userInfo)
logger.info(s"Found job $jobId in cWDS")
cwdsResponse
- } recover {
- case apiEx:ApiException => throw wrapCwdsException(apiEx)
+ } recover { case apiEx: ApiException =>
+ throw wrapCwdsException(apiEx)
}
- }
- private def wrapCwdsException(apiEx:ApiException) = {
+ private def wrapCwdsException(apiEx: ApiException) = {
def extractMessage(responseBody: String) = {
// attempt to extract a human-readable message from the API response. The API response is itself a json object
import spray.json._
responseBody.parseJson.asJsObject.fields("message") match {
- case jss:JsString => jss.value
- case jsv:JsValue => jsv.prettyPrint
+ case jss: JsString => jss.value
+ case jsv: JsValue => jsv.prettyPrint
}
}
// if human-readable message extraction fails, just default to the ApiException message
@@ -300,7 +384,10 @@ class EntityService(rawlsDAO: RawlsDAO, cwdsDAO: CwdsDAO, googleServicesDAO: Goo
new FireCloudExceptionWithErrorReport(ErrorReport(apiEx.getCode, errMsg))
}
- def getEntitiesWithType(workspaceNamespace: String, workspaceName: String, userInfo: UserInfo): Future[PerRequestMessage] = {
+ def getEntitiesWithType(workspaceNamespace: String,
+ workspaceName: String,
+ userInfo: UserInfo
+ ): Future[PerRequestMessage] =
rawlsDAO.getEntityTypes(workspaceNamespace, workspaceName)(userInfo).flatMap { entityTypeResponse =>
val entityTypes = entityTypeResponse.keys.toList
@@ -312,6 +399,5 @@ class EntityService(rawlsDAO: RawlsDAO, cwdsDAO: CwdsDAO, googleServicesDAO: Goo
RequestComplete(OK, result.flatten)
}
}
- }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/FireCloudApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/FireCloudApiService.scala
index 037940ba3..b553708fb 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/FireCloudApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/FireCloudApiService.scala
@@ -5,7 +5,7 @@ import akka.event.Logging.LogLevel
import akka.event.{Logging, LoggingAdapter}
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
import akka.http.scaladsl.model.headers.CacheDirectives.{`no-cache`, `no-store`}
-import akka.http.scaladsl.model.headers.{RawHeader, `Cache-Control`}
+import akka.http.scaladsl.model.headers.{`Cache-Control`, RawHeader}
import akka.http.scaladsl.model.{HttpEntity, HttpRequest, StatusCodes}
import akka.http.scaladsl.server
import akka.http.scaladsl.server.Directives._
@@ -31,7 +31,9 @@ object FireCloudApiService extends LazyLogging {
import org.broadinstitute.dsde.rawls.model.WorkspaceJsonSupport._
- implicit val errorReportSource: ErrorReportSource = ErrorReportSource("FireCloud") //TODO make sure this doesn't clobber source names globally
+ implicit val errorReportSource: ErrorReportSource = ErrorReportSource(
+ "FireCloud"
+ ) // TODO make sure this doesn't clobber source names globally
ExceptionHandler {
case withErrorReport: FireCloudExceptionWithErrorReport =>
@@ -53,36 +55,36 @@ object FireCloudApiService extends LazyLogging {
logger.error(e.toString)
}
// ErrorReport.apply with "message" kwarg. is specifically used to mute Stack Trace output in HTTP Error Responses
- complete(StatusCodes.InternalServerError -> ErrorReport(message=e.getMessage))
+ complete(StatusCodes.InternalServerError -> ErrorReport(message = e.getMessage))
}
}
}
-trait FireCloudApiService extends CookieAuthedApiService
- with EntityApiService
- with ExportEntitiesApiService
- with LibraryApiService
- with NamespaceApiService
- with NihApiService
- with OauthApiService
- with RegisterApiService
- with WorkspaceApiService
- with WorkspaceV2ApiService
- with NotificationsApiService
- with MethodConfigurationApiService
- with BillingApiService
- with SubmissionApiService
- with StatusApiService
- with MethodsApiService
- with Ga4ghApiService
- with UserApiService
- with ShareLogApiService
- with ManagedGroupApiService
- with CromIamApiService
- with HealthApiService
- with StaticNotebooksApiService
- with PerimeterApiService
-{
+trait FireCloudApiService
+ extends CookieAuthedApiService
+ with EntityApiService
+ with ExportEntitiesApiService
+ with LibraryApiService
+ with NamespaceApiService
+ with NihApiService
+ with OauthApiService
+ with RegisterApiService
+ with WorkspaceApiService
+ with WorkspaceV2ApiService
+ with NotificationsApiService
+ with MethodConfigurationApiService
+ with BillingApiService
+ with SubmissionApiService
+ with StatusApiService
+ with MethodsApiService
+ with Ga4ghApiService
+ with UserApiService
+ with ShareLogApiService
+ with ManagedGroupApiService
+ with CromIamApiService
+ with HealthApiService
+ with StaticNotebooksApiService
+ with PerimeterApiService {
override lazy val log = LoggerFactory.getLogger(getClass)
@@ -125,7 +127,7 @@ trait FireCloudApiService extends CookieAuthedApiService
None
}
} catch {
- case e:Exception =>
+ case e: Exception =>
// error when extracting the response, likely in decoding the raw bytes
None
}
@@ -168,16 +170,15 @@ trait FireCloudApiService extends CookieAuthedApiService
// Note that many Orch APIs are passthroughs, and if the underlying
// service (Rawls, Sam, etc) already returns these headers, Orch
// will not overwrite them.
- private val noCacheNoStore: Directive0 = respondWithDefaultHeaders(
- `Cache-Control`(`no-store`),
- RawHeader("Pragma", `no-cache`.value))
+ private val noCacheNoStore: Directive0 =
+ respondWithDefaultHeaders(`Cache-Control`(`no-store`), RawHeader("Pragma", `no-cache`.value))
// routes under /api
def apiRoutes: server.Route =
- options { complete(StatusCodes.OK) } ~
+ options(complete(StatusCodes.OK)) ~
withExecutionContext(ExecutionContext.global) {
v1RegisterRoutes ~
- methodsApiServiceRoutes ~
+ methodsApiServiceRoutes ~
profileRoutes ~
cromIamApiServiceRoutes ~
methodConfigurationRoutes ~
@@ -190,13 +191,13 @@ trait FireCloudApiService extends CookieAuthedApiService
}
val routeWrappers: Directive[Unit] =
- handleRejections(org.broadinstitute.dsde.firecloud.model.defaultErrorReportRejectionHandler) &
+ handleRejections(org.broadinstitute.dsde.firecloud.model.defaultErrorReportRejectionHandler) &
handleExceptions(FireCloudApiService.exceptionHandler) &
appendTimestampOnFailure &
logRequests &
noCacheNoStore
- def route: server.Route = (routeWrappers) {
+ def route: server.Route = routeWrappers {
cromIamEngineRoutes ~
tosRoutes ~
exportEntitiesRoutes ~
@@ -228,23 +229,26 @@ trait FireCloudApiService extends CookieAuthedApiService
}
-class FireCloudApiServiceImpl(val agoraPermissionService: (UserInfo) => AgoraPermissionService,
- val exportEntitiesByTypeConstructor: (ExportEntitiesByTypeArguments) => ExportEntitiesByTypeActor,
- val entityServiceConstructor: (ModelSchema) => EntityService,
- val libraryServiceConstructor: (UserInfo) => LibraryService,
- val ontologyServiceConstructor: () => OntologyService,
- val namespaceServiceConstructor: (UserInfo) => NamespaceService,
- val nihServiceConstructor: () => NihService,
- val registerServiceConstructor: () => RegisterService,
- val workspaceServiceConstructor: (WithAccessToken) => WorkspaceService,
- val statusServiceConstructor: () => StatusService,
- val permissionReportServiceConstructor: (UserInfo) => PermissionReportService,
- val userServiceConstructor: (UserInfo) => UserService,
- val shareLogServiceConstructor: () => ShareLogService,
- val managedGroupServiceConstructor: (WithAccessToken) => ManagedGroupService,
- val oidcConfig: OpenIDConnectConfiguration)
- (implicit val actorRefFactory: ActorRefFactory,
- val executionContext: ExecutionContext,
- val materializer: Materializer,
- val system: ActorSystem
- ) extends FireCloudApiService with StandardUserInfoDirectives
+class FireCloudApiServiceImpl(
+ val agoraPermissionService: (UserInfo) => AgoraPermissionService,
+ val exportEntitiesByTypeConstructor: (ExportEntitiesByTypeArguments) => ExportEntitiesByTypeActor,
+ val entityServiceConstructor: (ModelSchema) => EntityService,
+ val libraryServiceConstructor: (UserInfo) => LibraryService,
+ val ontologyServiceConstructor: () => OntologyService,
+ val namespaceServiceConstructor: (UserInfo) => NamespaceService,
+ val nihServiceConstructor: () => NihService,
+ val registerServiceConstructor: () => RegisterService,
+ val workspaceServiceConstructor: (WithAccessToken) => WorkspaceService,
+ val statusServiceConstructor: () => StatusService,
+ val permissionReportServiceConstructor: (UserInfo) => PermissionReportService,
+ val userServiceConstructor: (UserInfo) => UserService,
+ val shareLogServiceConstructor: () => ShareLogService,
+ val managedGroupServiceConstructor: (WithAccessToken) => ManagedGroupService,
+ val oidcConfig: OpenIDConnectConfiguration
+)(implicit
+ val actorRefFactory: ActorRefFactory,
+ val executionContext: ExecutionContext,
+ val materializer: Materializer,
+ val system: ActorSystem
+) extends FireCloudApiService
+ with StandardUserInfoDirectives
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/FireCloudConfig.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/FireCloudConfig.scala
index 9cd36896b..756546b03 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/FireCloudConfig.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/FireCloudConfig.scala
@@ -52,7 +52,8 @@ object FireCloudConfig {
val entitiesPath = workspace.getString("entitiesPath")
val entityQueryPath = workspace.getString("entityQueryPath")
val workspacesEntitiesCopyPath = workspace.getString("workspacesEntitiesCopyPath")
- def workspacesEntitiesCopyUrl(linkExistingEntities: Boolean) = authUrl + workspacesEntitiesCopyPath + "?linkExistingEntities=%s".format(linkExistingEntities)
+ def workspacesEntitiesCopyUrl(linkExistingEntities: Boolean) =
+ authUrl + workspacesEntitiesCopyPath + "?linkExistingEntities=%s".format(linkExistingEntities)
val submissionsCountPath = workspace.getString("submissionsCountPath")
val submissionsPath = workspace.getString("submissionsPath")
val submissionsIdPath = workspace.getString("submissionsIdPath")
@@ -68,20 +69,29 @@ object FireCloudConfig {
val defaultPageSize = rawls.getInt("defaultPageSize")
def entityPathFromWorkspace(namespace: String, name: String) = authUrl + entitiesPath.format(namespace, name)
- def entityQueryPathFromWorkspace(namespace: String, name: String) = authUrl + entityQueryPath.format(namespace, name)
+ def entityQueryPathFromWorkspace(namespace: String, name: String) =
+ authUrl + entityQueryPath.format(namespace, name)
def createGroup(groupName: String) = authUrl + createGroupPath.format(groupName)
- def entityQueryUriFromWorkspaceAndQuery(workspaceNamespace: String, workspaceName: String, entityType: String, query: Option[EntityQuery] = None): Uri = {
- val baseEntityQueryUri = Uri(FireCloudDirectiveUtils.encodeUri(s"${entityQueryPathFromWorkspace(workspaceNamespace, workspaceName)}/$entityType"))
+ def entityQueryUriFromWorkspaceAndQuery(workspaceNamespace: String,
+ workspaceName: String,
+ entityType: String,
+ query: Option[EntityQuery] = None
+ ): Uri = {
+ val baseEntityQueryUri = Uri(
+ FireCloudDirectiveUtils.encodeUri(
+ s"${entityQueryPathFromWorkspace(workspaceNamespace, workspaceName)}/$entityType"
+ )
+ )
query match {
case Some(q) =>
- val qMap: Map[String, String] = Map(
- ("page", q.page.toString),
- ("pageSize", q.pageSize.toString),
- ("sortField", q.sortField),
- ("sortDirection", SortDirections.toString(q.sortDirection)))
+ val qMap: Map[String, String] = Map(("page", q.page.toString),
+ ("pageSize", q.pageSize.toString),
+ ("sortField", q.sortField),
+ ("sortDirection", SortDirections.toString(q.sortDirection))
+ )
val filteredQMap = q.filterTerms match {
case Some(f) => qMap + ("filterTerms" -> f)
- case _ => qMap
+ case _ => qMap
}
baseEntityQueryUri.withQuery(Query(filteredQMap))
case _ => baseEntityQueryUri
@@ -159,7 +169,7 @@ object FireCloudConfig {
lazy val whitelists: Set[NihAllowlist] = {
val whitelistConfigs = nih.getConfig("whitelists")
- whitelistConfigs.root.asScala.collect { case (name, configObject:ConfigObject) =>
+ whitelistConfigs.root.asScala.collect { case (name, configObject: ConfigObject) =>
val config = configObject.toConfig
val rawlsGroup = config.getString("rawlsGroup")
val fileName = config.getString("fileName")
@@ -183,12 +193,11 @@ object FireCloudConfig {
val enabled = elasticsearch.optionalBoolean("enabled").getOrElse(true)
}
- def parseESServers(confString: String): Seq[Authority] = {
+ def parseESServers(confString: String): Seq[Authority] =
confString.split(',').toIndexedSeq map { hostport =>
val hp = hostport.split(':')
Authority(Host(hp(0)), hp(1).toInt)
}
- }
object GoogleCloud {
// lazy - only required when google is enabled
@@ -197,9 +206,19 @@ object FireCloudConfig {
lazy val priceListEgressKey = googlecloud.getString("priceListEgressKey")
lazy val priceListStorageKey = googlecloud.getString("priceListStorageKey")
lazy val defaultStoragePriceListConf = googlecloud.getConfig("defaultStoragePriceList")
- lazy val defaultStoragePriceList = defaultStoragePriceListConf.root().keySet().asScala.map(key => key -> BigDecimal(defaultStoragePriceListConf.getDouble(key))).toMap
+ lazy val defaultStoragePriceList = defaultStoragePriceListConf
+ .root()
+ .keySet()
+ .asScala
+ .map(key => key -> BigDecimal(defaultStoragePriceListConf.getDouble(key)))
+ .toMap
lazy val defaultEgressPriceListConf = googlecloud.getConfig("defaultEgressPriceList")
- lazy val defaultEgressPriceList = defaultEgressPriceListConf.root().keySet().asScala.map(key => key.toLong -> BigDecimal(defaultEgressPriceListConf.getDouble(key))).toMap
+ lazy val defaultEgressPriceList = defaultEgressPriceListConf
+ .root()
+ .keySet()
+ .asScala
+ .map(key => key.toLong -> BigDecimal(defaultEgressPriceListConf.getDouble(key)))
+ .toMap
val enabled = googlecloud.optionalBoolean("enabled").getOrElse(true)
}
@@ -224,13 +243,12 @@ object FireCloudConfig {
}
implicit class RichConfig(val config: Config) {
- private def getOptional[T](path: String, get: String => T): Option[T] = {
+ private def getOptional[T](path: String, get: String => T): Option[T] =
if (config.hasPath(path)) {
Some(get(path))
} else {
None
}
- }
def optionalString(path: String): Option[String] = getOptional(path, config.getString)
def optionalInt(path: String): Option[Int] = getOptional(path, config.getInt)
def optionalDouble(path: String): Option[Double] = getOptional(path, config.getDouble)
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/HealthChecks.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/HealthChecks.scala
index 597b4c3be..5e4b75662 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/HealthChecks.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/HealthChecks.scala
@@ -14,9 +14,8 @@ object HealthChecks {
val termsOfServiceUrl = "app.terra.bio/#terms-of-service"
}
-class HealthChecks(app: Application)
- (implicit val system: ActorSystem, implicit val executionContext: ExecutionContext)
- extends LazyLogging {
+class HealthChecks(app: Application)(implicit val system: ActorSystem, implicit val executionContext: ExecutionContext)
+ extends LazyLogging {
def healthMonitorChecks: () => Map[Subsystem, Future[SubsystemStatus]] = () => {
val servicesToMonitor = Seq(app.rawlsDAO, app.samDAO, app.thurloeDAO) ++
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/AgoraDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/AgoraDAO.scala
index 02fa36145..166a54226 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/AgoraDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/AgoraDAO.scala
@@ -1,6 +1,11 @@
package org.broadinstitute.dsde.firecloud.dataaccess
-import org.broadinstitute.dsde.firecloud.model.OrchMethodRepository.{AgoraEntityType, AgoraPermission, EntityAccessControlAgora, Method}
+import org.broadinstitute.dsde.firecloud.model.OrchMethodRepository.{
+ AgoraEntityType,
+ AgoraPermission,
+ EntityAccessControlAgora,
+ Method
+}
import org.broadinstitute.dsde.firecloud.model.UserInfo
import org.broadinstitute.dsde.rawls.model.ErrorReportSource
import org.broadinstitute.dsde.workbench.util.health.Subsystems
@@ -17,13 +22,21 @@ trait AgoraDAO extends ReportsSubsystemStatus {
implicit val errorReportSource: ErrorReportSource = ErrorReportSource(AgoraDAO.serviceName.value)
def getNamespacePermissions(ns: String, entity: String)(implicit userInfo: UserInfo): Future[List[AgoraPermission]]
- def postNamespacePermissions(ns: String, entity: String, perms: List[AgoraPermission])(implicit userInfo: UserInfo): Future[List[AgoraPermission]]
+ def postNamespacePermissions(ns: String, entity: String, perms: List[AgoraPermission])(implicit
+ userInfo: UserInfo
+ ): Future[List[AgoraPermission]]
- def getMultiEntityPermissions(entityType: AgoraEntityType.Value, entities: List[Method])(implicit userInfo: UserInfo): Future[List[EntityAccessControlAgora]]
+ def getMultiEntityPermissions(entityType: AgoraEntityType.Value, entities: List[Method])(implicit
+ userInfo: UserInfo
+ ): Future[List[EntityAccessControlAgora]]
- def batchCreatePermissions(inputs: List[EntityAccessControlAgora])(implicit userInfo: UserInfo): Future[List[EntityAccessControlAgora]]
+ def batchCreatePermissions(inputs: List[EntityAccessControlAgora])(implicit
+ userInfo: UserInfo
+ ): Future[List[EntityAccessControlAgora]]
def getPermission(url: String)(implicit userInfo: UserInfo): Future[List[AgoraPermission]]
- def createPermission(url: String, agoraPermissions: List[AgoraPermission])(implicit userInfo: UserInfo): Future[List[AgoraPermission]]
+ def createPermission(url: String, agoraPermissions: List[AgoraPermission])(implicit
+ userInfo: UserInfo
+ ): Future[List[AgoraPermission]]
override def serviceName: Subsystem = AgoraDAO.serviceName
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/CwdsDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/CwdsDAO.scala
index 4f44b13e0..2f10d55e1 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/CwdsDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/CwdsDAO.scala
@@ -16,18 +16,12 @@ trait CwdsDAO {
def getSupportedFormats: List[String]
@throws(classOf[ApiException])
- def listJobsV1(workspaceId: String,
- runningOnly: Boolean
- )(implicit userInfo: UserInfo): List[CwdsListResponse]
+ def listJobsV1(workspaceId: String, runningOnly: Boolean)(implicit userInfo: UserInfo): List[CwdsListResponse]
@throws(classOf[ApiException])
- def getJobV1(workspaceId: String,
- jobId: String
- )(implicit userInfo: UserInfo): CwdsListResponse
+ def getJobV1(workspaceId: String, jobId: String)(implicit userInfo: UserInfo): CwdsListResponse
@throws(classOf[ApiException])
- def importV1(workspaceId: String,
- asyncImportRequest: AsyncImportRequest
- )(implicit userInfo: UserInfo): GenericJob
+ def importV1(workspaceId: String, asyncImportRequest: AsyncImportRequest)(implicit userInfo: UserInfo): GenericJob
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/DisabledExternalCredsDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/DisabledExternalCredsDAO.scala
index a5c032ed4..23903c2c8 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/DisabledExternalCredsDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/DisabledExternalCredsDAO.scala
@@ -12,21 +12,27 @@ class DisabledExternalCredsDAO extends ExternalCredsDAO with LazyLogging {
None
}
- override def putLinkedEraAccount(linkedEraAccount: LinkedEraAccount)(implicit orchInfo: WithAccessToken): Future[Unit] = Future.successful {
+ override def putLinkedEraAccount(
+ linkedEraAccount: LinkedEraAccount
+ )(implicit orchInfo: WithAccessToken): Future[Unit] = Future.successful {
logger.warn("Putting Linked eRA Account to ECM, but ECM is disabled.")
}
- override def deleteLinkedEraAccount(userInfo: UserInfo)(implicit orchInfo: WithAccessToken): Future[Unit] = Future.successful {
- logger.warn("Deleting Linked eRA Account from ECM, but ECM is disabled.")
- }
+ override def deleteLinkedEraAccount(userInfo: UserInfo)(implicit orchInfo: WithAccessToken): Future[Unit] =
+ Future.successful {
+ logger.warn("Deleting Linked eRA Account from ECM, but ECM is disabled.")
+ }
- override def getLinkedEraAccountForUsername(username: String)(implicit orchInfo: WithAccessToken): Future[Option[LinkedEraAccount]] = Future.successful {
+ override def getLinkedEraAccountForUsername(
+ username: String
+ )(implicit orchInfo: WithAccessToken): Future[Option[LinkedEraAccount]] = Future.successful {
logger.warn("Getting Linked eRA Account for username from ECM, but ECM is disabled.")
None
}
- override def getActiveLinkedEraAccounts(implicit orchInfo: WithAccessToken): Future[Seq[LinkedEraAccount]] = Future.successful {
- logger.warn("Getting Active Linked eRA Accounts from ECM, but ECM is disabled.")
- Seq.empty
- }
+ override def getActiveLinkedEraAccounts(implicit orchInfo: WithAccessToken): Future[Seq[LinkedEraAccount]] =
+ Future.successful {
+ logger.warn("Getting Active Linked eRA Accounts from ECM, but ECM is disabled.")
+ Seq.empty
+ }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ESResearchPurposeSupport.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ESResearchPurposeSupport.scala
index 9c89d5eff..ac35598b5 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ESResearchPurposeSupport.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ESResearchPurposeSupport.scala
@@ -5,7 +5,11 @@ import org.elasticsearch.index.query.BoolQueryBuilder
import scala.concurrent.ExecutionContext.Implicits.global
-class ESResearchPurposeSupport(ontologyDAO: OntologyDAO) extends ResearchPurposeSupport with ElasticSearchDAOResearchPurposeSupport {
- override def researchPurposeFilters(researchPurpose: ResearchPurpose, makeAttributeName: String => String): BoolQueryBuilder =
+class ESResearchPurposeSupport(ontologyDAO: OntologyDAO)
+ extends ResearchPurposeSupport
+ with ElasticSearchDAOResearchPurposeSupport {
+ override def researchPurposeFilters(researchPurpose: ResearchPurpose,
+ makeAttributeName: String => String
+ ): BoolQueryBuilder =
researchPurposeFilters(researchPurpose, ontologyDAO, makeAttributeName)
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchDAO.scala
index 5b5b785fe..aae8e3361 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchDAO.scala
@@ -5,9 +5,21 @@ import org.broadinstitute.dsde.firecloud.model._
import org.broadinstitute.dsde.firecloud.service.LibraryService
import org.broadinstitute.dsde.firecloud.{FireCloudConfig, FireCloudException}
import org.broadinstitute.dsde.workbench.util.health.SubsystemStatus
-import org.elasticsearch.action.admin.indices.create.{CreateIndexRequest, CreateIndexRequestBuilder, CreateIndexResponse}
-import org.elasticsearch.action.admin.indices.delete.{DeleteIndexRequest, DeleteIndexRequestBuilder, DeleteIndexResponse}
-import org.elasticsearch.action.admin.indices.exists.indices.{IndicesExistsRequest, IndicesExistsRequestBuilder, IndicesExistsResponse}
+import org.elasticsearch.action.admin.indices.create.{
+ CreateIndexRequest,
+ CreateIndexRequestBuilder,
+ CreateIndexResponse
+}
+import org.elasticsearch.action.admin.indices.delete.{
+ DeleteIndexRequest,
+ DeleteIndexRequestBuilder,
+ DeleteIndexResponse
+}
+import org.elasticsearch.action.admin.indices.exists.indices.{
+ IndicesExistsRequest,
+ IndicesExistsRequestBuilder,
+ IndicesExistsResponse
+}
import org.elasticsearch.action.bulk.{BulkRequest, BulkRequestBuilder, BulkResponse}
import org.elasticsearch.action.delete.{DeleteRequest, DeleteRequestBuilder, DeleteResponse}
import org.elasticsearch.action.index.{IndexRequest, IndexRequestBuilder, IndexResponse}
@@ -24,44 +36,44 @@ import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.util.{Failure, Success, Try}
-class ElasticSearchDAO(client: TransportClient, indexName: String, researchPurposeSupport: ResearchPurposeSupport) extends SearchDAO with ElasticSearchDAOSupport with ElasticSearchDAOQuerySupport {
+class ElasticSearchDAO(client: TransportClient, indexName: String, researchPurposeSupport: ResearchPurposeSupport)
+ extends SearchDAO
+ with ElasticSearchDAOSupport
+ with ElasticSearchDAOQuerySupport {
- private final val datatype = "dataset"
+ final private val datatype = "dataset"
initIndex()
// if the index does not exist, create it.
- override def initIndex() = {
+ override def initIndex() =
conditionalRecreateIndex(false)
- }
// delete an existing index, then re-create it.
- override def recreateIndex() = {
+ override def recreateIndex() =
conditionalRecreateIndex(true)
- }
- override def indexExists(): Boolean = {
+ override def indexExists(): Boolean =
executeESRequest[IndicesExistsRequest, IndicesExistsResponse, IndicesExistsRequestBuilder](
client.admin.indices.prepareExists(indexName)
).isExists
- }
override def createIndex() = {
val mapping = makeMapping(FileUtils.readAllTextFromResource(LibraryService.schemaLocation))
executeESRequest[CreateIndexRequest, CreateIndexResponse, CreateIndexRequestBuilder](
- client.admin.indices.prepareCreate(indexName)
+ client.admin.indices
+ .prepareCreate(indexName)
.setSettings(analysisSettings, XContentType.JSON)
.addMapping(datatype, mapping, XContentType.JSON)
- // TODO: set to one shard? https://www.elastic.co/guide/en/elasticsearch/guide/current/relevance-is-broken.html
+ // TODO: set to one shard? https://www.elastic.co/guide/en/elasticsearch/guide/current/relevance-is-broken.html
)
}
// will throw an error if index does not exist
- override def deleteIndex() = {
+ override def deleteIndex() =
executeESRequest[DeleteIndexRequest, DeleteIndexResponse, DeleteIndexRequestBuilder](
client.admin.indices.prepareDelete(indexName)
)
- }
override def bulkIndex(docs: Seq[Document], refresh: Boolean = false): LibraryBulkIndexResponse = {
val bulkRequest = client.prepareBulk
@@ -69,12 +81,14 @@ class ElasticSearchDAO(client: TransportClient, indexName: String, researchPurpo
// this way, the ES client library can change its default for setRefreshPolicy, and we'll inherit the default.
if (refresh)
bulkRequest.setRefreshPolicy(RefreshPolicy.IMMEDIATE)
- docs map {
- case (doc:Document) => bulkRequest.add(client.prepareIndex(indexName, datatype, doc.id).setSource(doc.content.compactPrint, XContentType.JSON))
+ docs map { case (doc: Document) =>
+ bulkRequest.add(
+ client.prepareIndex(indexName, datatype, doc.id).setSource(doc.content.compactPrint, XContentType.JSON)
+ )
}
val bulkResponse = executeESRequest[BulkRequest, BulkResponse, BulkRequestBuilder](bulkRequest)
- val msgs:Map[String,String] = if (bulkResponse.hasFailures) {
+ val msgs: Map[String, String] = if (bulkResponse.hasFailures) {
bulkResponse.getItems.filter(_.isFailed).map(f => f.getId -> f.getFailureMessage).toMap
} else {
Map.empty
@@ -82,19 +96,17 @@ class ElasticSearchDAO(client: TransportClient, indexName: String, researchPurpo
LibraryBulkIndexResponse(bulkResponse.getItems.length, bulkResponse.hasFailures, msgs)
}
- override def indexDocument(doc: Document) = {
- executeESRequest[IndexRequest, IndexResponse, IndexRequestBuilder] (
+ override def indexDocument(doc: Document) =
+ executeESRequest[IndexRequest, IndexResponse, IndexRequestBuilder](
client.prepareIndex(indexName, datatype, doc.id).setSource(doc.content.compactPrint, XContentType.JSON)
)
- }
- override def deleteDocument(id: String) = {
- executeESRequest[DeleteRequest, DeleteResponse, DeleteRequestBuilder] (
+ override def deleteDocument(id: String) =
+ executeESRequest[DeleteRequest, DeleteResponse, DeleteRequestBuilder](
client.prepareDelete(indexName, datatype, id)
)
- }
- private def conditionalRecreateIndex(deleteFirst: Boolean = false) = {
+ private def conditionalRecreateIndex(deleteFirst: Boolean = false) =
try {
logger.info(s"Checking to see if ElasticSearch index '%s' exists ... ".format(indexName))
val exists = indexExists()
@@ -110,21 +122,24 @@ class ElasticSearchDAO(client: TransportClient, indexName: String, researchPurpo
logger.info(s"... ES index '%s' created.".format(indexName))
}
} catch {
- case e: Exception => logger.warn(s"ES index '%s' could not be recreated and may be in an unstable state.".format(indexName), e)
+ case e: Exception =>
+ logger.warn(s"ES index '%s' could not be recreated and may be in an unstable state.".format(indexName), e)
}
- }
- override def findDocuments(criteria: LibrarySearchParams, groups: Seq[String], workspacePolicyMap: Map[String, UserPolicy]): Future[LibrarySearchResponse] = {
+ override def findDocuments(criteria: LibrarySearchParams,
+ groups: Seq[String],
+ workspacePolicyMap: Map[String, UserPolicy]
+ ): Future[LibrarySearchResponse] =
findDocumentsWithAggregateInfo(client, indexName, criteria, groups, workspacePolicyMap, researchPurposeSupport)
- }
- override def suggestionsFromAll(criteria: LibrarySearchParams, groups: Seq[String], workspacePolicyMap: Map[String, UserPolicy]): Future[LibrarySearchResponse] = {
+ override def suggestionsFromAll(criteria: LibrarySearchParams,
+ groups: Seq[String],
+ workspacePolicyMap: Map[String, UserPolicy]
+ ): Future[LibrarySearchResponse] =
autocompleteSuggestions(client, indexName, criteria, groups, workspacePolicyMap, researchPurposeSupport)
- }
- override def suggestionsForFieldPopulate(field: String, text: String): Future[Seq[String]] = {
+ override def suggestionsForFieldPopulate(field: String, text: String): Future[Seq[String]] =
populateSuggestions(client, indexName, field, text)
- }
/* see https://www.elastic.co/guide/en/elasticsearch/guide/current/_index_time_search_as_you_type.html
* and https://qbox.io/blog/multi-field-partial-word-autocomplete-in-elasticsearch-using-ngrams
@@ -132,12 +147,11 @@ class ElasticSearchDAO(client: TransportClient, indexName: String, researchPurpo
*
* our default analyzer is based off the english analyzer (https://www.elastic.co/guide/en/elasticsearch/reference/2.4/analysis-lang-analyzer.html#english-analyzer)
* but includes the word_delimiter filter for better searching on data containing underscores, e.g. "tcga_brca"
- *
+ *
* lazy is necessary here because we use it above
*/
- private final lazy val analysisSettings = FileUtils.readAllTextFromResource("library/es-settings.json")
+ final private lazy val analysisSettings = FileUtils.readAllTextFromResource("library/es-settings.json")
- override def status: Future[SubsystemStatus] = {
+ override def status: Future[SubsystemStatus] =
Future(SubsystemStatus(this.indexExists(), None))
- }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchDAOQuerySupport.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchDAOQuerySupport.scala
index ea006313d..f02ad4ece 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchDAOQuerySupport.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchDAOQuerySupport.scala
@@ -25,12 +25,11 @@ import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Failure, Success, Try}
import scala.util.matching.Regex
-
trait ElasticSearchDAOQuerySupport extends ElasticSearchDAOSupport {
final val HL_START = ""
final val HL_END = ""
- final val HL_REGEX:Regex = s"$HL_START(.+?)$HL_END".r.unanchored
+ final val HL_REGEX: Regex = s"$HL_START(.+?)$HL_END".r.unanchored
final val AGG_MAX_SIZE = FireCloudConfig.ElasticSearch.maxAggregations
final val AGG_DEFAULT_SIZE = 5
@@ -73,11 +72,16 @@ trait ElasticSearchDAOQuerySupport extends ElasticSearchDAOSupport {
* Permission matching is done in a filter because there is not the same restriction on the number of terms allowed.
*/
-
- def createQuery(criteria: LibrarySearchParams, groups: Seq[String], workspaceIds: Seq[String], researchPurposeSupport: ResearchPurposeSupport, searchField: String = fieldAll, phrase: Boolean = false): QueryBuilder = {
+ def createQuery(criteria: LibrarySearchParams,
+ groups: Seq[String],
+ workspaceIds: Seq[String],
+ researchPurposeSupport: ResearchPurposeSupport,
+ searchField: String = fieldAll,
+ phrase: Boolean = false
+ ): QueryBuilder = {
val query: BoolQueryBuilder = boolQuery // outer query, all subqueries should be added to the must list
query.must(criteria.searchString match {
- case None => matchAllQuery
+ case None => matchAllQuery
case Some(searchTerm) if searchTerm.trim == "" => matchAllQuery
case Some(searchTerm) =>
val fieldSearch = if (phrase) {
@@ -87,15 +91,18 @@ trait ElasticSearchDAOQuerySupport extends ElasticSearchDAOSupport {
}
boolQuery
.should(fieldSearch)
- .should(nestedQuery("parents", matchQuery("parents.label", searchTerm).minimumShouldMatch("3<75%"), ScoreMode.Avg))
+ .should(
+ nestedQuery("parents", matchQuery("parents.label", searchTerm).minimumShouldMatch("3<75%"), ScoreMode.Avg)
+ )
})
- criteria.filters foreach { case (field:String, values:Seq[String]) =>
+ criteria.filters foreach { case (field: String, values: Seq[String]) =>
val fieldQuery = boolQuery // query for possible values of aggregation, added via should
- values foreach { value:String => fieldQuery.should(termQuery(field+".keyword", value))}
+ values foreach { value: String => fieldQuery.should(termQuery(field + ".keyword", value)) }
query.must(fieldQuery)
}
criteria.researchPurpose map {
- def toLibraryAttributeName(name: String): String = AttributeName.toDelimitedName(AttributeName.withLibraryNS(name))
+ def toLibraryAttributeName(name: String): String =
+ AttributeName.toDelimitedName(AttributeName.withLibraryNS(name))
rp => query.must(researchPurposeSupport.researchPurposeFilters(rp, toLibraryAttributeName))
}
@@ -116,22 +123,39 @@ trait ElasticSearchDAOQuerySupport extends ElasticSearchDAOSupport {
// The UI sends 0 to indicate unbounded size for an aggregate. However, we don't actually
// want unbounded/infinite; we impose a server-side limit here, instead of asking the UI to
// know what the limit should be.
- val aggregates = aggFields map { case (k:String,v:Int) => if (v == 0) (k,AGG_MAX_SIZE) else (k,v) }
+ val aggregates = aggFields map { case (k: String, v: Int) => if (v == 0) (k, AGG_MAX_SIZE) else (k, v) }
aggregates.keys foreach { property: String =>
// property here is specifying which attribute to collect aggregation info for
// we use field.keyword here because we want it to use the unanalyzed form of the data for the aggregations
- searchReq.addAggregation(AggregationBuilders.terms(property).field(property + ".keyword").size(aggregates.getOrElse(property, AGG_DEFAULT_SIZE)))
+ searchReq.addAggregation(
+ AggregationBuilders
+ .terms(property)
+ .field(property + ".keyword")
+ .size(aggregates.getOrElse(property, AGG_DEFAULT_SIZE))
+ )
}
searchReq
}
- def createESSearchRequest(client: TransportClient, indexname: String, qmseq: QueryBuilder, from: Int, size: Int): SearchRequestBuilder = {
+ def createESSearchRequest(client: TransportClient,
+ indexname: String,
+ qmseq: QueryBuilder,
+ from: Int,
+ size: Int
+ ): SearchRequestBuilder =
createESSearchRequest(client, indexname, qmseq, from, size, None, None)
- }
- def createESSearchRequest(client: TransportClient, indexname: String, qmseq: QueryBuilder, from: Int, size: Int, sortField: Option[String], sortDirection: Option[String]): SearchRequestBuilder = {
- val search = client.prepareSearch(indexname)
+ def createESSearchRequest(client: TransportClient,
+ indexname: String,
+ qmseq: QueryBuilder,
+ from: Int,
+ size: Int,
+ sortField: Option[String],
+ sortDirection: Option[String]
+ ): SearchRequestBuilder = {
+ val search = client
+ .prepareSearch(indexname)
.setQuery(qmseq)
.setFrom(from)
.setSize(size)
@@ -139,7 +163,7 @@ trait ElasticSearchDAOQuerySupport extends ElasticSearchDAOSupport {
if (sortField.isDefined && sortField.get.trim.nonEmpty) {
val direction = sortDirection match {
case Some("desc") => SortOrder.DESC
- case _ => SortOrder.ASC
+ case _ => SortOrder.ASC
}
search.addSort(sortField.get + ".sort", direction)
}
@@ -147,17 +171,39 @@ trait ElasticSearchDAOQuerySupport extends ElasticSearchDAOSupport {
search
}
- def createESAutocompleteRequest(client: TransportClient, indexname: String, qmseq: QueryBuilder, from: Int, size: Int): SearchRequestBuilder = {
+ def createESAutocompleteRequest(client: TransportClient,
+ indexname: String,
+ qmseq: QueryBuilder,
+ from: Int,
+ size: Int
+ ): SearchRequestBuilder = {
val hb = new HighlightBuilder()
- .field(fieldSuggest).fragmentSize(50)
- .preTags(HL_START).postTags(HL_END)
+ .field(fieldSuggest)
+ .fragmentSize(50)
+ .preTags(HL_START)
+ .postTags(HL_END)
createESSearchRequest(client, indexname, qmseq, from, size)
- .setFetchSource(false).highlighter(hb)
+ .setFetchSource(false)
+ .highlighter(hb)
}
- def buildSearchQuery(client: TransportClient, indexname: String, criteria: LibrarySearchParams, groups: Seq[String], workspaceIds: Seq[String], researchPurposeSupport: ResearchPurposeSupport): SearchRequestBuilder = {
- val searchQuery = createESSearchRequest(client, indexname, createQuery(criteria, groups, workspaceIds, researchPurposeSupport), criteria.from, criteria.size, criteria.sortField, criteria.sortDirection)
+ def buildSearchQuery(client: TransportClient,
+ indexname: String,
+ criteria: LibrarySearchParams,
+ groups: Seq[String],
+ workspaceIds: Seq[String],
+ researchPurposeSupport: ResearchPurposeSupport
+ ): SearchRequestBuilder = {
+ val searchQuery = createESSearchRequest(
+ client,
+ indexname,
+ createQuery(criteria, groups, workspaceIds, researchPurposeSupport),
+ criteria.from,
+ criteria.size,
+ criteria.sortField,
+ criteria.sortDirection
+ )
// if we are not collecting aggregation data (in the case of pagination), we can skip adding aggregations
// if the search criteria contains elements from all of the aggregatable attributes, then we will be making
// separate queries for each of them. so we can skip adding them in the main search query
@@ -172,79 +218,112 @@ trait ElasticSearchDAOQuerySupport extends ElasticSearchDAOSupport {
searchQuery
}
- def buildAutocompleteQuery(client: TransportClient, indexname: String, criteria: LibrarySearchParams, groups: Seq[String], workspaceIds: Seq[String], researchPurposeSupport: ResearchPurposeSupport): SearchRequestBuilder = {
- createESAutocompleteRequest(client, indexname, createQuery(criteria, groups, workspaceIds, researchPurposeSupport, searchField=fieldSuggest, phrase=true), 0, criteria.size)
- }
+ def buildAutocompleteQuery(client: TransportClient,
+ indexname: String,
+ criteria: LibrarySearchParams,
+ groups: Seq[String],
+ workspaceIds: Seq[String],
+ researchPurposeSupport: ResearchPurposeSupport
+ ): SearchRequestBuilder =
+ createESAutocompleteRequest(
+ client,
+ indexname,
+ createQuery(criteria, groups, workspaceIds, researchPurposeSupport, searchField = fieldSuggest, phrase = true),
+ 0,
+ criteria.size
+ )
- def buildAggregateQueries(client: TransportClient, indexname: String, criteria: LibrarySearchParams, groups: Seq[String], workspaceIds: Seq[String], researchPurposeSupport: ResearchPurposeSupport): Seq[SearchRequestBuilder] = {
+ def buildAggregateQueries(client: TransportClient,
+ indexname: String,
+ criteria: LibrarySearchParams,
+ groups: Seq[String],
+ workspaceIds: Seq[String],
+ researchPurposeSupport: ResearchPurposeSupport
+ ): Seq[SearchRequestBuilder] =
// for aggregations fields that are part of the current search criteria, we need to do a separate
// aggregate request *without* that term in the search criteria
(criteria.fieldAggregations.keySet.toSeq intersect criteria.filters.keySet.toSeq) map { field: String =>
- val query = createQuery(criteria.copy(filters = criteria.filters - field), groups, workspaceIds, researchPurposeSupport)
+ val query =
+ createQuery(criteria.copy(filters = criteria.filters - field), groups, workspaceIds, researchPurposeSupport)
// setting size to 0, we will ignore the actual search results
addAggregationsToQuery(
createESSearchRequest(client, indexname, query, 0, 0),
// using filter instead of filterKeys which is not reliable
- criteria.fieldAggregations.filter({case (key, value) => key == field}))
+ criteria.fieldAggregations.filter { case (key, value) => key == field }
+ )
}
- }
- def getAggregationsFromResults(aggResults: Aggregations): Seq[LibraryAggregationResponse] = {
+ def getAggregationsFromResults(aggResults: Aggregations): Seq[LibraryAggregationResponse] =
if (aggResults == null)
Seq.empty
else {
aggResults.getAsMap.keySet().asScala.toSeq map { field: String =>
val terms: Terms = aggResults.get(field)
- LibraryAggregationResponse(terms.getName,
- AggregationFieldResults(terms.getSumOfOtherDocCounts.toInt,
+ LibraryAggregationResponse(
+ terms.getName,
+ AggregationFieldResults(
+ terms.getSumOfOtherDocCounts.toInt,
terms.getBuckets.asScala.toList map { bucket: Terms.Bucket =>
AggregationTermResult(bucket.getKey.toString, bucket.getDocCount.toInt)
- }))
+ }
+ )
+ )
}
}
- }
// TODO: we might want to keep a cache of workspaceIds that have been published so we can intersect with the workspaces
// the user has access to before adding them to the filter criteria
- def findDocumentsWithAggregateInfo(client: TransportClient, indexname: String, criteria: LibrarySearchParams, groups: Seq[String], workspacePolicyMap: Map[String, UserPolicy], researchPurposeSupport: ResearchPurposeSupport): Future[LibrarySearchResponse] = {
+ def findDocumentsWithAggregateInfo(client: TransportClient,
+ indexname: String,
+ criteria: LibrarySearchParams,
+ groups: Seq[String],
+ workspacePolicyMap: Map[String, UserPolicy],
+ researchPurposeSupport: ResearchPurposeSupport
+ ): Future[LibrarySearchResponse] = {
val workspaceIds = workspacePolicyMap.keys.toSeq
val searchQuery = buildSearchQuery(client, indexname, criteria, groups, workspaceIds, researchPurposeSupport)
- val aggregateQueries = buildAggregateQueries(client, indexname, criteria, groups, workspaceIds, researchPurposeSupport)
+ val aggregateQueries =
+ buildAggregateQueries(client, indexname, criteria, groups, workspaceIds, researchPurposeSupport)
logger.debug(s"main search query: $searchQuery.toJson")
// search future will request aggregate data for aggregatable attributes that are not being searched on
- val searchFuture = Future[SearchResponse](executeESRequest[SearchRequest, SearchResponse, SearchRequestBuilder](searchQuery))
+ val searchFuture =
+ Future[SearchResponse](executeESRequest[SearchRequest, SearchResponse, SearchRequestBuilder](searchQuery))
logger.debug(s"additional queries for aggregations: $aggregateQueries.toJson")
- val aggFutures:Seq[Future[SearchResponse]] = aggregateQueries map {query: SearchRequestBuilder =>
+ val aggFutures: Seq[Future[SearchResponse]] = aggregateQueries map { query: SearchRequestBuilder =>
Future[SearchResponse](executeESRequest[SearchRequest, SearchResponse, SearchRequestBuilder](query))
}
val allFutures = Future.sequence(aggFutures :+ searchFuture)
- val response = for (
- allResults <- allFutures
- ) yield LibrarySearchResponse(
- criteria,
- allResults.last.getHits.getTotalHits().toInt,
- allResults.last.getHits.getHits.toList map { hit: SearchHit =>
- addAccessLevel(hit.getSourceAsString.parseJson.asJsObject, workspacePolicyMap)
- },
- allResults flatMap { aggResp => getAggregationsFromResults(aggResp.getAggregations) }
- )
+ val response =
+ for (allResults <- allFutures)
+ yield LibrarySearchResponse(
+ criteria,
+ allResults.last.getHits.getTotalHits().toInt,
+ allResults.last.getHits.getHits.toList map { hit: SearchHit =>
+ addAccessLevel(hit.getSourceAsString.parseJson.asJsObject, workspacePolicyMap)
+ },
+ allResults flatMap { aggResp => getAggregationsFromResults(aggResp.getAggregations) }
+ )
response
}
def addAccessLevel(doc: JsObject, workspacePolicyMap: Map[String, UserPolicy]): JsObject = {
val docId: Option[JsValue] = doc.fields.get("workspaceId")
val accessStr = (docId match {
- case Some(wsid:JsString) =>
+ case Some(wsid: JsString) =>
workspacePolicyMap.get(wsid.value) map (_.accessPolicyName.value)
case _ => None
}) getOrElse WorkspaceAccessLevels.NoAccess.toString
- JsObject(doc.fields + ("workspaceAccess" -> JsString(accessStr)))
+ JsObject(doc.fields + ("workspaceAccess" -> JsString(accessStr)))
}
- def populateSuggestions(client: TransportClient, indexName: String, field: String, text: String) : Future[Seq[String]] = {
+ def populateSuggestions(client: TransportClient,
+ indexName: String,
+ field: String,
+ text: String
+ ): Future[Seq[String]] = {
/*
goal:
generate suggestions for populating a single field in the catalog wizard,
@@ -259,17 +338,18 @@ trait ElasticSearchDAOQuerySupport extends ElasticSearchDAOSupport {
val keywordField = field + ".suggestKeyword" // non-analyzed variant of the field
val prefixFilter = boolQuery()
- .should(prefixQuery(keywordField, text))
- .should(matchPhrasePrefixQuery(field, text))
+ .should(prefixQuery(keywordField, text))
+ .should(matchPhrasePrefixQuery(field, text))
val aggregationName = "suggTerms"
val termsAgg = AggregationBuilders.terms(aggregationName).field(keywordField).size(10)
- val suggestQuery = client.prepareSearch(indexName)
- .setQuery(prefixFilter)
- .addAggregation(termsAgg)
- .setFetchSource(false)
- .setSize(0)
+ val suggestQuery = client
+ .prepareSearch(indexName)
+ .setQuery(prefixFilter)
+ .addAggregation(termsAgg)
+ .setFetchSource(false)
+ .setSize(0)
val suggestTry = Try(executeESRequest[SearchRequest, SearchResponse, SearchRequestBuilder](suggestQuery))
@@ -278,7 +358,7 @@ trait ElasticSearchDAOQuerySupport extends ElasticSearchDAOSupport {
val allAggs = suggestResult.getAggregations.asMap().asScala
val termsAgg = allAggs.get(aggregationName)
val buckets = termsAgg match {
- case Some(st:StringTerms) =>
+ case Some(st: StringTerms) =>
st.getBuckets.asScala.map(_.getKey.toString)
case _ =>
logger.warn(s"failed to get populate suggestions for field [$field] and term [$text]")
@@ -292,19 +372,31 @@ trait ElasticSearchDAOQuerySupport extends ElasticSearchDAOSupport {
}
}
- def autocompleteSuggestions(client: TransportClient, indexname: String, criteria: LibrarySearchParams, groups: Seq[String], workspaceIdAccessMap: Map[String, UserPolicy], researchPurposeSupport: ResearchPurposeSupport): Future[LibrarySearchResponse] = {
-
- val searchQuery = buildAutocompleteQuery(client, indexname, criteria, groups, workspaceIdAccessMap.keys.toSeq, researchPurposeSupport)
+ def autocompleteSuggestions(client: TransportClient,
+ indexname: String,
+ criteria: LibrarySearchParams,
+ groups: Seq[String],
+ workspaceIdAccessMap: Map[String, UserPolicy],
+ researchPurposeSupport: ResearchPurposeSupport
+ ): Future[LibrarySearchResponse] = {
+
+ val searchQuery = buildAutocompleteQuery(client,
+ indexname,
+ criteria,
+ groups,
+ workspaceIdAccessMap.keys.toSeq,
+ researchPurposeSupport
+ )
logger.debug(s"autocomplete search query: $searchQuery.toJson")
- val searchFuture = Future[SearchResponse](executeESRequest[SearchRequest, SearchResponse, SearchRequestBuilder](searchQuery))
+ val searchFuture =
+ Future[SearchResponse](executeESRequest[SearchRequest, SearchResponse, SearchRequestBuilder](searchQuery))
- searchFuture map {searchResult =>
+ searchFuture map { searchResult =>
// autocomplete query can return duplicate suggestions. De-dupe them here.
- val suggestions:List[JsObject] = (searchResult.getHits.getHits.toList flatMap { hit =>
+ val suggestions: List[JsObject] = (searchResult.getHits.getHits.toList flatMap { hit =>
if (hit.getHighlightFields.containsKey(fieldSuggest)) {
- hit.getHighlightFields.get(fieldSuggest).fragments map {t =>
-
+ hit.getHighlightFields.get(fieldSuggest).fragments map { t =>
val normalized = t.toString.toLowerCase
val stripped = stripHighlight(normalized)
@@ -312,7 +404,7 @@ trait ElasticSearchDAOQuerySupport extends ElasticSearchDAOSupport {
"suggestion" -> JsString(stripped)
) ++ (findHighlight(normalized) match {
case Some(x) => Map("highlight" -> JsString(x))
- case _ => Map.empty
+ case _ => Map.empty
})
JsObject(resultFields)
@@ -322,25 +414,17 @@ trait ElasticSearchDAOQuerySupport extends ElasticSearchDAOSupport {
}
}).distinct
- LibrarySearchResponse(
- criteria,
- suggestions.size,
- suggestions,
- Seq.empty)
+ LibrarySearchResponse(criteria, suggestions.size, suggestions, Seq.empty)
}
}
- def stripHighlight(txt:String): String = {
- txt.replace(HL_START,"").replace(HL_END,"")
- }
+ def stripHighlight(txt: String): String =
+ txt.replace(HL_START, "").replace(HL_END, "")
- def findHighlight(txt:String): Option[String] = {
+ def findHighlight(txt: String): Option[String] =
txt match {
case HL_REGEX(hlt) => Some(hlt)
- case _ => None
+ case _ => None
}
- }
-
}
-
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchDAOResearchPurposeSupport.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchDAOResearchPurposeSupport.scala
index 2b5cf0e17..89cff8714 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchDAOResearchPurposeSupport.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchDAOResearchPurposeSupport.scala
@@ -11,11 +11,15 @@ import scala.concurrent.ExecutionContext
trait ElasticSearchDAOResearchPurposeSupport extends DataUseRestrictionSupport with LazyLogging {
- def researchPurposeFilters(rp: ResearchPurpose, ontologyDAO: OntologyDAO, makeAttributeName: String => String)(implicit ec: ExecutionContext): BoolQueryBuilder = {
+ def researchPurposeFilters(rp: ResearchPurpose, ontologyDAO: OntologyDAO, makeAttributeName: String => String)(
+ implicit ec: ExecutionContext
+ ): BoolQueryBuilder = {
val durRoot = makeAttributeName(structuredUseRestrictionName)
- def generateDiseaseMatchLogic(rp: ResearchPurpose, ontologyDAO: OntologyDAO)(implicit ec: ExecutionContext): Option[BoolQueryBuilder] = {
+ def generateDiseaseMatchLogic(rp: ResearchPurpose, ontologyDAO: OntologyDAO)(implicit
+ ec: ExecutionContext
+ ): Option[BoolQueryBuilder] =
/*
purpose: DS: Disease focused research
dul:
@@ -32,9 +36,10 @@ trait ElasticSearchDAOResearchPurposeSupport extends DataUseRestrictionSupport w
} else {
None
}
- }
- def generateDiseaseQuery(nodeids: Seq[DiseaseOntologyNodeId], ontologyDAO: OntologyDAO)(implicit ec: ExecutionContext): BoolQueryBuilder = {
+ def generateDiseaseQuery(nodeids: Seq[DiseaseOntologyNodeId], ontologyDAO: OntologyDAO)(implicit
+ ec: ExecutionContext
+ ): BoolQueryBuilder = {
val allnodes = augmentWithDiseaseParents(nodeids, ontologyDAO)
val dsClause = boolQuery()
@@ -44,20 +49,21 @@ trait ElasticSearchDAOResearchPurposeSupport extends DataUseRestrictionSupport w
dsClause
}
- def augmentWithDiseaseParents(nodeids: Seq[DiseaseOntologyNodeId], ontologyDAO: OntologyDAO)(implicit ec: ExecutionContext): Seq[DiseaseOntologyNodeId] = {
+ def augmentWithDiseaseParents(nodeids: Seq[DiseaseOntologyNodeId], ontologyDAO: OntologyDAO)(implicit
+ ec: ExecutionContext
+ ): Seq[DiseaseOntologyNodeId] =
if (nodeids.isEmpty)
nodeids // return unchanged; no ontology nodes to augment
else {
// for all nodes in the research purpose's DS value, query ontology to get their parent nodes
nodeids map (node => ontologyDAO.search(node.uri.toString)) flatMap { allTermResults =>
- val parentsToAugment:Seq[DiseaseOntologyNodeId] = (allTermResults collect {
- case termWithParents => termWithParents.parents.getOrElse(List.empty[TermParent]).map(parent => DiseaseOntologyNodeId(parent.id))
+ val parentsToAugment: Seq[DiseaseOntologyNodeId] = (allTermResults collect { case termWithParents =>
+ termWithParents.parents.getOrElse(List.empty[TermParent]).map(parent => DiseaseOntologyNodeId(parent.id))
}).flatten
// append the parent node info to the original research purpose
nodeids ++ parentsToAugment
}
}
- }
def encode[T](code: String, value: T) = termQuery(s"$durRoot.$code", value)
@@ -69,9 +75,10 @@ trait ElasticSearchDAOResearchPurposeSupport extends DataUseRestrictionSupport w
*/
if (rp.NAGR) {
bool.must(encode("NAGR", false))
- bool.must(boolQuery()
- .should(encode("GRU", true))
- .should(encode("HMB", true))
+ bool.must(
+ boolQuery()
+ .should(encode("GRU", true))
+ .should(encode("HMB", true))
)
}
@@ -87,11 +94,10 @@ trait ElasticSearchDAOResearchPurposeSupport extends DataUseRestrictionSupport w
/*
purpose: POA: Study population origins or ancestry
dul: Any dataset tagged with GRU
- */
+ */
if (rp.POA)
bool.must(encode("GRU", true))
-
/*
purpose: DS: Disease focused research
dul:
@@ -115,9 +121,10 @@ trait ElasticSearchDAOResearchPurposeSupport extends DataUseRestrictionSupport w
nmdsClause.should(encode("NMDS", false))
if (rp.DS.nonEmpty) {
generateDiseaseMatchLogic(rp, ontologyDAO) map { dsClause =>
- nmdsClause.should(boolQuery()
- .must(encode("NMDS", true))
- .must(dsClause)
+ nmdsClause.should(
+ boolQuery()
+ .must(encode("NMDS", true))
+ .must(dsClause)
)
}
}
@@ -132,12 +139,14 @@ trait ElasticSearchDAOResearchPurposeSupport extends DataUseRestrictionSupport w
*/
if (rp.NCTRL) {
val nctrlClause = boolQuery()
- nctrlClause.should(boolQuery()
- .must(encode("NCTRL", false))
- .must(boolQuery()
- .should(encode("GRU", true))
- .should(encode("HMB", true))
- )
+ nctrlClause.should(
+ boolQuery()
+ .must(encode("NCTRL", false))
+ .must(
+ boolQuery()
+ .should(encode("GRU", true))
+ .should(encode("HMB", true))
+ )
)
if (rp.DS.nonEmpty)
nctrlClause.should(generateDiseaseQuery(rp.DS, ontologyDAO))
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchDAOSupport.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchDAOSupport.scala
index 175e1b769..4cfe97915 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchDAOSupport.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchDAOSupport.scala
@@ -16,8 +16,6 @@ import scala.util.{Failure, Success, Try}
trait ElasticSearchDAOSupport extends LazyLogging with PerformanceLogging {
-
-
def executeESRequest[T <: ActionRequest, U <: ActionResponse, V <: ActionRequestBuilder[T, U, V]](req: V): U = {
val tick = Instant.now()
val responseTry = Try(req.get())
@@ -28,7 +26,12 @@ trait ElasticSearchDAOSupport extends LazyLogging with PerformanceLogging {
s
case Failure(f) =>
perfLogger.info(perfmsg(req.getClass.getSimpleName, "failure", tick, tock))
- logger.warn(s"ElasticSearch %s request failed in %s ms: %s".format(req.getClass.getName, tock.toEpochMilli-tick.toEpochMilli, f.getMessage))
+ logger.warn(
+ s"ElasticSearch %s request failed in %s ms: %s".format(req.getClass.getName,
+ tock.toEpochMilli - tick.toEpochMilli,
+ f.getMessage
+ )
+ )
throw new FireCloudException("ElasticSearch request failed", f)
}
}
@@ -36,7 +39,7 @@ trait ElasticSearchDAOSupport extends LazyLogging with PerformanceLogging {
def makeMapping(attributeJson: String): String = {
// generate mappings from the Library schema file
val definition = attributeJson.parseJson.convertTo[AttributeDefinition]
- val attributeDetailMap = definition.properties filter(_._2.indexable.getOrElse(true)) map {
+ val attributeDetailMap = definition.properties filter (_._2.indexable.getOrElse(true)) map {
case (label: String, detail: AttributeDetail) => createType(label, detail)
}
/* add the additional mappings that aren't tracked in the schema file:
@@ -44,13 +47,18 @@ trait ElasticSearchDAOSupport extends LazyLogging with PerformanceLogging {
* - _discoverableByGroups property to hold discover-mode permissions
* - parents.order and parents.label for ontology-aware search
*/
- val addlMappings:Map[String, ESPropertyFields] = Map(
+ val addlMappings: Map[String, ESPropertyFields] = Map(
fieldSuggest -> ESType.suggestField("string"),
fieldDiscoverableByGroups -> ESInternalType("string"),
- fieldOntologyParents -> ESNestedType(Map(
- fieldOntologyParentsLabel -> ESInnerField("string", include_in_all=Some(false), copy_to=Some(ElasticSearch.fieldSuggest)),
- fieldOntologyParentsOrder -> ESInnerField("integer", include_in_all=Some(false))
- ))
+ fieldOntologyParents -> ESNestedType(
+ Map(
+ fieldOntologyParentsLabel -> ESInnerField("string",
+ include_in_all = Some(false),
+ copy_to = Some(ElasticSearch.fieldSuggest)
+ ),
+ fieldOntologyParentsOrder -> ESInnerField("integer", include_in_all = Some(false))
+ )
+ )
)
val props = attributeDetailMap ++ addlMappings
ESDatasetProperty(props).toJson.prettyPrint
@@ -59,13 +67,13 @@ trait ElasticSearchDAOSupport extends LazyLogging with PerformanceLogging {
def createType(label: String, detail: AttributeDetail): (String, ESPropertyFields) = {
val itemType = detail match {
case x if x.`type` == "array" && x.items.isDefined => x.items.get.`type`
- case _ => detail.`type`
+ case _ => detail.`type`
}
val searchSuggest = itemType == "string"
val createSuggest = detail.typeahead.contains("populate")
val isAggregate = detail match {
case x if x.aggregate.isDefined => true
- case _ => false
+ case _ => false
}
label -> ESType(itemType, createSuggest, searchSuggest, isAggregate)
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchOntologyDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchOntologyDAO.scala
index 06d44d13f..50f6507ae 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchOntologyDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchOntologyDAO.scala
@@ -3,7 +3,11 @@ package org.broadinstitute.dsde.firecloud.dataaccess
import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol.impOntologyTermResource
import org.broadinstitute.dsde.firecloud.model.Ontology.TermResource
import org.broadinstitute.dsde.workbench.util.health.SubsystemStatus
-import org.elasticsearch.action.admin.indices.exists.indices.{IndicesExistsRequest, IndicesExistsRequestBuilder, IndicesExistsResponse}
+import org.elasticsearch.action.admin.indices.exists.indices.{
+ IndicesExistsRequest,
+ IndicesExistsRequestBuilder,
+ IndicesExistsResponse
+}
import org.elasticsearch.action.get.{GetRequest, GetRequestBuilder, GetResponse}
import org.elasticsearch.action.search.{SearchRequest, SearchRequestBuilder, SearchResponse}
import org.elasticsearch.client.transport.TransportClient
@@ -13,10 +17,11 @@ import spray.json._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
-class ElasticSearchOntologyDAO(client: TransportClient, indexName: String) extends OntologyDAO with ElasticSearchDAOSupport {
-
- private final val datatype = "ontology_term"
+class ElasticSearchOntologyDAO(client: TransportClient, indexName: String)
+ extends OntologyDAO
+ with ElasticSearchDAOSupport {
+ final private val datatype = "ontology_term"
override def search(term: String): List[TermResource] = {
val getRequest = client.prepareGet(indexName, datatype, term)
@@ -32,19 +37,23 @@ class ElasticSearchOntologyDAO(client: TransportClient, indexName: String) exten
val prefix = term.toLowerCase
// user's term must be a prefix in either label or synonyms
val query = boolQuery()
- .must(termQuery("ontology.keyword", "Disease"))
- .must(termQuery("usable", true))
- .must(boolQuery()
+ .must(termQuery("ontology.keyword", "Disease"))
+ .must(termQuery("usable", true))
+ .must(
+ boolQuery()
.should(termQuery("label.keyword", prefix).boost(10)) // exact match on label gets pushed to top
- .should(matchPhrasePrefixQuery("label", prefix).boost(5)) // prefix matches on label are more relevant than ...
+ .should(
+ matchPhrasePrefixQuery("label", prefix).boost(5)
+ ) // prefix matches on label are more relevant than ...
.should(matchPhrasePrefixQuery("synonyms", prefix)) /// prefix matches on synonyms
.minimumShouldMatch(1) // match at least one of the above cases
- )
+ )
- val searchRequest = client.prepareSearch(indexName)
+ val searchRequest = client
+ .prepareSearch(indexName)
.setQuery(query)
- .setSize(20)
- .setFetchSource(List("id","ontology","usable","label","synonyms","definition").toArray, null)
+ .setSize(20)
+ .setFetchSource(List("id", "ontology", "usable", "label", "synonyms", "definition").toArray, null)
val autocompleteResults = executeESRequest[SearchRequest, SearchResponse, SearchRequestBuilder](searchRequest)
@@ -56,13 +65,11 @@ class ElasticSearchOntologyDAO(client: TransportClient, indexName: String) exten
termResources
}
- private def indexExists: Boolean = {
+ private def indexExists: Boolean =
executeESRequest[IndicesExistsRequest, IndicesExistsResponse, IndicesExistsRequestBuilder](
client.admin.indices.prepareExists(indexName)
).isExists
- }
- override def status: Future[SubsystemStatus] = {
+ override def status: Future[SubsystemStatus] =
Future(SubsystemStatus(indexExists, None))
- }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchShareLogDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchShareLogDAO.scala
index 9de9e6285..a3b9ab127 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchShareLogDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ElasticSearchShareLogDAO.scala
@@ -6,8 +6,16 @@ import org.broadinstitute.dsde.firecloud.FireCloudException
import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol.impShareFormat
import org.broadinstitute.dsde.firecloud.model.ShareLog.{Share, ShareType}
import org.broadinstitute.dsde.workbench.util.health.SubsystemStatus
-import org.elasticsearch.action.admin.indices.create.{CreateIndexRequest, CreateIndexRequestBuilder, CreateIndexResponse}
-import org.elasticsearch.action.admin.indices.exists.indices.{IndicesExistsRequest, IndicesExistsRequestBuilder, IndicesExistsResponse}
+import org.elasticsearch.action.admin.indices.create.{
+ CreateIndexRequest,
+ CreateIndexRequestBuilder,
+ CreateIndexResponse
+}
+import org.elasticsearch.action.admin.indices.exists.indices.{
+ IndicesExistsRequest,
+ IndicesExistsRequestBuilder,
+ IndicesExistsResponse
+}
import org.elasticsearch.action.get.{GetRequest, GetRequestBuilder, GetResponse}
import org.elasticsearch.action.index.{IndexRequest, IndexRequestBuilder, IndexResponse}
import org.elasticsearch.action.search.{SearchRequest, SearchRequestBuilder, SearchResponse}
@@ -25,6 +33,7 @@ import scala.util.hashing.MurmurHash3
import scala.util.{Failure, Success, Try}
trait ShareQueries {
+
/**
* Makes an ElasticSearch query builder to get user shares
* @param userId ID of user whose shares to get
@@ -47,10 +56,14 @@ trait ShareQueries {
* @param client The ElasticSearch client
* @param indexName The name of the target share log index in ElasticSearch
*/
-class ElasticSearchShareLogDAO(client: TransportClient, indexName: String, refreshMode: RefreshPolicy = RefreshPolicy.NONE)
- extends ShareLogDAO with ElasticSearchDAOSupport with ShareQueries {
+class ElasticSearchShareLogDAO(client: TransportClient,
+ indexName: String,
+ refreshMode: RefreshPolicy = RefreshPolicy.NONE
+) extends ShareLogDAO
+ with ElasticSearchDAOSupport
+ with ShareQueries {
- lazy private final val datatype = "sharelog"
+ final private lazy val datatype = "sharelog"
init // checks for the presence of the index
@@ -83,9 +96,8 @@ class ElasticSearchShareLogDAO(client: TransportClient, indexName: String, refre
* @param shareType The type (workspace, group, or method) see `ShareLog`
* @return The records of the shares - see `ShareLog.Share`
*/
- override def logShares(userId: String, sharees: Seq[String], shareType: ShareType.Value): Seq[Share] = {
+ override def logShares(userId: String, sharees: Seq[String], shareType: ShareType.Value): Seq[Share] =
sharees map { sharee => logShare(userId, sharee, shareType) }
- }
/**
* Gets a share by the ID, a `MurmurHash3` of `userId` + `sharee` + `shareType`
@@ -98,7 +110,7 @@ class ElasticSearchShareLogDAO(client: TransportClient, indexName: String, refre
val getSharesQuery = client.prepareGet(indexName, datatype, id)
Try(executeESRequest[GetRequest, GetResponse, GetRequestBuilder](getSharesQuery)) match {
case Success(get) if get.isExists => get.getSourceAsString.parseJson.convertTo[Share]
- case Success(_) => throw new FireCloudException(s"share not found")
+ case Success(_) => throw new FireCloudException(s"share not found")
case Failure(f) => throw new FireCloudException(s"error getting share for $share: ${f.getMessage}")
}
}
@@ -123,9 +135,8 @@ class ElasticSearchShareLogDAO(client: TransportClient, indexName: String, refre
case hits =>
if (hits.totalHits == 0)
Seq.empty[Share]
- else
- if (hits.totalHits >= 100) logger.warn(s"Number of shares for user $userId has reached or exceeded 100.")
- getSharesResponse.getHits.getHits.toList map (_.getSourceAsString.parseJson.convertTo[Share])
+ else if (hits.totalHits >= 100) logger.warn(s"Number of shares for user $userId has reached or exceeded 100.")
+ getSharesResponse.getHits.getHits.toList map (_.getSourceAsString.parseJson.convertTo[Share])
}
}
@@ -137,21 +148,20 @@ class ElasticSearchShareLogDAO(client: TransportClient, indexName: String, refre
//
// override def autocomplete(userId: String, term: String): List[String] = ???
- private def indexExists: Boolean = {
+ private def indexExists: Boolean =
executeESRequest[IndicesExistsRequest, IndicesExistsResponse, IndicesExistsRequestBuilder](
- client.admin.indices.prepareExists(indexName)
+ client.admin.indices.prepareExists(indexName)
).isExists
- }
- private def init: Unit = {
+ private def init: Unit =
if (!indexExists) {
executeESRequest[CreateIndexRequest, CreateIndexResponse, CreateIndexRequestBuilder](
- client.admin.indices.prepareCreate(indexName))
+ client.admin.indices.prepareCreate(indexName)
+ )
// Try one more time and fail if index creation fails
if (!indexExists)
- throw new FireCloudException(s"index $indexName does not exist!")
+ throw new FireCloudException(s"index $indexName does not exist!")
}
- }
/**
* Uses MurmurHash3 for quick hashing -
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ExternalCredsDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ExternalCredsDAO.scala
index 401803d89..82f71d32b 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ExternalCredsDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ExternalCredsDAO.scala
@@ -17,7 +17,9 @@ trait ExternalCredsDAO {
def deleteLinkedEraAccount(userInfo: UserInfo)(implicit orchInfo: WithAccessToken): Future[Unit]
@throws(classOf[ApiException])
- def getLinkedEraAccountForUsername(username: String)(implicit orchInfo: WithAccessToken): Future[Option[LinkedEraAccount]]
+ def getLinkedEraAccountForUsername(username: String)(implicit
+ orchInfo: WithAccessToken
+ ): Future[Option[LinkedEraAccount]]
@throws(classOf[ApiException])
def getActiveLinkedEraAccounts(implicit orchInfo: WithAccessToken): Future[Seq[LinkedEraAccount]]
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/GoogleServicesDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/GoogleServicesDAO.scala
index 52b2061b1..ac8299417 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/GoogleServicesDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/GoogleServicesDAO.scala
@@ -23,15 +23,14 @@ trait GoogleServicesDAO extends ReportsSubsystemStatus {
def getAdminUserAccessToken: String
def getBucketObjectAsInputStream(bucketName: String, objectKey: String): InputStream
def getObjectResourceUrl(bucketName: String, objectKey: String): String
- def getUserProfile(accessToken: WithAccessToken)
- (implicit executionContext: ExecutionContext): Future[HttpResponse]
+ def getUserProfile(accessToken: WithAccessToken)(implicit executionContext: ExecutionContext): Future[HttpResponse]
val fetchPriceList: Future[GooglePriceList]
-
+
def writeObjectAsRawlsSA(bucketName: GcsBucketName, objectKey: GcsObjectName, objectContents: Array[Byte]): GcsPath
def writeObjectAsRawlsSA(bucketName: GcsBucketName, objectKey: GcsObjectName, tempFile: File): GcsPath
- def deleteGoogleGroup(groupEmail: String) : Unit
+ def deleteGoogleGroup(groupEmail: String): Unit
def createGoogleGroup(groupName: String): Option[String]
def addMemberToAnonymizedGoogleGroup(groupName: String, targetUserEmail: String): Option[String]
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpAgoraDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpAgoraDAO.scala
index 39c8d9b09..5c050b8e5 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpAgoraDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpAgoraDAO.scala
@@ -4,13 +4,21 @@ import akka.actor.ActorSystem
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
import akka.http.scaladsl.model.Uri
import akka.stream.Materializer
-import org.broadinstitute.dsde.firecloud.model.OrchMethodRepository.{AgoraEntityType, AgoraPermission, EntityAccessControlAgora, Method}
+import org.broadinstitute.dsde.firecloud.model.OrchMethodRepository.{
+ AgoraEntityType,
+ AgoraPermission,
+ EntityAccessControlAgora,
+ Method
+}
import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol._
import org.broadinstitute.dsde.firecloud.model.UserInfo
import org.broadinstitute.dsde.firecloud.utils.RestJsonClient
import org.broadinstitute.dsde.firecloud.webservice.MethodsApiServiceUrls
import org.broadinstitute.dsde.firecloud.{FireCloudConfig, FireCloudExceptionWithErrorReport}
-import org.broadinstitute.dsde.workbench.util.health.StatusJsonSupport.{StatusCheckResponseFormat, SubsystemStatusFormat}
+import org.broadinstitute.dsde.workbench.util.health.StatusJsonSupport.{
+ StatusCheckResponseFormat,
+ SubsystemStatusFormat
+}
import org.broadinstitute.dsde.workbench.util.health.Subsystems.Subsystem
import org.broadinstitute.dsde.workbench.util.health.{StatusCheckResponse, SubsystemStatus}
import spray.json.DefaultJsonProtocol._
@@ -19,57 +27,70 @@ import spray.json._
import scala.concurrent.{ExecutionContext, Future}
import scala.util.Try
-class HttpAgoraDAO(config: FireCloudConfig.Agora.type)(implicit val system: ActorSystem, implicit val materializer: Materializer, implicit val executionContext: ExecutionContext)
- extends AgoraDAO with SprayJsonSupport with RestJsonClient with MethodsApiServiceUrls {
+class HttpAgoraDAO(config: FireCloudConfig.Agora.type)(implicit val system: ActorSystem,
+ implicit val materializer: Materializer,
+ implicit val executionContext: ExecutionContext
+) extends AgoraDAO
+ with SprayJsonSupport
+ with RestJsonClient
+ with MethodsApiServiceUrls {
- private def getNamespaceUrl(ns: String, entity: String): String = {
+ private def getNamespaceUrl(ns: String, entity: String): String =
s"${config.authUrl}/$entity/$ns/permissions"
- }
- private def getMultiEntityPermissionUrl(entityType: AgoraEntityType.Value) = {
+ private def getMultiEntityPermissionUrl(entityType: AgoraEntityType.Value) =
s"${config.authUrl}/${AgoraEntityType.toPath(entityType)}/permissions"
- }
- override def getNamespacePermissions(ns: String, entity: String)(implicit userInfo: UserInfo): Future[List[AgoraPermission]] =
- authedRequestToObject[List[AgoraPermission]]( Get(getNamespaceUrl(ns, entity)) )
+ override def getNamespacePermissions(ns: String, entity: String)(implicit
+ userInfo: UserInfo
+ ): Future[List[AgoraPermission]] =
+ authedRequestToObject[List[AgoraPermission]](Get(getNamespaceUrl(ns, entity)))
- override def postNamespacePermissions(ns: String, entity: String, perms: List[AgoraPermission])(implicit userInfo: UserInfo): Future[List[AgoraPermission]] =
- authedRequestToObject[List[AgoraPermission]]( Post(getNamespaceUrl(ns, entity), perms) )
+ override def postNamespacePermissions(ns: String, entity: String, perms: List[AgoraPermission])(implicit
+ userInfo: UserInfo
+ ): Future[List[AgoraPermission]] =
+ authedRequestToObject[List[AgoraPermission]](Post(getNamespaceUrl(ns, entity), perms))
- override def getMultiEntityPermissions(entityType: AgoraEntityType.Value, entities: List[Method])(implicit userInfo: UserInfo): Future[List[EntityAccessControlAgora]] = {
- authedRequestToObject[List[EntityAccessControlAgora]]( Post(getMultiEntityPermissionUrl(entityType), entities) )
- }
+ override def getMultiEntityPermissions(entityType: AgoraEntityType.Value, entities: List[Method])(implicit
+ userInfo: UserInfo
+ ): Future[List[EntityAccessControlAgora]] =
+ authedRequestToObject[List[EntityAccessControlAgora]](Post(getMultiEntityPermissionUrl(entityType), entities))
- override def batchCreatePermissions(inputs: List[EntityAccessControlAgora])(implicit userInfo: UserInfo): Future[List[EntityAccessControlAgora]] = {
+ override def batchCreatePermissions(inputs: List[EntityAccessControlAgora])(implicit
+ userInfo: UserInfo
+ ): Future[List[EntityAccessControlAgora]] =
authedRequestToObject[List[EntityAccessControlAgora]](Put(remoteMultiPermissionsUrl, inputs))
- }
- override def getPermission(url: String)(implicit userInfo: UserInfo): Future[List[AgoraPermission]] = {
+ override def getPermission(url: String)(implicit userInfo: UserInfo): Future[List[AgoraPermission]] =
authedRequestToObject[List[AgoraPermission]](Get(url))
- }
- override def createPermission(url: String, agoraPermissions: List[AgoraPermission])(implicit userInfo: UserInfo): Future[List[AgoraPermission]] = {
+ override def createPermission(url: String, agoraPermissions: List[AgoraPermission])(implicit
+ userInfo: UserInfo
+ ): Future[List[AgoraPermission]] =
authedRequestToObject[List[AgoraPermission]](Post(url, agoraPermissions))
- }
override def status: Future[SubsystemStatus] = {
- val agoraStatusCheck = unAuthedRequestToObject[StatusCheckResponse](Get(Uri(config.baseUrl).withPath(Uri.Path("/status"))))
+ val agoraStatusCheck =
+ unAuthedRequestToObject[StatusCheckResponse](Get(Uri(config.baseUrl).withPath(Uri.Path("/status"))))
agoraStatusCheck map { agoraStatus =>
if (agoraStatus.ok)
SubsystemStatus(ok = true, None)
else
- SubsystemStatus(ok = false, Some(agoraStatus.systems.map{
- case (k:Subsystem, v:SubsystemStatus) => s"""$k : ${SubsystemStatusFormat.write(v).compactPrint}"""
- case x => x.toString
- }.toList))
+ SubsystemStatus(
+ ok = false,
+ Some(agoraStatus.systems.map {
+ case (k: Subsystem, v: SubsystemStatus) => s"""$k : ${SubsystemStatusFormat.write(v).compactPrint}"""
+ case x => x.toString
+ }.toList)
+ )
} recover {
- case fcee:FireCloudExceptionWithErrorReport =>
+ case fcee: FireCloudExceptionWithErrorReport =>
// attempt to make the underlying message prettier
val parseTry = Try(fcee.errorReport.message.parseJson.compactPrint.replace("\"", "'")).toOption
val msg = parseTry.getOrElse(fcee.errorReport.message)
SubsystemStatus(ok = false, Some(List(msg)))
- case e:Exception =>
+ case e: Exception =>
SubsystemStatus(ok = false, Some(List(e.getMessage)))
}
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpCwdsDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpCwdsDAO.scala
index 491498ffc..a24fc342f 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpCwdsDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpCwdsDAO.scala
@@ -22,9 +22,9 @@ object HttpCwdsDAO {
class HttpCwdsDAO(enabled: Boolean, supportedFormats: List[String]) extends CwdsDAO {
- private final val RUNNING_STATUSES: java.util.List[String] = List("CREATED", "QUEUED", "RUNNING").asJava
+ final private val RUNNING_STATUSES: java.util.List[String] = List("CREATED", "QUEUED", "RUNNING").asJava
- private final val STATUS_TRANSLATION: Map[GenericJob.StatusEnum,String] = Map(
+ final private val STATUS_TRANSLATION: Map[GenericJob.StatusEnum, String] = Map(
// there is no effective difference between Translating and ReadyForUpsert for our purposes
CREATED -> "Translating",
QUEUED -> "Translating",
@@ -35,7 +35,7 @@ class HttpCwdsDAO(enabled: Boolean, supportedFormats: List[String]) extends Cwds
UNKNOWN -> "Error"
)
- private final val TYPE_TRANSLATION: Map[String, ImportRequest.TypeEnum] = Map(
+ final private val TYPE_TRANSLATION: Map[String, ImportRequest.TypeEnum] = Map(
"pfb" -> ImportRequest.TypeEnum.PFB,
"tdrexport" -> ImportRequest.TypeEnum.TDRMANIFEST,
"rawlsjson" -> ImportRequest.TypeEnum.RAWLSJSON
@@ -45,8 +45,9 @@ class HttpCwdsDAO(enabled: Boolean, supportedFormats: List[String]) extends Cwds
override def getSupportedFormats: List[String] = supportedFormats
- override def listJobsV1(workspaceId: String, runningOnly: Boolean)(implicit userInfo: UserInfo)
- : scala.collection.immutable.List[CwdsListResponse] = {
+ override def listJobsV1(workspaceId: String, runningOnly: Boolean)(implicit
+ userInfo: UserInfo
+ ): scala.collection.immutable.List[CwdsListResponse] = {
// determine the proper cWDS statuses based on the runningOnly argument
// the Java API expects null when not specifying statuses
val statuses = if (runningOnly) RUNNING_STATUSES else null
@@ -55,7 +56,8 @@ class HttpCwdsDAO(enabled: Boolean, supportedFormats: List[String]) extends Cwds
jobApi.setApiClient(getApiClient(userInfo.accessToken.token))
// query cWDS for its jobs, and translate the response to CwdsListResponse format
- jobApi.jobsInInstanceV1(UUID.fromString(workspaceId), statuses)
+ jobApi
+ .jobsInInstanceV1(UUID.fromString(workspaceId), statuses)
.asScala
.map(toCwdsListResponse)
.toList
@@ -68,9 +70,9 @@ class HttpCwdsDAO(enabled: Boolean, supportedFormats: List[String]) extends Cwds
toCwdsListResponse(jobApi.jobStatusV1(UUID.fromString(jobId)))
}
- override def importV1(workspaceId: String,
- asyncImportRequest: AsyncImportRequest
- )(implicit userInfo: UserInfo): GenericJob = {
+ override def importV1(workspaceId: String, asyncImportRequest: AsyncImportRequest)(implicit
+ userInfo: UserInfo
+ ): GenericJob = {
val importApi: ImportApi = new ImportApi()
importApi.setApiClient(getApiClient(userInfo.accessToken.token))
@@ -85,7 +87,9 @@ class HttpCwdsDAO(enabled: Boolean, supportedFormats: List[String]) extends Cwds
// as of this writing, the only available option is "tdrSyncPermissions"
asyncImportRequest.options.map { opts =>
opts.tdrSyncPermissions.map { tdrSyncPermissions =>
- importRequest.setOptions(Map[String, Object]("tdrSyncPermissions" -> tdrSyncPermissions.asInstanceOf[Object]).asJava)
+ importRequest.setOptions(
+ Map[String, Object]("tdrSyncPermissions" -> tdrSyncPermissions.asInstanceOf[Object]).asJava
+ )
}
opts.isUpsert.map { isUpsert =>
importRequest.setOptions(Map[String, Object]("isUpsert" -> isUpsert.asInstanceOf[Object]).asJava)
@@ -96,22 +100,22 @@ class HttpCwdsDAO(enabled: Boolean, supportedFormats: List[String]) extends Cwds
importRequest
}
- protected[dataaccess] def toCwdsImportType(input: String): ImportRequest.TypeEnum = {
- TYPE_TRANSLATION.getOrElse(input,
- throw new FireCloudException("Import type unknown; possible values are: " + TYPE_TRANSLATION.keys.mkString))
- }
+ protected[dataaccess] def toCwdsImportType(input: String): ImportRequest.TypeEnum =
+ TYPE_TRANSLATION.getOrElse(
+ input,
+ throw new FireCloudException("Import type unknown; possible values are: " + TYPE_TRANSLATION.keys.mkString)
+ )
- protected[dataaccess] def toCwdsListResponse(cwdsJob: GenericJob): CwdsListResponse = {
+ protected[dataaccess] def toCwdsListResponse(cwdsJob: GenericJob): CwdsListResponse =
CwdsListResponse(jobId = cwdsJob.getJobId.toString,
- status = toCwdsStatus(cwdsJob.getStatus),
- filetype = cwdsJob.getJobType.getValue,
- message = Option(cwdsJob.getErrorMessage))
- }
+ status = toCwdsStatus(cwdsJob.getStatus),
+ filetype = cwdsJob.getJobType.getValue,
+ message = Option(cwdsJob.getErrorMessage)
+ )
- protected[dataaccess] def toCwdsStatus(cwdsStatus: GenericJob.StatusEnum): String = {
+ protected[dataaccess] def toCwdsStatus(cwdsStatus: GenericJob.StatusEnum): String =
// don't fail status translation if status somehow could not be found
STATUS_TRANSLATION.getOrElse(cwdsStatus, "Unknown")
- }
private def getApiClient(accessToken: String): ApiClient = {
// prepare the cWDS client
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpExternalCredsDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpExternalCredsDAO.scala
index 76151d0b7..020864fdf 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpExternalCredsDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpExternalCredsDAO.scala
@@ -19,12 +19,11 @@ class HttpExternalCredsDAO(implicit val executionContext: ExecutionContext) exte
private lazy val restTemplate = new RestTemplate
- private def handleError[A](e: HttpClientErrorException, operation: String): Option[A] = {
+ private def handleError[A](e: HttpClientErrorException, operation: String): Option[A] =
e.getStatusCode.value() match {
case HttpStatusCodes.STATUS_CODE_NOT_FOUND => None
case _ => throw new WorkbenchException(s"Failed to $operation: ${e.getMessage}")
}
- }
override def getLinkedAccount(implicit userInfo: UserInfo): Future[Option[LinkedEraAccount]] = Future {
val oauthApi: OauthApi = getOauthApi(userInfo.accessToken.token)
@@ -36,21 +35,25 @@ class HttpExternalCredsDAO(implicit val executionContext: ExecutionContext) exte
}
}
- override def putLinkedEraAccount(linkedEraAccount: LinkedEraAccount)(implicit orchInfo: WithAccessToken): Future[Unit] = Future {
+ override def putLinkedEraAccount(
+ linkedEraAccount: LinkedEraAccount
+ )(implicit orchInfo: WithAccessToken): Future[Unit] = Future {
val adminApi = getAdminApi(orchInfo.accessToken.token)
adminApi.putLinkedAccountWithFakeToken(unapply(linkedEraAccount), Provider.ERA_COMMONS)
}
override def deleteLinkedEraAccount(userInfo: UserInfo)(implicit orchInfo: WithAccessToken): Future[Unit] = Future {
val adminApi = getAdminApi(orchInfo.accessToken.token)
- try {
+ try
adminApi.adminDeleteLinkedAccount(userInfo.id, Provider.ERA_COMMONS)
- } catch {
+ catch {
case e: HttpClientErrorException => handleError(e, "DELETE eRA Linked Account")
}
}
- override def getLinkedEraAccountForUsername(username: String)(implicit orchInfo: WithAccessToken): Future[Option[LinkedEraAccount]] = Future {
+ override def getLinkedEraAccountForUsername(
+ username: String
+ )(implicit orchInfo: WithAccessToken): Future[Option[LinkedEraAccount]] = Future {
val adminApi = getAdminApi(orchInfo.accessToken.token)
try {
val adminLinkInfo = adminApi.getLinkedAccountForExternalId(Provider.ERA_COMMONS, username)
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpGoogleServicesDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpGoogleServicesDAO.scala
index 97a92f527..630492c5b 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpGoogleServicesDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpGoogleServicesDAO.scala
@@ -47,8 +47,7 @@ import scala.util.{Failure, Success, Try}
case class GooglePriceList(prices: GooglePrices, version: String, updated: String)
/** Partial price list. Attributes can be added as needed to import prices for more products. */
-case class GooglePrices(cpBigstoreStorage: Map[String, BigDecimal],
- cpComputeengineInternetEgressNA: UsTieredPriceItem)
+case class GooglePrices(cpBigstoreStorage: Map[String, BigDecimal], cpComputeengineInternetEgressNA: UsTieredPriceItem)
/** Tiered price item containing only US currency.
*
@@ -62,12 +61,20 @@ object GooglePriceListJsonProtocol extends DefaultJsonProtocol with SprayJsonSup
implicit object UsTieredPriceItemFormat extends RootJsonFormat[UsTieredPriceItem] {
override def write(value: UsTieredPriceItem): JsValue = ???
override def read(json: JsValue): UsTieredPriceItem = json match {
- case JsObject(values) => UsTieredPriceItem(values("tiers").asJsObject.fields.map{ case (name, value) => name.toLong -> BigDecimal(value.toString)})
+ case JsObject(values) =>
+ UsTieredPriceItem(values("tiers").asJsObject.fields.map { case (name, value) =>
+ name.toLong -> BigDecimal(value.toString)
+ })
case x => throw new DeserializationException("invalid value: " + x)
}
}
- implicit val GooglePricesFormat: RootJsonFormat[GooglePrices] = jsonFormat(GooglePrices, FireCloudConfig.GoogleCloud.priceListStorageKey, FireCloudConfig.GoogleCloud.priceListEgressKey)
- implicit val GooglePriceListFormat: RootJsonFormat[GooglePriceList] = jsonFormat(GooglePriceList, "gcp_price_list", "version", "updated")
+ implicit val GooglePricesFormat: RootJsonFormat[GooglePrices] = jsonFormat(
+ GooglePrices,
+ FireCloudConfig.GoogleCloud.priceListStorageKey,
+ FireCloudConfig.GoogleCloud.priceListEgressKey
+ )
+ implicit val GooglePriceListFormat: RootJsonFormat[GooglePriceList] =
+ jsonFormat(GooglePriceList, "gcp_price_list", "version", "updated")
}
import org.broadinstitute.dsde.firecloud.dataaccess.GooglePriceListJsonProtocol._
@@ -81,31 +88,39 @@ object HttpGoogleServicesDAO {
// the scope we want is not defined in CloudbillingScopes, so we hardcode it here
val billingScope = Seq("https://www.googleapis.com/auth/cloud-billing")
- private def getScopedCredentials(baseCreds: GoogleCredentials, scopes: Seq[String]): GoogleCredentials = {
+ private def getScopedCredentials(baseCreds: GoogleCredentials, scopes: Seq[String]): GoogleCredentials =
baseCreds.createScoped(scopes.asJava)
- }
- private def getScopedServiceAccountCredentials(baseCreds: ServiceAccountCredentials, scopes: Seq[String]): ServiceAccountCredentials = {
+ private def getScopedServiceAccountCredentials(baseCreds: ServiceAccountCredentials,
+ scopes: Seq[String]
+ ): ServiceAccountCredentials =
getScopedCredentials(baseCreds, scopes) match {
- case sa:ServiceAccountCredentials => sa
+ case sa: ServiceAccountCredentials => sa
case ex => throw new Exception(s"Excpected a ServiceAccountCredentials instance, got a ${ex.getClass.getName}")
}
- }
// credentials for orchestration's "firecloud" service account, used for admin duties
- lazy private val firecloudAdminSACreds = ServiceAccountCredentials
+ private lazy val firecloudAdminSACreds = ServiceAccountCredentials
.fromStream(new FileInputStream(FireCloudConfig.Auth.firecloudAdminSAJsonFile))
- def getAdminUserAccessToken = {
+ def getAdminUserAccessToken =
getScopedServiceAccountCredentials(firecloudAdminSACreds, authScopes)
- .refreshAccessToken().getTokenValue
- }
+ .refreshAccessToken()
+ .getTokenValue
}
-class HttpGoogleServicesDAO(priceListUrl: String, defaultPriceList: GooglePriceList)(implicit val system: ActorSystem, implicit val materializer: Materializer, implicit val executionContext: ExecutionContext) extends GoogleServicesDAO with FireCloudRequestBuilding with LazyLogging with RestJsonClient with SprayJsonSupport {
+class HttpGoogleServicesDAO(priceListUrl: String, defaultPriceList: GooglePriceList)(
+ implicit val system: ActorSystem,
+ implicit val materializer: Materializer,
+ implicit val executionContext: ExecutionContext
+) extends GoogleServicesDAO
+ with FireCloudRequestBuilding
+ with LazyLogging
+ with RestJsonClient
+ with SprayJsonSupport {
// application name to use within Google api libraries
- private final val appName = "firecloud:orchestration"
+ final private val appName = "firecloud:orchestration"
val httpTransport = GoogleNetHttpTransport.newTrustedTransport
val jsonFactory = GsonFactory.getDefaultInstance
@@ -116,30 +131,28 @@ class HttpGoogleServicesDAO(priceListUrl: String, defaultPriceList: GooglePriceL
val anonymizedGroupRole = "MEMBER"
val anonymizedGroupDeliverySettings = "ALL_MAIL"
- private def getDelegatedCredentials(baseCreds: GoogleCredentials, user: String): GoogleCredentials= {
+ private def getDelegatedCredentials(baseCreds: GoogleCredentials, user: String): GoogleCredentials =
baseCreds.createDelegated(user)
- }
- def getDirectoryManager(credential: GoogleCredentials): Directory = {
- new Directory.Builder(httpTransport, jsonFactory, new HttpCredentialsAdapter(credential.createScoped(directoryScope.asJava))).setApplicationName(appName).build()
- }
+ def getDirectoryManager(credential: GoogleCredentials): Directory =
+ new Directory.Builder(httpTransport,
+ jsonFactory,
+ new HttpCredentialsAdapter(credential.createScoped(directoryScope.asJava))
+ ).setApplicationName(appName).build()
- private lazy val pubSub = {
- new Pubsub.Builder(httpTransport, jsonFactory, new HttpCredentialsAdapter(getPubSubServiceAccountCredential)).setApplicationName(appName).build()
- }
+ private lazy val pubSub =
+ new Pubsub.Builder(httpTransport, jsonFactory, new HttpCredentialsAdapter(getPubSubServiceAccountCredential))
+ .setApplicationName(appName)
+ .build()
- private def getDelegatedCredentialForAdminUser: GoogleCredentials = {
+ private def getDelegatedCredentialForAdminUser: GoogleCredentials =
getDelegatedCredentials(firecloudAdminSACreds, userAdminAccount)
- }
- private def getBucketServiceAccountCredential = {
+ private def getBucketServiceAccountCredential =
getScopedServiceAccountCredentials(firecloudAdminSACreds, storageReadOnly)
- }
- private def getPubSubServiceAccountCredential = {
+ private def getPubSubServiceAccountCredential =
getScopedServiceAccountCredentials(firecloudAdminSACreds, Seq(PubsubScopes.PUBSUB))
- }
-
/**
* Uploads the supplied data to GCS, using the Rawls service account credentials
@@ -148,7 +161,10 @@ class HttpGoogleServicesDAO(priceListUrl: String, defaultPriceList: GooglePriceL
* @param objectContents byte array of the data to upload
* @return path to the uploaded GCS object
*/
- override def writeObjectAsRawlsSA(bucketName: GcsBucketName, objectKey: GcsObjectName, objectContents: Array[Byte]): GcsPath = {
+ override def writeObjectAsRawlsSA(bucketName: GcsBucketName,
+ objectKey: GcsObjectName,
+ objectContents: Array[Byte]
+ ): GcsPath = {
// call the upload implementation
val dataStream: Stream[IO, Byte] = Stream.emits(objectContents).covary[IO]
streamUploadObject(getStorageResource, bucketName, objectKey, dataStream)
@@ -168,8 +184,11 @@ class HttpGoogleServicesDAO(priceListUrl: String, defaultPriceList: GooglePriceL
}
// separate method to perform the upload, to ease unit testing
- protected[dataaccess] def streamUploadObject(storageResource: Resource[IO, GoogleStorageService[IO]], bucketName: GcsBucketName,
- objectKey: GcsObjectName, dataStream: Stream[IO, Byte]): GcsPath = {
+ protected[dataaccess] def streamUploadObject(storageResource: Resource[IO, GoogleStorageService[IO]],
+ bucketName: GcsBucketName,
+ objectKey: GcsObjectName,
+ dataStream: Stream[IO, Byte]
+ ): GcsPath = {
val uploadAttempt = storageResource.use { storageService =>
// create the destination pipe to which we will write the file
// N.B. workbench-libs' streamUploadBlob does not allow setting the Content-Type, so we don't set it
@@ -190,20 +209,28 @@ class HttpGoogleServicesDAO(priceListUrl: String, defaultPriceList: GooglePriceL
// create the storage service, using the Rawls SA credentials
// the Rawls SA json creds do not contain a project, so also specify the project explicitly
- GoogleStorageService.resource(FireCloudConfig.Auth.rawlsSAJsonFile, Option.empty[Semaphore[IO]],
- project = Some(GoogleProject(FireCloudConfig.FireCloud.serviceProject)))
+ GoogleStorageService.resource(FireCloudConfig.Auth.rawlsSAJsonFile,
+ Option.empty[Semaphore[IO]],
+ project = Some(GoogleProject(FireCloudConfig.FireCloud.serviceProject))
+ )
}
def getBucketObjectAsInputStream(bucketName: String, objectKey: String) = {
- val storage = new Storage.Builder(httpTransport, jsonFactory, new HttpCredentialsAdapter(getBucketServiceAccountCredential)).setApplicationName(appName).build()
+ val storage = new Storage.Builder(httpTransport,
+ jsonFactory,
+ new HttpCredentialsAdapter(getBucketServiceAccountCredential)
+ ).setApplicationName(appName).build()
storage.objects().get(bucketName, objectKey).executeMediaAsInputStream
}
def getBucket(bucketName: String, petKey: String): Option[Bucket] = {
val keyStream = new ByteArrayInputStream(petKey.getBytes)
- val credential = getScopedServiceAccountCredentials(ServiceAccountCredentials.fromStream(keyStream), storageReadOnly)
+ val credential =
+ getScopedServiceAccountCredentials(ServiceAccountCredentials.fromStream(keyStream), storageReadOnly)
- val storage = new Storage.Builder(httpTransport, jsonFactory, new HttpCredentialsAdapter(credential)).setApplicationName(appName).build()
+ val storage = new Storage.Builder(httpTransport, jsonFactory, new HttpCredentialsAdapter(credential))
+ .setApplicationName(appName)
+ .build()
Try(executeGoogleRequest[Bucket](storage.buckets().get(bucketName))) match {
case Failure(ex) =>
@@ -218,29 +245,31 @@ class HttpGoogleServicesDAO(priceListUrl: String, defaultPriceList: GooglePriceL
def getObjectResourceUrl(bucketName: String, objectKey: String) = {
val gcsStatUrl = "https://www.googleapis.com/storage/v1/b/%s/o/%s"
- gcsStatUrl.format(bucketName, java.net.URLEncoder.encode(objectKey,"UTF-8"))
+ gcsStatUrl.format(bucketName, java.net.URLEncoder.encode(objectKey, "UTF-8"))
}
- def getUserProfile(accessToken: WithAccessToken)
- (implicit executionContext: ExecutionContext): Future[HttpResponse] = {
- val profileRequest = Get( "https://www.googleapis.com/oauth2/v3/userinfo" )
+ def getUserProfile(
+ accessToken: WithAccessToken
+ )(implicit executionContext: ExecutionContext): Future[HttpResponse] = {
+ val profileRequest = Get("https://www.googleapis.com/oauth2/v3/userinfo")
userAuthedRequest(profileRequest)(accessToken)
}
/** Fetch the latest price list from Google. Returns only the subset of prices that we find we have use for. */
- //Why is this a val? Because the price lists do not change very often. This prevents making an HTTP call to Google
- //every time we want to calculate a cost estimate (which happens extremely often in the Terra UI)
- //Because the price list is brittle and Google sometimes changes the names of keys in the JSON, there is a
- //default cached value in configuration to use as a backup. If we fallback to it, the error will be logged
- //but users will probably not notice a difference. They're cost *estimates*, after all.
+ // Why is this a val? Because the price lists do not change very often. This prevents making an HTTP call to Google
+ // every time we want to calculate a cost estimate (which happens extremely often in the Terra UI)
+ // Because the price list is brittle and Google sometimes changes the names of keys in the JSON, there is a
+ // default cached value in configuration to use as a backup. If we fallback to it, the error will be logged
+ // but users will probably not notice a difference. They're cost *estimates*, after all.
lazy val fetchPriceList: Future[GooglePriceList] = {
val httpReq = Get(priceListUrl)
- unAuthedRequestToObject[GooglePriceList](httpReq).recover {
- case t: Throwable =>
- logger.error(s"Unable to fetch/parse latest Google price list. A cached (possibly outdated) value will be used instead. Error: ${t.getMessage}")
- defaultPriceList
+ unAuthedRequestToObject[GooglePriceList](httpReq).recover { case t: Throwable =>
+ logger.error(
+ s"Unable to fetch/parse latest Google price list. A cached (possibly outdated) value will be used instead. Error: ${t.getMessage}"
+ )
+ defaultPriceList
}
}
@@ -265,19 +294,16 @@ class HttpGoogleServicesDAO(priceListUrl: String, defaultPriceList: GooglePriceL
val insertRequest = directoryService.groups.insert(newGroup)
Try(executeGoogleRequest[Group](insertRequest)) match {
- case Failure(response: GoogleJsonResponseException) => {
+ case Failure(response: GoogleJsonResponseException) =>
val errorCode = response.getDetails.getCode
val message = response.getDetails.getMessage
logger.warn(s"Error $errorCode: Could not create new group $groupEmail; $message")
Option.empty
- }
- case Failure(f) => {
+ case Failure(f) =>
logger.warn(s"Error: Could not create new group $groupEmail: $f")
Option.empty
- }
- case Success(newGroupInfo) => {
+ case Success(newGroupInfo) =>
Option(newGroupInfo.getEmail())
- }
}
}
@@ -293,25 +319,25 @@ class HttpGoogleServicesDAO(priceListUrl: String, defaultPriceList: GooglePriceL
val directoryService = getDirectoryManager(getDelegatedCredentialForAdminUser)
// add targetUserEmail as member of google group - modeled after `override def addMemberToGroup` in workbench-libs HttpGoogleDirectoryDAO.scala
- val member = new Member().setEmail(targetUserEmail).setRole(anonymizedGroupRole).setDeliverySettings(anonymizedGroupDeliverySettings)
+ val member = new Member()
+ .setEmail(targetUserEmail)
+ .setRole(anonymizedGroupRole)
+ .setDeliverySettings(anonymizedGroupDeliverySettings)
val memberInsertRequest = directoryService.members.insert(groupEmail, member)
Try(executeGoogleRequest(memberInsertRequest)) match {
- case Failure(response: GoogleJsonResponseException) => {
+ case Failure(response: GoogleJsonResponseException) =>
val errorCode = response.getDetails.getCode
val message = response.getDetails.getMessage
logger.warn(s"Error $errorCode: Could not add new member $targetUserEmail to group $groupEmail; $message")
deleteGoogleGroup(groupEmail) // try to clean up after yourself
Option.empty
- }
- case Failure(f) => {
+ case Failure(f) =>
logger.warn(s"Error: Could not add new member $targetUserEmail to group $groupEmail; $f")
deleteGoogleGroup(groupEmail)
Option.empty
- }
- case Success(_) => {
+ case Success(_) =>
Option(targetUserEmail) // return email address of added user (string)
- }
}
}
@@ -319,54 +345,59 @@ class HttpGoogleServicesDAO(priceListUrl: String, defaultPriceList: GooglePriceL
// following two methods borrowed from rawls. I'd much prefer to just import workbench-google
// from workbench-libs, but that has spray vs. akka-http conflicts. So this will do for now.
// ====================================================================================
- protected def executeGoogleRequest[T](request: AbstractGoogleClientRequest[T]): T = {
+ protected def executeGoogleRequest[T](request: AbstractGoogleClientRequest[T]): T =
executeGoogleCall(request) { response =>
response.parseAs(request.getResponseClass)
}
- }
- protected def executeGoogleCall[A,B](request: AbstractGoogleClientRequest[A])(processResponse: (com.google.api.client.http.HttpResponse) => B): B = {
+ protected def executeGoogleCall[A, B](
+ request: AbstractGoogleClientRequest[A]
+ )(processResponse: (com.google.api.client.http.HttpResponse) => B): B =
Try {
request.executeUnparsed()
} match {
case Success(response) =>
- try {
+ try
processResponse(response)
- } finally {
+ finally
response.disconnect()
- }
case Failure(httpRegrets: HttpResponseException) =>
throw httpRegrets
case Failure(regrets) =>
throw regrets
}
- }
// ====================================================================================
// END methods borrowed from rawls
// ====================================================================================
def status: Future[SubsystemStatus] = {
- val storage = new Storage.Builder(httpTransport, jsonFactory, new HttpCredentialsAdapter(getBucketServiceAccountCredential)).setApplicationName(appName).build()
+ val storage = new Storage.Builder(httpTransport,
+ jsonFactory,
+ new HttpCredentialsAdapter(getBucketServiceAccountCredential)
+ ).setApplicationName(appName).build()
val bucketResponseTry = Try(storage.buckets().list(FireCloudConfig.FireCloud.serviceProject).executeUsingHead())
bucketResponseTry match {
- case scala.util.Success(bucketResponse) => bucketResponse.getStatusCode match {
- case x if x == 200 => Future(SubsystemStatus(ok = true, messages = None))
- case _ => Future(SubsystemStatus(ok = false, messages = Some(List(bucketResponse.parseAsString()))))
- }
+ case scala.util.Success(bucketResponse) =>
+ bucketResponse.getStatusCode match {
+ case x if x == 200 => Future(SubsystemStatus(ok = true, messages = None))
+ case _ => Future(SubsystemStatus(ok = false, messages = Some(List(bucketResponse.parseAsString()))))
+ }
case Failure(ex) => Future(SubsystemStatus(ok = false, messages = Some(List(ex.getMessage))))
}
}
override def publishMessages(fullyQualifiedTopic: String, messages: Seq[String]): Future[Unit] = {
logger.debug(s"publishing to google pubsub topic $fullyQualifiedTopic, messages [${messages.mkString(", ")}]")
- Future.traverse(messages.grouped(1000)) { messageBatch =>
- val pubsubMessages = messageBatch.map(text => new PubsubMessage().encodeData(text.getBytes("UTF-8")))
- val pubsubRequest = new PublishRequest().setMessages(pubsubMessages.asJava)
- Future(executeGoogleRequest(pubSub.projects().topics().publish(fullyQualifiedTopic, pubsubRequest)))
- }.map(_ => ())
+ Future
+ .traverse(messages.grouped(1000)) { messageBatch =>
+ val pubsubMessages = messageBatch.map(text => new PubsubMessage().encodeData(text.getBytes("UTF-8")))
+ val pubsubRequest = new PublishRequest().setMessages(pubsubMessages.asJava)
+ Future(executeGoogleRequest(pubSub.projects().topics().publish(fullyQualifiedTopic, pubsubRequest)))
+ }
+ .map(_ => ())
}
- override def getAdminUserAccessToken = {
+ override def getAdminUserAccessToken =
getScopedServiceAccountCredentials(firecloudAdminSACreds, authScopes)
- .refreshAccessToken().getTokenValue
- }
+ .refreshAccessToken()
+ .getTokenValue
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpRawlsDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpRawlsDAO.scala
index 92de4ed13..013bd1683 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpRawlsDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpRawlsDAO.scala
@@ -20,7 +20,11 @@ import org.broadinstitute.dsde.firecloud.{FireCloudConfig, FireCloudExceptionWit
import org.broadinstitute.dsde.rawls.model.AttributeUpdateOperations._
import org.broadinstitute.dsde.rawls.model.StatusJsonSupport._
import org.broadinstitute.dsde.rawls.model.WorkspaceACLJsonSupport._
-import org.broadinstitute.dsde.rawls.model.{StatusCheckResponse => RawlsStatus, SubsystemStatus => RawlsSubsystemStatus, _}
+import org.broadinstitute.dsde.rawls.model.{
+ StatusCheckResponse => RawlsStatus,
+ SubsystemStatus => RawlsSubsystemStatus,
+ _
+}
import org.broadinstitute.dsde.workbench.util.health.SubsystemStatus
import org.joda.time.DateTime
import spray.json.DefaultJsonProtocol._
@@ -32,118 +36,162 @@ import scala.util.control.NonFatal
/**
* Created by davidan on 9/23/16.
*/
-class HttpRawlsDAO(implicit val system: ActorSystem, implicit val materializer: Materializer, implicit val executionContext: ExecutionContext)
- extends RawlsDAO with RestJsonClient with SprayJsonSupport {
-
- override def isAdmin(userInfo: UserInfo): Future[Boolean] = {
+class HttpRawlsDAO(implicit val system: ActorSystem,
+ implicit val materializer: Materializer,
+ implicit val executionContext: ExecutionContext
+) extends RawlsDAO
+ with RestJsonClient
+ with SprayJsonSupport {
+
+ override def isAdmin(userInfo: UserInfo): Future[Boolean] =
userAuthedRequest(Get(rawlsAdminUrl))(userInfo) flatMap { response =>
response.status match {
- case OK => Future.successful(true)
+ case OK => Future.successful(true)
case NotFound => Future.successful(false)
- case _ => {
+ case _ =>
FCErrorReport(response).flatMap { errorReport =>
Future.failed(new FireCloudExceptionWithErrorReport(errorReport))
}
- }
}
}
- }
- override def isLibraryCurator(userInfo: UserInfo): Future[Boolean] = {
+ override def isLibraryCurator(userInfo: UserInfo): Future[Boolean] =
userAuthedRequest(Get(rawlsCuratorUrl))(userInfo) flatMap { response =>
response.status match {
- case OK => Future.successful(true)
+ case OK => Future.successful(true)
case NotFound => Future.successful(false)
- case _ => {
+ case _ =>
FCErrorReport(response).flatMap { errorReport =>
Future.failed(new FireCloudExceptionWithErrorReport(errorReport))
}
- }
}
}
- }
- override def getBucketUsage(ns: String, name: String)(implicit userInfo: WithAccessToken): Future[BucketUsageResponse] =
+ override def getBucketUsage(ns: String, name: String)(implicit
+ userInfo: WithAccessToken
+ ): Future[BucketUsageResponse] =
authedRequestToObject[BucketUsageResponse](Get(rawlsBucketUsageUrl(ns, name)))
override def getWorkspaces(implicit userInfo: WithAccessToken): Future[Seq[WorkspaceListResponse]] =
- authedRequestToObject[Seq[WorkspaceListResponse]](Get(rawlsWorkpacesUrl), label = Some("HttpRawlsDAO.getWorkspaces"))
+ authedRequestToObject[Seq[WorkspaceListResponse]](Get(rawlsWorkpacesUrl),
+ label = Some("HttpRawlsDAO.getWorkspaces")
+ )
override def getWorkspace(ns: String, name: String)(implicit userToken: WithAccessToken): Future[WorkspaceResponse] =
authedRequestToObject[WorkspaceResponse](Get(getWorkspaceUrl(ns, name)))
- override def patchWorkspaceAttributes(ns: String, name: String, attributeOperations: Seq[AttributeUpdateOperation])(implicit userToken: WithAccessToken): Future[WorkspaceDetails] =
+ override def patchWorkspaceAttributes(ns: String, name: String, attributeOperations: Seq[AttributeUpdateOperation])(
+ implicit userToken: WithAccessToken
+ ): Future[WorkspaceDetails] =
authedRequestToObject[WorkspaceDetails](Patch(getWorkspaceUrl(ns, name), attributeOperations))
- override def updateLibraryAttributes(ns: String, name: String, attributeOperations: Seq[AttributeUpdateOperation])(implicit userToken: WithAccessToken): Future[WorkspaceDetails] =
+ override def updateLibraryAttributes(ns: String, name: String, attributeOperations: Seq[AttributeUpdateOperation])(
+ implicit userToken: WithAccessToken
+ ): Future[WorkspaceDetails] =
authedRequestToObject[WorkspaceDetails](Patch(getWorkspaceUrl(ns, name) + "/library", attributeOperations))
override def getWorkspaceACL(ns: String, name: String)(implicit userToken: WithAccessToken): Future[WorkspaceACL] =
authedRequestToObject[WorkspaceACL](Get(getWorkspaceAclUrl(ns, name)))
- override def patchWorkspaceACL(ns: String, name: String, aclUpdates: Seq[WorkspaceACLUpdate], inviteUsersNotFound: Boolean)(implicit userToken: WithAccessToken): Future[WorkspaceACLUpdateResponseList] =
- authedRequestToObject[WorkspaceACLUpdateResponseList](Patch(patchWorkspaceAclUrl(ns, name, inviteUsersNotFound), aclUpdates))
+ override def patchWorkspaceACL(ns: String,
+ name: String,
+ aclUpdates: Seq[WorkspaceACLUpdate],
+ inviteUsersNotFound: Boolean
+ )(implicit userToken: WithAccessToken): Future[WorkspaceACLUpdateResponseList] =
+ authedRequestToObject[WorkspaceACLUpdateResponseList](
+ Patch(patchWorkspaceAclUrl(ns, name, inviteUsersNotFound), aclUpdates)
+ )
// you must be an admin to execute this method
- override def getAllLibraryPublishedWorkspaces(implicit userToken: WithAccessToken): Future[Seq[WorkspaceDetails]] = {
+ override def getAllLibraryPublishedWorkspaces(implicit userToken: WithAccessToken): Future[Seq[WorkspaceDetails]] =
userAuthedRequest(Get(rawlsAdminWorkspaces)).flatMap { response =>
- if(response.status.isSuccess()) {
+ if (response.status.isSuccess()) {
Unmarshal(response).to[Seq[WorkspaceDetails]].map { srw =>
logger.info("admin workspace list reindexing: " + srw.length + " published workspaces")
srw
}
- }
- else {
+ } else {
logger.info(s"body of reindex error response: ${response.entity}")
- throw new FireCloudExceptionWithErrorReport(ErrorReport(StatusCodes.InternalServerError, "Could not unmarshal: " + response.entity))
+ throw new FireCloudExceptionWithErrorReport(
+ ErrorReport(StatusCodes.InternalServerError, "Could not unmarshal: " + response.entity)
+ )
}
}
- }
- override def fetchAllEntitiesOfType(workspaceNamespace: String, workspaceName: String, entityType: String)(implicit userInfo: UserInfo): Future[Seq[Entity]] = {
+ override def fetchAllEntitiesOfType(workspaceNamespace: String, workspaceName: String, entityType: String)(implicit
+ userInfo: UserInfo
+ ): Future[Seq[Entity]] =
authedRequestToObject[Seq[Entity]](Get(rawlsEntitiesOfTypeUrl(workspaceNamespace, workspaceName, entityType)), true)
- }
- override def queryEntitiesOfType(workspaceNamespace: String, workspaceName: String, entityType: String, query: EntityQuery)(implicit userToken: UserInfo): Future[EntityQueryResponse] = {
- val targetUri = FireCloudConfig.Rawls.entityQueryUriFromWorkspaceAndQuery(workspaceNamespace, workspaceName, entityType, Some(query))
+ override def queryEntitiesOfType(workspaceNamespace: String,
+ workspaceName: String,
+ entityType: String,
+ query: EntityQuery
+ )(implicit userToken: UserInfo): Future[EntityQueryResponse] = {
+ val targetUri = FireCloudConfig.Rawls.entityQueryUriFromWorkspaceAndQuery(workspaceNamespace,
+ workspaceName,
+ entityType,
+ Some(query)
+ )
authedRequestToObject[EntityQueryResponse](Get(targetUri), compressed = true)
}
- override def getEntityTypes(workspaceNamespace: String, workspaceName: String)(implicit userToken: UserInfo): Future[Map[String, EntityTypeMetadata]] = {
+ override def getEntityTypes(workspaceNamespace: String, workspaceName: String)(implicit
+ userToken: UserInfo
+ ): Future[Map[String, EntityTypeMetadata]] = {
val url = encodeUri(FireCloudConfig.Rawls.entityPathFromWorkspace(workspaceNamespace, workspaceName))
authedRequestToObject[Map[String, EntityTypeMetadata]](Get(url), compressed = true)
}
- private def getWorkspaceUrl(ns: String, name: String) = encodeUri(FireCloudConfig.Rawls.authUrl + FireCloudConfig.Rawls.workspacesPath + s"/$ns/$name")
+ private def getWorkspaceUrl(ns: String, name: String) = encodeUri(
+ FireCloudConfig.Rawls.authUrl + FireCloudConfig.Rawls.workspacesPath + s"/$ns/$name"
+ )
- private def getWorkspaceCloneUrl(ns: String, name: String) = encodeUri(FireCloudConfig.Rawls.authUrl + FireCloudConfig.Rawls.workspacesPath + s"/$ns/$name/clone")
+ private def getWorkspaceCloneUrl(ns: String, name: String) = encodeUri(
+ FireCloudConfig.Rawls.authUrl + FireCloudConfig.Rawls.workspacesPath + s"/$ns/$name/clone"
+ )
private def getWorkspaceAclUrl(ns: String, name: String) = encodeUri(rawlsWorkspaceACLUrl(ns, name))
- private def patchWorkspaceAclUrl(ns: String, name: String, inviteUsersNotFound: Boolean) = rawlsWorkspaceACLUrl(ns, name) + rawlsWorkspaceACLQuerystring.format(inviteUsersNotFound)
+ private def patchWorkspaceAclUrl(ns: String, name: String, inviteUsersNotFound: Boolean) =
+ rawlsWorkspaceACLUrl(ns, name) + rawlsWorkspaceACLQuerystring.format(inviteUsersNotFound)
- private def workspaceCatalogUrl(ns: String, name: String) = encodeUri(FireCloudConfig.Rawls.authUrl + FireCloudConfig.Rawls.workspacesPath + s"/$ns/$name/catalog")
+ private def workspaceCatalogUrl(ns: String, name: String) = encodeUri(
+ FireCloudConfig.Rawls.authUrl + FireCloudConfig.Rawls.workspacesPath + s"/$ns/$name/catalog"
+ )
- override def getCatalog(ns: String, name: String)(implicit userToken: WithAccessToken): Future[Seq[WorkspaceCatalog]] =
+ override def getCatalog(ns: String, name: String)(implicit
+ userToken: WithAccessToken
+ ): Future[Seq[WorkspaceCatalog]] =
authedRequestToObject[Seq[WorkspaceCatalog]](Get(workspaceCatalogUrl(ns, name)), true)
- override def patchCatalog(ns: String, name: String, catalogUpdates: Seq[WorkspaceCatalog])(implicit userToken: WithAccessToken): Future[WorkspaceCatalogUpdateResponseList] =
- authedRequestToObject[WorkspaceCatalogUpdateResponseList](Patch(workspaceCatalogUrl(ns, name), catalogUpdates), true)
+ override def patchCatalog(ns: String, name: String, catalogUpdates: Seq[WorkspaceCatalog])(implicit
+ userToken: WithAccessToken
+ ): Future[WorkspaceCatalogUpdateResponseList] =
+ authedRequestToObject[WorkspaceCatalogUpdateResponseList](Patch(workspaceCatalogUrl(ns, name), catalogUpdates),
+ true
+ )
// If we ever need to getAllMethodConfigs, that's Uri(rawlsWorkspaceMethodConfigsUrl.format(ns, name)).withQuery("allRepos" -> "true")
- override def getAgoraMethodConfigs(ns: String, name: String)(implicit userToken: WithAccessToken): Future[Seq[AgoraConfigurationShort]] = {
+ override def getAgoraMethodConfigs(ns: String, name: String)(implicit
+ userToken: WithAccessToken
+ ): Future[Seq[AgoraConfigurationShort]] =
authedRequestToObject[Seq[AgoraConfigurationShort]](Get(rawlsWorkspaceMethodConfigsUrl(ns, name)), true)
- }
- override def getProjects(implicit userToken: WithAccessToken): Future[Seq[RawlsBillingProjectMembership]] = {
+ override def getProjects(implicit userToken: WithAccessToken): Future[Seq[RawlsBillingProjectMembership]] =
authedRequestToObject[Seq[RawlsBillingProjectMembership]](Get(FireCloudConfig.Rawls.authUrl + "/user/billing"))
- }
-
- override def getProjectMembers(projectId: String)(implicit userToken: WithAccessToken): Future[Seq[RawlsBillingProjectMember]] = {
- authedRequestToObject[Seq[RawlsBillingProjectMember]](Get(FireCloudConfig.Rawls.authUrl + s"/billing/$projectId/members"), true)
- }
- override def addUserToBillingProject(projectId: String, role: ProjectRole, email: String)(implicit userToken: WithAccessToken): Future[Boolean] = {
+ override def getProjectMembers(
+ projectId: String
+ )(implicit userToken: WithAccessToken): Future[Seq[RawlsBillingProjectMember]] =
+ authedRequestToObject[Seq[RawlsBillingProjectMember]](
+ Get(FireCloudConfig.Rawls.authUrl + s"/billing/$projectId/members"),
+ true
+ )
+
+ override def addUserToBillingProject(projectId: String, role: ProjectRole, email: String)(implicit
+ userToken: WithAccessToken
+ ): Future[Boolean] = {
val url = editBillingMembershipURL(projectId, role, email)
userAuthedRequest(Put(url), true) flatMap { resp =>
@@ -157,7 +205,9 @@ class HttpRawlsDAO(implicit val system: ActorSystem, implicit val materializer:
}
}
- override def removeUserFromBillingProject(projectId: String, role: ProjectRole, email: String)(implicit userToken: WithAccessToken): Future[Boolean] = {
+ override def removeUserFromBillingProject(projectId: String, role: ProjectRole, email: String)(implicit
+ userToken: WithAccessToken
+ ): Future[Boolean] = {
val url = editBillingMembershipURL(projectId, role, email)
userAuthedRequest(Delete(url), true) flatMap { resp =>
@@ -171,27 +221,42 @@ class HttpRawlsDAO(implicit val system: ActorSystem, implicit val materializer:
}
}
- override def batchUpsertEntities(workspaceNamespace: String, workspaceName: String, entityType: String, upserts: Seq[EntityUpdateDefinition])(implicit userToken: UserInfo): Future[HttpResponse] = {
- val request = Post(FireCloudDirectiveUtils.encodeUri(Rawls.entityPathFromWorkspace(workspaceNamespace, workspaceName)+"/batchUpsert"),
- HttpEntity(MediaTypes.`application/json`,upserts.toJson.toString))
+ override def batchUpsertEntities(workspaceNamespace: String,
+ workspaceName: String,
+ entityType: String,
+ upserts: Seq[EntityUpdateDefinition]
+ )(implicit userToken: UserInfo): Future[HttpResponse] = {
+ val request = Post(
+ FireCloudDirectiveUtils.encodeUri(
+ Rawls.entityPathFromWorkspace(workspaceNamespace, workspaceName) + "/batchUpsert"
+ ),
+ HttpEntity(MediaTypes.`application/json`, upserts.toJson.toString)
+ )
userAuthedRequest(request)
}
- override def batchUpdateEntities(workspaceNamespace: String, workspaceName: String, entityType: String, updates: Seq[EntityUpdateDefinition])(implicit userToken: UserInfo): Future[HttpResponse] = {
- val request = Post(FireCloudDirectiveUtils.encodeUri(Rawls.entityPathFromWorkspace(workspaceNamespace, workspaceName)+"/batchUpdate"),
- HttpEntity(MediaTypes.`application/json`,updates.toJson.toString))
+ override def batchUpdateEntities(workspaceNamespace: String,
+ workspaceName: String,
+ entityType: String,
+ updates: Seq[EntityUpdateDefinition]
+ )(implicit userToken: UserInfo): Future[HttpResponse] = {
+ val request = Post(
+ FireCloudDirectiveUtils.encodeUri(
+ Rawls.entityPathFromWorkspace(workspaceNamespace, workspaceName) + "/batchUpdate"
+ ),
+ HttpEntity(MediaTypes.`application/json`, updates.toJson.toString)
+ )
userAuthedRequest(request)
}
-
- private def editBillingMembershipURL(projectId: String, role: ProjectRole, email: String) = {
+ private def editBillingMembershipURL(projectId: String, role: ProjectRole, email: String) =
FireCloudConfig.Rawls.authUrl + s"/billing/$projectId/${role.toString}/${java.net.URLEncoder.encode(email, "UTF-8")}"
- }
override def status: Future[SubsystemStatus] = {
- val rawlsStatus = unAuthedRequestToObject[RawlsStatus](Get(Uri(FireCloudConfig.Rawls.baseUrl).withPath(Uri.Path("/status"))))
+ val rawlsStatus =
+ unAuthedRequestToObject[RawlsStatus](Get(Uri(FireCloudConfig.Rawls.baseUrl).withPath(Uri.Path("/status"))))
def parseRawlsMessages(rs: RawlsStatus): Option[List[String]] = {
val rawlsMessages = rs.systems.toList.flatMap {
@@ -202,26 +267,33 @@ class HttpRawlsDAO(implicit val system: ActorSystem, implicit val materializer:
if (rawlsMessages.nonEmpty) Some(rawlsMessages) else None
}
- rawlsStatus.map { status =>
- SubsystemStatus(status.ok, parseRawlsMessages(status))
- }.recoverWith { case e: FireCloudExceptionWithErrorReport if e.errorReport.statusCode == Some(StatusCodes.InternalServerError) =>
- // Rawls returns 500 on status check failures, but the JSON data should still be sent in the
- // response body and stored in the ErrorReport. Try to parse a RawlsStatus from the error report
- // (if it exists) so we can display it to the user. If this fails, then we will recover from the error below.
- Future(e.errorReport.message.parseJson.convertTo[RawlsStatus]).map { recoveredStatus =>
- SubsystemStatus(recoveredStatus.ok, parseRawlsMessages(recoveredStatus))
+ rawlsStatus
+ .map { status =>
+ SubsystemStatus(status.ok, parseRawlsMessages(status))
+ }
+ .recoverWith {
+ case e: FireCloudExceptionWithErrorReport
+ if e.errorReport.statusCode == Some(StatusCodes.InternalServerError) =>
+ // Rawls returns 500 on status check failures, but the JSON data should still be sent in the
+ // response body and stored in the ErrorReport. Try to parse a RawlsStatus from the error report
+ // (if it exists) so we can display it to the user. If this fails, then we will recover from the error below.
+ Future(e.errorReport.message.parseJson.convertTo[RawlsStatus]).map { recoveredStatus =>
+ SubsystemStatus(recoveredStatus.ok, parseRawlsMessages(recoveredStatus))
+ }
+ }
+ .recover { case NonFatal(e) =>
+ SubsystemStatus(false, Some(List(e.getMessage)))
}
- }.recover {
- case NonFatal(e) => SubsystemStatus(false, Some(List(e.getMessage)))
- }
}
- override def deleteWorkspace(workspaceNamespace: String, workspaceName: String)(implicit userToken: WithAccessToken): Future[Option[String]] = {
+ override def deleteWorkspace(workspaceNamespace: String, workspaceName: String)(implicit
+ userToken: WithAccessToken
+ ): Future[Option[String]] =
authedRequestToObject[Option[String]](Delete(getWorkspaceUrl(workspaceNamespace, workspaceName)))
- }
- override def cloneWorkspace(workspaceNamespace: String, workspaceName: String, cloneRequest: WorkspaceRequest)(implicit userToken: WithAccessToken): Future[WorkspaceDetails] = {
+ override def cloneWorkspace(workspaceNamespace: String, workspaceName: String, cloneRequest: WorkspaceRequest)(
+ implicit userToken: WithAccessToken
+ ): Future[WorkspaceDetails] =
authedRequestToObject[WorkspaceDetails](Post(getWorkspaceCloneUrl(workspaceNamespace, workspaceName), cloneRequest))
- }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpSamDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpSamDAO.scala
index f84b26ad4..19cf8d7f6 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpSamDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpSamDAO.scala
@@ -12,7 +12,20 @@ import org.broadinstitute.dsde.firecloud.model.ErrorReportExtensions.FCErrorRepo
import org.broadinstitute.dsde.firecloud.model.ManagedGroupRoles.ManagedGroupRole
import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol._
import org.broadinstitute.dsde.firecloud.model.SamResource.UserPolicy
-import org.broadinstitute.dsde.firecloud.model.{AccessToken, FireCloudManagedGroupMembership, ManagedGroupRoles, RegistrationInfo, SamUser, SamUserAttributesRequest, SamUserRegistrationRequest, SamUserResponse, UserIdInfo, UserInfo, WithAccessToken, WorkbenchUserInfo}
+import org.broadinstitute.dsde.firecloud.model.{
+ AccessToken,
+ FireCloudManagedGroupMembership,
+ ManagedGroupRoles,
+ RegistrationInfo,
+ SamUser,
+ SamUserAttributesRequest,
+ SamUserRegistrationRequest,
+ SamUserResponse,
+ UserIdInfo,
+ UserInfo,
+ WithAccessToken,
+ WorkbenchUserInfo
+}
import org.broadinstitute.dsde.firecloud.utils.RestJsonClient
import org.broadinstitute.dsde.rawls.model.RawlsUserEmail
import org.broadinstitute.dsde.workbench.model.WorkbenchIdentityJsonSupport._
@@ -27,81 +40,108 @@ import scala.concurrent.{ExecutionContext, Future}
/**
* Created by mbemis on 8/21/17.
*/
-class HttpSamDAO( implicit val system: ActorSystem, val materializer: Materializer, implicit val executionContext: ExecutionContext )
- extends SamDAO with RestJsonClient with SprayJsonSupport {
-
- override def listWorkspaceResources(implicit userInfo: WithAccessToken): Future[Seq[UserPolicy]] = {
- authedRequestToObject[Seq[UserPolicy]](Get(samListResources("workspace")), label=Some("HttpSamDAO.listWorkspaceResources"))
- }
-
- override def registerUser(termsOfService: Option[String])(implicit userInfo: WithAccessToken): Future[RegistrationInfo] = {
- authedRequestToObject[RegistrationInfo](Post(samUserRegistrationUrl, termsOfService), label=Some("HttpSamDAO.registerUser"))
- }
-
- override def registerUserSelf(acceptsTermsOfService: Boolean)(implicit userInfo: WithAccessToken): Future[SamUserResponse] = {
- authedRequestToObject[SamUserResponse](Post(samUserRegisterSelfUrl, SamUserRegistrationRequest(acceptsTermsOfService, SamUserAttributesRequest(marketingConsent = Some(false)))), label = Some("HttpSamDAO.registerUserSelf"))
- }
-
- override def getRegistrationStatus(implicit userInfo: WithAccessToken): Future[RegistrationInfo] = {
- authedRequestToObject[RegistrationInfo](Get(samUserRegistrationUrl), label=Some("HttpSamDAO.getRegistrationStatus"))
- }
-
- override def getUserIds(email: RawlsUserEmail)(implicit userInfo: WithAccessToken): Future[UserIdInfo] = {
+class HttpSamDAO(implicit
+ val system: ActorSystem,
+ val materializer: Materializer,
+ implicit val executionContext: ExecutionContext
+) extends SamDAO
+ with RestJsonClient
+ with SprayJsonSupport {
+
+ override def listWorkspaceResources(implicit userInfo: WithAccessToken): Future[Seq[UserPolicy]] =
+ authedRequestToObject[Seq[UserPolicy]](Get(samListResources("workspace")),
+ label = Some("HttpSamDAO.listWorkspaceResources")
+ )
+
+ override def registerUser(
+ termsOfService: Option[String]
+ )(implicit userInfo: WithAccessToken): Future[RegistrationInfo] =
+ authedRequestToObject[RegistrationInfo](Post(samUserRegistrationUrl, termsOfService),
+ label = Some("HttpSamDAO.registerUser")
+ )
+
+ override def registerUserSelf(acceptsTermsOfService: Boolean)(implicit
+ userInfo: WithAccessToken
+ ): Future[SamUserResponse] =
+ authedRequestToObject[SamUserResponse](
+ Post(samUserRegisterSelfUrl,
+ SamUserRegistrationRequest(acceptsTermsOfService, SamUserAttributesRequest(marketingConsent = Some(false)))
+ ),
+ label = Some("HttpSamDAO.registerUserSelf")
+ )
+
+ override def getRegistrationStatus(implicit userInfo: WithAccessToken): Future[RegistrationInfo] =
+ authedRequestToObject[RegistrationInfo](Get(samUserRegistrationUrl),
+ label = Some("HttpSamDAO.getRegistrationStatus")
+ )
+
+ override def getUserIds(email: RawlsUserEmail)(implicit userInfo: WithAccessToken): Future[UserIdInfo] =
authedRequestToObject[UserIdInfo](Get(samGetUserIdsUrl.format(URLEncoder.encode(email.value, UTF_8.name))))
- }
// Sam's API only allows for 1000 user to be fetched at one time
- override def getUsersForIds(samUserIds: Seq[WorkbenchUserId])(implicit userInfo: WithAccessToken): Future[Seq[WorkbenchUserInfo]] = Future.sequence {
+ override def getUsersForIds(
+ samUserIds: Seq[WorkbenchUserId]
+ )(implicit userInfo: WithAccessToken): Future[Seq[WorkbenchUserInfo]] = Future
+ .sequence {
samUserIds.sliding(1000, 1000).toSeq.map { batch =>
adminAuthedRequestToObject[Seq[SamUser]](Post(samAdminGetUsersForIdsUrl, batch))
.map(_.map(user => WorkbenchUserInfo(user.id.value, user.email.value)))
}
- }.map(_.flatten)
+ }
+ .map(_.flatten)
override def isGroupMember(groupName: WorkbenchGroupName, userInfo: UserInfo): Future[Boolean] = {
implicit val accessToken = userInfo
- authedRequestToObject[List[String]](Get(samResourceRoles(managedGroupResourceTypeName, groupName.value)), label=Some("HttpSamDAO.isGroupMember")).map { allRoles =>
+ authedRequestToObject[List[String]](Get(samResourceRoles(managedGroupResourceTypeName, groupName.value)),
+ label = Some("HttpSamDAO.isGroupMember")
+ ).map { allRoles =>
allRoles.map(ManagedGroupRoles.withName).toSet.intersect(ManagedGroupRoles.membershipRoles).nonEmpty
}
}
- override def createGroup(groupName: WorkbenchGroupName)(implicit userInfo: WithAccessToken): Future[Unit] = {
+ override def createGroup(groupName: WorkbenchGroupName)(implicit userInfo: WithAccessToken): Future[Unit] =
userAuthedRequestToUnit(Post(samManagedGroup(groupName)))
- }
- override def deleteGroup(groupName: WorkbenchGroupName)(implicit userInfo: WithAccessToken): Future[Unit] = {
+ override def deleteGroup(groupName: WorkbenchGroupName)(implicit userInfo: WithAccessToken): Future[Unit] =
userAuthedRequestToUnit(Delete(samManagedGroup(groupName)))
- }
- override def listGroups(implicit userInfo: WithAccessToken): Future[List[FireCloudManagedGroupMembership]] = {
+ override def listGroups(implicit userInfo: WithAccessToken): Future[List[FireCloudManagedGroupMembership]] =
authedRequestToObject[List[FireCloudManagedGroupMembership]](Get(samManagedGroupsBase))
- }
- override def getGroupEmail(groupName: WorkbenchGroupName)(implicit userInfo: WithAccessToken): Future[WorkbenchEmail] = {
+ override def getGroupEmail(groupName: WorkbenchGroupName)(implicit
+ userInfo: WithAccessToken
+ ): Future[WorkbenchEmail] =
authedRequestToObject[WorkbenchEmail](Get(samManagedGroup(groupName)))
- }
- override def listGroupPolicyEmails(groupName: WorkbenchGroupName, policyName: ManagedGroupRole)(implicit userInfo: WithAccessToken): Future[List[WorkbenchEmail]] = {
+ override def listGroupPolicyEmails(groupName: WorkbenchGroupName, policyName: ManagedGroupRole)(implicit
+ userInfo: WithAccessToken
+ ): Future[List[WorkbenchEmail]] =
authedRequestToObject[List[WorkbenchEmail]](Get(samManagedGroupPolicy(groupName, policyName)))
- }
- override def addGroupMember(groupName: WorkbenchGroupName, role: ManagedGroupRole, email: WorkbenchEmail)(implicit userInfo: WithAccessToken): Future[Unit] = {
+ override def addGroupMember(groupName: WorkbenchGroupName, role: ManagedGroupRole, email: WorkbenchEmail)(implicit
+ userInfo: WithAccessToken
+ ): Future[Unit] =
userAuthedRequestToUnit(Put(samManagedGroupAlterMember(groupName, role, email)))
- }
- override def removeGroupMember(groupName: WorkbenchGroupName, role: ManagedGroupRole, email: WorkbenchEmail)(implicit userInfo: WithAccessToken): Future[Unit] = {
+ override def removeGroupMember(groupName: WorkbenchGroupName, role: ManagedGroupRole, email: WorkbenchEmail)(implicit
+ userInfo: WithAccessToken
+ ): Future[Unit] =
userAuthedRequestToUnit(Delete(samManagedGroupAlterMember(groupName, role, email)))
- }
- override def overwriteGroupMembers(groupName: WorkbenchGroupName, role: ManagedGroupRole, memberList: List[WorkbenchEmail])(implicit userInfo: WithAccessToken): Future[Unit] = {
+ override def overwriteGroupMembers(groupName: WorkbenchGroupName,
+ role: ManagedGroupRole,
+ memberList: List[WorkbenchEmail]
+ )(implicit userInfo: WithAccessToken): Future[Unit] =
userAuthedRequestToUnit(Put(samManagedGroupPolicy(groupName, role), memberList))
- }
- override def addPolicyMember(resourceTypeName: String, resourceId: String, policyName: String, email: WorkbenchEmail)(implicit userInfo: WithAccessToken): Future[Unit] = {
+ override def addPolicyMember(resourceTypeName: String, resourceId: String, policyName: String, email: WorkbenchEmail)(
+ implicit userInfo: WithAccessToken
+ ): Future[Unit] =
userAuthedRequestToUnit(Put(samResourcePolicyAlterMember(resourceTypeName, resourceId, policyName, email)))
- }
- override def setPolicyPublic(resourceTypeName: String, resourceId: String, policyName: String, public: Boolean)(implicit userInfo: WithAccessToken): Future[Unit] = {
+ override def setPolicyPublic(resourceTypeName: String, resourceId: String, policyName: String, public: Boolean)(
+ implicit userInfo: WithAccessToken
+ ): Future[Unit] = {
implicit val booleanFormat = new RootJsonFormat[Boolean] {
override def read(json: JsValue): Boolean = implicitly[JsonFormat[Boolean]].read(json)
override def write(obj: Boolean): JsValue = implicitly[JsonFormat[Boolean]].write(obj)
@@ -110,50 +150,49 @@ class HttpSamDAO( implicit val system: ActorSystem, val materializer: Materializ
userAuthedRequestToUnit(Put(samResourcePolicy(resourceTypeName, resourceId, policyName) + "/public", public))
}
- override def requestGroupAccess(groupName: WorkbenchGroupName)(implicit userInfo: WithAccessToken): Future[Unit] = {
+ override def requestGroupAccess(groupName: WorkbenchGroupName)(implicit userInfo: WithAccessToken): Future[Unit] =
userAuthedRequestToUnit(Post(samManagedGroupRequestAccess(groupName)))
- }
- private def userAuthedRequestToUnit(request: HttpRequest)(implicit userInfo: WithAccessToken): Future[Unit] = {
+ private def userAuthedRequestToUnit(request: HttpRequest)(implicit userInfo: WithAccessToken): Future[Unit] =
userAuthedRequest(request).flatMap { resp =>
- if(resp.status.isSuccess) Future.successful({
+ if (resp.status.isSuccess) Future.successful {
resp.discardEntityBytes()
- })
+ }
else {
FCErrorReport(resp).flatMap { errorReport =>
Future.failed(new FireCloudExceptionWithErrorReport(errorReport))
}
}
}
- }
override def getPetServiceAccountTokenForUser(user: WithAccessToken, scopes: Seq[String]): Future[AccessToken] = {
implicit val accessToken = user
- authedRequestToObject[String](Post(samArbitraryPetTokenUrl, scopes), label=Some("HttpSamDAO.getPetServiceAccountTokenForUser")).map { quotedToken =>
+ authedRequestToObject[String](Post(samArbitraryPetTokenUrl, scopes),
+ label = Some("HttpSamDAO.getPetServiceAccountTokenForUser")
+ ).map { quotedToken =>
// Sam returns a quoted string. We need the token without the quotes.
- val token = if (quotedToken.startsWith("\"") && quotedToken.endsWith("\"") )
- quotedToken.substring(1,quotedToken.length-1)
- else
- quotedToken
+ val token =
+ if (quotedToken.startsWith("\"") && quotedToken.endsWith("\""))
+ quotedToken.substring(1, quotedToken.length - 1)
+ else
+ quotedToken
AccessToken.apply(token)
}
}
-
def getPetServiceAccountKeyForUser(user: WithAccessToken, project: GoogleProject): Future[String] = {
implicit val accessToken = user
- authedRequestToObject[String](Get(samPetKeyForProject.format(project.value)), label=Some("HttpSamDAO.getPetServiceAccountKeyForUser"))
+ authedRequestToObject[String](Get(samPetKeyForProject.format(project.value)),
+ label = Some("HttpSamDAO.getPetServiceAccountKeyForUser")
+ )
}
- override def status: Future[SubsystemStatus] = {
+ override def status: Future[SubsystemStatus] =
for {
response <- unAuthedRequest(Get(samStatusUrl))
ok = response.status.isSuccess
message <- if (ok) Future.successful(None) else Unmarshal(response.entity).to[String].map(Option(_))
- } yield {
- SubsystemStatus(ok, message.map(List(_)))
- }
- }
+ } yield SubsystemStatus(ok, message.map(List(_)))
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpShibbolethDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpShibbolethDAO.scala
index 60be9a619..5f0727ab7 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpShibbolethDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpShibbolethDAO.scala
@@ -8,8 +8,12 @@ import org.broadinstitute.dsde.firecloud.utils.RestJsonClient
import scala.concurrent.{ExecutionContext, Future}
-class HttpShibbolethDAO(implicit val system: ActorSystem, implicit val materializer: Materializer, implicit val executionContext: ExecutionContext)
- extends ShibbolethDAO with RestJsonClient with SprayJsonSupport {
+class HttpShibbolethDAO(implicit val system: ActorSystem,
+ implicit val materializer: Materializer,
+ implicit val executionContext: ExecutionContext
+) extends ShibbolethDAO
+ with RestJsonClient
+ with SprayJsonSupport {
override def getPublicKey(): Future[String] = {
val publicKeyUrl = FireCloudConfig.Shibboleth.publicKeyUrl
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpThurloeDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpThurloeDAO.scala
index a4dc01b46..b632fd5a2 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpThurloeDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpThurloeDAO.scala
@@ -17,34 +17,45 @@ import spray.json.DefaultJsonProtocol._
import scala.concurrent.{ExecutionContext, Future}
import scala.util.Try
+
/**
* Created by mbemis on 10/21/16.
*/
-class HttpThurloeDAO ( implicit val system: ActorSystem, implicit val executionContext: ExecutionContext, implicit val materializer: Materializer )
- extends ThurloeDAO with RestJsonClient with SprayJsonSupport {
-
- override def getAllKVPs(forUserId: String, callerToken: WithAccessToken): Future[Option[ProfileWrapper]] = {
+class HttpThurloeDAO(implicit val system: ActorSystem,
+ implicit val executionContext: ExecutionContext,
+ implicit val materializer: Materializer
+) extends ThurloeDAO
+ with RestJsonClient
+ with SprayJsonSupport {
+
+ override def getAllKVPs(forUserId: String, callerToken: WithAccessToken): Future[Option[ProfileWrapper]] =
wrapExceptions {
- val req = userAuthedRequest(Get(UserApiService.remoteGetAllURL.format(forUserId)), useFireCloudHeader = true, label = Some("HttpThurloeDAO.getAllKVPs"))(callerToken)
+ val req = userAuthedRequest(Get(UserApiService.remoteGetAllURL.format(forUserId)),
+ useFireCloudHeader = true,
+ label = Some("HttpThurloeDAO.getAllKVPs")
+ )(callerToken)
req flatMap { response =>
response.status match {
- case StatusCodes.OK => Unmarshal(response).to[ProfileWrapper].map(Option(_))
+ case StatusCodes.OK => Unmarshal(response).to[ProfileWrapper].map(Option(_))
case StatusCodes.NotFound => Future.successful(None)
- case _ => throw new FireCloudException("Unable to get user KVPs from profile service")
+ case _ => throw new FireCloudException("Unable to get user KVPs from profile service")
}
}
}
- }
override def getAllUserValuesForKey(key: String): Future[Map[String, String]] = {
- val queryUri = Uri(UserApiService.remoteGetQueryURL).withQuery(Query(("key"->key)))
+ val queryUri = Uri(UserApiService.remoteGetQueryURL).withQuery(Query("key" -> key))
wrapExceptions {
- adminAuthedRequest(Get(queryUri), false, true, label = Some("HttpThurloeDAO.getAllUserValuesForKey")).flatMap(x => Unmarshal(x).to[Seq[ThurloeKeyValue]]).map { tkvs =>
- val resultOptions = tkvs.map { tkv => (tkv.userId, tkv.keyValuePair.flatMap { kvp => kvp.value }) }
- val actualResultsOnly = resultOptions collect { case (Some(firecloudSubjId), Some(thurloeValue)) => (firecloudSubjId, thurloeValue) }
- actualResultsOnly.toMap
- }
+ adminAuthedRequest(Get(queryUri), false, true, label = Some("HttpThurloeDAO.getAllUserValuesForKey"))
+ .flatMap(x => Unmarshal(x).to[Seq[ThurloeKeyValue]])
+ .map { tkvs =>
+ val resultOptions = tkvs.map(tkv => (tkv.userId, tkv.keyValuePair.flatMap(kvp => kvp.value)))
+ val actualResultsOnly = resultOptions collect { case (Some(firecloudSubjId), Some(thurloeValue)) =>
+ (firecloudSubjId, thurloeValue)
+ }
+ actualResultsOnly.toMap
+ }
}
}
@@ -65,11 +76,22 @@ class HttpThurloeDAO ( implicit val system: ActorSystem, implicit val executionC
* @param callerToken auth token of the user making the call
* @return success/failure of save
*/
- override def saveKeyValues(forUserId: String, callerToken: WithAccessToken, keyValues: Map[String, String]): Future[Try[Unit]] = {
- val thurloeKeyValues = ThurloeKeyValues(Option(forUserId), Option(keyValues.map { case (key, value) => FireCloudKeyValue(Option(key), Option(value)) }.toSeq))
+ override def saveKeyValues(forUserId: String,
+ callerToken: WithAccessToken,
+ keyValues: Map[String, String]
+ ): Future[Try[Unit]] = {
+ val thurloeKeyValues = ThurloeKeyValues(Option(forUserId),
+ Option(keyValues.map { case (key, value) =>
+ FireCloudKeyValue(Option(key), Option(value))
+ }.toSeq)
+ )
wrapExceptions {
- userAuthedRequest(Post(UserApiService.remoteSetKeyURL, thurloeKeyValues), compressed = false, useFireCloudHeader = true, label = Some("HttpThurloeDAO.saveKeyValues"))(callerToken) map { response =>
- if(response.status.isSuccess) Try(())
+ userAuthedRequest(Post(UserApiService.remoteSetKeyURL, thurloeKeyValues),
+ compressed = false,
+ useFireCloudHeader = true,
+ label = Some("HttpThurloeDAO.saveKeyValues")
+ )(callerToken) map { response =>
+ if (response.status.isSuccess) Try(())
else Try(throw new FireCloudException(s"Unable to update user profile"))
}
}
@@ -80,57 +102,62 @@ class HttpThurloeDAO ( implicit val system: ActorSystem, implicit val executionC
saveKeyValues(userInfo, profilePropertyMap).map(_ => ())
}
- override def deleteKeyValue(forUserId: String, keyName: String, callerToken: WithAccessToken): Future[Try[Unit]] = {
+ override def deleteKeyValue(forUserId: String, keyName: String, callerToken: WithAccessToken): Future[Try[Unit]] =
wrapExceptions {
- userAuthedRequest(Delete(UserApiService.remoteDeleteKeyURL.format(forUserId, keyName)), useFireCloudHeader = true, label = Some("HttpThurloeDAO.deleteKeyValue"))(callerToken) map { response =>
- if(response.status.isSuccess) Try(())
+ userAuthedRequest(Delete(UserApiService.remoteDeleteKeyURL.format(forUserId, keyName)),
+ useFireCloudHeader = true,
+ label = Some("HttpThurloeDAO.deleteKeyValue")
+ )(callerToken) map { response =>
+ if (response.status.isSuccess) Try(())
else Try(throw new FireCloudException(s"Unable to delete key ${keyName} from user profile"))
}
}
- }
- private def wrapExceptions[T](codeBlock: => Future[T]): Future[T] = {
- codeBlock.recover {
- case t: Throwable => {
- throw new FireCloudExceptionWithErrorReport(ErrorReport.apply(StatusCodes.InternalServerError, t))
- }
+ private def wrapExceptions[T](codeBlock: => Future[T]): Future[T] =
+ codeBlock.recover { case t: Throwable =>
+ throw new FireCloudExceptionWithErrorReport(ErrorReport.apply(StatusCodes.InternalServerError, t))
}
- }
override def bulkUserQuery(userIds: List[String], keySelection: List[String]): Future[List[ProfileWrapper]] = {
- val userIdParams:List[(String,String)] = userIds.map(("userId", _))
- val keyParams:List[(String,String)] = keySelection.map(("key", _))
+ val userIdParams: List[(String, String)] = userIds.map(("userId", _))
+ val keyParams: List[(String, String)] = keySelection.map(("key", _))
val allQueryParams = keyParams ++ userIdParams
val queryUri = Uri(UserApiService.remoteGetQueryURL).withQuery(Query(allQueryParams.toMap))
// default uri length for Spray - which Thurloe uses - is 2048 chars
- assert(queryUri.toString().length < 2048, s"generated url is too long at ${queryUri.toString().length} chars.")
+ assert(queryUri.toString().length < 2048, s"generated url is too long at ${queryUri.toString().length} chars.")
- val req = adminAuthedRequest(Get(queryUri), useFireCloudHeader = true,label = Some("HttpThurloeDAO.bulkUserQuery"))
+ val req = adminAuthedRequest(Get(queryUri), useFireCloudHeader = true, label = Some("HttpThurloeDAO.bulkUserQuery"))
req flatMap { response =>
response.status match {
case StatusCodes.OK =>
- val profileKVPsF:Future[List[ProfileKVP]] = Unmarshal(response).to[List[ProfileKVP]]
- val groupedByUserF:Future[Map[String, List[ProfileKVP]]] = profileKVPsF.map(x => x.groupBy(_.userId))
- groupedByUserF.map{ groupedByUser =>
- groupedByUser.map {
- case (userId: String, kvps: List[ProfileKVP]) => ProfileWrapper(userId, kvps.map(_.keyValuePair))
+ val profileKVPsF: Future[List[ProfileKVP]] = Unmarshal(response).to[List[ProfileKVP]]
+ val groupedByUserF: Future[Map[String, List[ProfileKVP]]] = profileKVPsF.map(x => x.groupBy(_.userId))
+ groupedByUserF.map { groupedByUser =>
+ groupedByUser.map { case (userId: String, kvps: List[ProfileKVP]) =>
+ ProfileWrapper(userId, kvps.map(_.keyValuePair))
}.toList
}
- case _ => throw new FireCloudException(s"Unable to execute bulkUserQuery from profile service: ${response.status} $response")
+ case _ =>
+ throw new FireCloudException(
+ s"Unable to execute bulkUserQuery from profile service: ${response.status} $response"
+ )
}
}
}
override def status: Future[SubsystemStatus] = {
- val thurloeStatus = unAuthedRequestToObject[ThurloeStatus](Get(Uri(FireCloudConfig.Thurloe.baseUrl).withPath(Uri.Path("/status"))), useFireCloudHeader = true)
+ val thurloeStatus = unAuthedRequestToObject[ThurloeStatus](
+ Get(Uri(FireCloudConfig.Thurloe.baseUrl).withPath(Uri.Path("/status"))),
+ useFireCloudHeader = true
+ )
thurloeStatus map { thurloeStatus =>
thurloeStatus.status match {
- case "up" => SubsystemStatus(ok = true, None)
+ case "up" => SubsystemStatus(ok = true, None)
case "down" => SubsystemStatus(ok = false, thurloeStatus.error.map(List(_)))
}
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/OntologyDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/OntologyDAO.scala
index 6b0f1dd39..a36d4fd55 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/OntologyDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/OntologyDAO.scala
@@ -22,6 +22,6 @@ trait OntologyDAO extends ReportsSubsystemStatus {
def autocomplete(term: String): List[TermResource]
- override def serviceName:Subsystem = OntologyDAO.serviceName
+ override def serviceName: Subsystem = OntologyDAO.serviceName
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/RawlsDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/RawlsDAO.scala
index d14a22a22..e5355afc7 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/RawlsDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/RawlsDAO.scala
@@ -38,13 +38,22 @@ trait RawlsDAO extends LazyLogging with ReportsSubsystemStatus {
lazy val rawlsAdminUrl = FireCloudConfig.Rawls.authUrl + "/user/role/admin"
lazy val rawlsCuratorUrl = FireCloudConfig.Rawls.authUrl + "/user/role/curator"
lazy val rawlsWorkpacesUrl = FireCloudConfig.Rawls.workspacesUrl
- lazy val rawlsAdminWorkspaces = FireCloudConfig.Rawls.authUrl + "/admin/workspaces?attributeName=library:published&valueBoolean=true"
- def rawlsWorkspaceACLUrl(workspaceNamespace: String, workspaceName: String): String = encodeUri(FireCloudConfig.Rawls.workspacesUrl + s"/$workspaceNamespace/$workspaceName/acl")
+ lazy val rawlsAdminWorkspaces =
+ FireCloudConfig.Rawls.authUrl + "/admin/workspaces?attributeName=library:published&valueBoolean=true"
+ def rawlsWorkspaceACLUrl(workspaceNamespace: String, workspaceName: String): String = encodeUri(
+ FireCloudConfig.Rawls.workspacesUrl + s"/$workspaceNamespace/$workspaceName/acl"
+ )
lazy val rawlsWorkspaceACLQuerystring = "?inviteUsersNotFound=%s"
- def rawlsWorkspaceMethodConfigsUrl(workspaceNamespace: String, workspaceName: String): String = encodeUri(FireCloudConfig.Rawls.workspacesUrl + s"/$workspaceNamespace/$workspaceName/methodconfigs")
- def rawlsBucketUsageUrl(workspaceNamespace: String, workspaceName: String): String = encodeUri(FireCloudConfig.Rawls.workspacesUrl + s"/$workspaceNamespace/$workspaceName/bucketUsage")
+ def rawlsWorkspaceMethodConfigsUrl(workspaceNamespace: String, workspaceName: String): String = encodeUri(
+ FireCloudConfig.Rawls.workspacesUrl + s"/$workspaceNamespace/$workspaceName/methodconfigs"
+ )
+ def rawlsBucketUsageUrl(workspaceNamespace: String, workspaceName: String): String = encodeUri(
+ FireCloudConfig.Rawls.workspacesUrl + s"/$workspaceNamespace/$workspaceName/bucketUsage"
+ )
- def rawlsEntitiesOfTypeUrl(workspaceNamespace: String, workspaceName: String, entityType: String): String = encodeUri(FireCloudConfig.Rawls.workspacesUrl + s"/$workspaceNamespace/$workspaceName/entities/$entityType")
+ def rawlsEntitiesOfTypeUrl(workspaceNamespace: String, workspaceName: String, entityType: String): String = encodeUri(
+ FireCloudConfig.Rawls.workspacesUrl + s"/$workspaceNamespace/$workspaceName/entities/$entityType"
+ )
def isAdmin(userInfo: UserInfo): Future[Boolean]
@@ -56,45 +65,79 @@ trait RawlsDAO extends LazyLogging with ReportsSubsystemStatus {
def getWorkspace(ns: String, name: String)(implicit userToken: WithAccessToken): Future[WorkspaceResponse]
- def patchWorkspaceAttributes(ns: String, name: String, attributes: Seq[AttributeUpdateOperation])(implicit userToken: WithAccessToken): Future[WorkspaceDetails]
+ def patchWorkspaceAttributes(ns: String, name: String, attributes: Seq[AttributeUpdateOperation])(implicit
+ userToken: WithAccessToken
+ ): Future[WorkspaceDetails]
- def updateLibraryAttributes(ns: String, name: String, attributeOperations: Seq[AttributeUpdateOperation])(implicit userToken: WithAccessToken): Future[WorkspaceDetails]
+ def updateLibraryAttributes(ns: String, name: String, attributeOperations: Seq[AttributeUpdateOperation])(implicit
+ userToken: WithAccessToken
+ ): Future[WorkspaceDetails]
// you must be an admin to execute this method
def getAllLibraryPublishedWorkspaces(implicit userToken: WithAccessToken): Future[Seq[WorkspaceDetails]]
def getWorkspaceACL(ns: String, name: String)(implicit userToken: WithAccessToken): Future[WorkspaceACL]
- def patchWorkspaceACL(ns: String, name: String, aclUpdates: Seq[WorkspaceACLUpdate], inviteUsersNotFound: Boolean)(implicit userToken: WithAccessToken): Future[WorkspaceACLUpdateResponseList]
+ def patchWorkspaceACL(ns: String, name: String, aclUpdates: Seq[WorkspaceACLUpdate], inviteUsersNotFound: Boolean)(
+ implicit userToken: WithAccessToken
+ ): Future[WorkspaceACLUpdateResponseList]
- def fetchAllEntitiesOfType(workspaceNamespace: String, workspaceName: String, entityType: String)(implicit userToken: UserInfo): Future[Seq[Entity]]
+ def fetchAllEntitiesOfType(workspaceNamespace: String, workspaceName: String, entityType: String)(implicit
+ userToken: UserInfo
+ ): Future[Seq[Entity]]
- def queryEntitiesOfType(workspaceNamespace: String, workspaceName: String, entityType: String, query: EntityQuery)(implicit userToken: UserInfo): Future[EntityQueryResponse]
+ def queryEntitiesOfType(workspaceNamespace: String, workspaceName: String, entityType: String, query: EntityQuery)(
+ implicit userToken: UserInfo
+ ): Future[EntityQueryResponse]
- def getEntityTypes(workspaceNamespace: String, workspaceName: String)(implicit userToken: UserInfo): Future[Map[String, EntityTypeMetadata]]
+ def getEntityTypes(workspaceNamespace: String, workspaceName: String)(implicit
+ userToken: UserInfo
+ ): Future[Map[String, EntityTypeMetadata]]
- def getCatalog(workspaceNamespace: String, workspaceName: String)(implicit userToken: WithAccessToken): Future[Seq[WorkspaceCatalog]]
+ def getCatalog(workspaceNamespace: String, workspaceName: String)(implicit
+ userToken: WithAccessToken
+ ): Future[Seq[WorkspaceCatalog]]
- def patchCatalog(workspaceNamespace: String, workspaceName: String, updates: Seq[WorkspaceCatalog])(implicit userToken: WithAccessToken): Future[WorkspaceCatalogUpdateResponseList]
+ def patchCatalog(workspaceNamespace: String, workspaceName: String, updates: Seq[WorkspaceCatalog])(implicit
+ userToken: WithAccessToken
+ ): Future[WorkspaceCatalogUpdateResponseList]
- def getAgoraMethodConfigs(workspaceNamespace: String, workspaceName: String)(implicit userToken: WithAccessToken): Future[Seq[AgoraConfigurationShort]]
+ def getAgoraMethodConfigs(workspaceNamespace: String, workspaceName: String)(implicit
+ userToken: WithAccessToken
+ ): Future[Seq[AgoraConfigurationShort]]
- def deleteWorkspace(workspaceNamespace: String, workspaceName: String)(implicit userToken: WithAccessToken): Future[Option[String]]
+ def deleteWorkspace(workspaceNamespace: String, workspaceName: String)(implicit
+ userToken: WithAccessToken
+ ): Future[Option[String]]
- def cloneWorkspace(workspaceNamespace: String, workspaceName: String, cloneRequest: WorkspaceRequest)(implicit userToken: WithAccessToken): Future[WorkspaceDetails]
+ def cloneWorkspace(workspaceNamespace: String, workspaceName: String, cloneRequest: WorkspaceRequest)(implicit
+ userToken: WithAccessToken
+ ): Future[WorkspaceDetails]
def getProjects(implicit userToken: WithAccessToken): Future[Seq[RawlsBillingProjectMembership]]
def getProjectMembers(projectId: String)(implicit userToken: WithAccessToken): Future[Seq[RawlsBillingProjectMember]]
- def addUserToBillingProject(projectId: String, role: ProjectRole, email: String)(implicit userToken: WithAccessToken): Future[Boolean]
+ def addUserToBillingProject(projectId: String, role: ProjectRole, email: String)(implicit
+ userToken: WithAccessToken
+ ): Future[Boolean]
- def removeUserFromBillingProject(projectId: String, role: ProjectRole, email: String)(implicit userToken: WithAccessToken): Future[Boolean]
+ def removeUserFromBillingProject(projectId: String, role: ProjectRole, email: String)(implicit
+ userToken: WithAccessToken
+ ): Future[Boolean]
- def batchUpsertEntities(workspaceNamespace: String, workspaceName: String, entityType: String, upserts: Seq[EntityUpdateDefinition])(implicit userToken: UserInfo): Future[HttpResponse]
+ def batchUpsertEntities(workspaceNamespace: String,
+ workspaceName: String,
+ entityType: String,
+ upserts: Seq[EntityUpdateDefinition]
+ )(implicit userToken: UserInfo): Future[HttpResponse]
- def batchUpdateEntities(workspaceNamespace: String, workspaceName: String, entityType: String, updates: Seq[EntityUpdateDefinition])(implicit userToken: UserInfo): Future[HttpResponse]
+ def batchUpdateEntities(workspaceNamespace: String,
+ workspaceName: String,
+ entityType: String,
+ updates: Seq[EntityUpdateDefinition]
+ )(implicit userToken: UserInfo): Future[HttpResponse]
- override def serviceName:Subsystem = RawlsDAO.serviceName
+ override def serviceName: Subsystem = RawlsDAO.serviceName
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ResearchPurposeSupport.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ResearchPurposeSupport.scala
index 45fa2e6fc..e0a86aaf3 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ResearchPurposeSupport.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ResearchPurposeSupport.scala
@@ -4,11 +4,14 @@ import org.broadinstitute.dsde.firecloud.model.DataUse.ResearchPurpose
import org.elasticsearch.index.query.BoolQueryBuilder
trait ResearchPurposeSupport {
+
/**
* Build a query filter based on research purpose.
*
* @param researchPurpose description of the research purpose to match
* @param makeAttributeName function to transform an attribute name; e.g. add a prefix to avoid naming collisions with other attributes in the search index
*/
- def researchPurposeFilters(researchPurpose: ResearchPurpose, makeAttributeName: String => String = identity): BoolQueryBuilder
+ def researchPurposeFilters(researchPurpose: ResearchPurpose,
+ makeAttributeName: String => String = identity
+ ): BoolQueryBuilder
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/SamDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/SamDAO.scala
index ba647ffad..7fc0d4fd7 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/SamDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/SamDAO.scala
@@ -6,7 +6,17 @@ import com.typesafe.scalalogging.LazyLogging
import org.broadinstitute.dsde.firecloud.FireCloudConfig
import org.broadinstitute.dsde.firecloud.model.ManagedGroupRoles.ManagedGroupRole
import org.broadinstitute.dsde.firecloud.model.SamResource.UserPolicy
-import org.broadinstitute.dsde.firecloud.model.{AccessToken, FireCloudManagedGroupMembership, RegistrationInfo, RegistrationInfoV2, SamUserResponse, UserIdInfo, UserInfo, WithAccessToken, WorkbenchUserInfo}
+import org.broadinstitute.dsde.firecloud.model.{
+ AccessToken,
+ FireCloudManagedGroupMembership,
+ RegistrationInfo,
+ RegistrationInfoV2,
+ SamUserResponse,
+ UserIdInfo,
+ UserInfo,
+ WithAccessToken,
+ WorkbenchUserInfo
+}
import org.broadinstitute.dsde.rawls.model.{ErrorReportSource, RawlsUserEmail}
import org.broadinstitute.dsde.workbench.model.google.GoogleProject
import org.broadinstitute.dsde.workbench.model.{WorkbenchEmail, WorkbenchGroupName, WorkbenchUserId}
@@ -40,17 +50,30 @@ trait SamDAO extends LazyLogging with ReportsSubsystemStatus {
val samManagedGroupsBase: String = FireCloudConfig.Sam.baseUrl + "/api/groups"
val samManagedGroupBase: String = FireCloudConfig.Sam.baseUrl + "/api/group"
def samManagedGroup(groupName: WorkbenchGroupName): String = samManagedGroupBase + s"/$groupName"
- def samManagedGroupRequestAccess(groupName: WorkbenchGroupName): String = samManagedGroup(groupName) + "/requestAccess"
- def samManagedGroupPolicy(groupName: WorkbenchGroupName, policyName: ManagedGroupRole): String = samManagedGroup(groupName) + s"/$policyName"
- def samManagedGroupAlterMember(groupName: WorkbenchGroupName, policyName: ManagedGroupRole, email: WorkbenchEmail): String = samManagedGroupPolicy(groupName, policyName) + s"/${URLEncoder.encode(email.value, UTF_8.name)}"
+ def samManagedGroupRequestAccess(groupName: WorkbenchGroupName): String =
+ samManagedGroup(groupName) + "/requestAccess"
+ def samManagedGroupPolicy(groupName: WorkbenchGroupName, policyName: ManagedGroupRole): String =
+ samManagedGroup(groupName) + s"/$policyName"
+ def samManagedGroupAlterMember(groupName: WorkbenchGroupName,
+ policyName: ManagedGroupRole,
+ email: WorkbenchEmail
+ ): String = samManagedGroupPolicy(groupName, policyName) + s"/${URLEncoder.encode(email.value, UTF_8.name)}"
val samResourceBase: String = FireCloudConfig.Sam.baseUrl + s"/api/resource"
- def samResource(resourceTypeName: String, resourceId: String): String = samResourceBase + s"/$resourceTypeName/$resourceId"
- def samResourceRoles(resourceTypeName: String, resourceId: String): String = samResource(resourceTypeName, resourceId) + "/roles"
- def samResourcePolicies(resourceTypeName: String, resourceId: String): String = samResource(resourceTypeName, resourceId) + "/policies"
- def samResourcePolicy(resourceTypeName: String, resourceId: String, policyName: String): String = samResourcePolicies(resourceTypeName, resourceId) + s"/$policyName"
- def samResourcePolicyAlterMember(resourceTypeName: String, resourceId: String, policyName: String, email: WorkbenchEmail): String = samResourcePolicy(resourceTypeName, resourceId, policyName) + s"/${URLEncoder.encode(email.value, UTF_8.name)}"
-
+ def samResource(resourceTypeName: String, resourceId: String): String =
+ samResourceBase + s"/$resourceTypeName/$resourceId"
+ def samResourceRoles(resourceTypeName: String, resourceId: String): String =
+ samResource(resourceTypeName, resourceId) + "/roles"
+ def samResourcePolicies(resourceTypeName: String, resourceId: String): String =
+ samResource(resourceTypeName, resourceId) + "/policies"
+ def samResourcePolicy(resourceTypeName: String, resourceId: String, policyName: String): String =
+ samResourcePolicies(resourceTypeName, resourceId) + s"/$policyName"
+ def samResourcePolicyAlterMember(resourceTypeName: String,
+ resourceId: String,
+ policyName: String,
+ email: WorkbenchEmail
+ ): String =
+ samResourcePolicy(resourceTypeName, resourceId, policyName) + s"/${URLEncoder.encode(email.value, UTF_8.name)}"
val samResourcesBase: String = FireCloudConfig.Sam.baseUrl + s"/api/resources/v1"
def samListResources(resourceTypeName: String): String = samResourcesBase + s"/$resourceTypeName"
@@ -62,7 +85,9 @@ trait SamDAO extends LazyLogging with ReportsSubsystemStatus {
def getUserIds(email: RawlsUserEmail)(implicit userInfo: WithAccessToken): Future[UserIdInfo]
- def getUsersForIds(samUserIds: Seq[WorkbenchUserId])(implicit userInfo: WithAccessToken): Future[Seq[WorkbenchUserInfo]]
+ def getUsersForIds(samUserIds: Seq[WorkbenchUserId])(implicit
+ userInfo: WithAccessToken
+ ): Future[Seq[WorkbenchUserInfo]]
def listWorkspaceResources(implicit userInfo: WithAccessToken): Future[Seq[UserPolicy]]
@@ -71,14 +96,26 @@ trait SamDAO extends LazyLogging with ReportsSubsystemStatus {
def listGroups(implicit userInfo: WithAccessToken): Future[List[FireCloudManagedGroupMembership]]
def getGroupEmail(groupName: WorkbenchGroupName)(implicit userInfo: WithAccessToken): Future[WorkbenchEmail]
def isGroupMember(groupName: WorkbenchGroupName, userInfo: UserInfo): Future[Boolean]
- def listGroupPolicyEmails(groupName: WorkbenchGroupName, policyName: ManagedGroupRole)(implicit userInfo: WithAccessToken): Future[List[WorkbenchEmail]]
- def addGroupMember(groupName: WorkbenchGroupName, role: ManagedGroupRole, email: WorkbenchEmail)(implicit userInfo: WithAccessToken): Future[Unit]
- def removeGroupMember(groupName: WorkbenchGroupName, role: ManagedGroupRole, email: WorkbenchEmail)(implicit userInfo: WithAccessToken): Future[Unit]
- def overwriteGroupMembers(groupName: WorkbenchGroupName, role: ManagedGroupRole, memberList: List[WorkbenchEmail])(implicit userInfo: WithAccessToken): Future[Unit]
+ def listGroupPolicyEmails(groupName: WorkbenchGroupName, policyName: ManagedGroupRole)(implicit
+ userInfo: WithAccessToken
+ ): Future[List[WorkbenchEmail]]
+ def addGroupMember(groupName: WorkbenchGroupName, role: ManagedGroupRole, email: WorkbenchEmail)(implicit
+ userInfo: WithAccessToken
+ ): Future[Unit]
+ def removeGroupMember(groupName: WorkbenchGroupName, role: ManagedGroupRole, email: WorkbenchEmail)(implicit
+ userInfo: WithAccessToken
+ ): Future[Unit]
+ def overwriteGroupMembers(groupName: WorkbenchGroupName, role: ManagedGroupRole, memberList: List[WorkbenchEmail])(
+ implicit userInfo: WithAccessToken
+ ): Future[Unit]
def requestGroupAccess(groupName: WorkbenchGroupName)(implicit userInfo: WithAccessToken): Future[Unit]
- def addPolicyMember(resourceTypeName: String, resourceId: String, policyName: String, email: WorkbenchEmail)(implicit userInfo: WithAccessToken): Future[Unit]
- def setPolicyPublic(resourceTypeName: String, resourceId: String, policyName: String, public: Boolean)(implicit userInfo: WithAccessToken): Future[Unit]
+ def addPolicyMember(resourceTypeName: String, resourceId: String, policyName: String, email: WorkbenchEmail)(implicit
+ userInfo: WithAccessToken
+ ): Future[Unit]
+ def setPolicyPublic(resourceTypeName: String, resourceId: String, policyName: String, public: Boolean)(implicit
+ userInfo: WithAccessToken
+ ): Future[Unit]
def getPetServiceAccountTokenForUser(user: WithAccessToken, scopes: Seq[String]): Future[AccessToken]
def getPetServiceAccountKeyForUser(user: WithAccessToken, project: GoogleProject): Future[String]
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/SearchDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/SearchDAO.scala
index 2d77b2251..8f22711a0 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/SearchDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/SearchDAO.scala
@@ -23,12 +23,18 @@ trait SearchDAO extends LazyLogging with ReportsSubsystemStatus {
def createIndex(): Unit
def deleteIndex(): Unit
- def bulkIndex(docs: Seq[Document], refresh:Boolean = false): LibraryBulkIndexResponse
+ def bulkIndex(docs: Seq[Document], refresh: Boolean = false): LibraryBulkIndexResponse
def indexDocument(doc: Document): Unit
def deleteDocument(id: String): Unit
- def findDocuments(criteria: LibrarySearchParams, groups: Seq[String], workspacePolicyMap: Map[String, UserPolicy]): Future[LibrarySearchResponse]
- def suggestionsFromAll(criteria: LibrarySearchParams, groups: Seq[String], workspacePolicyMap: Map[String, UserPolicy]): Future[LibrarySearchResponse]
+ def findDocuments(criteria: LibrarySearchParams,
+ groups: Seq[String],
+ workspacePolicyMap: Map[String, UserPolicy]
+ ): Future[LibrarySearchResponse]
+ def suggestionsFromAll(criteria: LibrarySearchParams,
+ groups: Seq[String],
+ workspacePolicyMap: Map[String, UserPolicy]
+ ): Future[LibrarySearchResponse]
def suggestionsForFieldPopulate(field: String, text: String): Future[Seq[String]]
- override def serviceName:Subsystem = SearchDAO.serviceName
+ override def serviceName: Subsystem = SearchDAO.serviceName
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ShareLogDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ShareLogDAO.scala
index 8da622b6e..ea943e52d 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ShareLogDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ShareLogDAO.scala
@@ -22,7 +22,6 @@ trait ShareLogDAO extends ElasticSearchDAOSupport {
*/
def logShare(userId: String, sharee: String, shareType: ShareType.Value): Share
-
/**
* Logs records of a user sharing a workspace, group, or method with users.
*
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ThurloeDAO.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ThurloeDAO.scala
index 461563c1b..5c5d929fa 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ThurloeDAO.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/dataaccess/ThurloeDAO.scala
@@ -46,5 +46,5 @@ trait ThurloeDAO extends LazyLogging with ReportsSubsystemStatus {
def deleteKeyValue(forUserId: String, keyName: String, callerToken: WithAccessToken): Future[Try[Unit]]
- override def serviceName:Subsystem = ThurloeDAO.serviceName
+ override def serviceName: Subsystem = ThurloeDAO.serviceName
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/elastic/ElasticUtils.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/elastic/ElasticUtils.scala
index 31da099e8..fd6b28e76 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/elastic/ElasticUtils.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/elastic/ElasticUtils.scala
@@ -9,7 +9,7 @@ import org.elasticsearch.transport.client.PreBuiltTransportClient
import akka.http.scaladsl.model.Uri.Authority
object ElasticUtils {
- def buildClient(servers:Seq[Authority], clusterName: String): TransportClient = {
+ def buildClient(servers: Seq[Authority], clusterName: String): TransportClient = {
val settings = Settings.builder
.put("cluster.name", clusterName)
.build
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/DataUse.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/DataUse.scala
index 6642489b4..71ca58bba 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/DataUse.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/DataUse.scala
@@ -1,7 +1,12 @@
package org.broadinstitute.dsde.firecloud.model
import akka.http.scaladsl.model.Uri
-import org.broadinstitute.dsde.rawls.model.{Attribute, AttributeFormat, AttributeName, PlainArrayAttributeListSerializer}
+import org.broadinstitute.dsde.rawls.model.{
+ Attribute,
+ AttributeFormat,
+ AttributeName,
+ PlainArrayAttributeListSerializer
+}
import org.broadinstitute.dsde.rawls.model.WorkspaceJsonSupport.AttributeNameFormat
import spray.json._
import spray.json.DefaultJsonProtocol._
@@ -9,28 +14,32 @@ import spray.json.JsValue
object DataUse {
- private final val doid_prefix = "http://purl.obolibrary.org/obo/DOID_"
+ final private val doid_prefix = "http://purl.obolibrary.org/obo/DOID_"
- case class ResearchPurpose(
- DS: Seq[DiseaseOntologyNodeId],
- NMDS: Boolean,
- NCTRL: Boolean,
- NAGR: Boolean,
- POA: Boolean,
- NCU: Boolean)
+ case class ResearchPurpose(DS: Seq[DiseaseOntologyNodeId],
+ NMDS: Boolean,
+ NCTRL: Boolean,
+ NAGR: Boolean,
+ POA: Boolean,
+ NCU: Boolean
+ )
object ResearchPurpose {
- def default = {
- new ResearchPurpose(Seq.empty[DiseaseOntologyNodeId], NMDS=false, NCTRL=false, NAGR=false, POA=false, NCU=false)
- }
+ def default =
+ new ResearchPurpose(Seq.empty[DiseaseOntologyNodeId],
+ NMDS = false,
+ NCTRL = false,
+ NAGR = false,
+ POA = false,
+ NCU = false
+ )
- def apply(request: ResearchPurposeRequest): ResearchPurpose = {
+ def apply(request: ResearchPurposeRequest): ResearchPurpose =
requestToResearchPurpose(request)
- }
}
case class DiseaseOntologyNodeId(uri: Uri, numericId: Int)
object DiseaseOntologyNodeId {
- def apply(stringid:String) = {
+ def apply(stringid: String) = {
require(stringid.startsWith(doid_prefix), s"Disease Ontology node id must be in the form '${doid_prefix}NNN'")
val uri = Uri(stringid)
val numericId = stringid.stripPrefix(doid_prefix).toInt
@@ -38,32 +47,38 @@ object DataUse {
}
}
- case class ResearchPurposeRequest(
- DS: Option[Seq[String]],
- NMDS: Option[Boolean],
- NCTRL: Option[Boolean],
- NAGR: Option[Boolean],
- POA: Option[Boolean],
- NCU: Option[Boolean],
- prefix: Option[String])
+ case class ResearchPurposeRequest(DS: Option[Seq[String]],
+ NMDS: Option[Boolean],
+ NCTRL: Option[Boolean],
+ NAGR: Option[Boolean],
+ POA: Option[Boolean],
+ NCU: Option[Boolean],
+ prefix: Option[String]
+ )
object ResearchPurposeRequest {
- def empty: ResearchPurposeRequest = {
- new ResearchPurposeRequest(DS = None, NMDS = None, NCTRL = None, NAGR = None, POA = None, NCU = None, prefix = None)
- }
+ def empty: ResearchPurposeRequest =
+ new ResearchPurposeRequest(DS = None,
+ NMDS = None,
+ NCTRL = None,
+ NAGR = None,
+ POA = None,
+ NCU = None,
+ prefix = None
+ )
}
- def requestToResearchPurpose(r: ResearchPurposeRequest): ResearchPurpose = {
+ def requestToResearchPurpose(r: ResearchPurposeRequest): ResearchPurpose =
ResearchPurpose(
DS = r.DS match {
case Some(ds) => ds.map(DiseaseOntologyNodeId(_))
- case None => Seq.empty[DiseaseOntologyNodeId]
+ case None => Seq.empty[DiseaseOntologyNodeId]
},
NMDS = r.NMDS.getOrElse(false),
NCTRL = r.NCTRL.getOrElse(false),
NAGR = r.NAGR.getOrElse(false),
POA = r.POA.getOrElse(false),
- NCU = r.NCU.getOrElse(false))
- }
+ NCU = r.NCU.getOrElse(false)
+ )
case class StructuredDataRequest(generalResearchUse: Boolean,
healthMedicalBiomedicalUseRequired: Boolean,
@@ -76,17 +91,20 @@ object DataUse {
genderUseRequired: String,
pediatricResearchRequired: Boolean,
irbRequired: Boolean,
- prefix: Option[String])
+ prefix: Option[String]
+ )
case class StructuredDataResponse(consentCodes: Array[String],
dulvn: Int,
prefix: String,
- structuredUseRestriction: Map[AttributeName, Attribute]) {
+ structuredUseRestriction: Map[AttributeName, Attribute]
+ ) {
def formatWithPrefix(): Map[String, JsValue] = {
implicit val impAttributeFormat = new AttributeFormat with PlainArrayAttributeListSerializer
Map(prefix + "consentCodes" -> consentCodes.toJson,
- prefix + "dulvn" -> dulvn.toJson,
- prefix + "structuredUseRestriction" -> structuredUseRestriction.toJson)
+ prefix + "dulvn" -> dulvn.toJson,
+ prefix + "structuredUseRestriction" -> structuredUseRestriction.toJson
+ )
}
}
}
@@ -115,4 +133,3 @@ object ConsentCodes extends Enumeration {
val allPreviousDurFieldNames = duRestrictionFieldNames ++ Seq(DS, "RS-POP", "futureUseDate")
val diseaseLabelsAttributeName: AttributeName = AttributeName.withLibraryNS(DS)
}
-
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/ElasticSearch.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/ElasticSearch.scala
index 11ceb7b3c..9fba7f03b 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/ElasticSearch.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/ElasticSearch.scala
@@ -28,16 +28,15 @@ case class AttributeDetail(
typeahead: Option[String] = None
)
-
case class ESDatasetProperty(properties: Map[String, ESPropertyFields])
// trait def, with util factories
trait ESPropertyFields {
- def suggestField(`type`:String) = ESInnerField(`type`,
- analyzer = Some("autocomplete"),
- search_analyzer = Some("standard"),
- include_in_all = Some(false),
- store = Some(true)
+ def suggestField(`type`: String) = ESInnerField(`type`,
+ analyzer = Some("autocomplete"),
+ search_analyzer = Some("standard"),
+ include_in_all = Some(false),
+ store = Some(true)
)
// https://www.elastic.co/guide/en/elasticsearch/reference/2.4/search-suggesters-completion.html
def completionField = ESInnerField(
@@ -45,23 +44,21 @@ trait ESPropertyFields {
analyzer = Option("simple"),
search_analyzer = Option("simple")
)
- def keywordField(`type`:String) = ESInnerField("keyword")
- def sortField(`type`:String) = ESInnerField(`type`,
- analyzer = Some("sort_analyzer"),
- include_in_all = Some(false),
- fielddata = Some(true)
- )
+ def keywordField(`type`: String) = ESInnerField("keyword")
+ def sortField(`type`: String) =
+ ESInnerField(`type`, analyzer = Some("sort_analyzer"), include_in_all = Some(false), fielddata = Some(true))
}
// top-level field defs, for facet and non-facet types
-case class ESType(`type`: String, fields: Option[Map[String,ESInnerField]], copy_to: Option[String] = None ) extends ESPropertyFields
+case class ESType(`type`: String, fields: Option[Map[String, ESInnerField]], copy_to: Option[String] = None)
+ extends ESPropertyFields
object ESType extends ESPropertyFields {
- def apply(`type`: String, hasPopulateSuggest: Boolean, hasSearchSuggest: Boolean, isAggregatable: Boolean):ESType = {
- val innerFields = Map.empty[String,ESInnerField] ++
+ def apply(`type`: String, hasPopulateSuggest: Boolean, hasSearchSuggest: Boolean, isAggregatable: Boolean): ESType = {
+ val innerFields = Map.empty[String, ESInnerField] ++
(if (`type`.equals("string"))
- Map("sort" -> sortField(`type`))
- else
- Map("sort" -> ESInnerField(`type`))) ++
+ Map("sort" -> sortField(`type`))
+ else
+ Map("sort" -> ESInnerField(`type`))) ++
(if (isAggregatable) Map("keyword" -> keywordField(`type`)) else Nil) ++
(if (hasPopulateSuggest) Map("suggestKeyword" -> keywordField(`type`)) else Nil)
if (hasSearchSuggest)
@@ -72,12 +69,10 @@ object ESType extends ESPropertyFields {
}
-case class ESNestedType(properties:Map[String,ESInnerField], `type`:String="nested") extends ESPropertyFields
+case class ESNestedType(properties: Map[String, ESInnerField], `type`: String = "nested") extends ESPropertyFields
-case class ESInternalType(
- `type`: String,
- index: String = "not_analyzed",
- include_in_all: Boolean = false) extends ESPropertyFields
+case class ESInternalType(`type`: String, index: String = "not_analyzed", include_in_all: Boolean = false)
+ extends ESPropertyFields
// def for ElasticSearch's multi-fields: https://www.elastic.co/guide/en/elasticsearch/reference/2.4/multi-fields.html
// technically, the top-level fields and inner fields are the same thing, and we *could* use the same class.
@@ -88,7 +83,8 @@ case class ESInnerField(`type`: String,
include_in_all: Option[Boolean] = None,
store: Option[Boolean] = None,
copy_to: Option[String] = None,
- fielddata: Option[Boolean] = None) extends ESPropertyFields
+ fielddata: Option[Boolean] = None
+) extends ESPropertyFields
// classes for sending documents to ES to be indexed
trait Indexable {
@@ -108,7 +104,6 @@ object Document {
def apply(id: String, jsonStr: String) = new Document(id, jsonStr.parseJson.asJsObject)
}
-
// classes to convert from json body and to json response
/**
@@ -119,38 +114,47 @@ object Document {
* @param from used for pagination, where to start the returned results
* @param size used for pagination, how many results to return
*/
-case class LibrarySearchParams(
- searchString: Option[String],
- filters: Map[String, Seq[String]],
- researchPurpose: Option[ResearchPurpose],
- fieldAggregations: Map[String, Int],
- from: Int = 0,
- size: Int = 10,
- sortField: Option[String] = None,
- sortDirection: Option[String] = None)
+case class LibrarySearchParams(searchString: Option[String],
+ filters: Map[String, Seq[String]],
+ researchPurpose: Option[ResearchPurpose],
+ fieldAggregations: Map[String, Int],
+ from: Int = 0,
+ size: Int = 10,
+ sortField: Option[String] = None,
+ sortDirection: Option[String] = None
+)
object LibrarySearchParams {
- def apply(searchString: Option[String], filters: Map[String, Seq[String]], researchPurpose: Option[ResearchPurpose], fieldAggregations: Map[String, Int], from: Option[Int], size: Option[Int], sortField: Option[String], sortDirection: Option[String]) = {
- new LibrarySearchParams(searchString, filters, researchPurpose, fieldAggregations, from.getOrElse(0), size.getOrElse(10), sortField, sortDirection)
- }
+ def apply(searchString: Option[String],
+ filters: Map[String, Seq[String]],
+ researchPurpose: Option[ResearchPurpose],
+ fieldAggregations: Map[String, Int],
+ from: Option[Int],
+ size: Option[Int],
+ sortField: Option[String],
+ sortDirection: Option[String]
+ ) =
+ new LibrarySearchParams(searchString,
+ filters,
+ researchPurpose,
+ fieldAggregations,
+ from.getOrElse(0),
+ size.getOrElse(10),
+ sortField,
+ sortDirection
+ )
}
-case class LibrarySearchResponse(
- searchParams: LibrarySearchParams,
- total: Int,
- results: Seq[JsValue],
- aggregations: Seq[LibraryAggregationResponse])
+case class LibrarySearchResponse(searchParams: LibrarySearchParams,
+ total: Int,
+ results: Seq[JsValue],
+ aggregations: Seq[LibraryAggregationResponse]
+)
-case class LibraryAggregationResponse(
- field: String,
- results: AggregationFieldResults)
+case class LibraryAggregationResponse(field: String, results: AggregationFieldResults)
-case class AggregationFieldResults(
- numOtherDocs: Int,
- buckets: Seq[AggregationTermResult])
+case class AggregationFieldResults(numOtherDocs: Int, buckets: Seq[AggregationTermResult])
case class AggregationTermResult(key: String, doc_count: Int)
-case class LibraryBulkIndexResponse(totalCount: Int, hasFailures: Boolean, failureMessages: Map[String,String])
-
-
+case class LibraryBulkIndexResponse(totalCount: Int, hasFailures: Boolean, failureMessages: Map[String, String])
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/EntityUpdateDefinition.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/EntityUpdateDefinition.scala
index 61a181ff6..acac09f7a 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/EntityUpdateDefinition.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/EntityUpdateDefinition.scala
@@ -7,6 +7,4 @@ import spray.json._
* Created by tsharpe on 7/28/15.
*/
-case class EntityUpdateDefinition( name: String,
- entityType: String,
- operations: Seq[Map[String, Attribute]] )
+case class EntityUpdateDefinition(name: String, entityType: String, operations: Seq[Map[String, Attribute]])
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/ErrorReport.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/ErrorReport.scala
index e51bb94c3..1c50d2beb 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/ErrorReport.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/ErrorReport.scala
@@ -14,7 +14,9 @@ import scala.util.Try
object ErrorReportExtensions {
object FCErrorReport extends SprayJsonSupport {
- def apply(response: HttpResponse)(implicit ers: ErrorReportSource, executionContext: ExecutionContext, mat: Materializer): Future[ErrorReport] = {
+ def apply(
+ response: HttpResponse
+ )(implicit ers: ErrorReportSource, executionContext: ExecutionContext, mat: Materializer): Future[ErrorReport] =
// code prior to creation of this error report may have already consumed the response entity
response.entity match {
@@ -22,13 +24,14 @@ object ErrorReportExtensions {
val entityString = data.decodeString(java.nio.charset.Charset.defaultCharset())
Unmarshal(entityString).to[ErrorReport].map { re =>
new ErrorReport(ers.source, re.message, Option(response.status), Seq(re), Seq.empty, None)
- } recover {
- case _ =>
- new ErrorReport(ers.source, entityString, Option(response.status), Seq.empty, Seq.empty, None)
+ } recover { case _ =>
+ new ErrorReport(ers.source, entityString, Option(response.status), Seq.empty, Seq.empty, None)
}
case _ =>
val fallbackMessage = Try(response.toString()).toOption.getOrElse("Unexpected error")
- Future.successful(new ErrorReport(ers.source, fallbackMessage, Option(response.status), Seq.empty, Seq.empty, None))
+ Future.successful(
+ new ErrorReport(ers.source, fallbackMessage, Option(response.status), Seq.empty, Seq.empty, None)
+ )
}
//
// Unmarshal(response).to[ErrorReport].map { re =>
@@ -38,7 +41,6 @@ object ErrorReportExtensions {
// new ErrorReport(ers.source, message, Option(response.status), Seq.empty, Seq.empty, None)
// }
// }
- }
}
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/JWT.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/JWT.scala
index a5f11ca3f..f43dc0b52 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/JWT.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/JWT.scala
@@ -1,8 +1,5 @@
package org.broadinstitute.dsde.firecloud.model
-
case class JWTWrapper(
jwt: String
)
-
-
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/LinkedEraAccount.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/LinkedEraAccount.scala
index b31c78d2a..6b5ce9ba6 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/LinkedEraAccount.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/LinkedEraAccount.scala
@@ -4,22 +4,20 @@ import bio.terra.externalcreds.model.AdminLinkInfo
import org.joda.time.{DateTime, Instant}
object LinkedEraAccount {
- def apply(samUserId: String, nihLink: NihLink): LinkedEraAccount = {
+ def apply(samUserId: String, nihLink: NihLink): LinkedEraAccount =
LinkedEraAccount(samUserId, nihLink.linkedNihUsername, Instant.ofEpochSecond(nihLink.linkExpireTime).toDateTime)
- }
- def apply(adminLinkInfo: AdminLinkInfo): LinkedEraAccount = {
- LinkedEraAccount(adminLinkInfo.getUserId, adminLinkInfo.getLinkedExternalId, new DateTime(adminLinkInfo.getLinkExpireTime))
- }
+ def apply(adminLinkInfo: AdminLinkInfo): LinkedEraAccount =
+ LinkedEraAccount(adminLinkInfo.getUserId,
+ adminLinkInfo.getLinkedExternalId,
+ new DateTime(adminLinkInfo.getLinkExpireTime)
+ )
- def unapply(linkedEraAccount: LinkedEraAccount): AdminLinkInfo = {
+ def unapply(linkedEraAccount: LinkedEraAccount): AdminLinkInfo =
new AdminLinkInfo()
.userId(linkedEraAccount.userId)
.linkedExternalId(linkedEraAccount.linkedExternalId)
.linkExpireTime(linkedEraAccount.linkExpireTime.toDate)
- }
}
case class LinkedEraAccount(userId: String, linkedExternalId: String, linkExpireTime: DateTime)
-
-
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/ManagedGroup.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/ManagedGroup.scala
index 1801ca4ec..8599b77e0 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/ManagedGroup.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/ManagedGroup.scala
@@ -8,25 +8,24 @@ import org.broadinstitute.dsde.workbench.model.WorkbenchEmail
object ManagedGroupRoles {
sealed trait ManagedGroupRole {
- override def toString: String = {
+ override def toString: String =
this match {
- case Admin => "admin"
- case Member => "member"
+ case Admin => "admin"
+ case Member => "member"
case AdminNotifier => "admin-notifier"
- case _ => throw new Exception(s"invalid ManagedGroupRole [$this]")
+ case _ => throw new Exception(s"invalid ManagedGroupRole [$this]")
}
- }
def withName(name: String): ManagedGroupRole = ManagedGroupRoles.withName(name)
}
- //we'll match on singular and plural for these roles because there's some inconsistency introduced
- //between orch and sam. this maintains backwards compatibility and as a bonus is a bit more user-friendly
+ // we'll match on singular and plural for these roles because there's some inconsistency introduced
+ // between orch and sam. this maintains backwards compatibility and as a bonus is a bit more user-friendly
def withName(name: String): ManagedGroupRole = name.toLowerCase match {
- case role if role matches "(?i)admin(s?$)" => Admin
- case role if role matches "(?i)member(s?$)" => Member
+ case role if role matches "(?i)admin(s?$)" => Admin
+ case role if role matches "(?i)member(s?$)" => Member
case role if role matches "(?i)admin-notifier(s?$)" => AdminNotifier
- case _ => throw new Exception(s"invalid ManagedGroupRole [$name]")
+ case _ => throw new Exception(s"invalid ManagedGroupRole [$name]")
}
case object Admin extends ManagedGroupRole
@@ -36,5 +35,8 @@ object ManagedGroupRoles {
val membershipRoles: Set[ManagedGroupRole] = Set(Admin, Member)
}
-case class FireCloudManagedGroup(adminsEmails: List[WorkbenchEmail], membersEmails: List[WorkbenchEmail], groupEmail: WorkbenchEmail)
-case class FireCloudManagedGroupMembership(groupName: String, groupEmail: String, role: String)
\ No newline at end of file
+case class FireCloudManagedGroup(adminsEmails: List[WorkbenchEmail],
+ membersEmails: List[WorkbenchEmail],
+ groupEmail: WorkbenchEmail
+)
+case class FireCloudManagedGroupMembership(groupName: String, groupEmail: String, role: String)
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/ModelJsonProtocol.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/ModelJsonProtocol.scala
index 907d9ff03..65074e67c 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/ModelJsonProtocol.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/ModelJsonProtocol.scala
@@ -28,9 +28,8 @@ import scala.util.{Failure, Success, Try}
object ModelJsonProtocol extends WorkspaceJsonSupport with SprayJsonSupport {
import spray.json.DefaultJsonProtocol._
- def optionalEntryIntReader(fieldName: String, data: Map[String,JsValue]): Option[Int] = {
+ def optionalEntryIntReader(fieldName: String, data: Map[String, JsValue]): Option[Int] =
optionalEntryReader[Option[Int]](fieldName, data, _.convertTo[Option[Int]], None)
- }
/**
* optionalEntryReader constructs a type from a json map for a field that may or may not be in the map.
@@ -43,26 +42,25 @@ object ModelJsonProtocol extends WorkspaceJsonSupport with SprayJsonSupport {
* @tparam T the type of the object the data represents and will be converted to
* @return on object of the specified type constructed from the data if field is in the map, or the default if not
*/
- def optionalEntryReader[T](fieldName: String, data: Map[String,JsValue], converter: JsValue => T, default: T): T = {
+ def optionalEntryReader[T](fieldName: String, data: Map[String, JsValue], converter: JsValue => T, default: T): T =
data.getOrElse(fieldName, None) match {
- case j:JsValue => Try(converter(j)).toOption.getOrElse(
- throw DeserializationException(s"unexpected json type for $fieldName")
- )
+ case j: JsValue =>
+ Try(converter(j)).toOption.getOrElse(
+ throw DeserializationException(s"unexpected json type for $fieldName")
+ )
case None => default
}
- }
implicit object impStatusCode extends JsonFormat[StatusCode] {
override def write(code: StatusCode): JsValue = JsNumber(code.intValue)
override def read(json: JsValue): StatusCode = json match {
case JsNumber(n) => n.intValue
- case _ => throw DeserializationException("unexpected json type")
+ case _ => throw DeserializationException("unexpected json type")
}
}
-
- implicit object impDiseaseOntologyNodeId extends RootJsonFormat[DiseaseOntologyNodeId] {
+ implicit object impDiseaseOntologyNodeId extends RootJsonFormat[DiseaseOntologyNodeId] {
override def write(obj: DiseaseOntologyNodeId): JsValue = JsString(obj.uri.toString)
override def read(json: JsValue): DiseaseOntologyNodeId = json match {
@@ -71,7 +69,9 @@ object ModelJsonProtocol extends WorkspaceJsonSupport with SprayJsonSupport {
}
}
implicit val impResearchPurpose: RootJsonFormat[ResearchPurpose] = jsonFormat6(ResearchPurpose.apply)
- implicit val impResearchPurposeRequest: RootJsonFormat[ResearchPurposeRequest] = jsonFormat7(ResearchPurposeRequest.apply)
+ implicit val impResearchPurposeRequest: RootJsonFormat[ResearchPurposeRequest] = jsonFormat7(
+ ResearchPurposeRequest.apply
+ )
implicit object impLibrarySearchParams extends RootJsonFormat[LibrarySearchParams] {
val SEARCH_STRING = "searchString"
@@ -85,44 +85,46 @@ object ModelJsonProtocol extends WorkspaceJsonSupport with SprayJsonSupport {
val SORT_DIR = "sortDirection"
override def write(params: LibrarySearchParams): JsValue = {
- val fields:Seq[Option[(String, JsValue)]] = Seq(
+ val fields: Seq[Option[(String, JsValue)]] = Seq(
Some(FILTERS -> params.filters.toJson),
- params.researchPurpose map {RESEARCH_PURPOSE -> _.toJson},
+ params.researchPurpose map { RESEARCH_PURPOSE -> _.toJson },
Some(FIELD_AGGREGATIONS -> params.fieldAggregations.toJson),
Some(FROM -> params.from.toJson),
Some(SIZE -> params.size.toJson),
- params.sortField map {SORT_FIELD -> JsString(_)},
- params.sortDirection map {SORT_DIR -> JsString(_)},
- params.searchString map {SEARCH_STRING -> JsString(_)}
+ params.sortField map { SORT_FIELD -> JsString(_) },
+ params.sortDirection map { SORT_DIR -> JsString(_) },
+ params.searchString map { SEARCH_STRING -> JsString(_) }
)
- JsObject( fields.filter(_.isDefined).map{_.get}.toMap )
+ JsObject(fields.filter(_.isDefined).map(_.get).toMap)
}
override def read(json: JsValue): LibrarySearchParams = {
val data = json.asJsObject.fields
val term = data.getOrElse(SEARCH_STRING, None) match {
case JsString(str) if str.trim == "" => None
- case JsString(str) => Some(str.trim)
- case None => None
+ case JsString(str) => Some(str.trim)
+ case None => None
case _ => throw DeserializationException(s"unexpected json type for $SEARCH_STRING")
}
- val filters = optionalEntryReader[Map[String, Seq[String]]](FILTERS, data, _.convertTo[Map[String, Seq[String]]], Map.empty)
- val aggs = optionalEntryReader[Map[String, Int]](FIELD_AGGREGATIONS, data, _.convertTo[Map[String, Int]], Map.empty)
+ val filters =
+ optionalEntryReader[Map[String, Seq[String]]](FILTERS, data, _.convertTo[Map[String, Seq[String]]], Map.empty)
+ val aggs =
+ optionalEntryReader[Map[String, Int]](FIELD_AGGREGATIONS, data, _.convertTo[Map[String, Int]], Map.empty)
val from = optionalEntryIntReader(FROM, data)
val size = optionalEntryIntReader(SIZE, data)
val researchPurposeOption = data.get(RESEARCH_PURPOSE) map (_.convertTo[ResearchPurpose])
val sortField = data.get(SORT_FIELD) match {
- case Some(x:JsString) => Some(x.value)
- case _ => None
+ case Some(x: JsString) => Some(x.value)
+ case _ => None
}
val sortDirection = data.get(SORT_DIR) match {
- case Some(x:JsString) => Some(x.value)
- case _ => None
+ case Some(x: JsString) => Some(x.value)
+ case _ => None
}
LibrarySearchParams(term, filters, researchPurposeOption, aggs, from, size, sortField, sortDirection)
@@ -136,19 +138,19 @@ object ModelJsonProtocol extends WorkspaceJsonSupport with SprayJsonSupport {
implicit object impESPropertyFields extends JsonFormat[ESPropertyFields] {
override def write(input: ESPropertyFields): JsValue = input match {
- case estype: ESType => estype.toJson
+ case estype: ESType => estype.toJson
case esinternaltype: ESInternalType => esinternaltype.toJson
- case esinnerfield: ESInnerField => esinnerfield.toJson
- case esnestedtype: ESNestedType => esnestedtype.toJson
- case _ => throw new SerializationException("unexpected ESProperty type")
+ case esinnerfield: ESInnerField => esinnerfield.toJson
+ case esnestedtype: ESNestedType => esnestedtype.toJson
+ case _ => throw new SerializationException("unexpected ESProperty type")
}
override def read(json: JsValue): ESPropertyFields = {
val data = json.asJsObject.fields
data match {
case x if x.contains("properties") => ESNestedTypeFormat.read(json)
- case x if x.contains("fields") => ESTypeFormat.read(json)
- case _ => ESInternalTypeFormat.read(json)
+ case x if x.contains("fields") => ESTypeFormat.read(json)
+ case _ => ESInternalTypeFormat.read(json)
}
}
}
@@ -162,10 +164,12 @@ object ModelJsonProtocol extends WorkspaceJsonSupport with SprayJsonSupport {
val LINE_NUMBER = "lineNumber"
def write(stackTraceElement: StackTraceElement) =
- JsObject(CLASS_NAME -> JsString(stackTraceElement.getClassName),
+ JsObject(
+ CLASS_NAME -> JsString(stackTraceElement.getClassName),
METHOD_NAME -> JsString(stackTraceElement.getMethodName),
FILE_NAME -> JsString(stackTraceElement.getFileName),
- LINE_NUMBER -> JsNumber(stackTraceElement.getLineNumber))
+ LINE_NUMBER -> JsNumber(stackTraceElement.getLineNumber)
+ )
def read(json: JsValue) =
json.asJsObject.getFields(CLASS_NAME, METHOD_NAME, FILE_NAME, LINE_NUMBER) match {
@@ -178,30 +182,47 @@ object ModelJsonProtocol extends WorkspaceJsonSupport with SprayJsonSupport {
// Build error about missing implicit for Spray parameter unmarshaller? Add an entry here.
implicit val impMethod: RootJsonFormat[Method] = jsonFormat11(OrchMethodRepository.Method.apply)
implicit val impConfiguration: RootJsonFormat[Configuration] = jsonFormat10(OrchMethodRepository.Configuration)
- implicit val impAgoraConfigurationShort: RootJsonFormat[AgoraConfigurationShort] = jsonFormat4(OrchMethodRepository.AgoraConfigurationShort)
+ implicit val impAgoraConfigurationShort: RootJsonFormat[AgoraConfigurationShort] = jsonFormat4(
+ OrchMethodRepository.AgoraConfigurationShort
+ )
implicit val impUIWorkspaceResponse: RootJsonFormat[UIWorkspaceResponse] = jsonFormat6(UIWorkspaceResponse)
- //implicit val impEntity = jsonFormat5(Entity)
+ // implicit val impEntity = jsonFormat5(Entity)
implicit val impEntityCreateResult: RootJsonFormat[EntityCreateResult] = jsonFormat4(EntityCreateResult)
- implicit val impEntityCopyWithoutDestinationDefinition: RootJsonFormat[EntityCopyWithoutDestinationDefinition] = jsonFormat3(EntityCopyWithoutDestinationDefinition)
+ implicit val impEntityCopyWithoutDestinationDefinition: RootJsonFormat[EntityCopyWithoutDestinationDefinition] =
+ jsonFormat3(EntityCopyWithoutDestinationDefinition)
implicit val impEntityId: RootJsonFormat[EntityId] = jsonFormat2(EntityId)
implicit val impDestination: RootJsonFormat[MethodConfigurationId] = jsonFormat3(MethodConfigurationId)
- implicit val impMethodConfigurationCopy: RootJsonFormat[MethodConfigurationCopy] = jsonFormat4(MethodConfigurationCopy)
- implicit val impConfigurationCopyIngest: RootJsonFormat[CopyConfigurationIngest] = jsonFormat5(CopyConfigurationIngest)
- implicit val impMethodConfigurationPublish: RootJsonFormat[MethodConfigurationPublish] = jsonFormat3(MethodConfigurationPublish)
- implicit val impPublishConfigurationIngest: RootJsonFormat[PublishConfigurationIngest] = jsonFormat4(PublishConfigurationIngest)
- implicit val impMethodConfigurationName: RootJsonFormat[OrchMethodConfigurationName] = jsonFormat2(OrchMethodConfigurationName.apply)
+ implicit val impMethodConfigurationCopy: RootJsonFormat[MethodConfigurationCopy] = jsonFormat4(
+ MethodConfigurationCopy
+ )
+ implicit val impConfigurationCopyIngest: RootJsonFormat[CopyConfigurationIngest] = jsonFormat5(
+ CopyConfigurationIngest
+ )
+ implicit val impMethodConfigurationPublish: RootJsonFormat[MethodConfigurationPublish] = jsonFormat3(
+ MethodConfigurationPublish
+ )
+ implicit val impPublishConfigurationIngest: RootJsonFormat[PublishConfigurationIngest] = jsonFormat4(
+ PublishConfigurationIngest
+ )
+ implicit val impMethodConfigurationName: RootJsonFormat[OrchMethodConfigurationName] = jsonFormat2(
+ OrchMethodConfigurationName.apply
+ )
implicit val impFireCloudPermission: RootJsonFormat[FireCloudPermission] = jsonFormat2(FireCloudPermission)
implicit val impAgoraPermission: RootJsonFormat[AgoraPermission] = jsonFormat2(AgoraPermission)
implicit val impEntityAccessControl: RootJsonFormat[EntityAccessControl] = jsonFormat4(EntityAccessControl)
- implicit val impEntityAccessControlAgora: RootJsonFormat[EntityAccessControlAgora] = jsonFormat3(EntityAccessControlAgora)
+ implicit val impEntityAccessControlAgora: RootJsonFormat[EntityAccessControlAgora] = jsonFormat3(
+ EntityAccessControlAgora
+ )
implicit val impAccessEntry: RootJsonFormat[AccessEntry] = jsonFormat4(AccessEntry)
implicit val impPermissionReport: RootJsonFormat[PermissionReport] = jsonFormat2(PermissionReport)
- implicit val impPermissionReportRequest: RootJsonFormat[PermissionReportRequest] = jsonFormat2(PermissionReportRequest)
+ implicit val impPermissionReportRequest: RootJsonFormat[PermissionReportRequest] = jsonFormat2(
+ PermissionReportRequest
+ )
implicit val impMethodAclPair: RootJsonFormat[MethodAclPair] = jsonFormat3(MethodAclPair)
implicit val impEntityMetadata: RootJsonFormat[EntityMetadata] = jsonFormat3(EntityMetadata)
@@ -221,8 +242,12 @@ object ModelJsonProtocol extends WorkspaceJsonSupport with SprayJsonSupport {
implicit val impShibbolethToken: RootJsonFormat[ShibbolethToken] = jsonFormat2(ShibbolethToken)
implicit val impRegisterRequest: RootJsonFormat[RegisterRequest] = jsonFormat2(RegisterRequest)
- implicit val impSamUserAttributesRequest: RootJsonFormat[SamUserAttributesRequest] = jsonFormat1(SamUserAttributesRequest)
- implicit val impSamUserRegistrationRequest: RootJsonFormat[SamUserRegistrationRequest] = jsonFormat2(SamUserRegistrationRequest)
+ implicit val impSamUserAttributesRequest: RootJsonFormat[SamUserAttributesRequest] = jsonFormat1(
+ SamUserAttributesRequest
+ )
+ implicit val impSamUserRegistrationRequest: RootJsonFormat[SamUserRegistrationRequest] = jsonFormat2(
+ SamUserRegistrationRequest
+ )
implicit val impJWTWrapper: RootJsonFormat[JWTWrapper] = jsonFormat1(JWTWrapper)
@@ -246,35 +271,44 @@ object ModelJsonProtocol extends WorkspaceJsonSupport with SprayJsonSupport {
implicit val impCwdsResponse: RootJsonFormat[CwdsResponse] = jsonFormat3(CwdsResponse)
implicit val impCwdsListResponse: RootJsonFormat[CwdsListResponse] = jsonFormat4(CwdsListResponse)
- implicit val impWorkspaceStorageCostEstimate: RootJsonFormat[WorkspaceStorageCostEstimate] = jsonFormat2(WorkspaceStorageCostEstimate)
+ implicit val impWorkspaceStorageCostEstimate: RootJsonFormat[WorkspaceStorageCostEstimate] = jsonFormat2(
+ WorkspaceStorageCostEstimate
+ )
implicit object impManagedGroupRoleFormat extends RootJsonFormat[ManagedGroupRole] {
override def write(obj: ManagedGroupRole): JsValue = JsString(obj.toString)
override def read(json: JsValue): ManagedGroupRole = json match {
case JsString(name) => ManagedGroupRoles.withName(name)
- case _ => throw new DeserializationException("could not deserialize project role")
+ case _ => throw new DeserializationException("could not deserialize project role")
}
}
implicit val impFireCloudManagedGroup: RootJsonFormat[FireCloudManagedGroup] = jsonFormat3(FireCloudManagedGroup)
- implicit val impFireCloudManagedGroupMembership: RootJsonFormat[FireCloudManagedGroupMembership] = jsonFormat3(FireCloudManagedGroupMembership)
+ implicit val impFireCloudManagedGroupMembership: RootJsonFormat[FireCloudManagedGroupMembership] = jsonFormat3(
+ FireCloudManagedGroupMembership
+ )
implicit val impResourceId: ValueObjectFormat[ResourceId] = ValueObjectFormat(ResourceId)
implicit val impAccessPolicyName: ValueObjectFormat[AccessPolicyName] = ValueObjectFormat(AccessPolicyName)
implicit val impUserPolicy: RootJsonFormat[UserPolicy] = jsonFormat5(UserPolicy)
-
-
- implicit val AttributeDetailFormat: RootJsonFormat[AttributeDetail] = rootFormat(lazyFormat(jsonFormat5(AttributeDetail)))
+ implicit val AttributeDetailFormat: RootJsonFormat[AttributeDetail] = rootFormat(
+ lazyFormat(jsonFormat5(AttributeDetail))
+ )
implicit val AttributeDefinitionFormat: RootJsonFormat[AttributeDefinition] = jsonFormat1(AttributeDefinition)
-
implicit val impAggregationTermResult: RootJsonFormat[AggregationTermResult] = jsonFormat2(AggregationTermResult)
- implicit val impAggregationFieldResults: RootJsonFormat[AggregationFieldResults] = jsonFormat2(AggregationFieldResults)
- implicit val impLibraryAggregationResponse: RootJsonFormat[LibraryAggregationResponse] = jsonFormat2(LibraryAggregationResponse)
+ implicit val impAggregationFieldResults: RootJsonFormat[AggregationFieldResults] = jsonFormat2(
+ AggregationFieldResults
+ )
+ implicit val impLibraryAggregationResponse: RootJsonFormat[LibraryAggregationResponse] = jsonFormat2(
+ LibraryAggregationResponse
+ )
implicit val impLibrarySearchResponse: RootJsonFormat[LibrarySearchResponse] = jsonFormat4(LibrarySearchResponse)
- implicit val impLibraryBulkIndexResponse: RootJsonFormat[LibraryBulkIndexResponse] = jsonFormat3(LibraryBulkIndexResponse)
+ implicit val impLibraryBulkIndexResponse: RootJsonFormat[LibraryBulkIndexResponse] = jsonFormat3(
+ LibraryBulkIndexResponse
+ )
implicit val impStructuredDataRequest: RootJsonFormat[StructuredDataRequest] = jsonFormat12(StructuredDataRequest)
implicit val impStructuredDataResponse: RootJsonFormat[StructuredDataResponse] = jsonFormat4(StructuredDataResponse)
@@ -286,20 +320,21 @@ object ModelJsonProtocol extends WorkspaceJsonSupport with SprayJsonSupport {
implicit val impThurloeStatus: RootJsonFormat[ThurloeStatus] = jsonFormat2(ThurloeStatus)
// don't make this implicit! It would be pulled in by anything including ModelJsonProtocol._
- val entityExtractionRejectionHandler = RejectionHandler.newBuilder().handle {
- case MalformedRequestContentRejection(errorMsg, _) =>
+ val entityExtractionRejectionHandler = RejectionHandler
+ .newBuilder()
+ .handle { case MalformedRequestContentRejection(errorMsg, _) =>
complete(BadRequest, errorMsg)
- }.result()
+ }
+ .result()
// See http://stackoverflow.com/questions/24526103/generic-spray-client and
// https://gist.github.com/mikemckibben/fad4328de85a79a06bf3
- implicit def rootEitherFormat[A : RootJsonFormat, B : RootJsonFormat]: RootJsonFormat[Either[A, B]] = new RootJsonFormat[Either[A, B]] {
- val format = DefaultJsonProtocol.eitherFormat[A, B]
- def write(either: Either[A, B]) = format.write(either)
- def read(value: JsValue) = format.read(value)
- }
-
-
+ implicit def rootEitherFormat[A: RootJsonFormat, B: RootJsonFormat]: RootJsonFormat[Either[A, B]] =
+ new RootJsonFormat[Either[A, B]] {
+ val format = DefaultJsonProtocol.eitherFormat[A, B]
+ def write(either: Either[A, B]) = format.write(either)
+ def read(value: JsValue) = format.read(value)
+ }
// following are horribly copied-and-pasted from rawls core, since they're not available as shared models
implicit object ProjectStatusFormat extends RootJsonFormat[CreationStatuses.CreationStatus] {
@@ -307,7 +342,7 @@ object ModelJsonProtocol extends WorkspaceJsonSupport with SprayJsonSupport {
override def read(json: JsValue): CreationStatuses.CreationStatus = json match {
case JsString(name) => CreationStatuses.withName(name)
- case _ => throw new DeserializationException("could not deserialize project status")
+ case _ => throw new DeserializationException("could not deserialize project status")
}
}
@@ -316,24 +351,29 @@ object ModelJsonProtocol extends WorkspaceJsonSupport with SprayJsonSupport {
override def read(json: JsValue): ProjectRole = json match {
case JsString(name) => ProjectRoles.withName(name)
- case _ => throw new DeserializationException("could not deserialize project role")
+ case _ => throw new DeserializationException("could not deserialize project role")
}
}
- implicit val impRawlsBillingProjectMember: RootJsonFormat[RawlsBillingProjectMember] = jsonFormat2(RawlsBillingProjectMember)
+ implicit val impRawlsBillingProjectMember: RootJsonFormat[RawlsBillingProjectMember] = jsonFormat2(
+ RawlsBillingProjectMember
+ )
// END copy/paste from rawls
- implicit val impRawlsBillingProjectMembership: RootJsonFormat[RawlsBillingProjectMembership] = jsonFormat4(RawlsBillingProjectMembership)
+ implicit val impRawlsBillingProjectMembership: RootJsonFormat[RawlsBillingProjectMembership] = jsonFormat4(
+ RawlsBillingProjectMembership
+ )
- implicit val impCreateRawlsBillingProjectFullRequestFormat: RootJsonFormat[CreateRawlsBillingProjectFullRequest] = jsonFormat2(CreateRawlsBillingProjectFullRequest)
+ implicit val impCreateRawlsBillingProjectFullRequestFormat: RootJsonFormat[CreateRawlsBillingProjectFullRequest] =
+ jsonFormat2(CreateRawlsBillingProjectFullRequest)
implicit object ShareTypeFormat extends RootJsonFormat[ShareType.Value] {
override def write(obj: ShareType.Value): JsValue = JsString(obj.toString)
override def read(json: JsValue): ShareType.Value = json match {
case JsString(name) => ShareType.withName(name)
- case _ => throw DeserializationException("could not deserialize share type")
+ case _ => throw DeserializationException("could not deserialize share type")
}
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/ModelSchema.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/ModelSchema.scala
index 89a9fd07f..f65ee2344 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/ModelSchema.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/ModelSchema.scala
@@ -9,7 +9,6 @@ import DefaultJsonProtocol._
import org.broadinstitute.dsde.firecloud.{FireCloudConfig, FireCloudException, FireCloudExceptionWithErrorReport}
import org.broadinstitute.dsde.rawls.model.ErrorReport
-
/**
* Created with IntelliJ IDEA.
* User: hussein
@@ -19,75 +18,68 @@ import org.broadinstitute.dsde.rawls.model.ErrorReport
trait ModelSchema {
def getCollectionMemberType(entityType: String): Try[Option[String]]
- def isCollectionType (entityType: String): Boolean
- def getPlural (entityType: String): Try[String]
+ def isCollectionType(entityType: String): Boolean
+ def getPlural(entityType: String): Try[String]
def getRequiredAttributes(entityType: String): Try[Map[String, String]]
def getTypeSchema(entityType: String): Try[EntityMetadata]
def supportsBackwardsCompatibleIds(): Boolean
- def isAttributeArray(value: String): Boolean = {
+ def isAttributeArray(value: String): Boolean =
Try(value.parseJson.convertTo[JsArray]).isSuccess
- }
- def isEntityTypeInSchema(entityType: String): Boolean = {
+ def isEntityTypeInSchema(entityType: String): Boolean =
Try(this.getCollectionMemberType(entityType)) match {
case Failure(_) => false
- case _ => true
+ case _ => true
}
- }
}
-
-
object SchemaTypes {
sealed trait SchemaType
final case object FIRECLOUD extends SchemaType { override def toString = "firecloud" }
final case object FLEXIBLE extends SchemaType { override def toString = "flexible" }
- def withName(name: String): SchemaType = {
+ def withName(name: String): SchemaType =
name.toLowerCase match {
case "firecloud" => FIRECLOUD
- case "flexible" => FLEXIBLE
+ case "flexible" => FLEXIBLE
case _ => throw new FireCloudException(s"Invalid schema type '$name', supported types are: firecloud, flexible")
}
- }
}
-
-
object ModelSchemaRegistry {
// add new schema types from most specific to most general
- val schemas: Map[SchemaTypes.SchemaType, ModelSchema] = Map(SchemaTypes.FIRECLOUD -> FirecloudModelSchema, SchemaTypes.FLEXIBLE -> FlexibleModelSchema)
+ val schemas: Map[SchemaTypes.SchemaType, ModelSchema] =
+ Map(SchemaTypes.FIRECLOUD -> FirecloudModelSchema, SchemaTypes.FLEXIBLE -> FlexibleModelSchema)
- def getModelForSchemaType(schemaType: SchemaTypes.SchemaType): ModelSchema = schemas.getOrElse(schemaType, schemas.last._2)
+ def getModelForSchemaType(schemaType: SchemaTypes.SchemaType): ModelSchema =
+ schemas.getOrElse(schemaType, schemas.last._2)
}
-
object FlexibleModelSchema extends ModelSchema {
- def getCollectionMemberType(entityType: String): Try[Option[String]] = {
+ def getCollectionMemberType(entityType: String): Try[Option[String]] =
Success(Some(entityType.replace("_set", "")).filter(_ => isCollectionType(entityType)))
- }
- def isCollectionType(entityType: String): Boolean = {
+ def isCollectionType(entityType: String): Boolean =
entityType.endsWith("_set")
- }
- def getRequiredAttributes(entityType: String): Try[Map[String, String]] = {
+ def getRequiredAttributes(entityType: String): Try[Map[String, String]] =
Success(Map.empty)
- }
- def getPlural(entityType: String): Try[String] = {
+ def getPlural(entityType: String): Try[String] =
Success(pluralize(entityType))
- }
- private def pluralize(entityType: String): String = {
+ private def pluralize(entityType: String): String =
entityType + "s"
- }
- def getTypeSchema(entityType: String): Try[EntityMetadata] = {
- Success(EntityMetadata(pluralize(entityType), Map.empty, Some(entityType+"_members").filter(_ => isCollectionType(entityType))))
- }
+ def getTypeSchema(entityType: String): Try[EntityMetadata] =
+ Success(
+ EntityMetadata(pluralize(entityType),
+ Map.empty,
+ Some(entityType + "_members").filter(_ => isCollectionType(entityType))
+ )
+ )
def supportsBackwardsCompatibleIds(): Boolean = false
}
@@ -95,31 +87,31 @@ object FlexibleModelSchema extends ModelSchema {
object FirecloudModelSchema extends ModelSchema {
object EntityTypes {
- val types : Map[String, EntityMetadata] = ModelJsonProtocol.impModelSchema.read(
- Source.fromURL(getClass.getResource(FireCloudConfig.Rawls.model)).mkString.parseJson ).schema
+ val types: Map[String, EntityMetadata] = ModelJsonProtocol.impModelSchema
+ .read(Source.fromURL(getClass.getResource(FireCloudConfig.Rawls.model)).mkString.parseJson)
+ .schema
}
- def getTypeSchema(entityType: String): Try[EntityMetadata] = {
+ def getTypeSchema(entityType: String): Try[EntityMetadata] =
EntityTypes.types.get(entityType) map (Success(_)) getOrElse
- Failure(new FireCloudExceptionWithErrorReport(ErrorReport(StatusCodes.BadRequest, "Unknown firecloud model entity type: " + entityType)))
- }
+ Failure(
+ new FireCloudExceptionWithErrorReport(
+ ErrorReport(StatusCodes.BadRequest, "Unknown firecloud model entity type: " + entityType)
+ )
+ )
- def getCollectionMemberType(entityType: String): Try[Option[String]] = {
+ def getCollectionMemberType(entityType: String): Try[Option[String]] =
getTypeSchema(entityType).map(_.memberType)
- }
- def getRequiredAttributes(entityType: String): Try[Map[String, String]] = {
+ def getRequiredAttributes(entityType: String): Try[Map[String, String]] =
getTypeSchema(entityType).map(_.requiredAttributes)
- }
- def isCollectionType(entityType: String): Boolean = {
+ def isCollectionType(entityType: String): Boolean =
// if the option is None, returns false, if Some evaluates the funtion
- EntityTypes.types.get(entityType) exists(_.memberType.isDefined)
- }
+ EntityTypes.types.get(entityType) exists (_.memberType.isDefined)
- def getPlural(entityType: String): Try[String] = {
+ def getPlural(entityType: String): Try[String] =
getTypeSchema(entityType).map(_.plural)
- }
def supportsBackwardsCompatibleIds(): Boolean = true
}
@@ -137,5 +129,4 @@ case class EntityMetadata(
memberType: Option[String]
)
-case class EntityModel(schema : Map[String, EntityMetadata]) //entity name -> stuff about it
-
+case class EntityModel(schema: Map[String, EntityMetadata]) //entity name -> stuff about it
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/Ontology.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/Ontology.scala
index 0595977c8..520bfd9c8 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/Ontology.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/Ontology.scala
@@ -1,6 +1,5 @@
package org.broadinstitute.dsde.firecloud.model
-
object Ontology {
case class TermResource(
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/OrchMethodRepository.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/OrchMethodRepository.scala
index f71698c70..cece3df30 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/OrchMethodRepository.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/OrchMethodRepository.scala
@@ -16,7 +16,7 @@ object OrchMethodRepository {
def toPath(entityType: EntityType): String = entityType match {
case Workflow | Task => "methods"
- case Configuration => "configurations"
+ case Configuration => "configurations"
}
}
@@ -49,16 +49,21 @@ object OrchMethodRepository {
def toShortString: String = s"Method($namespace,$name,$snapshotId)"
}
- case class AgoraConfigurationShort(
- name: String,
- rootEntityType: String,
- methodRepoMethod: AgoraMethod,
- namespace: String)
+ case class AgoraConfigurationShort(name: String,
+ rootEntityType: String,
+ methodRepoMethod: AgoraMethod,
+ namespace: String
+ )
object Method {
def apply(mrm: AgoraMethod): Method = apply(mrm = mrm, managers = None, public = None)
- def apply(mrm: AgoraMethod, managers:Option[Seq[String]], public:Option[Boolean]): Method =
- new Method(Some(mrm.methodNamespace), Some(mrm.methodName), Some(mrm.methodVersion), managers=managers, public=public)
+ def apply(mrm: AgoraMethod, managers: Option[Seq[String]], public: Option[Boolean]): Method =
+ new Method(Some(mrm.methodNamespace),
+ Some(mrm.methodName),
+ Some(mrm.methodVersion),
+ managers = managers,
+ public = public
+ )
}
// represents a method/config permission as exposed to the user from the orchestration layer
@@ -87,7 +92,11 @@ object OrchMethodRepository {
case class MethodAclPair(method: AgoraMethod, acls: Seq[FireCloudPermission], message: Option[String] = None)
- case class EntityAccessControl(method:Option[Method], referencedBy: OrchMethodConfigurationName, acls: Seq[FireCloudPermission], message: Option[String] = None)
+ case class EntityAccessControl(method: Option[Method],
+ referencedBy: OrchMethodConfigurationName,
+ acls: Seq[FireCloudPermission],
+ message: Option[String] = None
+ )
object ACLNames {
val NoAccess = "NO ACCESS"
@@ -98,13 +107,11 @@ object OrchMethodRepository {
// yes we could manually sort these, but I prefer using .sorted - it's a one-time init, and it eliminates human mistakes
val ListNoAccess = List("Nothing")
val ListReader = List("Read")
- val ListOwner = List("Read","Write","Create","Redact","Manage").sorted
+ val ListOwner = List("Read", "Write", "Create", "Redact", "Manage").sorted
val ListAll = List("All")
}
- def validatePublicOrEmail(email:String): Boolean = {
+ def validatePublicOrEmail(email: String): Boolean =
"public".equals(email) || Try(new InternetAddress(email).validate()).isSuccess
- }
-
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/PermissionReport.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/PermissionReport.scala
index a6f2d3b6b..1f132397f 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/PermissionReport.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/PermissionReport.scala
@@ -6,12 +6,12 @@ import org.broadinstitute.dsde.rawls.model.AccessEntry
/**
* Created by davidan on 7/5/17.
*/
-case class PermissionReport (
+case class PermissionReport(
workspaceACL: Map[String, AccessEntry],
referencedMethods: Seq[EntityAccessControl]
)
-case class PermissionReportRequest (
+case class PermissionReportRequest(
users: Option[Seq[String]],
configs: Option[Seq[OrchMethodConfigurationName]]
)
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/Profile.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/Profile.scala
index 174146cfa..c1f84881a 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/Profile.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/Profile.scala
@@ -3,36 +3,30 @@ package org.broadinstitute.dsde.firecloud.model
import scala.language.postfixOps
import scala.util.Try
-case class FireCloudKeyValue(
- key: Option[String] = None,
- value: Option[String] = None)
+case class FireCloudKeyValue(key: Option[String] = None, value: Option[String] = None)
-case class ThurloeKeyValue(
- userId: Option[String] = None,
- keyValuePair: Option[FireCloudKeyValue] = None)
+case class ThurloeKeyValue(userId: Option[String] = None, keyValuePair: Option[FireCloudKeyValue] = None)
-case class ThurloeKeyValues(
- userId: Option[String] = None,
- keyValuePairs: Option[Seq[FireCloudKeyValue]] = None)
+case class ThurloeKeyValues(userId: Option[String] = None, keyValuePairs: Option[Seq[FireCloudKeyValue]] = None)
case class ProfileWrapper(userId: String, keyValuePairs: List[FireCloudKeyValue])
case class ProfileKVP(userId: String, keyValuePair: FireCloudKeyValue)
-case class BasicProfile (
- firstName: String,
- lastName: String,
- title: String,
- contactEmail: Option[String],
- institute: String,
- programLocationCity: String,
- programLocationState: String,
- programLocationCountry: String,
- termsOfService: Option[String],
- researchArea: Option[String],
- department: Option[String],
- interestInTerra: Option[String],
- ) extends mappedPropVals {
+case class BasicProfile(
+ firstName: String,
+ lastName: String,
+ title: String,
+ contactEmail: Option[String],
+ institute: String,
+ programLocationCity: String,
+ programLocationState: String,
+ programLocationCountry: String,
+ termsOfService: Option[String],
+ researchArea: Option[String],
+ department: Option[String],
+ interestInTerra: Option[String]
+) extends mappedPropVals {
require(ProfileValidator.nonEmpty(firstName), "first name must be non-empty")
require(ProfileValidator.nonEmpty(lastName), "last name must be non-empty")
require(ProfileValidator.nonEmpty(title), "title must be non-empty")
@@ -43,21 +37,21 @@ case class BasicProfile (
require(ProfileValidator.nonEmpty(programLocationCountry), "program location country must be non-empty")
}
-case class Profile (
- firstName: String,
- lastName: String,
- title: String,
- contactEmail: Option[String],
- institute: String,
- programLocationCity: String,
- programLocationState: String,
- programLocationCountry: String,
- researchArea: Option[String],
- linkedNihUsername: Option[String] = None,
- linkExpireTime: Option[Long] = None,
- department: Option[String],
- interestInTerra: Option[String],
- ) extends mappedPropVals {
+case class Profile(
+ firstName: String,
+ lastName: String,
+ title: String,
+ contactEmail: Option[String],
+ institute: String,
+ programLocationCity: String,
+ programLocationState: String,
+ programLocationCountry: String,
+ researchArea: Option[String],
+ linkedNihUsername: Option[String] = None,
+ linkExpireTime: Option[Long] = None,
+ department: Option[String],
+ interestInTerra: Option[String]
+) extends mappedPropVals {
require(ProfileValidator.nonEmpty(firstName), "first name must be non-empty")
require(ProfileValidator.nonEmpty(lastName), "last name must be non-empty")
require(ProfileValidator.nonEmpty(title), "title must be non-empty")
@@ -71,17 +65,24 @@ case class Profile (
object Profile {
// increment this number every time you make a change to the user-provided profile fields
- val currentVersion:Int = 5
-
- val requiredKeys = List("firstName", "lastName", "title", "institute", "department", "programLocationCity",
- "programLocationState", "programLocationCountry")
+ val currentVersion: Int = 5
+
+ val requiredKeys = List("firstName",
+ "lastName",
+ "title",
+ "institute",
+ "department",
+ "programLocationCity",
+ "programLocationState",
+ "programLocationCountry"
+ )
def apply(wrapper: ProfileWrapper): Profile = {
val mappedKVPs: Map[String, String] = (wrapper.keyValuePairs collect {
case fckv: FireCloudKeyValue if fckv.key.nonEmpty && fckv.value.nonEmpty => fckv.key.get -> fckv.value.get
}).toMap
- requiredKeys foreach {req =>
+ requiredKeys foreach { req =>
assert(mappedKVPs.contains(req), s"Profile for user ${wrapper.userId} must contain a key-value entry for $req")
}
@@ -98,7 +99,7 @@ object Profile {
linkedNihUsername = mappedKVPs.get("linkedNihUsername"),
linkExpireTime = mappedKVPs.get("linkExpireTime") match {
case Some(time) => Some(time.toLong)
- case _ => None
+ case _ => None
},
department = mappedKVPs.get("department"),
interestInTerra = mappedKVPs.get("interestInTerra")
@@ -108,7 +109,8 @@ object Profile {
}
object NihLink {
- def apply(linkedEraAccount: LinkedEraAccount): NihLink = NihLink(linkedEraAccount.linkedExternalId, linkedEraAccount.linkExpireTime.getMillis / 1000)
+ def apply(linkedEraAccount: LinkedEraAccount): NihLink =
+ NihLink(linkedEraAccount.linkedExternalId, linkedEraAccount.linkExpireTime.getMillis / 1000)
}
case class NihLink(linkedNihUsername: String, linkExpireTime: Long) extends mappedPropVals {
@@ -127,36 +129,32 @@ object ProfileValidator {
def nonEmpty(field: String): Boolean = !field.trim.isEmpty
def nonEmpty(field: Option[String]): Boolean = !field.getOrElse("").trim.isEmpty
def emptyOrValidEmail(field: Option[String]): Boolean = field match {
- case None => true
- case Some(x) if x.isEmpty => true
+ case None => true
+ case Some(x) if x.isEmpty => true
case Some(x) if emailRegex.findFirstMatchIn(x).isDefined => true
- case _ => false
+ case _ => false
}
}
object ProfileUtils {
- def getString(key: String, profileWrapper: ProfileWrapper): Option[String] = {
+ def getString(key: String, profileWrapper: ProfileWrapper): Option[String] =
profileWrapper.keyValuePairs.collectFirst {
case fckv if fckv.key.contains(key) => fckv.value
}.flatten
- }
- def getLong(key: String, profileWrapper: ProfileWrapper): Option[Long] = {
+ def getLong(key: String, profileWrapper: ProfileWrapper): Option[Long] =
getString(key, profileWrapper).flatMap(x => Try(x.toLong).toOption)
- }
}
trait mappedPropVals {
- def propertyValueMap: Map[String, String] = {
- this.getClass.getDeclaredFields map {
- f =>
- f.setAccessible(true)
- f.get(this) match {
- case x: String => f.getName -> x
- case y: Option[_] => f.getName -> y.asInstanceOf[Option[_]].getOrElse("").toString
- case z => f.getName -> z.toString
- }
+ def propertyValueMap: Map[String, String] =
+ this.getClass.getDeclaredFields map { f =>
+ f.setAccessible(true)
+ f.get(this) match {
+ case x: String => f.getName -> x
+ case y: Option[_] => f.getName -> y.asInstanceOf[Option[_]].getOrElse("").toString
+ case z => f.getName -> z.toString
+ }
} toMap
- }
}
case class TerraPreference(preferTerra: Boolean, preferTerraLastUpdated: Long)
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/Project.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/Project.scala
index ecf4a9975..56ed8f859 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/Project.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/Project.scala
@@ -8,7 +8,11 @@ object Project {
// following are horribly copied-and-pasted from rawls core, since they're not available as shared models
case class CreateRawlsBillingProjectFullRequest(projectName: String, billingAccount: String)
- case class RawlsBillingProjectMembership(projectName: RawlsBillingProjectName, role: ProjectRoles.ProjectRole, creationStatus: CreationStatuses.CreationStatus, message: Option[String] = None)
+ case class RawlsBillingProjectMembership(projectName: RawlsBillingProjectName,
+ role: ProjectRoles.ProjectRole,
+ creationStatus: CreationStatuses.CreationStatus,
+ message: Option[String] = None
+ )
case class RawlsBillingProjectMember(email: RawlsUserEmail, role: ProjectRoles.ProjectRole)
@@ -21,15 +25,15 @@ object Project {
def toName(status: CreationStatus): String = status match {
case Creating => "Creating"
- case Ready => "Ready"
- case Error => "Error"
+ case Ready => "Ready"
+ case Error => "Error"
}
def withName(name: String): CreationStatus = name.toLowerCase match {
case "creating" => Creating
- case "ready" => Ready
- case "error" => Error
- case _ => throw new FireCloudException(s"invalid CreationStatus [${name}]")
+ case "ready" => Ready
+ case "error" => Error
+ case _ => throw new FireCloudException(s"invalid CreationStatus [${name}]")
}
case object Creating extends CreationStatus
@@ -49,13 +53,13 @@ object Project {
def toName(role: ProjectRole): String = role match {
case Owner => "Owner"
- case User => "User"
+ case User => "User"
}
def withName(name: String): ProjectRole = name.toLowerCase match {
case "owner" => Owner
- case "user" => User
- case _ => throw new FireCloudException(s"invalid ProjectRole [${name}]")
+ case "user" => User
+ case _ => throw new FireCloudException(s"invalid ProjectRole [${name}]")
}
case object Owner extends ProjectRole
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/SamResource.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/SamResource.scala
index 7dcdf9ec8..0aad372e4 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/SamResource.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/SamResource.scala
@@ -6,5 +6,10 @@ object SamResource {
case class ResourceId(value: String) extends ValueObject
case class AccessPolicyName(value: String) extends ValueObject
- case class UserPolicy(resourceId: ResourceId, public: Boolean, accessPolicyName: AccessPolicyName, missingAuthDomainGroups: Set[WorkbenchGroupName], authDomainGroups: Set[WorkbenchGroupName])
+ case class UserPolicy(resourceId: ResourceId,
+ public: Boolean,
+ accessPolicyName: AccessPolicyName,
+ missingAuthDomainGroups: Set[WorkbenchGroupName],
+ authDomainGroups: Set[WorkbenchGroupName]
+ )
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/SamUser.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/SamUser.scala
index f3e4333aa..37b7f7304 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/SamUser.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/SamUser.scala
@@ -11,4 +11,5 @@ case class SamUser(id: WorkbenchUserId,
enabled: Boolean,
createdAt: Instant,
registeredAt: Option[Instant],
- updatedAt: Instant)
+ updatedAt: Instant
+)
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/SamUserRegistrationRequest.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/SamUserRegistrationRequest.scala
index 97d59c6fc..3581300bf 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/SamUserRegistrationRequest.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/SamUserRegistrationRequest.scala
@@ -3,6 +3,6 @@ package org.broadinstitute.dsde.firecloud.model
import org.broadinstitute.dsde.workbench.model.ErrorReport
case class SamUserRegistrationRequest(
- acceptsTermsOfService: Boolean,
- userAttributes: SamUserAttributesRequest
- )
+ acceptsTermsOfService: Boolean,
+ userAttributes: SamUserAttributesRequest
+)
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/SamUserResponse.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/SamUserResponse.scala
index 8328b91f0..81925bb74 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/SamUserResponse.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/SamUserResponse.scala
@@ -5,12 +5,12 @@ import org.broadinstitute.dsde.workbench.model.{AzureB2CId, GoogleSubjectId, Wor
import java.time.Instant
final case class SamUserResponse(
- id: WorkbenchUserId,
- googleSubjectId: Option[GoogleSubjectId],
- email: WorkbenchEmail,
- azureB2CId: Option[AzureB2CId],
- allowed: Boolean,
- createdAt: Instant,
- registeredAt: Option[Instant],
- updatedAt: Instant
- ) {}
+ id: WorkbenchUserId,
+ googleSubjectId: Option[GoogleSubjectId],
+ email: WorkbenchEmail,
+ azureB2CId: Option[AzureB2CId],
+ allowed: Boolean,
+ createdAt: Instant,
+ registeredAt: Option[Instant],
+ updatedAt: Instant
+) {}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/ShareLog.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/ShareLog.scala
index e0971f20e..e92e87de6 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/ShareLog.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/ShareLog.scala
@@ -11,5 +11,3 @@ object ShareLog {
}
case class Share(userId: String, sharee: String, shareType: ShareType.Value, timestamp: Option[Instant] = None)
}
-
-
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/UserInfo.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/UserInfo.scala
index d247464cb..5dda795ad 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/UserInfo.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/UserInfo.scala
@@ -20,7 +20,7 @@ import scala.util.Try
* UserInfo.
*/
-trait WithAccessToken { val accessToken : OAuth2BearerToken }
+trait WithAccessToken { val accessToken: OAuth2BearerToken }
/**
* Represents an authenticated user.
@@ -31,9 +31,14 @@ trait WithAccessToken { val accessToken : OAuth2BearerToken }
* @param googleAccessTokenThroughB2C if this is a Google login through B2C, contains the opaque
* Google access token. Empty otherwise.
*/
-case class UserInfo(userEmail: String, accessToken: OAuth2BearerToken, accessTokenExpiresIn: Long, id: String, googleAccessTokenThroughB2C: Option[OAuth2BearerToken] = None) extends WithAccessToken {
+case class UserInfo(userEmail: String,
+ accessToken: OAuth2BearerToken,
+ accessTokenExpiresIn: Long,
+ id: String,
+ googleAccessTokenThroughB2C: Option[OAuth2BearerToken] = None
+) extends WithAccessToken {
def isB2C: Boolean =
- // B2C ids are uuids, while google ids are numeric
+ // B2C ids are uuids, while google ids are numeric
Try(BigInt(id)).isFailure
}
@@ -43,14 +48,17 @@ object UserInfo {
}
case class AccessToken(accessToken: OAuth2BearerToken) extends WithAccessToken
-object AccessToken{
+object AccessToken {
def apply(tokenStr: String) = new AccessToken(OAuth2BearerToken(tokenStr))
}
// response from Google has other fields, but these are the ones we care about
case class OAuthUser(sub: String, email: String)
-case class RegistrationInfo(userInfo: WorkbenchUserInfo, enabled: WorkbenchEnabled, messages:Option[List[String]] = None)
+case class RegistrationInfo(userInfo: WorkbenchUserInfo,
+ enabled: WorkbenchEnabled,
+ messages: Option[List[String]] = None
+)
case class RegistrationInfoV2(userSubjectId: String, userEmail: String, enabled: Boolean)
case class UserIdInfo(userSubjectId: String, userEmail: String, googleSubjectId: String)
@@ -65,4 +73,3 @@ case class Curator(curator: Boolean)
// indicates whether or not the user can import (workflow|data|etc) into a workspace - the user
// must have either a writable workspace or the ability to create a workspace (ready billing project)
case class UserImportPermission(billingProject: Boolean, writableWorkspace: Boolean)
-
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/Workspace.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/Workspace.scala
index 85c891f12..f36eb853a 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/Workspace.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/Workspace.scala
@@ -4,13 +4,13 @@ import org.broadinstitute.dsde.firecloud.model.OrchMethodRepository.AgoraConfigu
import org.broadinstitute.dsde.rawls.model._
import org.joda.time.DateTime
-case class UIWorkspaceResponse(
- accessLevel: Option[String] = None,
- canShare: Option[Boolean] = None,
- catalog: Option[Boolean] = None,
- workspace: Option[WorkspaceDetails] = None,
- workspaceSubmissionStats: Option[WorkspaceSubmissionStats] = None,
- owners: Option[List[String]] = None)
+case class UIWorkspaceResponse(accessLevel: Option[String] = None,
+ canShare: Option[Boolean] = None,
+ catalog: Option[Boolean] = None,
+ workspace: Option[WorkspaceDetails] = None,
+ workspaceSubmissionStats: Option[WorkspaceSubmissionStats] = None,
+ owners: Option[List[String]] = None
+)
case class EntityCreateResult(entityType: String, entityName: String, succeeded: Boolean, message: String)
@@ -18,7 +18,7 @@ case class EntityCopyWithoutDestinationDefinition(
sourceWorkspace: WorkspaceName,
entityType: String,
entityNames: Seq[String]
- )
+)
case class EntityId(entityType: String, entityName: String)
@@ -31,78 +31,68 @@ case class ImportOptions(tdrSyncPermissions: Option[Boolean] = None, isUpsert: O
case class AsyncImportRequest(url: String, filetype: String, options: Option[ImportOptions] = None)
// the response payload received by users from Orchestration for async PFB/TSV/TDR snapshot imports
-case class AsyncImportResponse(url: String,
- jobId: String,
- workspace: WorkspaceName)
+case class AsyncImportResponse(url: String, jobId: String, workspace: WorkspaceName)
// the response payload received by Orchestration from cWDS
-case class CwdsResponse(
- jobId: String,
- status: String,
- message: Option[String])
+case class CwdsResponse(jobId: String, status: String, message: Option[String])
-case class CwdsListResponse(jobId: String,
- status: String,
- filetype: String,
- message: Option[String])
+case class CwdsListResponse(jobId: String, status: String, filetype: String, message: Option[String])
-case class MethodConfigurationId(
- name: Option[String] = None,
- namespace: Option[String] = None,
- workspaceName: Option[WorkspaceName] = None)
+case class MethodConfigurationId(name: Option[String] = None,
+ namespace: Option[String] = None,
+ workspaceName: Option[WorkspaceName] = None
+)
-case class OrchMethodConfigurationName(
- namespace: String,
- name: String)
+case class OrchMethodConfigurationName(namespace: String, name: String)
object OrchMethodConfigurationName {
- def apply(mcs:MethodConfigurationShort) =
+ def apply(mcs: MethodConfigurationShort) =
new OrchMethodConfigurationName(mcs.namespace, mcs.name)
def apply(mcs: AgoraConfigurationShort) =
new OrchMethodConfigurationName(mcs.namespace, mcs.name)
}
-case class MethodConfigurationCopy(
- methodRepoNamespace: Option[String] = None,
- methodRepoName: Option[String] = None,
- methodRepoSnapshotId: Option[Int] = None,
- destination: Option[MethodConfigurationId] = None)
-
-case class MethodConfigurationPublish(
- methodRepoNamespace: Option[String] = None,
- methodRepoName: Option[String] = None,
- source: Option[MethodConfigurationId] = None)
-
-case class CopyConfigurationIngest(
- configurationNamespace: Option[String],
- configurationName: Option[String],
- configurationSnapshotId: Option[Int],
- destinationNamespace: Option[String],
- destinationName: Option[String])
-
-case class PublishConfigurationIngest(
- configurationNamespace: Option[String],
- configurationName: Option[String],
- sourceNamespace: Option[String],
- sourceName: Option[String])
-
-case class OrchSubmissionRequest(
- methodConfigurationNamespace: Option[String],
- methodConfigurationName: Option[String],
- entityType: Option[String],
- entityName: Option[String],
- expression: Option[String],
- useCallCache: Option[Boolean],
- deleteIntermediateOutputFiles: Option[Boolean],
- useReferenceDisks: Option[Boolean],
- memoryRetryMultiplier: Option[Double],
- userComment: Option[String],
- workflowFailureMode: Option[String])
-
-case class RawlsGroupMemberList(
- userEmails: Option[Seq[String]] = None,
- subGroupEmails: Option[Seq[String]] = None,
- userSubjectIds: Option[Seq[String]] = None,
- subGroupNames: Option[Seq[String]] = None)
+case class MethodConfigurationCopy(methodRepoNamespace: Option[String] = None,
+ methodRepoName: Option[String] = None,
+ methodRepoSnapshotId: Option[Int] = None,
+ destination: Option[MethodConfigurationId] = None
+)
+
+case class MethodConfigurationPublish(methodRepoNamespace: Option[String] = None,
+ methodRepoName: Option[String] = None,
+ source: Option[MethodConfigurationId] = None
+)
+
+case class CopyConfigurationIngest(configurationNamespace: Option[String],
+ configurationName: Option[String],
+ configurationSnapshotId: Option[Int],
+ destinationNamespace: Option[String],
+ destinationName: Option[String]
+)
+
+case class PublishConfigurationIngest(configurationNamespace: Option[String],
+ configurationName: Option[String],
+ sourceNamespace: Option[String],
+ sourceName: Option[String]
+)
+
+case class OrchSubmissionRequest(methodConfigurationNamespace: Option[String],
+ methodConfigurationName: Option[String],
+ entityType: Option[String],
+ entityName: Option[String],
+ expression: Option[String],
+ useCallCache: Option[Boolean],
+ deleteIntermediateOutputFiles: Option[Boolean],
+ useReferenceDisks: Option[Boolean],
+ memoryRetryMultiplier: Option[Double],
+ userComment: Option[String],
+ workflowFailureMode: Option[String]
+)
+
+case class RawlsGroupMemberList(userEmails: Option[Seq[String]] = None,
+ subGroupEmails: Option[Seq[String]] = None,
+ userSubjectIds: Option[Seq[String]] = None,
+ subGroupNames: Option[Seq[String]] = None
+)
case class WorkspaceStorageCostEstimate(estimate: String, lastUpdated: Option[DateTime])
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/model/package.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/model/package.scala
index b748bc965..4b0d2fdce 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/model/package.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/model/package.scala
@@ -17,8 +17,7 @@ package object model {
Rejection handler: if the response from the rejection is not already json, make it json.
*/
implicit val defaultErrorReportRejectionHandler: RejectionHandler = RejectionHandler.default.mapRejectionResponse {
- case resp@HttpResponse(statusCode, _, ent: HttpEntity.Strict, _) => {
-
+ case resp @ HttpResponse(statusCode, _, ent: HttpEntity.Strict, _) =>
// since all Akka default rejection responses are Strict this will handle all rejections
val entityString = ent.data.utf8String
Try(entityString.parseJson) match {
@@ -27,9 +26,10 @@ package object model {
case Failure(_) =>
// N.B. this handler previously manually escaped double quotes in the entityString. We don't need to do that,
// since the .toJson below handles escaping internally.
- resp.withEntity(HttpEntity(ContentTypes.`application/json`, ErrorReport(statusCode, entityString).toJson.prettyPrint))
+ resp.withEntity(
+ HttpEntity(ContentTypes.`application/json`, ErrorReport(statusCode, entityString).toJson.prettyPrint)
+ )
}
- }
}
/*
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/service/AgoraPermissionService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/service/AgoraPermissionService.scala
index 01c8f5533..bb1d0af15 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/service/AgoraPermissionService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/service/AgoraPermissionService.scala
@@ -6,7 +6,12 @@ import com.typesafe.scalalogging.LazyLogging
import org.broadinstitute.dsde.firecloud.Application
import org.broadinstitute.dsde.firecloud.dataaccess.AgoraDAO
import org.broadinstitute.dsde.firecloud.model.OrchMethodRepository.ACLNames._
-import org.broadinstitute.dsde.firecloud.model.OrchMethodRepository.{AgoraPermission, EntityAccessControlAgora, FireCloudPermission, MethodAclPair}
+import org.broadinstitute.dsde.firecloud.model.OrchMethodRepository.{
+ AgoraPermission,
+ EntityAccessControlAgora,
+ FireCloudPermission,
+ MethodAclPair
+}
import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol._
import org.broadinstitute.dsde.firecloud.model.{RequestCompleteWithErrorReport, UserInfo}
import org.broadinstitute.dsde.firecloud.service.PerRequest.{PerRequestMessage, RequestComplete}
@@ -18,52 +23,50 @@ import scala.concurrent.{ExecutionContext, Future}
object AgoraPermissionService {
// convenience method to translate a FireCloudPermission object to an AgoraPermission object
- def toAgoraPermission(fireCloudPermission: FireCloudPermission):AgoraPermission = {
+ def toAgoraPermission(fireCloudPermission: FireCloudPermission): AgoraPermission =
AgoraPermission(Some(fireCloudPermission.user), Some(toAgoraRoles(fireCloudPermission.role)))
- }
// convenience method to translate an AgoraPermission to a FireCloudPermission object
- def toFireCloudPermission(agoraPermission: AgoraPermission):FireCloudPermission = {
+ def toFireCloudPermission(agoraPermission: AgoraPermission): FireCloudPermission =
// if the Agora permission has a None/empty/whitespace user, the following will throw
// an IllegalArgumentException trying to create the FireCloudPermission. That exception
// will be caught elsewhere.
FireCloudPermission(agoraPermission.user.getOrElse(""), toFireCloudRole(agoraPermission.roles))
- }
// translation between a FireCloud role and a list of Agora roles
- def toAgoraRoles(fireCloudRole:String) = {
+ def toAgoraRoles(fireCloudRole: String) =
fireCloudRole match {
case NoAccess => ListNoAccess
- case Reader => ListReader
- case Owner => ListOwner // Could use "All" instead but this is more precise
- case _ => ListNoAccess
+ case Reader => ListReader
+ case Owner => ListOwner // Could use "All" instead but this is more precise
+ case _ => ListNoAccess
}
- }
// translation between a list of Agora roles and a FireCloud role
- def toFireCloudRole(agoraRoles:Option[List[String]]) = {
+ def toFireCloudRole(agoraRoles: Option[List[String]]) =
agoraRoles match {
case None => NoAccess
- case Some(r) => {
+ case Some(r) =>
r.sorted match {
case ListNoAccess => NoAccess
- case ListReader => Reader
- case ListOwner => Owner
- case ListAll => Owner
- case _ => NoAccess
+ case ListReader => Reader
+ case ListOwner => Owner
+ case ListAll => Owner
+ case _ => NoAccess
}
- }
}
- }
def constructor(app: Application)(userInfo: UserInfo)(implicit executionContext: ExecutionContext) =
new AgoraPermissionService(userInfo, app.agoraDAO)
}
-class AgoraPermissionService(userInfo: UserInfo, val agoraDAO: AgoraDAO)(implicit val executionContext: ExecutionContext) extends LazyLogging with SprayJsonSupport {
+class AgoraPermissionService(userInfo: UserInfo, val agoraDAO: AgoraDAO)(implicit
+ val executionContext: ExecutionContext
+) extends LazyLogging
+ with SprayJsonSupport {
- def getAgoraPermission(url: String): Future[PerRequestMessage] = {
+ def getAgoraPermission(url: String): Future[PerRequestMessage] =
agoraDAO.getPermission(url)(userInfo).map { agoraPermissions =>
try {
val fireCloudPermissions = agoraPermissions.map(_.toFireCloudPermission)
@@ -71,15 +74,16 @@ class AgoraPermissionService(userInfo: UserInfo, val agoraDAO: AgoraDAO)(implici
} catch {
// TODO: more specific and graceful error-handling
case e: Exception =>
- RequestCompleteWithErrorReport(InternalServerError, "Failed to interpret methods " +
- "server response: " + e.getMessage)
+ RequestCompleteWithErrorReport(InternalServerError,
+ "Failed to interpret methods " +
+ "server response: " + e.getMessage
+ )
}
- } recoverWith {
- case e: Throwable => Future(RequestCompleteWithErrorReport(InternalServerError, e.getMessage))
+ } recoverWith { case e: Throwable =>
+ Future(RequestCompleteWithErrorReport(InternalServerError, e.getMessage))
}
- }
- def createAgoraPermission(url: String, agoraPermissions: List[AgoraPermission]): Future[PerRequestMessage] = {
+ def createAgoraPermission(url: String, agoraPermissions: List[AgoraPermission]): Future[PerRequestMessage] =
agoraDAO.createPermission(url, agoraPermissions)(userInfo).map { agoraPermissions =>
try {
val fireCloudPermissions = agoraPermissions.map(_.toFireCloudPermission)
@@ -87,28 +91,34 @@ class AgoraPermissionService(userInfo: UserInfo, val agoraDAO: AgoraDAO)(implici
} catch {
// TODO: more specific and graceful error-handling
case e: Exception =>
- RequestCompleteWithErrorReport(InternalServerError, "Failed to interpret methods " +
- "server response: " + e.getMessage)
+ RequestCompleteWithErrorReport(InternalServerError,
+ "Failed to interpret methods " +
+ "server response: " + e.getMessage
+ )
}
- } recoverWith {
- case e: Throwable => Future(RequestCompleteWithErrorReport(InternalServerError, e.getMessage))
+ } recoverWith { case e: Throwable =>
+ Future(RequestCompleteWithErrorReport(InternalServerError, e.getMessage))
}
- }
- def batchInsertAgoraPermissions(inputs: List[EntityAccessControlAgora]): Future[PerRequestMessage] = {
- agoraDAO.batchCreatePermissions(inputs)(userInfo).map { agoraResponse =>
- try {
- val fcResponse = agoraResponse.map { eaca =>
- val mrm = MethodRepoMethod(eaca.entity.namespace.get, eaca.entity.name.get, eaca.entity.snapshotId.get)
- MethodAclPair(mrm, eaca.acls.map(_.toFireCloudPermission), eaca.message)
+ def batchInsertAgoraPermissions(inputs: List[EntityAccessControlAgora]): Future[PerRequestMessage] =
+ agoraDAO
+ .batchCreatePermissions(inputs)(userInfo)
+ .map { agoraResponse =>
+ try {
+ val fcResponse = agoraResponse.map { eaca =>
+ val mrm = MethodRepoMethod(eaca.entity.namespace.get, eaca.entity.name.get, eaca.entity.snapshotId.get)
+ MethodAclPair(mrm, eaca.acls.map(_.toFireCloudPermission), eaca.message)
+ }
+ RequestComplete(OK, fcResponse)
+ } catch {
+ case e: Exception =>
+ RequestCompleteWithErrorReport(InternalServerError,
+ "Failed to interpret methods " +
+ "server response: " + e.getMessage
+ )
}
- RequestComplete(OK, fcResponse)
- } catch {
- case e: Exception => RequestCompleteWithErrorReport(InternalServerError, "Failed to interpret methods " +
- "server response: " + e.getMessage)
}
- }.recoverWith {
- case e: Throwable => Future(RequestCompleteWithErrorReport(InternalServerError, e.getMessage))
- }
- }
+ .recoverWith { case e: Throwable =>
+ Future(RequestCompleteWithErrorReport(InternalServerError, e.getMessage))
+ }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/service/AttributeSupport.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/service/AttributeSupport.scala
index 7279c7aa3..d3c9b3b7b 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/service/AttributeSupport.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/service/AttributeSupport.scala
@@ -8,24 +8,29 @@ import org.broadinstitute.dsde.rawls.model.AttributeUpdateOperations._
* Created by putnam on 11/1/16.
*/
trait AttributeSupport {
+
/**
* given a set of existing attributes and a set of new attributes, calculate the attribute operations
* that need to be performed
*/
- def generateAttributeOperations(existingAttrs: AttributeMap, newAttrs: AttributeMap, attributeFilter: AttributeName => Boolean): Seq[AttributeUpdateOperation] = {
+ def generateAttributeOperations(existingAttrs: AttributeMap,
+ newAttrs: AttributeMap,
+ attributeFilter: AttributeName => Boolean
+ ): Seq[AttributeUpdateOperation] = {
val oldKeys = existingAttrs.keySet.filter(attributeFilter)
val newFields = newAttrs.filter { case (name: AttributeName, _) => attributeFilter(name) }
// remove any attributes that currently exist on the workspace, but are not in the user's packet
// for any array attributes, we remove them and recreate them entirely. Add the array attrs.
- val keysToRemove: Set[AttributeName] = oldKeys.diff(newFields.keySet) ++ newFields.filter( _._2.isInstanceOf[AttributeList[_]] ).keySet
+ val keysToRemove: Set[AttributeName] =
+ oldKeys.diff(newFields.keySet) ++ newFields.filter(_._2.isInstanceOf[AttributeList[_]]).keySet
val removeOperations = keysToRemove.map(RemoveAttribute).toSeq
val updateOperations = newFields.toSeq flatMap {
- case (key, value:AttributeValue) => Seq(AddUpdateAttribute(key, value))
- case (key, value:AttributeEntityReference) => Seq(AddUpdateAttribute(key, value))
+ case (key, value: AttributeValue) => Seq(AddUpdateAttribute(key, value))
+ case (key, value: AttributeEntityReference) => Seq(AddUpdateAttribute(key, value))
- case (key, value:AttributeList[Attribute @unchecked]) => value.list.map(x => AddListMember(key, x))
+ case (key, value: AttributeList[Attribute @unchecked]) => value.list.map(x => AddListMember(key, x))
}
// handle removals before upserts
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/service/DataUseRestrictionSupport.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/service/DataUseRestrictionSupport.scala
index 8822cd86d..2b1fa7266 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/service/DataUseRestrictionSupport.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/service/DataUseRestrictionSupport.scala
@@ -3,7 +3,7 @@ package org.broadinstitute.dsde.firecloud.service
import com.typesafe.scalalogging.LazyLogging
import org.broadinstitute.dsde.firecloud.{FireCloudConfig, FireCloudException}
import org.broadinstitute.dsde.firecloud.dataaccess.OntologyDAO
-import org.broadinstitute.dsde.firecloud.model.{ConsentCodes}
+import org.broadinstitute.dsde.firecloud.model.ConsentCodes
import org.broadinstitute.dsde.firecloud.model.DataUse._
import org.broadinstitute.dsde.rawls.model.Attributable.AttributeMap
import org.broadinstitute.dsde.rawls.model.WorkspaceJsonSupport.AttributeNameFormat
@@ -13,7 +13,7 @@ import spray.json._
import scala.util.Try
-case class UseRestriction(structured: Map[AttributeName, Attribute], display: Map[AttributeName, Attribute])
+case class UseRestriction(structured: Map[AttributeName, Attribute], display: Map[AttributeName, Attribute])
trait DataUseRestrictionSupport extends LazyLogging {
@@ -39,20 +39,22 @@ trait DataUseRestrictionSupport extends LazyLogging {
* @param attributes The Attributes
* @return A structured data use restriction Attribute Map
*/
- def transformStructuredUseRestrictionAttribute(attributes: Map[AttributeName, Attribute]): Map[AttributeName, Attribute] = {
+ def transformStructuredUseRestrictionAttribute(
+ attributes: Map[AttributeName, Attribute]
+ ): Map[AttributeName, Attribute] = {
implicit val impAttributeFormat: AttributeFormat with PlainArrayAttributeListSerializer =
new AttributeFormat with PlainArrayAttributeListSerializer
attributes match {
case x if x.isEmpty => Map.empty[AttributeName, Attribute]
case existingAttrs =>
-
val existingKeyNames = existingAttrs.keys.map(_.name).toSeq
// Missing boolean codes default to false
- val booleanAttrs: Map[AttributeName, Attribute] = ((ConsentCodes.booleanCodes ++ ConsentCodes.genderCodes) diff existingKeyNames).map { code =>
- AttributeName.withDefaultNS(code) -> AttributeBoolean(false)
- }.toMap
+ val booleanAttrs: Map[AttributeName, Attribute] =
+ ((ConsentCodes.booleanCodes ++ ConsentCodes.genderCodes) diff existingKeyNames).map { code =>
+ AttributeName.withDefaultNS(code) -> AttributeBoolean(false)
+ }.toMap
// Missing list codes default to empty lists
val listAttrs: Map[AttributeName, Attribute] = (Seq(ConsentCodes.DS) diff existingKeyNames).map { code =>
@@ -64,35 +66,43 @@ trait DataUseRestrictionSupport extends LazyLogging {
}
}
- def generateStructuredAndDisplayAttributes(workspace: WorkspaceDetails, ontologyDAO: OntologyDAO): UseRestriction = {
- getDataUseAttributes(workspace) match {
- case None => UseRestriction(Map.empty[AttributeName, Attribute],Map.empty[AttributeName, Attribute])
- case Some(request) => {
+ def generateStructuredAndDisplayAttributes(workspace: WorkspaceDetails, ontologyDAO: OntologyDAO): UseRestriction =
+ getDataUseAttributes(workspace) match {
+ case None => UseRestriction(Map.empty[AttributeName, Attribute], Map.empty[AttributeName, Attribute])
+ case Some(request) =>
val consentMap = generateUseRestrictionBooleanMap(request)
- val structuredAttribute = if (workspace.attributes.getOrElse(Map.empty).isEmpty) Map.empty[AttributeName, Attribute] else transformStructuredUseRestrictionAttribute(consentMap ++ generateUseRestrictionDSStructuredMap(request))
- val displayAttribute = transformUseRestrictionDisplayAttribute(consentMap ++ generateUseRestrictionDSDisplayMap(request), ontologyDAO)
+ val structuredAttribute =
+ if (workspace.attributes.getOrElse(Map.empty).isEmpty) Map.empty[AttributeName, Attribute]
+ else transformStructuredUseRestrictionAttribute(consentMap ++ generateUseRestrictionDSStructuredMap(request))
+ val displayAttribute = transformUseRestrictionDisplayAttribute(
+ consentMap ++ generateUseRestrictionDSDisplayMap(request),
+ ontologyDAO
+ )
UseRestriction(structured = structuredAttribute, display = displayAttribute)
- }
}
- }
-
- def generateStructuredUseRestrictionAttribute(request: StructuredDataRequest, ontologyDAO: OntologyDAO): Map[String, JsValue] = {
+ def generateStructuredUseRestrictionAttribute(request: StructuredDataRequest,
+ ontologyDAO: OntologyDAO
+ ): Map[String, JsValue] =
generateStructuredDataResponse(request, ontologyDAO).formatWithPrefix()
- }
- def generateStructuredDataResponse(request: StructuredDataRequest, ontologyDAO: OntologyDAO): StructuredDataResponse = {
+ def generateStructuredDataResponse(request: StructuredDataRequest,
+ ontologyDAO: OntologyDAO
+ ): StructuredDataResponse = {
val diseaseCodesArray = getDiseaseNames(request.diseaseUseRequired, ontologyDAO)
val booleanConsentMap = generateUseRestrictionBooleanMap(request)
val diseaseSpecificMap = generateUseRestrictionDSStructuredMap(request)
// convert to array of consent codes
val consentCodes = booleanConsentMap.filter(_._2.value).map(_._1.name).toArray ++ diseaseCodesArray
- StructuredDataResponse(consentCodes, FireCloudConfig.Duos.dulvn, request.prefix.getOrElse(""), booleanConsentMap ++ diseaseSpecificMap)
+ StructuredDataResponse(consentCodes,
+ FireCloudConfig.Duos.dulvn,
+ request.prefix.getOrElse(""),
+ booleanConsentMap ++ diseaseSpecificMap
+ )
}
-
- def generateUseRestrictionBooleanMap(request: StructuredDataRequest): Map[AttributeName, AttributeBoolean] = {
+ def generateUseRestrictionBooleanMap(request: StructuredDataRequest): Map[AttributeName, AttributeBoolean] =
Map(
AttributeName.withDefaultNS(ConsentCodes.GRU) -> AttributeBoolean(request.generalResearchUse),
AttributeName.withDefaultNS(ConsentCodes.HMB) -> AttributeBoolean(request.healthMedicalBiomedicalUseRequired),
@@ -102,16 +112,22 @@ trait DataUseRestrictionSupport extends LazyLogging {
AttributeName.withDefaultNS(ConsentCodes.NAGR) -> AttributeBoolean(request.aggregateLevelDataProhibited),
AttributeName.withDefaultNS(ConsentCodes.NCTRL) -> AttributeBoolean(request.controlsUseProhibited),
AttributeName.withDefaultNS(ConsentCodes.RSPD) -> AttributeBoolean(request.pediatricResearchRequired),
- AttributeName.withDefaultNS(ConsentCodes.IRB) -> AttributeBoolean(request.irbRequired)) ++ getGenderCodeMap(request.genderUseRequired)
- }
+ AttributeName.withDefaultNS(ConsentCodes.IRB) -> AttributeBoolean(request.irbRequired)
+ ) ++ getGenderCodeMap(request.genderUseRequired)
- def generateUseRestrictionDSStructuredMap(request: StructuredDataRequest): Map[AttributeName, Attribute] = {
- Map(AttributeName.withDefaultNS(ConsentCodes.DS) -> AttributeValueList(request.diseaseUseRequired.toIndexedSeq.map(DiseaseOntologyNodeId(_).numericId).map(AttributeNumber(_))))
- }
+ def generateUseRestrictionDSStructuredMap(request: StructuredDataRequest): Map[AttributeName, Attribute] =
+ Map(
+ AttributeName.withDefaultNS(ConsentCodes.DS) -> AttributeValueList(
+ request.diseaseUseRequired.toIndexedSeq.map(DiseaseOntologyNodeId(_).numericId).map(AttributeNumber(_))
+ )
+ )
- def generateUseRestrictionDSDisplayMap(request: StructuredDataRequest): Map[AttributeName, Attribute] = {
- Map(AttributeName.withDefaultNS(ConsentCodes.DSURL) -> AttributeValueList(request.diseaseUseRequired.toIndexedSeq.map(AttributeString(_))))
- }
+ def generateUseRestrictionDSDisplayMap(request: StructuredDataRequest): Map[AttributeName, Attribute] =
+ Map(
+ AttributeName.withDefaultNS(ConsentCodes.DSURL) -> AttributeValueList(
+ request.diseaseUseRequired.toIndexedSeq.map(AttributeString(_))
+ )
+ )
/**
* Create a display-friendly version of the structured data use restriction in the form of a
@@ -120,18 +136,20 @@ trait DataUseRestrictionSupport extends LazyLogging {
* @param attributes The Attributes
* @return An Attribute Map representing a data use display
*/
- def transformUseRestrictionDisplayAttribute(attributes: Map[AttributeName, Attribute], ontologyDAO: OntologyDAO): Map[AttributeName, Attribute] = {
+ def transformUseRestrictionDisplayAttribute(attributes: Map[AttributeName, Attribute],
+ ontologyDAO: OntologyDAO
+ ): Map[AttributeName, Attribute] = {
- val booleanCodes:Seq[String] = attributes.collect {
- case (attr: AttributeName, AttributeBoolean(true)) => attr.name
+ val booleanCodes: Seq[String] = attributes.collect { case (attr: AttributeName, AttributeBoolean(true)) =>
+ attr.name
}.toSeq
-
- val dsLabels:Seq[String] = (attributes.get(AttributeName.withDefaultNS(ConsentCodes.DSURL)) collect {
- case value: AttributeValueList => value.list.collect {
- case a: AttributeString => a.value
- }
- }).getOrElse(Seq.empty[String])
+ val dsLabels: Seq[String] =
+ (attributes.get(AttributeName.withDefaultNS(ConsentCodes.DSURL)) collect { case value: AttributeValueList =>
+ value.list.collect { case a: AttributeString =>
+ a.value
+ }
+ }).getOrElse(Seq.empty[String])
val diseaseDisplayNames = getDiseaseNames(dsLabels.toArray, ontologyDAO)
@@ -143,39 +161,42 @@ trait DataUseRestrictionSupport extends LazyLogging {
Map.empty[AttributeName, Attribute]
}
- def replaceDataUseAttributes(existing: AttributeMap, preferred: AttributeMap): AttributeMap = {
+ def replaceDataUseAttributes(existing: AttributeMap, preferred: AttributeMap): AttributeMap =
// delete pre-existing DU codes, then add the DU codes from ORSP
(existing -
structuredUseRestrictionAttributeName -
consentCodesAttributeName --
ConsentCodes.allPreviousDurFieldNames.map(AttributeName.withLibraryNS)) ++ preferred
- }
- private def getDiseaseNames(diseaseCodes: Array[String], ontologyDAO: OntologyDAO): Array[String] = {
- diseaseCodes.map { nodeid =>
+ private def getDiseaseNames(diseaseCodes: Array[String], ontologyDAO: OntologyDAO): Array[String] =
+ diseaseCodes.map { nodeid =>
ontologyDAO.search(nodeid) match {
case termResource :: Nil => ConsentCodes.DS + ":" + termResource.label
- case _ => throw new FireCloudException(s"DS code $nodeid did not match any diseases.")
+ case _ => throw new FireCloudException(s"DS code $nodeid did not match any diseases.")
}
}
- }
- private def getGenderCodeMap(rsg: String): Map[AttributeName, AttributeBoolean] = {
+ private def getGenderCodeMap(rsg: String): Map[AttributeName, AttributeBoolean] =
rsg.toLowerCase match {
case "female" =>
- Map(AttributeName.withDefaultNS(ConsentCodes.RSG) -> AttributeBoolean(true),
+ Map(
+ AttributeName.withDefaultNS(ConsentCodes.RSG) -> AttributeBoolean(true),
AttributeName.withDefaultNS(ConsentCodes.RSFM) -> AttributeBoolean(true),
- AttributeName.withDefaultNS(ConsentCodes.RSM) -> AttributeBoolean(false))
+ AttributeName.withDefaultNS(ConsentCodes.RSM) -> AttributeBoolean(false)
+ )
case "male" =>
- Map(AttributeName.withDefaultNS(ConsentCodes.RSG) -> AttributeBoolean(true),
+ Map(
+ AttributeName.withDefaultNS(ConsentCodes.RSG) -> AttributeBoolean(true),
AttributeName.withDefaultNS(ConsentCodes.RSFM) -> AttributeBoolean(false),
- AttributeName.withDefaultNS(ConsentCodes.RSM) -> AttributeBoolean(true))
+ AttributeName.withDefaultNS(ConsentCodes.RSM) -> AttributeBoolean(true)
+ )
case _ =>
- Map(AttributeName.withDefaultNS(ConsentCodes.RSG) -> AttributeBoolean(false),
+ Map(
+ AttributeName.withDefaultNS(ConsentCodes.RSG) -> AttributeBoolean(false),
AttributeName.withDefaultNS(ConsentCodes.RSFM) -> AttributeBoolean(false),
- AttributeName.withDefaultNS(ConsentCodes.RSM) -> AttributeBoolean(false))
+ AttributeName.withDefaultNS(ConsentCodes.RSM) -> AttributeBoolean(false)
+ )
}
- }
// TODO: this method needs to respect attribute namespaces: see GAWB-3173
private def getDataUseAttributes(workspace: WorkspaceDetails): Option[StructuredDataRequest] = {
@@ -183,55 +204,56 @@ trait DataUseRestrictionSupport extends LazyLogging {
case (attr, value) if ConsentCodes.duRestrictionFieldNames.contains(attr.name) => (attr.name, value)
}
- def getBooleanPayloadValues(consentCode: String): Boolean = {
+ def getBooleanPayloadValues(consentCode: String): Boolean =
dataUseAttributes.get(consentCode) match {
case Some(att: AttributeBoolean) => att.value
- case _ => false
+ case _ => false
}
- }
- def getDiseaseArray: Array[String] = {
+ def getDiseaseArray: Array[String] =
dataUseAttributes.get(ConsentCodes.DSURL) match {
- case Some(attList: AttributeValueList) => {
- attList.list.collect {
- case a: AttributeString => Try(DiseaseOntologyNodeId(a.value)).toOption.map(_.uri.toString)
- }.flatten.toArray
- }
+ case Some(attList: AttributeValueList) =>
+ attList.list
+ .collect { case a: AttributeString =>
+ Try(DiseaseOntologyNodeId(a.value)).toOption.map(_.uri.toString)
+ }
+ .flatten
+ .toArray
case _ => Array.empty
}
- }
- def getGenderString: String = {
+ def getGenderString: String =
dataUseAttributes.get(ConsentCodes.RSG) match {
case Some(att: AttributeString) => att.value
- case _ => "None"
+ case _ => "None"
}
- }
- def getNagr: Boolean = {
+ def getNagr: Boolean =
dataUseAttributes.get(ConsentCodes.NAGR) match {
case Some(att: AttributeString) if att.value.toLowerCase == "yes" => true
- case Some(att: AttributeBoolean) => att.value
- case _ => false
+ case Some(att: AttributeBoolean) => att.value
+ case _ => false
}
- }
if (dataUseAttributes.isEmpty)
None
else
- Some(StructuredDataRequest(
- generalResearchUse = getBooleanPayloadValues(ConsentCodes.GRU),
- healthMedicalBiomedicalUseRequired = getBooleanPayloadValues(ConsentCodes.HMB),
- diseaseUseRequired = getDiseaseArray,
- commercialUseProhibited = getBooleanPayloadValues(ConsentCodes.NCU),
- forProfitUseProhibited = getBooleanPayloadValues(ConsentCodes.NPU),
- methodsResearchProhibited = getBooleanPayloadValues(ConsentCodes.NMDS),
- aggregateLevelDataProhibited = getNagr,
- controlsUseProhibited = getBooleanPayloadValues(ConsentCodes.NCTRL),
- genderUseRequired = getGenderString,
- pediatricResearchRequired = getBooleanPayloadValues(ConsentCodes.RSPD),
- irbRequired = getBooleanPayloadValues(ConsentCodes.IRB),
- prefix = Some(AttributeName.libraryNamespace + AttributeName.delimiter)))
+ Some(
+ StructuredDataRequest(
+ generalResearchUse = getBooleanPayloadValues(ConsentCodes.GRU),
+ healthMedicalBiomedicalUseRequired = getBooleanPayloadValues(ConsentCodes.HMB),
+ diseaseUseRequired = getDiseaseArray,
+ commercialUseProhibited = getBooleanPayloadValues(ConsentCodes.NCU),
+ forProfitUseProhibited = getBooleanPayloadValues(ConsentCodes.NPU),
+ methodsResearchProhibited = getBooleanPayloadValues(ConsentCodes.NMDS),
+ aggregateLevelDataProhibited = getNagr,
+ controlsUseProhibited = getBooleanPayloadValues(ConsentCodes.NCTRL),
+ genderUseRequired = getGenderString,
+ pediatricResearchRequired = getBooleanPayloadValues(ConsentCodes.RSPD),
+ irbRequired = getBooleanPayloadValues(ConsentCodes.IRB),
+ prefix = Some(AttributeName.libraryNamespace + AttributeName.delimiter)
+ )
+ )
}
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/service/ExportEntitiesByTypeActor.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/service/ExportEntitiesByTypeActor.scala
index 8acd81af4..e26c73f2f 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/service/ExportEntitiesByTypeActor.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/service/ExportEntitiesByTypeActor.scala
@@ -1,7 +1,7 @@
package org.broadinstitute.dsde.firecloud.service
import akka.actor.ActorSystem
-import akka.http.scaladsl.model.headers.{Connection, ContentDispositionTypes, `Content-Disposition`}
+import akka.http.scaladsl.model.headers.{`Content-Disposition`, Connection, ContentDispositionTypes}
import akka.http.scaladsl.model._
import akka.stream._
import akka.stream.scaladsl.{Source => AkkaSource, _}
@@ -23,24 +23,34 @@ import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}
import scala.language.postfixOps
-case class ExportEntitiesByTypeArguments (
- userInfo: UserInfo,
- workspaceNamespace: String,
- workspaceName: String,
- entityType: String,
- attributeNames: Option[IndexedSeq[String]],
- model: Option[String]
- )
+case class ExportEntitiesByTypeArguments(
+ userInfo: UserInfo,
+ workspaceNamespace: String,
+ workspaceName: String,
+ entityType: String,
+ attributeNames: Option[IndexedSeq[String]],
+ model: Option[String]
+)
object ExportEntitiesByTypeActor {
sealed trait ExportEntitiesByTypeMessage
case object ExportEntities extends ExportEntitiesByTypeMessage
- def constructor(app: Application, system: ActorSystem)(exportArgs: ExportEntitiesByTypeArguments)(implicit executionContext: ExecutionContext) = {
- new ExportEntitiesByTypeActor(app.rawlsDAO, app.googleServicesDAO, exportArgs.userInfo, exportArgs.workspaceNamespace,
- exportArgs.workspaceName, exportArgs.entityType, exportArgs.attributeNames, exportArgs.model, system)
- }
+ def constructor(app: Application, system: ActorSystem)(exportArgs: ExportEntitiesByTypeArguments)(implicit
+ executionContext: ExecutionContext
+ ) =
+ new ExportEntitiesByTypeActor(
+ app.rawlsDAO,
+ app.googleServicesDAO,
+ exportArgs.userInfo,
+ exportArgs.workspaceNamespace,
+ exportArgs.workspaceName,
+ exportArgs.entityType,
+ exportArgs.attributeNames,
+ exportArgs.model,
+ system
+ )
}
/**
@@ -60,12 +70,13 @@ class ExportEntitiesByTypeActor(rawlsDAO: RawlsDAO,
entityType: String,
attributeNames: Option[IndexedSeq[String]],
model: Option[String],
- argSystem: ActorSystem)
- (implicit protected val executionContext: ExecutionContext) extends LazyLogging {
+ argSystem: ActorSystem
+)(implicit protected val executionContext: ExecutionContext)
+ extends LazyLogging {
implicit val timeout: Timeout = Timeout(1 minute)
implicit val userInfo: UserInfo = argUserInfo
- implicit val system:ActorSystem = argSystem
+ implicit val system: ActorSystem = argSystem
implicit val modelSchema: ModelSchema = model match {
case Some(name) => ModelSchemaRegistry.getModelForSchemaType(SchemaTypes.withName(name))
@@ -113,10 +124,12 @@ class ExportEntitiesByTypeActor(rawlsDAO: RawlsDAO,
// verify write permissions
val isPermitted = workspaceResponse.accessLevel match {
case Some(accessLevel) => accessLevel >= WorkspaceAccessLevels.Write
- case None => false
+ case None => false
}
if (!isPermitted)
- throw new FireCloudExceptionWithErrorReport(errorReport = ErrorReport(StatusCodes.Forbidden, s"You must have at least write access."))
+ throw new FireCloudExceptionWithErrorReport(errorReport =
+ ErrorReport(StatusCodes.Forbidden, s"You must have at least write access.")
+ )
val now = Instant.now()
val fileNameBase = s"tsvexport/$entityType/$entityType-${now.toEpochMilli}"
@@ -133,9 +146,13 @@ class ExportEntitiesByTypeActor(rawlsDAO: RawlsDAO,
}
}.recover {
case f: FireCloudExceptionWithErrorReport => throw f // re-throw as-is
- case t =>
+ case t =>
// wrap in FireCloudExceptionWithErrorReport
- throw new FireCloudExceptionWithErrorReport(ErrorReport(StatusCodes.InternalServerError, s"FireCloudException: Error generating entity download: ${t.getMessage}"))
+ throw new FireCloudExceptionWithErrorReport(
+ ErrorReport(StatusCodes.InternalServerError,
+ s"FireCloudException: Error generating entity download: ${t.getMessage}"
+ )
+ )
}
/**
@@ -144,7 +161,7 @@ class ExportEntitiesByTypeActor(rawlsDAO: RawlsDAO,
* @see [[streamEntities()]]
* @see [[streamEntitiesToWorkspaceBucket()]]
*/
- private def entitiesToTempFile(): Future[File] = {
+ private def entitiesToTempFile(): Future[File] =
entityTypeMetadata flatMap { metadata =>
val entityQueries = getEntityQueries(metadata, entityType)
if (modelSchema.isCollectionType(entityType)) {
@@ -154,8 +171,6 @@ class ExportEntitiesByTypeActor(rawlsDAO: RawlsDAO,
streamSingularType(entityQueries, metadata, headers)
}
}
- }
-
/*
* Helper Methods
@@ -165,44 +180,56 @@ class ExportEntitiesByTypeActor(rawlsDAO: RawlsDAO,
private def handleStandardException(t: Throwable): Future[HttpResponse] = {
val errorReport = t match {
case f: FireCloudExceptionWithErrorReport => f.errorReport
- case _ => ErrorReport(StatusCodes.InternalServerError, s"FireCloudException: Error generating entity download: ${t.getMessage}")
+ case _ =>
+ ErrorReport(StatusCodes.InternalServerError,
+ s"FireCloudException: Error generating entity download: ${t.getMessage}"
+ )
}
- Future(HttpResponse(
- status = errorReport.statusCode.getOrElse(StatusCodes.InternalServerError),
- entity = HttpEntity(ContentTypes.`application/json`, errorReport.toJson.compactPrint)))
+ Future(
+ HttpResponse(
+ status = errorReport.statusCode.getOrElse(StatusCodes.InternalServerError),
+ entity = HttpEntity(ContentTypes.`application/json`, errorReport.toJson.compactPrint)
+ )
+ )
}
- private def streamSingularType(entityQueries: Seq[EntityQuery], metadata: EntityTypeMetadata, entityHeaders: IndexedSeq[String]): Future[File] = {
+ private def streamSingularType(entityQueries: Seq[EntityQuery],
+ metadata: EntityTypeMetadata,
+ entityHeaders: IndexedSeq[String]
+ ): Future[File] = {
val tempEntityFile: File = File.newTemporaryFile(prefix = entityType)
val entitySink: Sink[ByteString, Future[IOResult]] = FileIO.toPath(tempEntityFile.path)
// Run the Split Entity Flow that pipes entities through the two flows to the two file sinks
// Result of this will be a tuple of Future[IOResult] that represents the success or failure of
// streaming content to the file sinks.
- val fileStreamIOResults: Future[IOResult] = {
- RunnableGraph.fromGraph(GraphDSL.createGraph(entitySink) { implicit builder =>
- (eSink) =>
+ val fileStreamIOResults: Future[IOResult] =
+ RunnableGraph
+ .fromGraph(GraphDSL.createGraph(entitySink) { implicit builder => eSink =>
import GraphDSL.Implicits._
// Sources
val querySource: Outlet[EntityQuery] = builder.add(AkkaSource(entityQueries.to(LazyList))).out
- val entityHeaderSource: Outlet[ByteString] = builder.add(AkkaSource.single(ByteString(entityHeaders.mkString("\t") + "\n"))).out
+ val entityHeaderSource: Outlet[ByteString] =
+ builder.add(AkkaSource.single(ByteString(entityHeaders.mkString("\t") + "\n"))).out
// Flows
- val queryFlow: FlowShape[EntityQuery, Seq[Entity]] = builder.add(Flow[EntityQuery].mapAsync(1) { query => getEntitiesFromQuery(query) })
+ val queryFlow: FlowShape[EntityQuery, Seq[Entity]] = builder.add(Flow[EntityQuery].mapAsync(1) { query =>
+ getEntitiesFromQuery(query)
+ })
val splitter: UniformFanOutShape[Seq[Entity], Seq[Entity]] = builder.add(Broadcast[Seq[Entity]](1))
val entityFlow: FlowShape[Seq[Entity], ByteString] = builder.add(Flow[Seq[Entity]].map { entities =>
val rows = TSVFormatter.makeEntityRows(entityType, entities, entityHeaders)
- ByteString(rows.map { _.mkString("\t")}.mkString("\n") + "\n")
+ ByteString(rows.map(_.mkString("\t")).mkString("\n") + "\n")
})
val eConcat: UniformFanInShape[ByteString, ByteString] = builder.add(Concat[ByteString]())
// Graph
- entityHeaderSource ~> eConcat
- querySource ~> queryFlow ~> splitter ~> entityFlow ~> eConcat ~> eSink
+ entityHeaderSource ~> eConcat
+ querySource ~> queryFlow ~> splitter ~> entityFlow ~> eConcat ~> eSink
ClosedShape
- }).run()
- }
+ })
+ .run()
// Check that each file is completed
val fileStreamResult = for {
@@ -211,9 +238,12 @@ class ExportEntitiesByTypeActor(rawlsDAO: RawlsDAO,
fileStreamResult map { _ =>
tempEntityFile
- } recover {
- case _:Exception =>
- throw new FireCloudExceptionWithErrorReport(ErrorReport(s"FireCloudException: Unable to stream tsv file to user for $workspaceNamespace:$workspaceName:$entityType"))
+ } recover { case _: Exception =>
+ throw new FireCloudExceptionWithErrorReport(
+ ErrorReport(
+ s"FireCloudException: Unable to stream tsv file to user for $workspaceNamespace:$workspaceName:$entityType"
+ )
+ )
}
}
@@ -227,44 +257,49 @@ class ExportEntitiesByTypeActor(rawlsDAO: RawlsDAO,
val membershipSink: Sink[ByteString, Future[IOResult]] = FileIO.toPath(tempMembershipFile.path)
// Headers
- val entityHeaders: IndexedSeq[String] = TSVFormatter.makeEntityHeaders(entityType, metadata.attributeNames, attributeNames)
+ val entityHeaders: IndexedSeq[String] =
+ TSVFormatter.makeEntityHeaders(entityType, metadata.attributeNames, attributeNames)
val membershipHeaders: IndexedSeq[String] = TSVFormatter.makeMembershipHeaders(entityType)
// Run the Split Entity Flow that pipes entities through the two flows to the two file sinks
// Result of this will be a tuple of Future[IOResult] that represents the success or failure of
// streaming content to the file sinks.
- val fileStreamIOResults: (Future[IOResult], Future[IOResult]) = {
- RunnableGraph.fromGraph(GraphDSL.createGraph(entitySink, membershipSink)((_, _)) { implicit builder =>
- (eSink, mSink) =>
+ val fileStreamIOResults: (Future[IOResult], Future[IOResult]) =
+ RunnableGraph
+ .fromGraph(GraphDSL.createGraph(entitySink, membershipSink)((_, _)) { implicit builder => (eSink, mSink) =>
import GraphDSL.Implicits._
// Sources
val querySource: Outlet[EntityQuery] = builder.add(AkkaSource(entityQueries.to(LazyList))).out
- val entityHeaderSource: Outlet[ByteString] = builder.add(AkkaSource.single(ByteString(entityHeaders.mkString("\t") + "\n"))).out
- val membershipHeaderSource: Outlet[ByteString] = builder.add(AkkaSource.single(ByteString(membershipHeaders.mkString("\t") + "\n"))).out
+ val entityHeaderSource: Outlet[ByteString] =
+ builder.add(AkkaSource.single(ByteString(entityHeaders.mkString("\t") + "\n"))).out
+ val membershipHeaderSource: Outlet[ByteString] =
+ builder.add(AkkaSource.single(ByteString(membershipHeaders.mkString("\t") + "\n"))).out
// Flows
- val queryFlow: FlowShape[EntityQuery, Seq[Entity]] = builder.add(Flow[EntityQuery].mapAsync(1) { query => getEntitiesFromQuery(query) })
+ val queryFlow: FlowShape[EntityQuery, Seq[Entity]] = builder.add(Flow[EntityQuery].mapAsync(1) { query =>
+ getEntitiesFromQuery(query)
+ })
val splitter: UniformFanOutShape[Seq[Entity], Seq[Entity]] = builder.add(Broadcast[Seq[Entity]](2))
val entityFlow: FlowShape[Seq[Entity], ByteString] = builder.add(Flow[Seq[Entity]].map { entities =>
val rows = TSVFormatter.makeEntityRows(entityType, entities, entityHeaders)
- ByteString(rows.map { _.mkString("\t")}.mkString("\n") + "\n")
+ ByteString(rows.map(_.mkString("\t")).mkString("\n") + "\n")
})
val membershipFlow: FlowShape[Seq[Entity], ByteString] = builder.add(Flow[Seq[Entity]].map { entities =>
val rows = TSVFormatter.makeMembershipRows(entityType, entities)
- ByteString(rows.map { _.mkString("\t")}.mkString("\n") + "\n")
+ ByteString(rows.map(_.mkString("\t")).mkString("\n") + "\n")
})
val eConcat: UniformFanInShape[ByteString, ByteString] = builder.add(Concat[ByteString]())
val mConcat: UniformFanInShape[ByteString, ByteString] = builder.add(Concat[ByteString]())
// Graph
- entityHeaderSource ~> eConcat
- querySource ~> queryFlow ~> splitter ~> entityFlow ~> eConcat ~> eSink
- membershipHeaderSource ~> mConcat
+ entityHeaderSource ~> eConcat
+ querySource ~> queryFlow ~> splitter ~> entityFlow ~> eConcat ~> eSink
+ membershipHeaderSource ~> mConcat
splitter ~> membershipFlow ~> mConcat ~> mSink
ClosedShape
- }).run()
- }
+ })
+ .run()
// Check that each file is completed
val fileStreamResult = for {
@@ -277,23 +312,30 @@ class ExportEntitiesByTypeActor(rawlsDAO: RawlsDAO,
val zipFile: Future[File] = writeFilesToZip(tempEntityFile, tempMembershipFile)
// The output to the user
zipFile
- } recover {
- case _:Exception =>
- throw new FireCloudExceptionWithErrorReport(ErrorReport(s"FireCloudException: Unable to stream zip file to user for $workspaceNamespace:$workspaceName:$entityType"))
+ } recover { case _: Exception =>
+ throw new FireCloudExceptionWithErrorReport(
+ ErrorReport(
+ s"FireCloudException: Unable to stream zip file to user for $workspaceNamespace:$workspaceName:$entityType"
+ )
+ )
}
}
- private def writeFilesToZip(entityTSV: File, membershipTSV: File): Future[File] = {
+ private def writeFilesToZip(entityTSV: File, membershipTSV: File): Future[File] =
try {
val zipFile = File.newTemporaryDirectory()
- membershipTSV.moveTo(zipFile/s"${entityType}_membership.tsv")
- entityTSV.moveTo(zipFile/s"${entityType}_entity.tsv")
+ membershipTSV.moveTo(zipFile / s"${entityType}_membership.tsv")
+ entityTSV.moveTo(zipFile / s"${entityType}_entity.tsv")
zipFile.zip()
- Future { zipFile.zip() }
+ Future(zipFile.zip())
} catch {
- case t: Throwable => Future.failed(new FireCloudExceptionWithErrorReport(ErrorReport(StatusCodes.InternalServerError, s"FireCloudException: Unable to create zip file.", t)))
+ case t: Throwable =>
+ Future.failed(
+ new FireCloudExceptionWithErrorReport(
+ ErrorReport(StatusCodes.InternalServerError, s"FireCloudException: Unable to create zip file.", t)
+ )
+ )
}
- }
private def getEntityQueries(metadata: EntityTypeMetadata, entityType: String): Seq[EntityQuery] = {
val pageSize = FireCloudConfig.Rawls.defaultPageSize
@@ -301,21 +343,30 @@ class ExportEntitiesByTypeActor(rawlsDAO: RawlsDAO,
val sortField = "name" // Anything else and Rawls execution time blows up due to a join (GAWB-2350)
val pages = Math.ceil(filteredCount.toDouble / pageSize.toDouble).toInt
(1 to pages) map { page =>
- EntityQuery(page = page, pageSize = pageSize, sortField = sortField, sortDirection = SortDirections.Ascending, filterTerms = None)
+ EntityQuery(page = page,
+ pageSize = pageSize,
+ sortField = sortField,
+ sortDirection = SortDirections.Ascending,
+ filterTerms = None
+ )
}
}
- private def entityTypeMetadata: Future[EntityTypeMetadata] = {
- rawlsDAO.getEntityTypes(workspaceNamespace, workspaceName).
- map(_.getOrElse(entityType,
- throw new FireCloudExceptionWithErrorReport(ErrorReport(s"Unable to collect entity metadata for $workspaceNamespace:$workspaceName:$entityType")))
+ private def entityTypeMetadata: Future[EntityTypeMetadata] =
+ rawlsDAO
+ .getEntityTypes(workspaceNamespace, workspaceName)
+ .map(
+ _.getOrElse(
+ entityType,
+ throw new FireCloudExceptionWithErrorReport(
+ ErrorReport(s"Unable to collect entity metadata for $workspaceNamespace:$workspaceName:$entityType")
+ )
+ )
)
- }
- private def getEntitiesFromQuery(query: EntityQuery): Future[Seq[Entity]] = {
- rawlsDAO.queryEntitiesOfType(workspaceNamespace, workspaceName, entityType, query) map {
- response => response.results
+ private def getEntitiesFromQuery(query: EntityQuery): Future[Seq[Entity]] =
+ rawlsDAO.queryEntitiesOfType(workspaceNamespace, workspaceName, entityType, query) map { response =>
+ response.results
}
- }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/service/FireCloudDirectives.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/service/FireCloudDirectives.scala
index fa13c57d4..97c7144d3 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/service/FireCloudDirectives.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/service/FireCloudDirectives.scala
@@ -1,7 +1,7 @@
package org.broadinstitute.dsde.firecloud.service
import akka.http.scaladsl.client.RequestBuilding
-import akka.http.scaladsl.model.headers.{Authorization, `Content-Type`}
+import akka.http.scaladsl.model.headers.{`Content-Type`, Authorization}
import akka.http.scaladsl.model.{HttpMethod, Uri}
import akka.http.scaladsl.server.{Directives, Route}
import org.broadinstitute.dsde.firecloud.utils.RestJsonClient
@@ -15,7 +15,7 @@ object FireCloudDirectiveUtils {
def toUri(url: String) = url match {
case pattern(theScheme, theHost, thePort, thePath) =>
- val p: Int = Try(thePort.replace(":","").toInt).toOption.getOrElse(0)
+ val p: Int = Try(thePort.replace(":", "").toInt).toOption.getOrElse(0)
Uri.from(scheme = theScheme, port = p, host = theHost, path = thePath)
}
toUri(path).toString
@@ -40,9 +40,8 @@ object FireCloudDirectiveUtils {
trait FireCloudDirectives extends Directives with RequestBuilding with RestJsonClient {
- def passthrough(unencodedPath: String, methods: HttpMethod*): Route = {
+ def passthrough(unencodedPath: String, methods: HttpMethod*): Route =
passthrough(Uri(unencodedPath), methods: _*)
- }
// Danger: it is a common mistake to pass in a URI that omits the query parameters included in the original request to Orch.
// To preserve the query, extract it and attach it to the passthrough URI using `.withQuery(query)`.
@@ -52,17 +51,21 @@ trait FireCloudDirectives extends Directives with RequestBuilding with RestJsonC
def encodeUri(path: String): String = FireCloudDirectiveUtils.encodeUri(path)
- private def generateExternalHttpRequestForMethod(uri: Uri, inMethod: HttpMethod) = {
+ private def generateExternalHttpRequestForMethod(uri: Uri, inMethod: HttpMethod) =
method(inMethod) { requestContext =>
val outgoingRequest = requestContext.request
.withUri(uri)
- .withHeaders(requestContext.request.headers.filter(
- hdr => FireCloudDirectiveUtils.allowedPassthroughHeaders.contains(hdr.lowercaseName())))
- requestContext.complete(unAuthedRequest(outgoingRequest)) //NOTE: This is actually AUTHED because we pass through the Authorization header
+ .withHeaders(
+ requestContext.request.headers.filter(hdr =>
+ FireCloudDirectiveUtils.allowedPassthroughHeaders.contains(hdr.lowercaseName())
+ )
+ )
+ requestContext.complete(
+ unAuthedRequest(outgoingRequest)
+ ) // NOTE: This is actually AUTHED because we pass through the Authorization header
}
- }
def withResourceFileContents(path: String)(innerRoute: String => Route): Route =
- innerRoute( FileUtils.readAllTextFromResource(path) )
+ innerRoute(FileUtils.readAllTextFromResource(path))
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/service/FireCloudRequestBuilding.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/service/FireCloudRequestBuilding.scala
index 2f1bc2d91..c4a9dadf4 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/service/FireCloudRequestBuilding.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/service/FireCloudRequestBuilding.scala
@@ -9,12 +9,11 @@ import akka.http.scaladsl.model.HttpRequest
import akka.http.scaladsl.model.headers.{Authorization, HttpCredentials, OAuth2BearerToken, RawHeader}
import akka.http.scaladsl.server.RequestContext
-
trait FireCloudRequestBuilding extends RequestBuilding {
val fireCloudHeader = RawHeader("X-FireCloud-Id", FireCloudConfig.FireCloud.fireCloudId)
- def authHeaders(credentials:Option[HttpCredentials]): HttpRequest => HttpRequest = {
+ def authHeaders(credentials: Option[HttpCredentials]): HttpRequest => HttpRequest =
credentials match {
// if we have authorization credentials, apply them to the outgoing request
case Some(c) => addCredentials(c) ~> addFireCloudCredentials
@@ -22,7 +21,6 @@ trait FireCloudRequestBuilding extends RequestBuilding {
// alternately, we could throw an error here, since we assume some authorization should exist.
case None => (r: HttpRequest) => r ~> addFireCloudCredentials
}
- }
def authHeaders(requestContext: RequestContext): HttpRequest => HttpRequest = {
// inspect headers for a pre-existing Authorization: header
@@ -32,9 +30,8 @@ trait FireCloudRequestBuilding extends RequestBuilding {
authHeaders(authorizationHeader)
}
- def authHeaders(accessToken: WithAccessToken): HttpRequest => HttpRequest = {
+ def authHeaders(accessToken: WithAccessToken): HttpRequest => HttpRequest =
authHeaders(Some(accessToken.accessToken))
- }
// with great power comes great responsibility!
def addAdminCredentials = addCredentials(OAuth2BearerToken(HttpGoogleServicesDAO.getAdminUserAccessToken))
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/service/LibraryService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/service/LibraryService.scala
index 992af3919..5a3f68072 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/service/LibraryService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/service/LibraryService.scala
@@ -19,7 +19,11 @@ import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol.{impLibraryBulk
import org.broadinstitute.dsde.firecloud.model.SamResource.UserPolicy
import org.broadinstitute.dsde.rawls.model.WorkspaceJsonSupport.{AttributeNameFormat, WorkspaceDetailsFormat}
import org.broadinstitute.dsde.rawls.model.Attributable.AttributeMap
-import org.broadinstitute.dsde.rawls.model.AttributeUpdateOperations.{AddListMember, AttributeUpdateOperation, RemoveAttribute}
+import org.broadinstitute.dsde.rawls.model.AttributeUpdateOperations.{
+ AddListMember,
+ AttributeUpdateOperation,
+ RemoveAttribute
+}
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success, Try}
@@ -34,13 +38,18 @@ object LibraryService {
new LibraryService(userInfo, app.rawlsDAO, app.samDAO, app.searchDAO, app.ontologyDAO)
}
-
-class LibraryService (protected val argUserInfo: UserInfo,
- val rawlsDAO: RawlsDAO,
- val samDao: SamDAO,
- val searchDAO: SearchDAO,
- val ontologyDAO: OntologyDAO)
- (implicit protected val executionContext: ExecutionContext) extends LibraryServiceSupport with AttributeSupport with PermissionsSupport with SprayJsonSupport with LazyLogging with WorkspacePublishingSupport {
+class LibraryService(protected val argUserInfo: UserInfo,
+ val rawlsDAO: RawlsDAO,
+ val samDao: SamDAO,
+ val searchDAO: SearchDAO,
+ val ontologyDAO: OntologyDAO
+)(implicit protected val executionContext: ExecutionContext)
+ extends LibraryServiceSupport
+ with AttributeSupport
+ with PermissionsSupport
+ with SprayJsonSupport
+ with LazyLogging
+ with WorkspacePublishingSupport {
lazy val log = LoggerFactory.getLogger(getClass)
@@ -50,51 +59,66 @@ class LibraryService (protected val argUserInfo: UserInfo,
// we need to use the plain-array deserialization.
implicit val impAttributeFormat: AttributeFormat = new AttributeFormat with PlainArrayAttributeListSerializer
- def updateDiscoverableByGroups(ns: String, name: String, newGroups: Seq[String]): Future[PerRequestMessage] = {
- if (newGroups.forall { g => FireCloudConfig.ElasticSearch.discoverGroupNames.contains(g) }) {
+ def updateDiscoverableByGroups(ns: String, name: String, newGroups: Seq[String]): Future[PerRequestMessage] =
+ if (newGroups.forall(g => FireCloudConfig.ElasticSearch.discoverGroupNames.contains(g))) {
rawlsDAO.getWorkspace(ns, name) flatMap { workspaceResponse =>
// this is technically vulnerable to a race condition in which the workspace attributes have changed
// between the time we retrieved them and here, where we update them.
val remove = Seq(RemoveAttribute(discoverableWSAttribute))
val operations = newGroups map (group => AddListMember(discoverableWSAttribute, AttributeString(group)))
- internalPatchWorkspaceAndRepublish(ns, name, remove ++ operations, isPublished(workspaceResponse)) map (RequestComplete(_))
+ internalPatchWorkspaceAndRepublish(ns,
+ name,
+ remove ++ operations,
+ isPublished(workspaceResponse)
+ ) map (RequestComplete(_))
}
} else {
- Future(RequestCompleteWithErrorReport(BadRequest, s"groups must be subset of allowable groups: %s".format(FireCloudConfig.ElasticSearch.discoverGroupNames.toArray.mkString(", "))))
+ Future(
+ RequestCompleteWithErrorReport(BadRequest,
+ s"groups must be subset of allowable groups: %s".format(
+ FireCloudConfig.ElasticSearch.discoverGroupNames.toArray.mkString(", ")
+ )
+ )
+ )
}
- }
- def getDiscoverableByGroups(ns: String, name: String): Future[PerRequestMessage] = {
+ def getDiscoverableByGroups(ns: String, name: String): Future[PerRequestMessage] =
rawlsDAO.getWorkspace(ns, name) map { workspaceResponse =>
val groups = workspaceResponse.workspace.attributes.getOrElse(Map.empty).get(discoverableWSAttribute) match {
- case Some(vals:AttributeValueList) => vals.list.collect{
- case s:AttributeString => s.value
- }
+ case Some(vals: AttributeValueList) =>
+ vals.list.collect { case s: AttributeString =>
+ s.value
+ }
case _ => List.empty[String]
}
RequestComplete(OK, groups.sortBy(_.toLowerCase))
}
- }
private def isInvalid(attrsJsonString: String): (Boolean, Option[String]) = {
val validationResult = Try(schemaValidate(attrsJsonString))
validationResult match {
case Failure(ve: ValidationException) => (true, Some(getSchemaValidationMessages(ve).mkString("; ")))
- case Failure(e) => (true, Some(e.getMessage))
- case Success(x) => (false, None)
+ case Failure(e) => (true, Some(e.getMessage))
+ case Success(x) => (false, None)
}
}
- def updateLibraryMetadata(ns: String, name: String, attrsJsonString: String, validate: Boolean): Future[PerRequestMessage] = {
+ def updateLibraryMetadata(ns: String,
+ name: String,
+ attrsJsonString: String,
+ validate: Boolean
+ ): Future[PerRequestMessage] =
// we accept a string here, not a JsValue so we can most granularly handle json parsing
Try(attrsJsonString.parseJson.asJsObject.convertTo[AttributeMap]) match {
- case Failure(ex:ParsingException) => Future(RequestCompleteWithErrorReport(BadRequest, "Invalid json supplied", ex))
+ case Failure(ex: ParsingException) =>
+ Future(RequestCompleteWithErrorReport(BadRequest, "Invalid json supplied", ex))
case Failure(e) => Future(RequestCompleteWithErrorReport(BadRequest, BadRequest.defaultMessage, e))
case Success(attrs) =>
val userAttrs = attrs.get(AttributeName.withLibraryNS("dulvn")) match {
- case Some(AttributeNull) | None => attrs ++ Map(AttributeName.withLibraryNS("dulvn") -> AttributeNumber(FireCloudConfig.Duos.dulvn))
- case _ => attrs
+ case Some(AttributeNull) | None =>
+ attrs ++ Map(AttributeName.withLibraryNS("dulvn") -> AttributeNumber(FireCloudConfig.Duos.dulvn))
+ case _ => attrs
}
val (invalid, errorMessage): (Boolean, Option[String]) = isInvalid(attrsJsonString)
rawlsDAO.getWorkspace(ns, name) flatMap { workspaceResponse =>
@@ -105,72 +129,78 @@ class LibraryService (protected val argUserInfo: UserInfo,
// because not all editors can update discoverableByGroups, if the request does not include discoverableByGroups
// or if it is not being changed, don't include it in the update operations (less restrictive permissions will
// be checked by rawls)
- val modDiscoverability = userAttrs.contains(discoverableWSAttribute) && isDiscoverableDifferent(workspaceResponse, userAttrs)
+ val modDiscoverability =
+ userAttrs.contains(discoverableWSAttribute) && isDiscoverableDifferent(workspaceResponse, userAttrs)
val skipAttributes =
if (modDiscoverability)
Seq(publishedFlag)
else
- // if discoverable by groups is not being changed, then skip it (i.e. don't delete from ws)
+ // if discoverable by groups is not being changed, then skip it (i.e. don't delete from ws)
Seq(publishedFlag, discoverableWSAttribute)
// this is technically vulnerable to a race condition in which the workspace attributes have changed
// between the time we retrieved them and here, where we update them.
- val allOperations = generateAttributeOperations(workspaceResponse.workspace.attributes.getOrElse(Map.empty), userAttrs,
- k => k.namespace == AttributeName.libraryNamespace && !skipAttributes.contains(k))
+ val allOperations = generateAttributeOperations(
+ workspaceResponse.workspace.attributes.getOrElse(Map.empty),
+ userAttrs,
+ k => k.namespace == AttributeName.libraryNamespace && !skipAttributes.contains(k)
+ )
internalPatchWorkspaceAndRepublish(ns, name, allOperations, published) map (RequestComplete(_))
}
}
}
- }
- def getLibraryMetadata(ns: String, name: String): Future[PerRequestMessage] = {
+ def getLibraryMetadata(ns: String, name: String): Future[PerRequestMessage] =
rawlsDAO.getWorkspace(ns, name) flatMap { workspaceResponse =>
val allAttrs = workspaceResponse.workspace.attributes.getOrElse(Map.empty)
val libAttrs = allAttrs.filter {
- case ((LibraryService.publishedFlag,v)) => false
- case ((k,v)) if k.namespace == AttributeName.libraryNamespace => true
- case _ => false
+ case ((LibraryService.publishedFlag, v)) => false
+ case ((k, v)) if k.namespace == AttributeName.libraryNamespace => true
+ case _ => false
}
Future(RequestComplete(OK, libAttrs))
}
- }
/*
* Will republish if it is currently in the published state.
*/
- private def internalPatchWorkspaceAndRepublish(ns: String, name: String, allOperations: Seq[AttributeUpdateOperation], isPublished: Boolean): Future[WorkspaceDetails] = {
+ private def internalPatchWorkspaceAndRepublish(ns: String,
+ name: String,
+ allOperations: Seq[AttributeUpdateOperation],
+ isPublished: Boolean
+ ): Future[WorkspaceDetails] =
for {
newws <- rawlsDAO.updateLibraryAttributes(ns, name, allOperations)
_ <- republishDocument(newws, ontologyDAO, searchDAO)
} yield newws
- }
// should only be used to change published state
- def setWorkspaceIsPublished(ns: String, name: String, publishArg: Boolean): Future[PerRequestMessage] = {
+ def setWorkspaceIsPublished(ns: String, name: String, publishArg: Boolean): Future[PerRequestMessage] =
rawlsDAO.getWorkspace(ns, name) flatMap { workspaceResponse =>
val currentPublished = isPublished(workspaceResponse)
// only need to validate metadata if we are actually publishing
- val (invalid, errorMessage) = if (publishArg && !currentPublished)
- isInvalid(workspaceResponse.workspace.attributes.getOrElse(Map.empty).toJson.compactPrint)
- else
- (false, None)
+ val (invalid, errorMessage) =
+ if (publishArg && !currentPublished)
+ isInvalid(workspaceResponse.workspace.attributes.getOrElse(Map.empty).toJson.compactPrint)
+ else
+ (false, None)
if (currentPublished == publishArg)
- // user request would result in no change; just return as noop.
- Future(RequestComplete(NoContent))
+ // user request would result in no change; just return as noop.
+ Future(RequestComplete(NoContent))
else if (invalid)
- // user requested a publish, but metadata is invalid; return error.
- Future(RequestCompleteWithErrorReport(BadRequest, errorMessage.getOrElse(BadRequest.defaultMessage)))
+ // user requested a publish, but metadata is invalid; return error.
+ Future(RequestCompleteWithErrorReport(BadRequest, errorMessage.getOrElse(BadRequest.defaultMessage)))
else {
// user requested a change in published flag, and metadata is valid; make the change.
- setWorkspacePublishedStatus(workspaceResponse.workspace, publishArg, rawlsDAO, ontologyDAO, searchDAO) map { ws =>
- RequestComplete(ws)
+ setWorkspacePublishedStatus(workspaceResponse.workspace, publishArg, rawlsDAO, ontologyDAO, searchDAO) map {
+ ws =>
+ RequestComplete(ws)
}
}
}
- }
- def adminIndexAllWorkspaces(): Future[PerRequestMessage] = {
+ def adminIndexAllWorkspaces(): Future[PerRequestMessage] =
asAdmin {
logger.info("reindex: requesting workspaces from rawls ...")
rawlsDAO.getAllLibraryPublishedWorkspaces flatMap { workspaces: Seq[WorkspaceDetails] =>
@@ -194,9 +224,11 @@ class LibraryService (protected val argUserInfo: UserInfo,
}
}
}
- }
- def searchFor(criteria: LibrarySearchParams, searchMethod:(LibrarySearchParams, Seq[String], Map[String, UserPolicy])=>Future[LibrarySearchResponse]): Future[PerRequestMessage] ={
+ def searchFor(
+ criteria: LibrarySearchParams,
+ searchMethod: (LibrarySearchParams, Seq[String], Map[String, UserPolicy]) => Future[LibrarySearchResponse]
+ ): Future[PerRequestMessage] = {
val workspacePoliciesFuture: Future[Map[String, UserPolicy]] = samDao.listWorkspaceResources map { policyList =>
(policyList map { policy =>
(policy.resourceId.value, policy)
@@ -208,24 +240,20 @@ class LibraryService (protected val argUserInfo: UserInfo,
workspacePolicyMap <- workspacePoliciesFuture
userGroups <- userGroupsFuture
searchResults <- searchMethod(criteria, userGroups, workspacePolicyMap)
- } yield {
- RequestComplete(searchResults)
- }
+ } yield RequestComplete(searchResults)
}
- def findDocuments(criteria: LibrarySearchParams): Future[PerRequestMessage] = {
+ def findDocuments(criteria: LibrarySearchParams): Future[PerRequestMessage] =
searchFor(criteria, searchDAO.findDocuments)
- }
- def suggest(criteria: LibrarySearchParams): Future[PerRequestMessage] = {
+ def suggest(criteria: LibrarySearchParams): Future[PerRequestMessage] =
searchFor(criteria, searchDAO.suggestionsFromAll)
- }
- def populateSuggest(field: String, text: String): Future[PerRequestMessage] = {
- searchDAO.suggestionsForFieldPopulate(field, text) map {RequestComplete(_)} recoverWith {
- case e: FireCloudException => Future(RequestCompleteWithErrorReport(BadRequest, s"suggestions not available for field %s".format(field)))
+ def populateSuggest(field: String, text: String): Future[PerRequestMessage] =
+ searchDAO.suggestionsForFieldPopulate(field, text) map { RequestComplete(_) } recoverWith {
+ case e: FireCloudException =>
+ Future(RequestCompleteWithErrorReport(BadRequest, s"suggestions not available for field %s".format(field)))
}
- }
private def errorMessageFromSearchException(ex: Throwable): String = {
// elasticsearch errors are often nested, try to dig into them safely to find a message
@@ -241,8 +269,8 @@ class LibraryService (protected val argUserInfo: UserInfo,
}
Option(message) match {
- case Some(m:String) => m
- case _ => "Unknown error during search."
+ case Some(m: String) => m
+ case _ => "Unknown error during search."
}
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/service/LibraryServiceSupport.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/service/LibraryServiceSupport.scala
index ac07de901..59770870a 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/service/LibraryServiceSupport.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/service/LibraryServiceSupport.scala
@@ -8,7 +8,11 @@ import org.broadinstitute.dsde.firecloud.model.Ontology.TermParent
import org.broadinstitute.dsde.firecloud.model.{ConsentCodes, Document, ElasticSearch, UserInfo, WithAccessToken}
import org.broadinstitute.dsde.firecloud.service.LibraryService.orspIdAttribute
import org.broadinstitute.dsde.rawls.model.Attributable.AttributeMap
-import org.broadinstitute.dsde.rawls.model.AttributeUpdateOperations.{AddUpdateAttribute, AttributeUpdateOperation, RemoveAttribute}
+import org.broadinstitute.dsde.rawls.model.AttributeUpdateOperations.{
+ AddUpdateAttribute,
+ AttributeUpdateOperation,
+ RemoveAttribute
+}
import org.broadinstitute.dsde.rawls.model._
import org.everit.json.schema.loader.SchemaLoader
import org.everit.json.schema.{Schema, ValidationException}
@@ -29,12 +33,14 @@ trait LibraryServiceSupport extends DataUseRestrictionSupport with LazyLogging {
implicit val userToken: WithAccessToken
- def updatePublishAttribute(value: Boolean): Seq[AttributeUpdateOperation] = {
+ def updatePublishAttribute(value: Boolean): Seq[AttributeUpdateOperation] =
if (value) Seq(AddUpdateAttribute(LibraryService.publishedFlag, AttributeBoolean(true)))
else Seq(RemoveAttribute(LibraryService.publishedFlag))
- }
- def indexableDocuments(workspaces: Seq[WorkspaceDetails], ontologyDAO: OntologyDAO)(implicit userToken: WithAccessToken, ec: ExecutionContext): Future[Seq[Document]] = {
+ def indexableDocuments(workspaces: Seq[WorkspaceDetails], ontologyDAO: OntologyDAO)(implicit
+ userToken: WithAccessToken,
+ ec: ExecutionContext
+ ): Future[Seq[Document]] = {
// find all the ontology nodes in this list of workspaces
val nodes = uniqueWorkspaceStringAttributes(workspaces, AttributeName.withLibraryNS("diseaseOntologyID"))
@@ -52,7 +58,8 @@ trait LibraryServiceSupport extends DataUseRestrictionSupport with LazyLogging {
ws.attributes.getOrElse(Map.empty).get(orspIdAttribute) match {
case Some(_) =>
// if so, remove explicit DU attributes
- val newAttrs = replaceDataUseAttributes(ws.attributes.getOrElse(Map.empty), Map.empty[AttributeName, Attribute])
+ val newAttrs =
+ replaceDataUseAttributes(ws.attributes.getOrElse(Map.empty), Map.empty[AttributeName, Attribute])
ws.copy(attributes = Option(newAttrs))
case _ =>
// this workspace does not have an ORSP id; leave it untouched
@@ -62,24 +69,33 @@ trait LibraryServiceSupport extends DataUseRestrictionSupport with LazyLogging {
Future.successful(annotatedWorkspaces map { w => indexableDocument(w, parentMap, ontologyDAO) })
}
- private def indexableDocument(workspace: WorkspaceDetails, parentCache: Map[String,Seq[TermParent]], ontologyDAO: OntologyDAO)(implicit ec: ExecutionContext): Document = {
- val attrfields_subset = workspace.attributes.getOrElse(Map.empty).filter(_._1.namespace == AttributeName.libraryNamespace)
+ private def indexableDocument(workspace: WorkspaceDetails,
+ parentCache: Map[String, Seq[TermParent]],
+ ontologyDAO: OntologyDAO
+ )(implicit ec: ExecutionContext): Document = {
+ val attrfields_subset =
+ workspace.attributes.getOrElse(Map.empty).filter(_._1.namespace == AttributeName.libraryNamespace)
val attrfields = attrfields_subset map { case (attr, value) =>
attr.name match {
case "discoverableByGroups" => AttributeName.withDefaultNS(ElasticSearch.fieldDiscoverableByGroups) -> value
- case _ => attr -> value
+ case _ => attr -> value
}
}
val idfields = Map(
AttributeName.withDefaultNS("name") -> AttributeString(workspace.name),
AttributeName.withDefaultNS("namespace") -> AttributeString(workspace.namespace),
AttributeName.withDefaultNS("workspaceId") -> AttributeString(workspace.workspaceId),
- AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(workspace.authorizationDomain.getOrElse(Set.empty).map(group => AttributeString(group.membersGroupName.value)).toSeq)
+ AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(
+ workspace.authorizationDomain
+ .getOrElse(Set.empty)
+ .map(group => AttributeString(group.membersGroupName.value))
+ .toSeq
+ )
)
val tagfields = workspace.attributes.getOrElse(Map.empty).get(AttributeName.withTagsNS()) match {
case Some(t) => Map(AttributeName.withTagsNS() -> t)
- case None => Map()
+ case None => Map()
}
val durAttributeNames = ConsentCodes.allPreviousDurFieldNames.map(AttributeName.withLibraryNS)
@@ -92,7 +108,9 @@ trait LibraryServiceSupport extends DataUseRestrictionSupport with LazyLogging {
case Some(id: AttributeString) =>
val parents = parentCache.get(id.value)
val parentFields = if (parents.isDefined) {
- fields + (AttributeName.withDefaultNS("parents") -> AttributeValueRawJson(parents.get.map(_.toESTermParent).toJson.compactPrint))
+ fields + (AttributeName.withDefaultNS("parents") -> AttributeValueRawJson(
+ parents.get.map(_.toESTermParent).toJson.compactPrint
+ ))
} else {
fields
}
@@ -102,13 +120,17 @@ trait LibraryServiceSupport extends DataUseRestrictionSupport with LazyLogging {
}
def uniqueWorkspaceStringAttributes(workspaces: Seq[WorkspaceDetails], attributeName: AttributeName): Set[String] = {
- val valueSeq:Seq[String] = workspaces.collect {
- case w if w.attributes.getOrElse(Map.empty).contains(attributeName) =>
- w.attributes.getOrElse(Map.empty)(attributeName)
- }.collect {
- case s:AttributeString => s.value
- }
- logger.debug(s"found ${valueSeq.size} workspaces with ${AttributeName.toDelimitedName(attributeName)} string attributes")
+ val valueSeq: Seq[String] = workspaces
+ .collect {
+ case w if w.attributes.getOrElse(Map.empty).contains(attributeName) =>
+ w.attributes.getOrElse(Map.empty)(attributeName)
+ }
+ .collect { case s: AttributeString =>
+ s.value
+ }
+ logger.debug(
+ s"found ${valueSeq.size} workspaces with ${AttributeName.toDelimitedName(attributeName)} string attributes"
+ )
val valueSet = valueSeq.toSet
logger.debug(s"found ${valueSet.size} unique ${AttributeName.toDelimitedName(attributeName)} values")
@@ -118,7 +140,7 @@ trait LibraryServiceSupport extends DataUseRestrictionSupport with LazyLogging {
// wraps the ontologyDAO call, handles Nones/nulls, and returns a [Future[Seq].
// the Seq is populated if the leaf node exists and has parents; Seq is empty otherwise.
- def lookupParentNodes(leafId:String, ontologyDAO: OntologyDAO)(implicit ec: ExecutionContext):Seq[TermParent] = {
+ def lookupParentNodes(leafId: String, ontologyDAO: OntologyDAO)(implicit ec: ExecutionContext): Seq[TermParent] =
Try(ontologyDAO.search(leafId)) match {
case Success(terms) if terms.nonEmpty =>
terms.head.parents.getOrElse(Seq.empty)
@@ -127,7 +149,6 @@ trait LibraryServiceSupport extends DataUseRestrictionSupport with LazyLogging {
logger.warn(s"exception getting term and parents from ontology: ${ex.getMessage}")
Seq.empty[TermParent]
}
- }
def defaultSchema: String = FileUtils.readAllTextFromResource(LibraryService.schemaLocation)
@@ -135,34 +156,35 @@ trait LibraryServiceSupport extends DataUseRestrictionSupport with LazyLogging {
def schemaValidate(data: JsObject): Unit = validateJsonSchema(data.compactPrint, defaultSchema)
def validateJsonSchema(data: String, schemaStr: String): Unit = {
- val rawSchema:JSONObject = new JSONObject(new JSONTokener(schemaStr))
- val schema:Schema = SchemaLoader.load(rawSchema)
+ val rawSchema: JSONObject = new JSONObject(new JSONTokener(schemaStr))
+ val schema: Schema = SchemaLoader.load(rawSchema)
schema.validate(new JSONObject(data))
}
- def getSchemaValidationMessages(ve: ValidationException): Seq[String] = {
+ def getSchemaValidationMessages(ve: ValidationException): Seq[String] =
Seq(ve.getPointerToViolation + ": " + ve.getErrorMessage) ++
(ve.getCausingExceptions.asScala flatMap getSchemaValidationMessages)
- }
- def getEffectiveDiscoverGroups(samDAO: SamDAO)(implicit ec: ExecutionContext, userInfo:UserInfo): Future[Seq[String]] = {
+ def getEffectiveDiscoverGroups(
+ samDAO: SamDAO
+ )(implicit ec: ExecutionContext, userInfo: UserInfo): Future[Seq[String]] =
samDAO.listGroups(userInfo) map { groupMemberships =>
groupMemberships map (_.groupName) intersect FireCloudConfig.ElasticSearch.discoverGroupNames.asScala
}
- }
// this method will determine if the user is making a change to discoverableByGroups
// if the attribute does not exist on the workspace, it is the same as the empty list
def isDiscoverableDifferent(workspaceResponse: WorkspaceResponse, userAttrs: AttributeMap): Boolean = {
- def convert(list: Option[Attribute]): Seq[AttributeValue] = {
+ def convert(list: Option[Attribute]): Seq[AttributeValue] =
list match {
- case Some(x:AttributeValueList) => x.list
- case _ => Seq.empty[AttributeValue]
+ case Some(x: AttributeValueList) => x.list
+ case _ => Seq.empty[AttributeValue]
}
- }
- val current = convert(workspaceResponse.workspace.attributes.getOrElse(Map.empty).get(LibraryService.discoverableWSAttribute))
+ val current = convert(
+ workspaceResponse.workspace.attributes.getOrElse(Map.empty).get(LibraryService.discoverableWSAttribute)
+ )
val newvals = convert(userAttrs.get(LibraryService.discoverableWSAttribute))
current.toSet != newvals.toSet
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/service/ManagedGroupService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/service/ManagedGroupService.scala
index 42096af82..03c59d562 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/service/ManagedGroupService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/service/ManagedGroupService.scala
@@ -8,7 +8,12 @@ import com.typesafe.scalalogging.LazyLogging
import org.broadinstitute.dsde.firecloud.{Application, FireCloudConfig}
import org.broadinstitute.dsde.firecloud.dataaccess.SamDAO
import org.broadinstitute.dsde.firecloud.model.ManagedGroupRoles.ManagedGroupRole
-import org.broadinstitute.dsde.firecloud.model.{FireCloudManagedGroup, FireCloudManagedGroupMembership, ManagedGroupRoles, WithAccessToken}
+import org.broadinstitute.dsde.firecloud.model.{
+ FireCloudManagedGroup,
+ FireCloudManagedGroupMembership,
+ ManagedGroupRoles,
+ WithAccessToken
+}
import org.broadinstitute.dsde.firecloud.service.ManagedGroupService._
import org.broadinstitute.dsde.firecloud.service.PerRequest.{PerRequestMessage, RequestComplete}
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
@@ -26,53 +31,64 @@ object ManagedGroupService {
}
-class ManagedGroupService(samDAO: SamDAO, implicit val userToken: WithAccessToken)(implicit protected val executionContext: ExecutionContext)
- extends LazyLogging with SprayJsonSupport {
+class ManagedGroupService(samDAO: SamDAO, implicit val userToken: WithAccessToken)(implicit
+ protected val executionContext: ExecutionContext
+) extends LazyLogging
+ with SprayJsonSupport {
def createGroup(groupName: WorkbenchGroupName): Future[PerRequestMessage] = {
val membersList = for {
_ <- samDAO.createGroup(groupName)
listMembers <- listGroupMembersInternal(groupName)
- _ <- samDAO.setPolicyPublic(samDAO.managedGroupResourceTypeName, groupName.value, ManagedGroupRoles.AdminNotifier.toString, true)
+ _ <- samDAO.setPolicyPublic(samDAO.managedGroupResourceTypeName,
+ groupName.value,
+ ManagedGroupRoles.AdminNotifier.toString,
+ true
+ )
} yield listMembers
membersList.map(response => RequestComplete(StatusCodes.Created, response))
}
- def deleteGroup(groupName: WorkbenchGroupName): Future[PerRequestMessage] = {
+ def deleteGroup(groupName: WorkbenchGroupName): Future[PerRequestMessage] =
samDAO.deleteGroup(groupName).map(_ => RequestComplete(StatusCodes.NoContent))
- }
- def listGroups(): Future[PerRequestMessage] = {
- samDAO.listGroups.map(response => RequestComplete(StatusCodes.OK, response.map { group =>
- group.copy(role = group.role.capitalize)
- }))
- }
+ def listGroups(): Future[PerRequestMessage] =
+ samDAO.listGroups.map(response =>
+ RequestComplete(StatusCodes.OK,
+ response.map { group =>
+ group.copy(role = group.role.capitalize)
+ }
+ )
+ )
- def listGroupMembers(groupName: WorkbenchGroupName): Future[PerRequestMessage] = {
+ def listGroupMembers(groupName: WorkbenchGroupName): Future[PerRequestMessage] =
listGroupMembersInternal(groupName).map(response => RequestComplete(StatusCodes.OK, response))
- }
- def addGroupMember(groupName: WorkbenchGroupName, role: ManagedGroupRole, email: WorkbenchEmail): Future[PerRequestMessage] = {
+ def addGroupMember(groupName: WorkbenchGroupName,
+ role: ManagedGroupRole,
+ email: WorkbenchEmail
+ ): Future[PerRequestMessage] =
samDAO.addGroupMember(groupName, role, email).map(_ => RequestComplete(StatusCodes.NoContent))
- }
- def removeGroupMember(groupName: WorkbenchGroupName, role: ManagedGroupRole, email: WorkbenchEmail): Future[PerRequestMessage] = {
+ def removeGroupMember(groupName: WorkbenchGroupName,
+ role: ManagedGroupRole,
+ email: WorkbenchEmail
+ ): Future[PerRequestMessage] =
samDAO.removeGroupMember(groupName, role, email).map(_ => RequestComplete(StatusCodes.NoContent))
- }
- def overwriteGroupMembers(groupName: WorkbenchGroupName, role: ManagedGroupRole, membersList: List[WorkbenchEmail]): Future[PerRequestMessage] = {
+ def overwriteGroupMembers(groupName: WorkbenchGroupName,
+ role: ManagedGroupRole,
+ membersList: List[WorkbenchEmail]
+ ): Future[PerRequestMessage] =
samDAO.overwriteGroupMembers(groupName, role, membersList).map(_ => RequestComplete(StatusCodes.NoContent))
- }
- def requestGroupAccess(groupName: WorkbenchGroupName): Future[PerRequestMessage] = {
+ def requestGroupAccess(groupName: WorkbenchGroupName): Future[PerRequestMessage] =
samDAO.requestGroupAccess(groupName).map(_ => RequestComplete(StatusCodes.NoContent))
- }
- private def listGroupMembersInternal(groupName: WorkbenchGroupName): Future[FireCloudManagedGroup] = {
+ private def listGroupMembersInternal(groupName: WorkbenchGroupName): Future[FireCloudManagedGroup] =
for {
adminsEmails <- samDAO.listGroupPolicyEmails(groupName, ManagedGroupRoles.Admin)
membersEmails <- samDAO.listGroupPolicyEmails(groupName, ManagedGroupRoles.Member)
groupEmail <- samDAO.getGroupEmail(groupName)
} yield FireCloudManagedGroup(adminsEmails, membersEmails, groupEmail)
- }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/service/NamespaceService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/service/NamespaceService.scala
index 29a22c777..a701e8482 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/service/NamespaceService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/service/NamespaceService.scala
@@ -17,8 +17,9 @@ object NamespaceService {
new NamespaceService(userInfo, app.agoraDAO)
}
-class NamespaceService (protected val argUserInfo: UserInfo, val agoraDAO: AgoraDAO)(implicit protected val executionContext: ExecutionContext)
- extends SprayJsonSupport {
+class NamespaceService(protected val argUserInfo: UserInfo, val agoraDAO: AgoraDAO)(implicit
+ protected val executionContext: ExecutionContext
+) extends SprayJsonSupport {
implicit val userInfo: UserInfo = argUserInfo
@@ -27,16 +28,18 @@ class NamespaceService (protected val argUserInfo: UserInfo, val agoraDAO: Agora
delegatePermissionsResponse(agoraPermissions)
}
- def postFireCloudPermissions(ns: String, entity: String, permissions: List[FireCloudPermission]): Future[PerRequestMessage] = {
+ def postFireCloudPermissions(ns: String,
+ entity: String,
+ permissions: List[FireCloudPermission]
+ ): Future[PerRequestMessage] = {
val agoraPermissionsToPost = permissions map { permission => AgoraPermissionService.toAgoraPermission(permission) }
val agoraPermissionsPosted = agoraDAO.postNamespacePermissions(ns, entity, agoraPermissionsToPost)
delegatePermissionsResponse(agoraPermissionsPosted)
}
- private def delegatePermissionsResponse(agoraPerms: Future[List[AgoraPermission]]): Future[PerRequestMessage] = {
- agoraPerms map {
- perms =>
- RequestComplete(OK, perms map AgoraPermissionService.toFireCloudPermission)
+ private def delegatePermissionsResponse(agoraPerms: Future[List[AgoraPermission]]): Future[PerRequestMessage] =
+ agoraPerms map { perms =>
+ RequestComplete(OK, perms map AgoraPermissionService.toFireCloudPermission)
} recover {
case e: FireCloudExceptionWithErrorReport =>
// RequestComplete(e.errorReport.statusCode.getOrElse(InternalServerError), e.errorReport)
@@ -44,6 +47,5 @@ class NamespaceService (protected val argUserInfo: UserInfo, val agoraDAO: Agora
case e: Throwable =>
RequestCompleteWithErrorReport(InternalServerError, e.getMessage)
}
- }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/service/NihService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/service/NihService.scala
index 6993624ab..1d0a2d63a 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/service/NihService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/service/NihService.scala
@@ -4,12 +4,23 @@ import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.model.StatusCodes._
import com.typesafe.scalalogging.LazyLogging
-import org.broadinstitute.dsde.firecloud.dataaccess.{ExternalCredsDAO, GoogleServicesDAO, SamDAO, ShibbolethDAO, ThurloeDAO}
+import org.broadinstitute.dsde.firecloud.dataaccess.{
+ ExternalCredsDAO,
+ GoogleServicesDAO,
+ SamDAO,
+ ShibbolethDAO,
+ ThurloeDAO
+}
import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol._
import org.broadinstitute.dsde.firecloud.model._
import org.broadinstitute.dsde.firecloud.service.PerRequest.{PerRequestMessage, RequestComplete}
import org.broadinstitute.dsde.firecloud.utils.DateUtils
-import org.broadinstitute.dsde.firecloud.{Application, FireCloudConfig, FireCloudException, FireCloudExceptionWithErrorReport}
+import org.broadinstitute.dsde.firecloud.{
+ Application,
+ FireCloudConfig,
+ FireCloudException,
+ FireCloudExceptionWithErrorReport
+}
import org.broadinstitute.dsde.rawls.model.ErrorReport
import org.broadinstitute.dsde.workbench.model.{WorkbenchEmail, WorkbenchGroupName, WorkbenchUserId}
import org.slf4j.LoggerFactory
@@ -21,16 +32,12 @@ import scala.concurrent.{ExecutionContext, Future}
import scala.io.Source
import scala.util.{Failure, Success, Try}
+case class NihStatus(linkedNihUsername: Option[String] = None,
+ datasetPermissions: Set[NihDatasetPermission],
+ linkExpireTime: Option[Long] = None
+)
-case class NihStatus(
- linkedNihUsername: Option[String] = None,
- datasetPermissions: Set[NihDatasetPermission],
- linkExpireTime: Option[Long] = None)
-
-case class NihAllowlist(
- name: String,
- groupToSync: WorkbenchGroupName,
- fileName: String)
+case class NihAllowlist(name: String, groupToSync: WorkbenchGroupName, fileName: String)
case class NihDatasetPermission(name: String, authorized: Boolean)
@@ -44,8 +51,14 @@ object NihService {
new NihService(app.samDAO, app.thurloeDAO, app.googleServicesDAO, app.shibbolethDAO, app.ecmDAO)
}
-class NihService(val samDao: SamDAO, val thurloeDao: ThurloeDAO, val googleDao: GoogleServicesDAO, val shibbolethDao: ShibbolethDAO, val ecmDao: ExternalCredsDAO)
- (implicit val executionContext: ExecutionContext) extends LazyLogging with SprayJsonSupport {
+class NihService(val samDao: SamDAO,
+ val thurloeDao: ThurloeDAO,
+ val googleDao: GoogleServicesDAO,
+ val shibbolethDao: ShibbolethDAO,
+ val ecmDao: ExternalCredsDAO
+)(implicit val executionContext: ExecutionContext)
+ extends LazyLogging
+ with SprayJsonSupport {
lazy val log = LoggerFactory.getLogger(getClass)
@@ -53,34 +66,40 @@ class NihService(val samDao: SamDAO, val thurloeDao: ThurloeDAO, val googleDao:
private val nihAllowlists: Set[NihAllowlist] = FireCloudConfig.Nih.whitelists
- def getNihStatus(userInfo: UserInfo): Future[PerRequestMessage] = {
+ def getNihStatus(userInfo: UserInfo): Future[PerRequestMessage] =
getNihStatusFromEcm(userInfo).flatMap {
case Some(nihStatus) =>
logger.info("Found eRA Commons link in ECM for user " + userInfo.id)
Future.successful(RequestComplete(nihStatus))
- case None => getNihStatusFromThurloe(userInfo).map {
- case Some(nihStatus) =>
- logger.info("Found eRA Commons link in Thurloe for user " + userInfo.id)
- RequestComplete(nihStatus)
- case None => RequestComplete(NotFound)
- }
+ case None =>
+ getNihStatusFromThurloe(userInfo).map {
+ case Some(nihStatus) =>
+ logger.info("Found eRA Commons link in Thurloe for user " + userInfo.id)
+ RequestComplete(nihStatus)
+ case None => RequestComplete(NotFound)
+ }
}
- }
- private def getNihStatusFromEcm(userInfo: UserInfo): Future[Option[NihStatus]] = {
+ private def getNihStatusFromEcm(userInfo: UserInfo): Future[Option[NihStatus]] =
ecmDao.getLinkedAccount(userInfo).flatMap {
- case Some(linkedAccount) => getAllAllowlistGroupMemberships(userInfo).map { allowlistMembership =>
- Some(NihStatus(Some(linkedAccount.linkedExternalId), allowlistMembership, Some(linkedAccount.linkExpireTime.getMillis / 1000L)))
+ case Some(linkedAccount) =>
+ getAllAllowlistGroupMemberships(userInfo).map { allowlistMembership =>
+ Some(
+ NihStatus(Some(linkedAccount.linkedExternalId),
+ allowlistMembership,
+ Some(linkedAccount.linkExpireTime.getMillis / 1000L)
+ )
+ )
}
case None => Future.successful(None)
}
- }
- private def getNihStatusFromThurloe(userInfo: UserInfo): Future[Option[NihStatus]] = {
+ private def getNihStatusFromThurloe(userInfo: UserInfo): Future[Option[NihStatus]] =
thurloeDao.getAllKVPs(userInfo.id, userInfo) flatMap {
case Some(profileWrapper) =>
ProfileUtils.getString("linkedNihUsername", profileWrapper) match {
- case Some(linkedNihUsername) => getAllAllowlistGroupMemberships(userInfo).map { allowlistMembership =>
+ case Some(linkedNihUsername) =>
+ getAllAllowlistGroupMemberships(userInfo).map { allowlistMembership =>
val linkExpireTime = ProfileUtils.getLong("linkExpireTime", profileWrapper)
Some(NihStatus(Some(linkedNihUsername), allowlistMembership, linkExpireTime))
}
@@ -88,18 +107,21 @@ class NihService(val samDao: SamDAO, val thurloeDao: ThurloeDAO, val googleDao:
}
case None => Future.successful(None)
}
- }
private def getAllAllowlistGroupMemberships(userInfo: UserInfo): Future[Set[NihDatasetPermission]] = {
val groupMemberships = samDao.listGroups(userInfo)
groupMemberships.map { groups =>
val samGroupNames = groups.map(g => WorkbenchGroupName(g.groupName)).toSet
- nihAllowlists.map(allowlist => NihDatasetPermission(allowlist.name, samGroupNames.contains(allowlist.groupToSync)))
+ nihAllowlists.map(allowlist =>
+ NihDatasetPermission(allowlist.name, samGroupNames.contains(allowlist.groupToSync))
+ )
}
}
private def downloadNihAllowlist(allowlist: NihAllowlist): Set[String] = {
- val usersList = Source.fromInputStream(googleDao.getBucketObjectAsInputStream(FireCloudConfig.Nih.whitelistBucket, allowlist.fileName))
+ val usersList = Source.fromInputStream(
+ googleDao.getBucketObjectAsInputStream(FireCloudConfig.Nih.whitelistBucket, allowlist.fileName)
+ )
usersList.getLines().toSet
}
@@ -128,7 +150,9 @@ class NihService(val samDao: SamDAO, val thurloeDao: ThurloeDAO, val googleDao:
// The list of users that, according to ECM, have active links
allLinkedAccounts <- ecmDao.getActiveLinkedEraAccounts(getAdminAccessToken)
// The list of linked accounts which for which the user appears in the allowlist
- allowlistLinkedAccounts = allLinkedAccounts.filter(linkedAccount => allowlistEraUsernames.contains(linkedAccount.linkedExternalId))
+ allowlistLinkedAccounts = allLinkedAccounts.filter(linkedAccount =>
+ allowlistEraUsernames.contains(linkedAccount.linkedExternalId)
+ )
// The users from Sam for the linked accounts on the allowlist
users <- samDao.getUsersForIds(allowlistLinkedAccounts.map(la => WorkbenchUserId(la.userId)))(getAdminAccessToken)
} yield users.map(user => WorkbenchEmail(user.userEmail)).toSet
@@ -141,7 +165,7 @@ class NihService(val samDao: SamDAO, val thurloeDao: ThurloeDAO, val googleDao:
mapping.collect { case (fcUser, nihUser) if allowlistEraUsernames contains nihUser => fcUser }.toSeq
}
- //Sam APIs don't consume subject IDs. Now we must look up the emails in Thurloe...
+ // Sam APIs don't consume subject IDs. Now we must look up the emails in Thurloe...
members <- thurloeDao.getAllUserValuesForKey("email").map { keyValues =>
keyValues.view.filterKeys(subjectId => subjectIds.contains(subjectId)).values.map(WorkbenchEmail).toList
}
@@ -157,23 +181,25 @@ class NihService(val samDao: SamDAO, val thurloeDao: ThurloeDAO, val googleDao:
members = ecmEmails ++ thurloeEmails
_ <- ensureAllowlistGroupsExists()
// The request to Sam to completely overwrite the group with the list of actively linked users on the allowlist
- _ <- samDao.overwriteGroupMembers(nihAllowlist.groupToSync, ManagedGroupRoles.Member, members.toList)(getAdminAccessToken) recoverWith {
- case e: Exception => throw new FireCloudException(s"Error synchronizing NIH allowlist: ${e.getMessage}")
+ _ <- samDao.overwriteGroupMembers(nihAllowlist.groupToSync, ManagedGroupRoles.Member, members.toList)(
+ getAdminAccessToken
+ ) recoverWith { case e: Exception =>
+ throw new FireCloudException(s"Error synchronizing NIH allowlist: ${e.getMessage}")
}
} yield ()
}
- private def linkNihAccountEcm(userInfo: UserInfo, nihLink: NihLink): Future[Try[Unit]] = {
- ecmDao.putLinkedEraAccount(LinkedEraAccount(userInfo.id, nihLink))(getAdminAccessToken)
- .flatMap(_ => {
- logger.info("Successfully linked NIH account in ECM for user " + userInfo.id)
- Future.successful(Success(()))
- }).recoverWith {
- case e =>
+ private def linkNihAccountEcm(userInfo: UserInfo, nihLink: NihLink): Future[Try[Unit]] =
+ ecmDao
+ .putLinkedEraAccount(LinkedEraAccount(userInfo.id, nihLink))(getAdminAccessToken)
+ .flatMap { _ =>
+ logger.info("Successfully linked NIH account in ECM for user " + userInfo.id)
+ Future.successful(Success(()))
+ }
+ .recoverWith { case e =>
logger.warn("Failed to link NIH account in ECM for user" + userInfo.id)
Future.successful(Failure(e))
- }
- }
+ }
private def linkNihAccountThurloe(userInfo: UserInfo, nihLink: NihLink): Future[Try[Unit]] = {
val profilePropertyMap = nihLink.propertyValueMap
@@ -181,16 +207,14 @@ class NihService(val samDao: SamDAO, val thurloeDao: ThurloeDAO, val googleDao:
thurloeDao.saveKeyValues(userInfo, profilePropertyMap)
}
- private def unlinkNihAccount(userInfo: UserInfo): Future[Unit] = {
+ private def unlinkNihAccount(userInfo: UserInfo): Future[Unit] =
for {
_ <- unlinkNihAccountEcm(userInfo)
_ <- unlinkNihAccountThurloe(userInfo)
} yield ()
- }
- private def unlinkNihAccountEcm(userInfo: UserInfo): Future[Unit] = {
+ private def unlinkNihAccountEcm(userInfo: UserInfo): Future[Unit] =
ecmDao.deleteLinkedEraAccount(userInfo)(getAdminAccessToken)
- }
private def unlinkNihAccountThurloe(userInfo: UserInfo): Future[Unit] = {
val nihKeys = Set("linkedNihUsername", "linkExpireTime")
@@ -198,37 +222,46 @@ class NihService(val samDao: SamDAO, val thurloeDao: ThurloeDAO, val googleDao:
Future.traverse(nihKeys) { nihKey =>
thurloeDao.deleteKeyValue(userInfo.id, nihKey, userInfo)
} map { results =>
- val failedKeys = results.collect {
- case Failure(exception) => exception.getMessage
+ val failedKeys = results.collect { case Failure(exception) =>
+ exception.getMessage
}
- if(failedKeys.nonEmpty) {
- throw new FireCloudExceptionWithErrorReport(ErrorReport(StatusCodes.InternalServerError, s"Unable to unlink NIH account: ${failedKeys.mkString(",")}"))
+ if (failedKeys.nonEmpty) {
+ throw new FireCloudExceptionWithErrorReport(
+ ErrorReport(StatusCodes.InternalServerError, s"Unable to unlink NIH account: ${failedKeys.mkString(",")}")
+ )
}
}
}
- def unlinkNihAccountAndSyncSelf(userInfo: UserInfo): Future[Unit] = {
+ def unlinkNihAccountAndSyncSelf(userInfo: UserInfo): Future[Unit] =
for {
_ <- unlinkNihAccount(userInfo)
_ <- ensureAllowlistGroupsExists()
- _ <- Future.traverse(nihAllowlists) {
- allowlist => removeUserFromNihAllowlistGroup(WorkbenchEmail(userInfo.userEmail), allowlist).recoverWith {
- case _: Exception => throw new FireCloudExceptionWithErrorReport(ErrorReport(StatusCodes.InternalServerError, "Unable to unlink NIH account"))
+ _ <- Future.traverse(nihAllowlists) { allowlist =>
+ removeUserFromNihAllowlistGroup(WorkbenchEmail(userInfo.userEmail), allowlist).recoverWith {
+ case _: Exception =>
+ throw new FireCloudExceptionWithErrorReport(
+ ErrorReport(StatusCodes.InternalServerError, "Unable to unlink NIH account")
+ )
}
}
} yield {}
- }
def updateNihLinkAndSyncSelf(userInfo: UserInfo, jwtWrapper: JWTWrapper): Future[PerRequestMessage] = {
val res = for {
shibbolethPublicKey <- shibbolethDao.getPublicKey()
- decodedToken <- Future.fromTry(Jwt.decodeRawAll(jwtWrapper.jwt, shibbolethPublicKey, Seq(JwtAlgorithm.RS256))).recoverWith {
- // The exception's error message contains the raw JWT. For an abundance of security, don't
- // log the error message - even though if we reached this point, the JWT is invalid. It could
- // still contain sensitive info.
- case _: Throwable => Future.failed(new FireCloudExceptionWithErrorReport(ErrorReport(StatusCodes.BadRequest, "Failed to decode JWT")))
- }
+ decodedToken <- Future
+ .fromTry(Jwt.decodeRawAll(jwtWrapper.jwt, shibbolethPublicKey, Seq(JwtAlgorithm.RS256)))
+ .recoverWith {
+ // The exception's error message contains the raw JWT. For an abundance of security, don't
+ // log the error message - even though if we reached this point, the JWT is invalid. It could
+ // still contain sensitive info.
+ case _: Throwable =>
+ Future.failed(
+ new FireCloudExceptionWithErrorReport(ErrorReport(StatusCodes.BadRequest, "Failed to decode JWT"))
+ )
+ }
nihLink = decodedToken match {
case (_, rawTokenFromShibboleth, _) =>
rawTokenFromShibboleth.parseJson.convertTo[ShibbolethToken].toNihLink
@@ -237,23 +270,25 @@ class NihService(val samDao: SamDAO, val thurloeDao: ThurloeDAO, val googleDao:
ecmLinkResult <- linkNihAccountEcm(userInfo, nihLink)
_ <- ensureAllowlistGroupsExists()
- allowlistSyncResults <- Future.traverse(nihAllowlists) {
- allowlist => syncNihAllowlistForUser(WorkbenchEmail(userInfo.userEmail), nihLink.linkedNihUsername, allowlist)
+ allowlistSyncResults <- Future.traverse(nihAllowlists) { allowlist =>
+ syncNihAllowlistForUser(WorkbenchEmail(userInfo.userEmail), nihLink.linkedNihUsername, allowlist)
.map(NihDatasetPermission(allowlist.name, _))
}
- } yield {
+ } yield
if (thurloeLinkResult.isSuccess && ecmLinkResult.isSuccess) {
- RequestComplete(OK, NihStatus(Option(nihLink.linkedNihUsername), allowlistSyncResults, Option(nihLink.linkExpireTime)))
+ RequestComplete(
+ OK,
+ NihStatus(Option(nihLink.linkedNihUsername), allowlistSyncResults, Option(nihLink.linkExpireTime))
+ )
} else {
(thurloeLinkResult, ecmLinkResult) match {
- case (Failure(t), Success(_)) => logger.error("Failed to link NIH Account in Thurloe", t)
- case (Success(_), Failure(t)) => logger.error("Failed to link NIH Account in ECM", t)
+ case (Failure(t), Success(_)) => logger.error("Failed to link NIH Account in Thurloe", t)
+ case (Success(_), Failure(t)) => logger.error("Failed to link NIH Account in ECM", t)
case (Failure(t1), Failure(t2)) => logger.error("Failed to link NIH Account in Thurloe and ECM", t1, t2)
case _ => // unreachable case due to the if-condition above, but this case avoids compile warnings
}
RequestCompleteWithErrorReport(InternalServerError, "Error updating NIH link")
}
- }
res.recoverWith {
case e: FireCloudExceptionWithErrorReport if e.errorReport.statusCode == Option(BadRequest) =>
@@ -261,45 +296,55 @@ class NihService(val samDao: SamDAO, val thurloeDao: ThurloeDAO, val googleDao:
}
}
- private def syncNihAllowlistForUser(userEmail: WorkbenchEmail, linkedNihUserName: String, nihAllowlist: NihAllowlist): Future[Boolean] = {
+ private def syncNihAllowlistForUser(userEmail: WorkbenchEmail,
+ linkedNihUserName: String,
+ nihAllowlist: NihAllowlist
+ ): Future[Boolean] = {
val allowlistUsers = downloadNihAllowlist(nihAllowlist)
- if(allowlistUsers contains linkedNihUserName) {
+ if (allowlistUsers contains linkedNihUserName) {
for {
_ <- samDao.addGroupMember(nihAllowlist.groupToSync, ManagedGroupRoles.Member, userEmail)(getAdminAccessToken)
} yield true
} else {
for {
- _ <- samDao.removeGroupMember(nihAllowlist.groupToSync, ManagedGroupRoles.Member, userEmail)(getAdminAccessToken)
+ _ <- samDao.removeGroupMember(nihAllowlist.groupToSync, ManagedGroupRoles.Member, userEmail)(
+ getAdminAccessToken
+ )
} yield false
}
}
- private def removeUserFromNihAllowlistGroup(userEmail: WorkbenchEmail, nihAllowlist: NihAllowlist): Future[Unit] = {
+ private def removeUserFromNihAllowlistGroup(userEmail: WorkbenchEmail, nihAllowlist: NihAllowlist): Future[Unit] =
samDao.removeGroupMember(nihAllowlist.groupToSync, ManagedGroupRoles.Member, userEmail)(getAdminAccessToken)
- }
- private def ensureAllowlistGroupsExists(): Future[Unit] = {
+ private def ensureAllowlistGroupsExists(): Future[Unit] =
samDao.listGroups(getAdminAccessToken).flatMap { groups =>
- val missingGroupNames = nihAllowlists.map(_.groupToSync.value.toLowerCase()) -- groups.map(_.groupName.toLowerCase).toSet
+ val missingGroupNames =
+ nihAllowlists.map(_.groupToSync.value.toLowerCase()) -- groups.map(_.groupName.toLowerCase).toSet
if (missingGroupNames.isEmpty) {
Future.successful(())
} else {
- Future.traverse(missingGroupNames) { groupName =>
- samDao.createGroup(WorkbenchGroupName(groupName))(getAdminAccessToken).recover {
- case fce: FireCloudExceptionWithErrorReport if fce.errorReport.statusCode.contains(StatusCodes.Conflict) => // somebody else made it
+ Future
+ .traverse(missingGroupNames) { groupName =>
+ samDao.createGroup(WorkbenchGroupName(groupName))(getAdminAccessToken).recover {
+ case fce: FireCloudExceptionWithErrorReport
+ if fce.errorReport.statusCode.contains(StatusCodes.Conflict) => // somebody else made it
+ }
}
- }.map(_ => ())
+ .map(_ => ())
}
}
- }
def filterForCurrentUsers(usernames: Map[String, String], expirations: Map[String, String]): Map[String, String] = {
- val currentFcUsers = expirations.map {
- case (fcUser, expStr: String) => fcUser -> Try { expStr.toLong }.toOption
- }.collect {
- case (fcUser, Some(exp: Long)) if DateUtils.now < exp => fcUser
- }.toSet
+ val currentFcUsers = expirations
+ .map { case (fcUser, expStr: String) =>
+ fcUser -> Try(expStr.toLong).toOption
+ }
+ .collect {
+ case (fcUser, Some(exp: Long)) if DateUtils.now < exp => fcUser
+ }
+ .toSet
usernames.filter { case (fcUser, nihUser) => currentFcUsers.contains(fcUser) }
}
@@ -312,8 +357,6 @@ class NihService(val samDao: SamDAO, val thurloeDao: ThurloeDAO, val googleDao:
for {
usernames <- nihUsernames
expirations <- nihExpireTimes
- } yield {
- filterForCurrentUsers(usernames, expirations)
- }
+ } yield filterForCurrentUsers(usernames, expirations)
}
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/service/OntologyService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/service/OntologyService.scala
index eb9884b69..7d05accef 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/service/OntologyService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/service/OntologyService.scala
@@ -19,21 +19,25 @@ object OntologyService {
}
-class OntologyService(val ontologyDAO: OntologyDAO, val researchPurposeSupport: ResearchPurposeSupport)
- (implicit protected val executionContext: ExecutionContext)
- extends DataUseRestrictionSupport with SprayJsonSupport with LazyLogging {
+class OntologyService(val ontologyDAO: OntologyDAO, val researchPurposeSupport: ResearchPurposeSupport)(implicit
+ protected val executionContext: ExecutionContext
+) extends DataUseRestrictionSupport
+ with SprayJsonSupport
+ with LazyLogging {
- def buildStructuredUseRestrictionAttribute(request: StructuredDataRequest): Future[PerRequestMessage] = {
+ def buildStructuredUseRestrictionAttribute(request: StructuredDataRequest): Future[PerRequestMessage] =
Future(RequestComplete(generateStructuredUseRestrictionAttribute(request, ontologyDAO)))
- }
- def autocompleteOntology(term: String): Future[PerRequestMessage] = {
+ def autocompleteOntology(term: String): Future[PerRequestMessage] =
Future(RequestComplete(ontologyDAO.autocomplete(term)))
- }
def buildResearchPurposeQuery(request: ResearchPurposeRequest): Future[PerRequestMessage] = {
import spray.json._
def addPrefix(name: String): String = request.prefix.getOrElse("") + name
- Future(RequestComplete(researchPurposeSupport.researchPurposeFilters(ResearchPurpose(request), addPrefix).toString.parseJson))
+ Future(
+ RequestComplete(
+ researchPurposeSupport.researchPurposeFilters(ResearchPurpose(request), addPrefix).toString.parseJson
+ )
+ )
}
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/service/PerRequest.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/service/PerRequest.scala
index 7e89431e4..3b4741f97 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/service/PerRequest.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/service/PerRequest.scala
@@ -8,21 +8,37 @@ import scala.concurrent.ExecutionContext
object PerRequest {
- implicit def requestCompleteMarshaller(implicit executionContext: ExecutionContext): ToResponseMarshaller[PerRequestMessage] = Marshaller {
- _: ExecutionContext => {
- case requestComplete@ RequestComplete(errorReport: ErrorReport) =>
- requestComplete.marshaller(requestComplete.response).map(_.map(_.map(_.withStatus(errorReport.statusCode.getOrElse(StatusCodes.InternalServerError)))))
+ implicit def requestCompleteMarshaller(implicit
+ executionContext: ExecutionContext
+ ): ToResponseMarshaller[PerRequestMessage] = Marshaller { _: ExecutionContext =>
+ {
+ case requestComplete @ RequestComplete(errorReport: ErrorReport) =>
+ requestComplete
+ .marshaller(requestComplete.response)
+ .map(_.map(_.map(_.withStatus(errorReport.statusCode.getOrElse(StatusCodes.InternalServerError)))))
case requestComplete: RequestComplete[_] =>
requestComplete.marshaller(requestComplete.response)
- case requestComplete@ RequestCompleteWithHeaders(errorReport: ErrorReport, _) =>
- requestComplete.marshaller(requestComplete.response).map(_.map(_.map(_.mapHeaders(_ ++ requestComplete.headers).withStatus(errorReport.statusCode.getOrElse(StatusCodes.InternalServerError)))))
+ case requestComplete @ RequestCompleteWithHeaders(errorReport: ErrorReport, _) =>
+ requestComplete
+ .marshaller(requestComplete.response)
+ .map(
+ _.map(
+ _.map(
+ _.mapHeaders(_ ++ requestComplete.headers)
+ .withStatus(errorReport.statusCode.getOrElse(StatusCodes.InternalServerError))
+ )
+ )
+ )
case requestComplete: RequestCompleteWithHeaders[_] =>
- requestComplete.marshaller(requestComplete.response).map(_.map(_.map(_.mapHeaders(_ ++ requestComplete.headers))))
+ requestComplete
+ .marshaller(requestComplete.response)
+ .map(_.map(_.map(_.mapHeaders(_ ++ requestComplete.headers))))
}
}
sealed trait PerRequestMessage
+
/**
* Report complete, follows same pattern as spray.routing.RequestContext.complete; examples of how to call
* that method should apply here too. E.g. even though this method has only one parameter, it can be called
@@ -35,16 +51,22 @@ object PerRequest {
* tuple where the first element is StatusCode: RequestCompleteWithHeaders((StatusCode.Created, results), header).
* Note that this is here so that RequestComplete above can behave like spray.routing.RequestContext.complete.
*/
- case class RequestCompleteWithHeaders[T](response: T, headers: HttpHeader*)(implicit val marshaller: ToResponseMarshaller[T]) extends PerRequestMessage
+ case class RequestCompleteWithHeaders[T](response: T, headers: HttpHeader*)(implicit
+ val marshaller: ToResponseMarshaller[T]
+ ) extends PerRequestMessage
/** allows for pattern matching with extraction of marshaller */
private object RequestComplete_ {
- def unapply[T >: Any](requestComplete: RequestComplete[T]) = Some((requestComplete.response, requestComplete.marshaller))
+ def unapply[T >: Any](requestComplete: RequestComplete[T]) = Some(
+ (requestComplete.response, requestComplete.marshaller)
+ )
}
/** allows for pattern matching with extraction of marshaller */
private object RequestCompleteWithHeaders_ {
- def unapply[T >: Any](requestComplete: RequestCompleteWithHeaders[T]) = Some((requestComplete.response, requestComplete.headers, requestComplete.marshaller))
+ def unapply[T >: Any](requestComplete: RequestCompleteWithHeaders[T]) = Some(
+ (requestComplete.response, requestComplete.headers, requestComplete.marshaller)
+ )
}
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/service/PermissionReportService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/service/PermissionReportService.scala
index 3032ec38f..2bee80c36 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/service/PermissionReportService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/service/PermissionReportService.scala
@@ -7,14 +7,18 @@ import org.broadinstitute.dsde.firecloud.{Application, FireCloudExceptionWithErr
import org.broadinstitute.dsde.firecloud.dataaccess.{AgoraDAO, RawlsDAO}
import org.broadinstitute.dsde.firecloud.model.OrchMethodRepository._
import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol._
-import org.broadinstitute.dsde.firecloud.model.{OrchMethodConfigurationName, PermissionReport, PermissionReportRequest, UserInfo}
+import org.broadinstitute.dsde.firecloud.model.{
+ OrchMethodConfigurationName,
+ PermissionReport,
+ PermissionReportRequest,
+ UserInfo
+}
import org.broadinstitute.dsde.firecloud.service.PerRequest.RequestComplete
import org.broadinstitute.dsde.rawls.model.{AccessEntry, WorkspaceACL}
import akka.http.scaladsl.model.StatusCodes._
import scala.concurrent.ExecutionContext
-
object PermissionReportService {
def constructor(app: Application)(userInfo: UserInfo)(implicit executionContext: ExecutionContext) =
@@ -22,44 +26,58 @@ object PermissionReportService {
}
-class PermissionReportService (protected val argUserInfo: UserInfo, val rawlsDAO: RawlsDAO, val agoraDAO: AgoraDAO) (implicit protected val executionContext: ExecutionContext) extends LazyLogging {
+class PermissionReportService(protected val argUserInfo: UserInfo, val rawlsDAO: RawlsDAO, val agoraDAO: AgoraDAO)(
+ implicit protected val executionContext: ExecutionContext
+) extends LazyLogging {
import PermissionReportService._
implicit val userInfo: UserInfo = argUserInfo
-
+
def getPermissionReport(workspaceNamespace: String, workspaceName: String, reportInput: PermissionReportRequest) = {
// start the requests to get workspace users and workspace configs in parallel
val futureWorkspaceACL = rawlsDAO.getWorkspaceACL(workspaceNamespace, workspaceName) recover {
// User is forbidden from listing ACLs for this workspace, but may still be able to read
// the configs/methods. Continue with empty workspace ACLs.
- case fcex:FireCloudExceptionWithErrorReport if fcex.errorReport.statusCode.contains(Forbidden) =>
+ case fcex: FireCloudExceptionWithErrorReport if fcex.errorReport.statusCode.contains(Forbidden) =>
WorkspaceACL(Map.empty[String, AccessEntry])
// all other exceptions are considered fatal
}
val futureWorkspaceConfigs = rawlsDAO.getAgoraMethodConfigs(workspaceNamespace, workspaceName) map { configs =>
// filter to just those the user requested
if (reportInput.configs.isEmpty || reportInput.configs.get.isEmpty) configs
- else configs.filter( x => reportInput.configs.get.contains(OrchMethodConfigurationName(x)))
+ else configs.filter(x => reportInput.configs.get.contains(OrchMethodConfigurationName(x)))
}
for {
workspaceACL <- futureWorkspaceACL
workspaceConfigs <- futureWorkspaceConfigs
- methodACLs <- agoraDAO.getMultiEntityPermissions(AgoraEntityType.Workflow,
- (workspaceConfigs map {config => Method(config.methodRepoMethod)}).distinct.toList)
+ methodACLs <- agoraDAO.getMultiEntityPermissions(
+ AgoraEntityType.Workflow,
+ (workspaceConfigs map { config => Method(config.methodRepoMethod) }).distinct.toList
+ )
} yield {
// filter the workspace users to what the user requested
- val wsAcl = if (reportInput.users.isEmpty || reportInput.users.get.isEmpty) workspaceACL.acl
- else workspaceACL.acl.filter( x => reportInput.users.get.contains(x._1) )
+ val wsAcl =
+ if (reportInput.users.isEmpty || reportInput.users.get.isEmpty) workspaceACL.acl
+ else workspaceACL.acl.filter(x => reportInput.users.get.contains(x._1))
val translatedMethodAcl = workspaceConfigs map { config =>
val methodLookup = Method(config.methodRepoMethod)
val agoraMethodReference = methodACLs.find(_.entity.toShortString == methodLookup.toShortString)
agoraMethodReference match {
case Some(agora) =>
- EntityAccessControl(Some(Method(config.methodRepoMethod, agora.entity.managers, agora.entity.public)),
- OrchMethodConfigurationName(config), agora.acls map AgoraPermissionService.toFireCloudPermission, agora.message)
- case None => EntityAccessControl(None, OrchMethodConfigurationName(config), Seq.empty[FireCloudPermission], Some("referenced method not found."))
+ EntityAccessControl(
+ Some(Method(config.methodRepoMethod, agora.entity.managers, agora.entity.public)),
+ OrchMethodConfigurationName(config),
+ agora.acls map AgoraPermissionService.toFireCloudPermission,
+ agora.message
+ )
+ case None =>
+ EntityAccessControl(None,
+ OrchMethodConfigurationName(config),
+ Seq.empty[FireCloudPermission],
+ Some("referenced method not found.")
+ )
}
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/service/RegisterService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/service/RegisterService.scala
index bab848893..12c1d36f6 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/service/RegisterService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/service/RegisterService.scala
@@ -6,7 +6,13 @@ import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol._
import org.broadinstitute.dsde.firecloud.model._
import org.broadinstitute.dsde.firecloud.service.PerRequest.{PerRequestMessage, RequestComplete}
import org.broadinstitute.dsde.firecloud.{Application, FireCloudConfig, FireCloudExceptionWithErrorReport}
-import org.broadinstitute.dsde.workbench.model.Notifications.{ActivationNotification, AzurePreviewActivationNotification, AzurePreviewActivationNotificationType, Notification, NotificationFormat}
+import org.broadinstitute.dsde.workbench.model.Notifications.{
+ ActivationNotification,
+ AzurePreviewActivationNotification,
+ AzurePreviewActivationNotificationType,
+ Notification,
+ NotificationFormat
+}
import org.broadinstitute.dsde.rawls.model.ErrorReport
import akka.http.scaladsl.model.StatusCodes
import org.broadinstitute.dsde.firecloud.FireCloudConfig.Sam
@@ -26,64 +32,70 @@ object RegisterService {
}
-class RegisterService(val rawlsDao: RawlsDAO, val samDao: SamDAO, val thurloeDao: ThurloeDAO, val googleServicesDAO: GoogleServicesDAO)
- (implicit protected val executionContext: ExecutionContext) extends LazyLogging {
+class RegisterService(val rawlsDao: RawlsDAO,
+ val samDao: SamDAO,
+ val thurloeDao: ThurloeDAO,
+ val googleServicesDAO: GoogleServicesDAO
+)(implicit protected val executionContext: ExecutionContext)
+ extends LazyLogging {
def createUserWithProfile(userInfo: UserInfo, registerRequest: RegisterRequest): Future[PerRequestMessage] =
for {
registerResult <- registerUser(userInfo, registerRequest.acceptsTermsOfService)
// We are using the equivalent value from sam registration to force the order of operations for the thurloe calls
- registrationResultUserInfo = userInfo.copy(userEmail = registerResult.email.value, id = registerResult.id.value)
+ registrationResultUserInfo = userInfo.copy(userEmail = registerResult.email.value, id = registerResult.id.value)
_ <- saveProfileInThurloeAndSendRegistrationEmail(registrationResultUserInfo, registerRequest.profile)
} yield RequestComplete(StatusCodes.OK, registerResult)
- def createUpdateProfile(userInfo: UserInfo, basicProfile: BasicProfile): Future[PerRequestMessage] = {
+ def createUpdateProfile(userInfo: UserInfo, basicProfile: BasicProfile): Future[PerRequestMessage] =
for {
isRegistered <- isRegistered(userInfo)
- userStatus <- if (!isRegistered.enabled.google || !isRegistered.enabled.ldap) {
- for {
- registerResult <- registerUser(userInfo, basicProfile.termsOfService)
- registrationResultUserInfo = userInfo.copy(userEmail = registerResult.userInfo.userEmail, id = registerResult.userInfo.userSubjectId)
- _ <- saveProfileInThurloeAndSendRegistrationEmail(registrationResultUserInfo, basicProfile)
- } yield registerResult
- } else {
- /* when updating the profile in Thurloe, make sure to send the update under the same user id as the profile
+ userStatus <-
+ if (!isRegistered.enabled.google || !isRegistered.enabled.ldap) {
+ for {
+ registerResult <- registerUser(userInfo, basicProfile.termsOfService)
+ registrationResultUserInfo = userInfo.copy(userEmail = registerResult.userInfo.userEmail,
+ id = registerResult.userInfo.userSubjectId
+ )
+ _ <- saveProfileInThurloeAndSendRegistrationEmail(registrationResultUserInfo, basicProfile)
+ } yield registerResult
+ } else {
+ /* when updating the profile in Thurloe, make sure to send the update under the same user id as the profile
was originally created with. A given use can have a Sam id, a Google subject id, and a b2c Azure id; if we
send profile updates under a different id, Thurloe will create duplicate keys for the user.
Because the original profile was created during registration using `userInfo.userSubjectId` (see
`registrationResultUserInfo` above), we use that same id here.
- */
- thurloeDao.saveProfile(userInfo.copy(id = isRegistered.userInfo.userSubjectId), basicProfile) map (_ => isRegistered)
- }
- } yield {
- RequestComplete(StatusCodes.OK, userStatus)
- }
- }
+ */
+ thurloeDao.saveProfile(userInfo.copy(id = isRegistered.userInfo.userSubjectId), basicProfile) map (_ =>
+ isRegistered
+ )
+ }
+ } yield RequestComplete(StatusCodes.OK, userStatus)
private def saveProfileInThurloeAndSendRegistrationEmail(userInfo: UserInfo, profile: BasicProfile): Future[Unit] = {
val otherValues = Map("isRegistrationComplete" -> Profile.currentVersion.toString, "email" -> userInfo.userEmail)
- thurloeDao.saveProfile(userInfo, profile).andThen {
- case Success(_) => thurloeDao.saveKeyValues(userInfo, otherValues) andThen {
- case Success(_) => googleServicesDAO.publishMessages(FireCloudConfig.Notification.fullyQualifiedNotificationTopic, Seq(NotificationFormat.write(generateWelcomeEmail(userInfo)).compactPrint))
+ thurloeDao.saveProfile(userInfo, profile).andThen { case Success(_) =>
+ thurloeDao.saveKeyValues(userInfo, otherValues) andThen { case Success(_) =>
+ googleServicesDAO.publishMessages(FireCloudConfig.Notification.fullyQualifiedNotificationTopic,
+ Seq(NotificationFormat.write(generateWelcomeEmail(userInfo)).compactPrint)
+ )
}
}
}
- private def isRegistered(userInfo: UserInfo): Future[RegistrationInfo] = {
+ private def isRegistered(userInfo: UserInfo): Future[RegistrationInfo] =
samDao.getRegistrationStatus(userInfo) recover {
case e: FireCloudExceptionWithErrorReport if e.errorReport.statusCode == Option(StatusCodes.NotFound) =>
RegistrationInfo(WorkbenchUserInfo(userInfo.id, userInfo.userEmail), WorkbenchEnabled(false, false, false))
}
- }
- def generateWelcomeEmail(userInfo: UserInfo): Notification = {
- //If the user is a B2C user and does not have a Google access token, we can safely assume that they're an Azure user
+ def generateWelcomeEmail(userInfo: UserInfo): Notification =
+ // If the user is a B2C user and does not have a Google access token, we can safely assume that they're an Azure user
userInfo.googleAccessTokenThroughB2C match {
case None if userInfo.isB2C => AzurePreviewActivationNotification(WorkbenchUserId(userInfo.id))
- case _ => ActivationNotification(WorkbenchUserId(userInfo.id))
+ case _ => ActivationNotification(WorkbenchUserId(userInfo.id))
}
- }
private def registerUser(userInfo: UserInfo, termsOfService: Option[String]): Future[RegistrationInfo] =
samDao.registerUser(termsOfService)(userInfo)
@@ -99,11 +111,10 @@ class RegisterService(val rawlsDao: RawlsDAO, val samDao: SamDAO, val thurloeDao
validKeyPrefixes.exists(prefix => key.startsWith(prefix)) || validKeys.contains(key)
}
- def updateProfilePreferences(userInfo: UserInfo, preferences: Map[String, String]): Future[PerRequestMessage] = {
+ def updateProfilePreferences(userInfo: UserInfo, preferences: Map[String, String]): Future[PerRequestMessage] =
if (preferences.keys.forall(isValidPreferenceKey)) {
thurloeDao.saveKeyValues(userInfo, preferences).map(_ => RequestComplete(StatusCodes.NoContent))
} else {
throw new FireCloudExceptionWithErrorReport(ErrorReport(StatusCodes.BadRequest, "illegal preference key"))
}
- }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/service/ShareLogService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/service/ShareLogService.scala
index 2a0df535a..db2bb87c2 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/service/ShareLogService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/service/ShareLogService.scala
@@ -15,9 +15,12 @@ object ShareLogService {
() => new ShareLogService(app.shareLogDAO)
}
-class ShareLogService(val shareLogDAO: ShareLogDAO)(implicit protected val executionContext: ExecutionContext) extends SprayJsonSupport {
+class ShareLogService(val shareLogDAO: ShareLogDAO)(implicit protected val executionContext: ExecutionContext)
+ extends SprayJsonSupport {
implicit val impAttributeFormat: AttributeFormat = new AttributeFormat with PlainArrayAttributeListSerializer
- def getSharees(userId: String, shareType: Option[ShareType.Value] = None) = Future(RequestComplete(shareLogDAO.getShares(userId, shareType).map(_.sharee)))
+ def getSharees(userId: String, shareType: Option[ShareType.Value] = None) = Future(
+ RequestComplete(shareLogDAO.getShares(userId, shareType).map(_.sharee))
+ )
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/service/StatusService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/service/StatusService.scala
index dc036bd62..d0838dae7 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/service/StatusService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/service/StatusService.scala
@@ -17,21 +17,19 @@ import scala.concurrent.duration._
* Created by anichols on 4/5/17.
*/
object StatusService {
- def constructor(healthMonitor: ActorRef)()(implicit executionContext: ExecutionContext): StatusService = {
+ def constructor(healthMonitor: ActorRef)()(implicit executionContext: ExecutionContext): StatusService =
new StatusService(healthMonitor)
- }
}
-class StatusService (val healthMonitor: ActorRef)
- (implicit protected val executionContext: ExecutionContext) extends SprayJsonSupport {
+class StatusService(val healthMonitor: ActorRef)(implicit protected val executionContext: ExecutionContext)
+ extends SprayJsonSupport {
implicit val timeout: Timeout = Timeout(1.minute) // timeout for the ask to healthMonitor for GetCurrentStatus
- def collectStatusInfo(): Future[PerRequestMessage] = {
+ def collectStatusInfo(): Future[PerRequestMessage] =
(healthMonitor ? GetCurrentStatus).mapTo[StatusCheckResponse].map { statusCheckResponse =>
// if we've successfully reached this point, always return a 200, so the load balancers
// don't think orchestration is down. the statusCheckResponse will still contain ok: true|false
// in its payload, depending on the status of subsystems.
RequestComplete(StatusCodes.OK, statusCheckResponse)
}
- }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/service/TSVFileSupport.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/service/TSVFileSupport.scala
index 09cbc2c01..b7bffc333 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/service/TSVFileSupport.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/service/TSVFileSupport.scala
@@ -2,7 +2,11 @@ package org.broadinstitute.dsde.firecloud.service
import org.broadinstitute.dsde.firecloud.{FireCloudException, FireCloudExceptionWithErrorReport}
import org.broadinstitute.dsde.rawls.model._
-import org.broadinstitute.dsde.rawls.model.AttributeUpdateOperations.{AddUpdateAttribute, AttributeUpdateOperation, RemoveAttribute}
+import org.broadinstitute.dsde.rawls.model.AttributeUpdateOperations.{
+ AddUpdateAttribute,
+ AttributeUpdateOperation,
+ RemoveAttribute
+}
import org.broadinstitute.dsde.firecloud.model._
import org.broadinstitute.dsde.firecloud.service.PerRequest.PerRequestMessage
import org.broadinstitute.dsde.firecloud.utils.{TSVLoadFile, TSVParser}
@@ -20,41 +24,52 @@ import scala.util.{Failure, Success, Try}
*/
object TsvTypes {
sealed trait TsvType
- case object ENTITY extends TsvType { override def toString = "entity" } // insert or update, must have required columns
+ case object ENTITY extends TsvType {
+ override def toString = "entity"
+ } // insert or update, must have required columns
case object UPDATE extends TsvType { override def toString = "update" } // update only, entity must preexist
case object MEMBERSHIP extends TsvType { override def toString = "membership" } // add members to a set
- def withName(name: String): TsvType = {
+ def withName(name: String): TsvType =
name match {
- case "entity" => ENTITY
- case "update" => UPDATE
+ case "entity" => ENTITY
+ case "update" => UPDATE
case "membership" => MEMBERSHIP
- case _ => throw new FireCloudException(s"Invalid TSV type '$name', supported types are: membership, entity, update")
+ case _ =>
+ throw new FireCloudException(s"Invalid TSV type '$name', supported types are: membership, entity, update")
}
- }
}
trait TSVFileSupport {
+
/**
* Attempts to parse a string into a TSVLoadFile.
* Bails with a 400 Bad Request if the TSV is invalid. */
- def withTSVFile(tsvString:String)(op: TSVLoadFile => Future[PerRequestMessage])(implicit ec: ExecutionContext): Future[PerRequestMessage] = {
+ def withTSVFile(
+ tsvString: String
+ )(op: TSVLoadFile => Future[PerRequestMessage])(implicit ec: ExecutionContext): Future[PerRequestMessage] =
Try(TSVParser.parse(tsvString)) match {
- case Failure(regret) => Future(RequestCompleteWithErrorReport(BadRequest, regret.getMessage))
+ case Failure(regret) => Future(RequestCompleteWithErrorReport(BadRequest, regret.getMessage))
case Success(tsvFile) => op(tsvFile)
}
- }
- def checkNumberOfRows(tsv: TSVLoadFile, rows: Int)(op: => Future[PerRequestMessage])(implicit ec: ExecutionContext): Future[PerRequestMessage] = {
+ def checkNumberOfRows(tsv: TSVLoadFile, rows: Int)(
+ op: => Future[PerRequestMessage]
+ )(implicit ec: ExecutionContext): Future[PerRequestMessage] =
if ((tsv.tsvData.length + (if (tsv.headers.isEmpty) 0 else 1)) != rows) {
- Future(RequestCompleteWithErrorReport(BadRequest,
- "Your file does not have the correct number of rows. There should be " + rows.toString))
+ Future(
+ RequestCompleteWithErrorReport(
+ BadRequest,
+ "Your file does not have the correct number of rows. There should be " + rows.toString
+ )
+ )
} else {
op
}
- }
- def checkFirstRowDistinct( tsv: TSVLoadFile )(op: => Future[PerRequestMessage])(implicit ec: ExecutionContext): Future[PerRequestMessage] = {
+ def checkFirstRowDistinct(
+ tsv: TSVLoadFile
+ )(op: => Future[PerRequestMessage])(implicit ec: ExecutionContext): Future[PerRequestMessage] = {
val attributeNames = Seq(tsv.headers.head.stripPrefix("workspace:")) ++ tsv.headers.tail
if (attributeNames.size != attributeNames.distinct.size) {
Future(RequestCompleteWithErrorReport(BadRequest, "Duplicated attribute keys are not allowed"))
@@ -66,12 +81,20 @@ trait TSVFileSupport {
/**
* Bail with a 400 Bad Request if the first column of the tsv has duplicate values.
* Otherwise, carry on. */
- def checkFirstColumnDistinct( tsv: TSVLoadFile )(op: => Future[PerRequestMessage])(implicit ec: ExecutionContext): Future[PerRequestMessage] = {
+ def checkFirstColumnDistinct(
+ tsv: TSVLoadFile
+ )(op: => Future[PerRequestMessage])(implicit ec: ExecutionContext): Future[PerRequestMessage] = {
val entitiesToUpdate = tsv.tsvData.map(_.headOption.get)
val distinctEntities = entitiesToUpdate.distinct
- if ( entitiesToUpdate.size != distinctEntities.size ) {
- Future( RequestCompleteWithErrorReport(BadRequest,
- "Duplicated entities are not allowed in TSV: " + entitiesToUpdate.diff(distinctEntities).distinct.mkString(", ")) )
+ if (entitiesToUpdate.size != distinctEntities.size) {
+ Future(
+ RequestCompleteWithErrorReport(BadRequest,
+ "Duplicated entities are not allowed in TSV: " + entitiesToUpdate
+ .diff(distinctEntities)
+ .distinct
+ .mkString(", ")
+ )
+ )
} else {
op
}
@@ -81,41 +104,51 @@ trait TSVFileSupport {
* Collection type entities have typed members enforced by the schema. If the provided entity type exists, returns
* Some( its_member_type ) if it's a collection, or None if it isn't.
* Bails with a 400 Bad Request if the provided entity type is unknown to the schema. */
- def withMemberCollectionType(entityType: String, modelSchema: ModelSchema)(op: Option[String] => Future[PerRequestMessage])(implicit ec: ExecutionContext): Future[PerRequestMessage] = {
+ def withMemberCollectionType(entityType: String, modelSchema: ModelSchema)(
+ op: Option[String] => Future[PerRequestMessage]
+ )(implicit ec: ExecutionContext): Future[PerRequestMessage] =
modelSchema.getCollectionMemberType(entityType) match {
- case Failure(regret) => Future(RequestCompleteWithErrorReport(BadRequest, regret.getMessage))
+ case Failure(regret) => Future(RequestCompleteWithErrorReport(BadRequest, regret.getMessage))
case Success(memberTypeOpt) => op(memberTypeOpt)
}
- }
/**
* Bail with a 400 Bad Request if the tsv is trying to set members on a collection type.
* Otherwise, carry on. */
- def checkNoCollectionMemberAttribute( tsv: TSVLoadFile, memberTypeOpt: Option[String] )(op: => Future[PerRequestMessage])(implicit ec: ExecutionContext): Future[PerRequestMessage] = {
- if( memberTypeOpt.isDefined && tsv.headers.contains(memberTypeOpt.get + "_id") ) {
- Future( RequestCompleteWithErrorReport(BadRequest,
- "Can't set collection members along with other attributes; please use two-column TSV format or remove " +
- memberTypeOpt.get + "_id from your tsv.") )
+ def checkNoCollectionMemberAttribute(tsv: TSVLoadFile, memberTypeOpt: Option[String])(
+ op: => Future[PerRequestMessage]
+ )(implicit ec: ExecutionContext): Future[PerRequestMessage] =
+ if (memberTypeOpt.isDefined && tsv.headers.contains(memberTypeOpt.get + "_id")) {
+ Future(
+ RequestCompleteWithErrorReport(
+ BadRequest,
+ "Can't set collection members along with other attributes; please use two-column TSV format or remove " +
+ memberTypeOpt.get + "_id from your tsv."
+ )
+ )
} else {
op
}
- }
- def validateMembershipTSV(tsv: TSVLoadFile, membersType: Option[String]) (op: => Future[PerRequestMessage])(implicit ec: ExecutionContext): Future[PerRequestMessage] = {
- //This magical list of conditions determines whether the TSV is populating the "members" attribute of a collection type entity.
- if( membersType.isEmpty ) {
+ def validateMembershipTSV(tsv: TSVLoadFile, membersType: Option[String])(op: => Future[PerRequestMessage])(implicit
+ ec: ExecutionContext
+ ): Future[PerRequestMessage] =
+ // This magical list of conditions determines whether the TSV is populating the "members" attribute of a collection type entity.
+ if (membersType.isEmpty) {
Future(
- RequestCompleteWithErrorReport(BadRequest,"Invalid membership TSV. Entity type must be a collection type") )
- } else if( tsv.headers.length != 2 ){
+ RequestCompleteWithErrorReport(BadRequest, "Invalid membership TSV. Entity type must be a collection type")
+ )
+ } else if (tsv.headers.length != 2) {
+ Future(RequestCompleteWithErrorReport(BadRequest, "Invalid membership TSV. Must have exactly two columns"))
+ } else if (tsv.headers != Seq(tsv.firstColumnHeader, membersType.get)) {
Future(
- RequestCompleteWithErrorReport(BadRequest, "Invalid membership TSV. Must have exactly two columns") )
- } else if( tsv.headers != Seq(tsv.firstColumnHeader, membersType.get) ) {
- Future(
- RequestCompleteWithErrorReport(BadRequest, "Invalid membership TSV. Second column header should be " + membersType.get) )
+ RequestCompleteWithErrorReport(BadRequest,
+ "Invalid membership TSV. Second column header should be " + membersType.get
+ )
+ )
} else {
op
}
- }
/*
Takes a TSVLoadFile for **workspace attributes** and turns it into sequence of AttributeUpdateOperation
@@ -131,7 +164,6 @@ trait TSVFileSupport {
}
}
-
val upsertAttrOperation: (String, AttributeString) = "op" -> AttributeString("AddUpdateAttribute")
val removeAttrOperation: (String, AttributeString) = "op" -> AttributeString("RemoveAttribute")
val addListMemberOperation: (String, AttributeString) = "op" -> AttributeString("AddListMember")
@@ -146,40 +178,40 @@ trait TSVFileSupport {
/*
Creates an AttributeValue whose implementation is more closely tied to the value of the input.
*/
- def stringToTypedAttribute(value: String): Attribute = {
- Try (java.lang.Integer.parseInt(value)) match {
+ def stringToTypedAttribute(value: String): Attribute =
+ Try(java.lang.Integer.parseInt(value)) match {
case Success(intValue) => AttributeNumber(intValue)
- case Failure(_) => Try (java.lang.Double.parseDouble(value)) match {
- // because we represent AttributeNumber as a BigDecimal, and BigDecimal has no concept of infinity or NaN,
- // if we find infinite/NaN numbers here, don't save them as AttributeNumber; instead let them fall through
- // to AttributeString.
- case Success(doubleValue) if !Double.NegativeInfinity.equals(doubleValue)
- && !Double.PositiveInfinity.equals(doubleValue)
- && !Double.NaN.equals(doubleValue)
- && !matchesLiteral(value) =>
- AttributeNumber(doubleValue)
- case _ => Try(BooleanUtils.toBoolean(value.toLowerCase, "true", "false")) match {
- case Success(booleanValue) => AttributeBoolean(booleanValue)
- case Failure(_) =>
- Try(value.parseJson.convertTo[AttributeEntityReference]) match {
- case Success(ref) => ref
- case Failure(_) => AttributeString(value)
+ case Failure(_) =>
+ Try(java.lang.Double.parseDouble(value)) match {
+ // because we represent AttributeNumber as a BigDecimal, and BigDecimal has no concept of infinity or NaN,
+ // if we find infinite/NaN numbers here, don't save them as AttributeNumber; instead let them fall through
+ // to AttributeString.
+ case Success(doubleValue)
+ if !Double.NegativeInfinity.equals(doubleValue)
+ && !Double.PositiveInfinity.equals(doubleValue)
+ && !Double.NaN.equals(doubleValue)
+ && !matchesLiteral(value) =>
+ AttributeNumber(doubleValue)
+ case _ =>
+ Try(BooleanUtils.toBoolean(value.toLowerCase, "true", "false")) match {
+ case Success(booleanValue) => AttributeBoolean(booleanValue)
+ case Failure(_) =>
+ Try(value.parseJson.convertTo[AttributeEntityReference]) match {
+ case Success(ref) => ref
+ case Failure(_) => AttributeString(value)
+ }
}
}
- }
}
- }
- def checkForJson(value: String): Attribute = {
+ def checkForJson(value: String): Attribute =
Try(value.parseJson) match {
- case Success(_: JsObject) => AttributeValueRawJson(value)
- case _ => AttributeString(value)
- }
- }
+ case Success(_: JsObject) => AttributeValueRawJson(value)
+ case _ => AttributeString(value)
+ }
- def matchesLiteral(value: String): Boolean = {
+ def matchesLiteral(value: String): Boolean =
value.toLowerCase().endsWith("d") || value.toLowerCase().endsWith("f")
- }
/**
* colInfo is a list of (headerName, refType), where refType is the type of the entity if the headerName is an AttributeRef
@@ -190,35 +222,48 @@ trait TSVFileSupport {
* to tell the TSV uploader to honor the blanks and delete those values. To preserve backwards compatibility, we will now allow
* the user to optionally set deleteEmptyValues to true. The default is the original behavior.
* */
- def setAttributesOnEntity(entityType: String, memberTypeOpt: Option[String], row: Seq[String], colInfo: Seq[(String,Option[String])], modelSchema: ModelSchema, deleteEmptyValues: Boolean = false): EntityUpdateDefinition = {
- //Iterate over the attribute names and their values
- //I (hussein) think the refTypeOpt.isDefined is to ensure that if required attributes are left empty, the empty
- //string gets passed to Rawls, which should error as they're required?
- val ops = for { (attributeValue, (attributeName, refTypeOpt)) <- row.tail zip colInfo if refTypeOpt.isDefined || (attributeValue.nonEmpty || deleteEmptyValues)} yield {
- refTypeOpt match {
- case Some(refType) => Seq(Map(upsertAttrOperation, nameEntry(attributeName), valEntry(AttributeEntityReference(refType, attributeValue))))
- case None =>
- attributeValue match {
- case "__DELETE__" => Seq(Map(removeAttrOperation, nameEntry(attributeName)))
- case value if deleteEmptyValues && value.trim.isEmpty => Seq(Map(removeAttrOperation, nameEntry(attributeName)))
- case value if modelSchema.isAttributeArray(value) => generateAttributeArrayOperations(value, attributeName)
- case _ => Seq(Map(upsertAttrOperation, nameEntry(attributeName), valEntry(stringToTypedAttribute(attributeValue))))
- }
- }
+ def setAttributesOnEntity(entityType: String,
+ memberTypeOpt: Option[String],
+ row: Seq[String],
+ colInfo: Seq[(String, Option[String])],
+ modelSchema: ModelSchema,
+ deleteEmptyValues: Boolean = false
+ ): EntityUpdateDefinition = {
+ // Iterate over the attribute names and their values
+ // I (hussein) think the refTypeOpt.isDefined is to ensure that if required attributes are left empty, the empty
+ // string gets passed to Rawls, which should error as they're required?
+ val ops = for {
+ (attributeValue, (attributeName, refTypeOpt)) <- row.tail zip colInfo
+ if refTypeOpt.isDefined || (attributeValue.nonEmpty || deleteEmptyValues)
+ } yield refTypeOpt match {
+ case Some(refType) =>
+ Seq(
+ Map(upsertAttrOperation,
+ nameEntry(attributeName),
+ valEntry(AttributeEntityReference(refType, attributeValue))
+ )
+ )
+ case None =>
+ attributeValue match {
+ case "__DELETE__" => Seq(Map(removeAttrOperation, nameEntry(attributeName)))
+ case value if deleteEmptyValues && value.trim.isEmpty =>
+ Seq(Map(removeAttrOperation, nameEntry(attributeName)))
+ case value if modelSchema.isAttributeArray(value) => generateAttributeArrayOperations(value, attributeName)
+ case _ =>
+ Seq(Map(upsertAttrOperation, nameEntry(attributeName), valEntry(stringToTypedAttribute(attributeValue))))
+ }
}
- //If we're upserting a collection type entity, add an AddListMember( members_attr, null ) operation.
- //This will force the members_attr attribute to exist if it's being created for the first time.
+ // If we're upserting a collection type entity, add an AddListMember( members_attr, null ) operation.
+ // This will force the members_attr attribute to exist if it's being created for the first time.
val collectionMemberAttrOp: Option[Map[String, Attribute]] =
- if (modelSchema.isCollectionType(entityType)) {
- val membersAttributeName = modelSchema.getPlural(memberTypeOpt.get).get
- Some(Map(
- createRefListOperation,
- "attributeListName"->AttributeString(membersAttributeName)))
- } else {
- None
- }
- EntityUpdateDefinition(row.headOption.get,entityType,ops.flatten ++ collectionMemberAttrOp )
+ if (modelSchema.isCollectionType(entityType)) {
+ val membersAttributeName = modelSchema.getPlural(memberTypeOpt.get).get
+ Some(Map(createRefListOperation, "attributeListName" -> AttributeString(membersAttributeName)))
+ } else {
+ None
+ }
+ EntityUpdateDefinition(row.headOption.get, entityType, ops.flatten ++ collectionMemberAttrOp)
}
def generateAttributeArrayOperations(attributeValue: String, attributeName: String): Seq[Map[String, Attribute]] = {
@@ -227,9 +272,11 @@ trait TSVFileSupport {
def addListEntry(attrVal: AttributeListElementable) =
Map(addListMemberOperation, listNameEntry(attributeName), listValEntry(attrVal))
- //if the list is empty, short-circuit and just replace any existing list with an empty list
- if(listElements.isEmpty) {
- Seq(Map(removeAttrOperation, nameEntry(attributeName)), Map(createAttrValueListOperation, nameEntry(attributeName)))
+ // if the list is empty, short-circuit and just replace any existing list with an empty list
+ if (listElements.isEmpty) {
+ Seq(Map(removeAttrOperation, nameEntry(attributeName)),
+ Map(createAttrValueListOperation, nameEntry(attributeName))
+ )
} else {
// validate that all elements in the list are the same datatype.
@@ -237,15 +284,17 @@ trait TSVFileSupport {
// be equal
val headClass = listElements.head.getClass
if (listElements.exists(_.getClass != headClass) && !listElements.forall(_.isInstanceOf[JsBoolean])) {
- throw new FireCloudExceptionWithErrorReport(ErrorReport(BadRequest, "Mixed-type entity attribute lists are not supported."))
+ throw new FireCloudExceptionWithErrorReport(
+ ErrorReport(BadRequest, "Mixed-type entity attribute lists are not supported.")
+ )
}
// since we know all list elements are the same datatype, we can match on them individually
val addElements = listElements map {
- case jsstr:JsString => addListEntry(AttributeString(jsstr.value))
- case jsnum:JsNumber => addListEntry(AttributeNumber(jsnum.value))
- case jsbool:JsBoolean => addListEntry(AttributeBoolean(jsbool.value))
- case jsobj:JsObject =>
+ case jsstr: JsString => addListEntry(AttributeString(jsstr.value))
+ case jsnum: JsNumber => addListEntry(AttributeNumber(jsnum.value))
+ case jsbool: JsBoolean => addListEntry(AttributeBoolean(jsbool.value))
+ case jsobj: JsObject =>
val entRefAttempt = Try(jsobj.convertTo[AttributeEntityReference])
entRefAttempt match {
case Success(ref) => addListEntry(ref)
@@ -253,7 +302,7 @@ trait TSVFileSupport {
throw new FireCloudExceptionWithErrorReport(ErrorReport(BadRequest, UNSUPPORTED_ARRAY_TYPE_ERROR_MSG))
}
case jsArray: JsArray => addListEntry(AttributeValueRawJson(jsArray.compactPrint))
- case _ =>
+ case _ =>
// if we hit this case, it means we have a homogenous array, but the elements' datatype
// is not one we support
throw new FireCloudExceptionWithErrorReport(ErrorReport(BadRequest, UNSUPPORTED_ARRAY_TYPE_ERROR_MSG))
@@ -263,6 +312,7 @@ trait TSVFileSupport {
}
}
- val UNSUPPORTED_ARRAY_TYPE_ERROR_MSG = "Only arrays of strings, numbers, booleans, or entity references are supported."
+ val UNSUPPORTED_ARRAY_TYPE_ERROR_MSG =
+ "Only arrays of strings, numbers, booleans, or entity references are supported."
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/service/UserService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/service/UserService.scala
index e5d3e7343..71042874f 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/service/UserService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/service/UserService.scala
@@ -7,9 +7,19 @@ import akka.http.scaladsl.model.StatusCodes
import com.typesafe.scalalogging.LazyLogging
import org.broadinstitute.dsde.firecloud.{Application, FireCloudConfig}
import org.broadinstitute.dsde.firecloud.dataaccess.{GoogleServicesDAO, RawlsDAO, ThurloeDAO}
-import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol.{impProfileWrapper, impTerraPreference, impUserImportPermission}
+import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol.{
+ impProfileWrapper,
+ impTerraPreference,
+ impUserImportPermission
+}
import org.broadinstitute.dsde.firecloud.model.Project.CreationStatuses
-import org.broadinstitute.dsde.firecloud.model.{ProfileWrapper, RequestCompleteWithErrorReport, TerraPreference, UserImportPermission, UserInfo}
+import org.broadinstitute.dsde.firecloud.model.{
+ ProfileWrapper,
+ RequestCompleteWithErrorReport,
+ TerraPreference,
+ UserImportPermission,
+ UserInfo
+}
import org.broadinstitute.dsde.firecloud.service.PerRequest.{PerRequestMessage, RequestComplete}
import org.broadinstitute.dsde.rawls.model.WorkspaceAccessLevels
import org.parboiled.common.FileUtils
@@ -18,8 +28,6 @@ import spray.json.DefaultJsonProtocol
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success, Try}
-
-
object UserService {
val TerraPreferenceKey = "preferTerra"
val TerraPreferenceLastUpdatedKey = "preferTerraLastUpdated"
@@ -34,8 +42,14 @@ object UserService {
}
-class UserService(rawlsDAO: RawlsDAO, thurloeDAO: ThurloeDAO, googleServicesDAO: GoogleServicesDAO, userToken: UserInfo)(implicit protected val executionContext: ExecutionContext)
- extends LazyLogging with SprayJsonSupport with DefaultJsonProtocol {
+class UserService(rawlsDAO: RawlsDAO,
+ thurloeDAO: ThurloeDAO,
+ googleServicesDAO: GoogleServicesDAO,
+ userToken: UserInfo
+)(implicit protected val executionContext: ExecutionContext)
+ extends LazyLogging
+ with SprayJsonSupport
+ with DefaultJsonProtocol {
def importPermission(): Future[PerRequestMessage] = {
// start two requests, in parallel, to fire off workspace list and billing project list
@@ -48,42 +62,40 @@ class UserService(rawlsDAO: RawlsDAO, thurloeDAO: ThurloeDAO, googleServicesDAO:
// canCompute, so the effort is somewhat high.
for {
hasProject <- billingProjects.map(_.exists(_.creationStatus == CreationStatuses.Ready))
- hasWorkspace <- workspaces.map { ws => ws.exists(_.accessLevel.compare(WorkspaceAccessLevels.Write) >= 0) }
- } yield
- RequestComplete(StatusCodes.OK, UserImportPermission(
- billingProject = hasProject,
- writableWorkspace = hasWorkspace))
+ hasWorkspace <- workspaces.map(ws => ws.exists(_.accessLevel.compare(WorkspaceAccessLevels.Write) >= 0))
+ } yield RequestComplete(StatusCodes.OK,
+ UserImportPermission(billingProject = hasProject, writableWorkspace = hasWorkspace)
+ )
}
- private def getProfileValue(profileWrapper: ProfileWrapper, targetKey: String): Option[String] = {
+ private def getProfileValue(profileWrapper: ProfileWrapper, targetKey: String): Option[String] =
profileWrapper.keyValuePairs
.find(_.key.contains(targetKey)) // .find returns Option[FireCloudKeyValue]
.flatMap(_.value) // .value returns Option[String]
- }
def getTerraPreference: Future[PerRequestMessage] = {
// so, so many nested Options ...
- val futurePref: Future[TerraPreference] = thurloeDAO.getAllKVPs(userToken.id, userToken) map { // .getAllKVPs returns Option[ProfileWrapper]
- case None => TerraPreference(preferTerra = true, 0)
- case Some(wrapper) => {
- val pref: Boolean = Try(getProfileValue(wrapper, UserService.TerraPreferenceKey).getOrElse("true").toBoolean)
- .toOption.getOrElse(true)
- val updated: Long = Try(getProfileValue(wrapper, UserService.TerraPreferenceLastUpdatedKey).getOrElse("0").toLong)
- .toOption.getOrElse(0L)
- TerraPreference(pref, updated)
+ val futurePref: Future[TerraPreference] =
+ thurloeDAO.getAllKVPs(userToken.id, userToken) map { // .getAllKVPs returns Option[ProfileWrapper]
+ case None => TerraPreference(preferTerra = true, 0)
+ case Some(wrapper) =>
+ val pref: Boolean = Try(
+ getProfileValue(wrapper, UserService.TerraPreferenceKey).getOrElse("true").toBoolean
+ ).toOption.getOrElse(true)
+ val updated: Long = Try(
+ getProfileValue(wrapper, UserService.TerraPreferenceLastUpdatedKey).getOrElse("0").toLong
+ ).toOption.getOrElse(0L)
+ TerraPreference(pref, updated)
}
- }
futurePref map { pref: TerraPreference => RequestComplete(pref) }
}
- def setTerraPreference(): Future[PerRequestMessage] = {
+ def setTerraPreference(): Future[PerRequestMessage] =
writeTerraPreference(prefValue = true)
- }
- def deleteTerraPreference(): Future[PerRequestMessage] = {
+ def deleteTerraPreference(): Future[PerRequestMessage] =
writeTerraPreference(prefValue = false)
- }
private def writeTerraPreference(prefValue: Boolean): Future[PerRequestMessage] = {
val kvpsToUpdate = Map(
@@ -94,8 +106,10 @@ class UserService(rawlsDAO: RawlsDAO, thurloeDAO: ThurloeDAO, googleServicesDAO:
logger.info(s"${userToken.userEmail} (${userToken.id}) setting Terra preference to $prefValue")
thurloeDAO.saveKeyValues(userToken, kvpsToUpdate) flatMap {
- case Failure(exception) => Future(RequestCompleteWithErrorReport(StatusCodes.InternalServerError,
- "could not save Terra preference", exception))
+ case Failure(exception) =>
+ Future(
+ RequestCompleteWithErrorReport(StatusCodes.InternalServerError, "could not save Terra preference", exception)
+ )
case Success(_) => getTerraPreference
}
}
@@ -113,32 +127,32 @@ class UserService(rawlsDAO: RawlsDAO, thurloeDAO: ThurloeDAO, googleServicesDAO:
logger.info(s"${userToken.userEmail} (${userToken.id}) setting anonymousGroup to $anonymousGroupName")
thurloeDAO.saveKeyValues(userToken, kvpsToUpdate) flatMap {
- case Failure(exception) => Future(RequestCompleteWithErrorReport(StatusCodes.InternalServerError,
- "could not save Anonymous Group", exception))
- case Success(_) => {
+ case Failure(exception) =>
+ Future(
+ RequestCompleteWithErrorReport(StatusCodes.InternalServerError, "could not save Anonymous Group", exception)
+ )
+ case Success(_) =>
val futureAllKeys: Future[ProfileWrapper] = getAllKeysFromThurloe(userToken)
- futureAllKeys map { keys => RequestComplete(keys)}
- }
+ futureAllKeys map { keys => RequestComplete(keys) }
}
}
def getNewAnonymousGroupName: String = {
// randomly generate the anonymousGroupName, which follows format: terra-user-adjective-noun-endOfUUID@supportdomain.org
val anonymousGroupUUID: UUID = UUID.randomUUID()
- val anonymousGroupName: String = ( FireCloudConfig.FireCloud.supportPrefix
+ val anonymousGroupName: String = (FireCloudConfig.FireCloud.supportPrefix
+ getWord(anonymousGroupUUID.getMostSignificantBits(), UserService.randomAdjectiveList) + "-"
+ getWord(anonymousGroupUUID.getLeastSignificantBits(), UserService.randomNounList) + "-"
+ anonymousGroupUUID.toString().split("-")(4)
- + "@" + FireCloudConfig.FireCloud.supportDomain )
+ + "@" + FireCloudConfig.FireCloud.supportDomain)
anonymousGroupName
}
- private def getAllKeysFromThurloe(userToken: UserInfo): Future[ProfileWrapper] = {
+ private def getAllKeysFromThurloe(userToken: UserInfo): Future[ProfileWrapper] =
thurloeDAO.getAllKVPs(userToken.id, userToken) map { // .getAllKVPs returns Option[ProfileWrapper]
- case None => ProfileWrapper(userToken.id, List())
+ case None => ProfileWrapper(userToken.id, List())
case Some(wrapper) => wrapper
}
- }
/**
* creates a new anonymized Google group for the user and adds the user's contact email to the new Google group.
@@ -150,27 +164,26 @@ class UserService(rawlsDAO: RawlsDAO, thurloeDAO: ThurloeDAO, googleServicesDAO:
def setupAnonymizedGoogleGroup(keys: ProfileWrapper, anonymousGroupName: String): Future[PerRequestMessage] = {
// define userEmail to add to google Group - check first for contactEmail, otherwise use user's login email
val userEmail = getProfileValue(keys, UserService.ContactEmailKey) match {
- case None | Some ("") => userToken.userEmail
- case Some (contactEmail) => contactEmail // if there is a non-empty value set for contactEmail, we assume contactEmail is a valid email
+ case None | Some("") => userToken.userEmail
+ case Some(contactEmail) =>
+ contactEmail // if there is a non-empty value set for contactEmail, we assume contactEmail is a valid email
}
// create the new anonymized Google group
- googleServicesDAO.createGoogleGroup(anonymousGroupName) match { // returns Option.empty if group creation not successful
- case None => {
+ googleServicesDAO
+ .createGoogleGroup(anonymousGroupName) match { // returns Option.empty if group creation not successful
+ case None =>
Future(RequestComplete(keys))
- }
- case Some(groupEmailName) => {
+ case Some(groupEmailName) =>
// if Google group creation was successful, add the user's email address to the group
- googleServicesDAO.addMemberToAnonymizedGoogleGroup(groupEmailName, userEmail) match { // returns Option.empty if user addition is not successful
- case None => {
+ googleServicesDAO
+ .addMemberToAnonymizedGoogleGroup(groupEmailName, userEmail) match { // returns Option.empty if user addition is not successful
+ case None =>
Future(RequestComplete(keys))
- }
- case Some(_) => {
+ case Some(_) =>
// only if the anonymized Google group was successfully created and user email added to group
writeAnonymousGroup(userToken, groupEmailName) // write new KVP to Thurloe
- }
}
- }
}
}
@@ -186,23 +199,20 @@ class UserService(rawlsDAO: RawlsDAO, thurloeDAO: ThurloeDAO, googleServicesDAO:
* @return
*/
def getAllUserKeys: Future[PerRequestMessage] = {
- val futureKeys:Future[ProfileWrapper] = getAllKeysFromThurloe(userToken)
+ val futureKeys: Future[ProfileWrapper] = getAllKeysFromThurloe(userToken)
futureKeys flatMap { keys: ProfileWrapper =>
getProfileValue(keys, UserService.AnonymousGroupKey) match { // getProfileValue returns Option[String]
- case None | Some("") if FireCloudConfig.GoogleCloud.enabled => {
+ case None | Some("") if FireCloudConfig.GoogleCloud.enabled =>
setupAnonymizedGoogleGroup(keys, getNewAnonymousGroupName)
- }
- case _ => {
+ case _ =>
Future(RequestComplete(keys))
- }
}
}
}
- def getUserProfileGoogle: Future[PerRequestMessage] = {
+ def getUserProfileGoogle: Future[PerRequestMessage] =
googleServicesDAO.getUserProfile(userToken).map { resp =>
RequestComplete(resp)
}
- }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/service/WorkspacePublishingSupport.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/service/WorkspacePublishingSupport.scala
index 69cddf55b..9f77719ce 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/service/WorkspacePublishingSupport.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/service/WorkspacePublishingSupport.scala
@@ -15,18 +15,20 @@ trait WorkspacePublishingSupport extends LibraryServiceSupport {
implicit val userToken: WithAccessToken
- def publishDocument(ws: WorkspaceDetails, ontologyDAO: OntologyDAO, searchDAO: SearchDAO)(implicit userToken: WithAccessToken): Future[Unit] = {
+ def publishDocument(ws: WorkspaceDetails, ontologyDAO: OntologyDAO, searchDAO: SearchDAO)(implicit
+ userToken: WithAccessToken
+ ): Future[Unit] =
indexableDocuments(Seq(ws), ontologyDAO) map { ws =>
assert(ws.size == 1)
searchDAO.indexDocument(ws.head)
}
- }
- def removeDocument(ws: WorkspaceDetails, searchDAO: SearchDAO): Unit = {
+ def removeDocument(ws: WorkspaceDetails, searchDAO: SearchDAO): Unit =
searchDAO.deleteDocument(ws.workspaceId)
- }
- def republishDocument(ws: WorkspaceDetails, ontologyDAO: OntologyDAO, searchDAO: SearchDAO)(implicit userToken: WithAccessToken): Future[Unit] = {
+ def republishDocument(ws: WorkspaceDetails, ontologyDAO: OntologyDAO, searchDAO: SearchDAO)(implicit
+ userToken: WithAccessToken
+ ): Future[Unit] =
if (isPublished(ws)) {
// if already published, republish
// we do not need to delete before republish
@@ -34,17 +36,19 @@ trait WorkspacePublishingSupport extends LibraryServiceSupport {
} else {
Future.successful(())
}
- }
- def isPublished(workspaceResponse: WorkspaceResponse): Boolean = {
+ def isPublished(workspaceResponse: WorkspaceResponse): Boolean =
isPublished(workspaceResponse.workspace)
- }
- def isPublished(workspace: WorkspaceDetails): Boolean = {
+ def isPublished(workspace: WorkspaceDetails): Boolean =
workspace.attributes.getOrElse(Map.empty).get(publishedFlag).fold(false)(_.asInstanceOf[AttributeBoolean].value)
- }
- def setWorkspacePublishedStatus(ws: WorkspaceDetails, publishArg: Boolean, rawlsDAO: RawlsDAO, ontologyDAO: OntologyDAO, searchDAO: SearchDAO)(implicit userToken: WithAccessToken): Future[WorkspaceDetails] = {
+ def setWorkspacePublishedStatus(ws: WorkspaceDetails,
+ publishArg: Boolean,
+ rawlsDAO: RawlsDAO,
+ ontologyDAO: OntologyDAO,
+ searchDAO: SearchDAO
+ )(implicit userToken: WithAccessToken): Future[WorkspaceDetails] =
rawlsDAO.updateLibraryAttributes(ws.namespace, ws.name, updatePublishAttribute(publishArg)) flatMap { workspace =>
val docPublishFuture = if (publishArg) {
publishDocument(workspace, ontologyDAO, searchDAO)
@@ -52,13 +56,11 @@ trait WorkspacePublishingSupport extends LibraryServiceSupport {
Future(removeDocument(workspace, searchDAO))
}
- docPublishFuture.map(_ => workspace).recover {
- case throwable: Throwable =>
- val message = s"Unable to update this workspace, ${ws.namespace}:${ws.name}, to $publishArg in elastic search."
- logger.error(message, throwable)
- throw new FireCloudException(message, throwable)
+ docPublishFuture.map(_ => workspace).recover { case throwable: Throwable =>
+ val message = s"Unable to update this workspace, ${ws.namespace}:${ws.name}, to $publishArg in elastic search."
+ logger.error(message, throwable)
+ throw new FireCloudException(message, throwable)
}
}
- }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/service/WorkspaceService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/service/WorkspaceService.scala
index 20d595974..506bb96ef 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/service/WorkspaceService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/service/WorkspaceService.scala
@@ -8,11 +8,20 @@ import org.broadinstitute.dsde.firecloud.dataaccess._
import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol._
import org.broadinstitute.dsde.firecloud.model.ShareLog.ShareType
import org.broadinstitute.dsde.firecloud.model.{RequestCompleteWithErrorReport, _}
-import org.broadinstitute.dsde.firecloud.service.PerRequest.{PerRequestMessage, RequestComplete, RequestCompleteWithHeaders}
+import org.broadinstitute.dsde.firecloud.service.PerRequest.{
+ PerRequestMessage,
+ RequestComplete,
+ RequestCompleteWithHeaders
+}
import org.broadinstitute.dsde.firecloud.utils.{PermissionsSupport, TSVFormatter, TSVLoadFile, TSVParser}
import org.broadinstitute.dsde.firecloud.{Application, FireCloudExceptionWithErrorReport}
import org.broadinstitute.dsde.rawls.model.Attributable.AttributeMap
-import org.broadinstitute.dsde.rawls.model.AttributeUpdateOperations.{AddListMember, AddUpdateAttribute, AttributeUpdateOperation, RemoveListMember}
+import org.broadinstitute.dsde.rawls.model.AttributeUpdateOperations.{
+ AddListMember,
+ AddUpdateAttribute,
+ AttributeUpdateOperation,
+ RemoveListMember
+}
import org.broadinstitute.dsde.rawls.model.WorkspaceACLJsonSupport._
import org.broadinstitute.dsde.rawls.model._
import org.broadinstitute.dsde.workbench.model.google.GoogleProject
@@ -26,65 +35,106 @@ import scala.util.{Failure, Success, Try}
*/
object WorkspaceService {
def constructor(app: Application)(userToken: WithAccessToken)(implicit executionContext: ExecutionContext) =
- new WorkspaceService(userToken, app.rawlsDAO, app.samDAO, app.thurloeDAO, app.googleServicesDAO, app.ontologyDAO, app.searchDAO, app.shareLogDAO)
+ new WorkspaceService(userToken,
+ app.rawlsDAO,
+ app.samDAO,
+ app.thurloeDAO,
+ app.googleServicesDAO,
+ app.ontologyDAO,
+ app.searchDAO,
+ app.shareLogDAO
+ )
}
-class WorkspaceService(protected val argUserToken: WithAccessToken, val rawlsDAO: RawlsDAO, val samDao: SamDAO, val thurloeDAO: ThurloeDAO, val googleServicesDAO: GoogleServicesDAO, val ontologyDAO: OntologyDAO, val searchDAO: SearchDAO, val shareLogDAO: ShareLogDAO)
- (implicit protected val executionContext: ExecutionContext) extends AttributeSupport with TSVFileSupport with PermissionsSupport with WorkspacePublishingSupport with SprayJsonSupport with LazyLogging {
+class WorkspaceService(protected val argUserToken: WithAccessToken,
+ val rawlsDAO: RawlsDAO,
+ val samDao: SamDAO,
+ val thurloeDAO: ThurloeDAO,
+ val googleServicesDAO: GoogleServicesDAO,
+ val ontologyDAO: OntologyDAO,
+ val searchDAO: SearchDAO,
+ val shareLogDAO: ShareLogDAO
+)(implicit protected val executionContext: ExecutionContext)
+ extends AttributeSupport
+ with TSVFileSupport
+ with PermissionsSupport
+ with WorkspacePublishingSupport
+ with SprayJsonSupport
+ with LazyLogging {
implicit val userToken: WithAccessToken = argUserToken
- def getStorageCostEstimate(workspaceNamespace: String, workspaceName: String): Future[RequestComplete[WorkspaceStorageCostEstimate]] = {
+ def getStorageCostEstimate(workspaceNamespace: String,
+ workspaceName: String
+ ): Future[RequestComplete[WorkspaceStorageCostEstimate]] =
rawlsDAO.getWorkspace(workspaceNamespace, workspaceName) flatMap { workspaceResponse =>
- samDao.getPetServiceAccountKeyForUser(userToken, GoogleProject(workspaceResponse.workspace.googleProject.value)) flatMap { petKey =>
+ samDao.getPetServiceAccountKeyForUser(userToken,
+ GoogleProject(workspaceResponse.workspace.googleProject.value)
+ ) flatMap { petKey =>
googleServicesDAO.getBucket(workspaceResponse.workspace.bucketName, petKey) match {
case Some(bucket) =>
rawlsDAO.getBucketUsage(workspaceNamespace, workspaceName).zip(googleServicesDAO.fetchPriceList) map {
- case (usage, priceList) =>
- val rate = priceList.prices.cpBigstoreStorage.getOrElse(bucket.getLocation.toLowerCase(), priceList.prices.cpBigstoreStorage("us"))
+ case (usage, priceList) =>
+ val rate = priceList.prices.cpBigstoreStorage.getOrElse(bucket.getLocation.toLowerCase(),
+ priceList.prices.cpBigstoreStorage("us")
+ )
// Convert bytes to GB since rate is based on GB.
val estimate: BigDecimal = BigDecimal(usage.usageInBytes) / (1024 * 1024 * 1024) * rate
RequestComplete(WorkspaceStorageCostEstimate(f"$$$estimate%.2f", usage.lastUpdated))
}
- case None => throw new FireCloudExceptionWithErrorReport(ErrorReport(StatusCodes.InternalServerError, "Unable to fetch bucket to calculate storage cost"))
+ case None =>
+ throw new FireCloudExceptionWithErrorReport(
+ ErrorReport(StatusCodes.InternalServerError, "Unable to fetch bucket to calculate storage cost")
+ )
}
}
}
- }
- def updateWorkspaceAttributes(workspaceNamespace: String, workspaceName: String, workspaceUpdateJson: Seq[AttributeUpdateOperation]) = {
+ def updateWorkspaceAttributes(workspaceNamespace: String,
+ workspaceName: String,
+ workspaceUpdateJson: Seq[AttributeUpdateOperation]
+ ) =
for {
ws <- rawlsDAO.patchWorkspaceAttributes(workspaceNamespace, workspaceName, workspaceUpdateJson)
_ <- republishDocument(ws, ontologyDAO, searchDAO)
} yield RequestComplete(ws)
- }
- def setWorkspaceAttributes(workspaceNamespace: String, workspaceName: String, newAttributes: AttributeMap) = {
+ def setWorkspaceAttributes(workspaceNamespace: String, workspaceName: String, newAttributes: AttributeMap) =
rawlsDAO.getWorkspace(workspaceNamespace, workspaceName) flatMap { workspaceResponse =>
// this is technically vulnerable to a race condition in which the workspace attributes have changed
// between the time we retrieved them and here, where we update them.
- val allOperations = generateAttributeOperations(workspaceResponse.workspace.attributes.getOrElse(Map.empty), newAttributes, _.namespace != AttributeName.libraryNamespace)
+ val allOperations = generateAttributeOperations(workspaceResponse.workspace.attributes.getOrElse(Map.empty),
+ newAttributes,
+ _.namespace != AttributeName.libraryNamespace
+ )
for {
ws <- rawlsDAO.patchWorkspaceAttributes(workspaceNamespace, workspaceName, allOperations)
_ <- republishDocument(ws, ontologyDAO, searchDAO)
} yield RequestComplete(ws)
}
- }
- def getCatalog(workspaceNamespace: String, workspaceName: String, userInfo: UserInfo): Future[PerRequestMessage] = {
+ def getCatalog(workspaceNamespace: String, workspaceName: String, userInfo: UserInfo): Future[PerRequestMessage] =
asPermitted(workspaceNamespace, workspaceName, WorkspaceAccessLevels.Read, userInfo) {
rawlsDAO.getCatalog(workspaceNamespace, workspaceName) map (RequestComplete(_))
}
- }
- def updateCatalog(workspaceNamespace: String, workspaceName: String, updates: Seq[WorkspaceCatalog], userInfo: UserInfo): Future[PerRequestMessage] = {
+ def updateCatalog(workspaceNamespace: String,
+ workspaceName: String,
+ updates: Seq[WorkspaceCatalog],
+ userInfo: UserInfo
+ ): Future[PerRequestMessage] =
// can update if admin or owner of workspace
asPermitted(workspaceNamespace, workspaceName, WorkspaceAccessLevels.Owner, userInfo) {
rawlsDAO.patchCatalog(workspaceNamespace, workspaceName, updates) map (RequestComplete(_))
}
- }
- def updateWorkspaceACL(workspaceNamespace: String, workspaceName: String, aclUpdates: Seq[WorkspaceACLUpdate], originEmail: String, originId: String, inviteUsersNotFound: Boolean): Future[RequestComplete[WorkspaceACLUpdateResponseList]] = {
+ def updateWorkspaceACL(workspaceNamespace: String,
+ workspaceName: String,
+ aclUpdates: Seq[WorkspaceACLUpdate],
+ originEmail: String,
+ originId: String,
+ inviteUsersNotFound: Boolean
+ ): Future[RequestComplete[WorkspaceACLUpdateResponseList]] = {
def logShares(aclUpdateList: WorkspaceACLUpdateResponseList) = {
// this will log a share every time a workspace is shared with a user
// it will also log a share every time a workspace permission is changed
@@ -102,50 +152,73 @@ class WorkspaceService(protected val argUserToken: WithAccessToken, val rawlsDAO
}
}
- def exportWorkspaceAttributesTSV(workspaceNamespace: String, workspaceName: String, filename: String): Future[PerRequestMessage] = {
+ def exportWorkspaceAttributesTSV(workspaceNamespace: String,
+ workspaceName: String,
+ filename: String
+ ): Future[PerRequestMessage] =
rawlsDAO.getWorkspace(workspaceNamespace, workspaceName) map { workspaceResponse =>
val attributeFormat = new AttributeFormat with PlainArrayAttributeListSerializer
- val attributes = workspaceResponse.workspace.attributes.getOrElse(Map.empty).view.filterKeys(_ != AttributeName.withDefaultNS("description"))
- val headerString = "workspace:" + (attributes map { case (attName, _) => attName.name }).mkString(s"${TSVParser.DELIMITER}")
- val valueString = (attributes map { case (_, attValue) => TSVFormatter.tsvSafeAttribute(attValue) }).mkString(s"${TSVParser.DELIMITER}")
+ val attributes = workspaceResponse.workspace.attributes
+ .getOrElse(Map.empty)
+ .view
+ .filterKeys(_ != AttributeName.withDefaultNS("description"))
+ val headerString =
+ "workspace:" + (attributes map { case (attName, _) => attName.name }).mkString(s"${TSVParser.DELIMITER}")
+ val valueString = (attributes map { case (_, attValue) => TSVFormatter.tsvSafeAttribute(attValue) })
+ .mkString(s"${TSVParser.DELIMITER}")
// TODO: entity TSVs are downloaded as text/tab-separated-value, but workspace attributes are text/plain. Align these?
- RequestCompleteWithHeaders((StatusCodes.OK, headerString + "\n" + valueString),
+ RequestCompleteWithHeaders(
+ (StatusCodes.OK, headerString + "\n" + valueString),
`Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> filename)),
- `Content-Type`(ContentTypes.`text/plain(UTF-8)`))
+ `Content-Type`(ContentTypes.`text/plain(UTF-8)`)
+ )
}
- }
- def importAttributesFromTSV(workspaceNamespace: String, workspaceName: String, tsvString: String): Future[PerRequestMessage] = {
+ def importAttributesFromTSV(workspaceNamespace: String,
+ workspaceName: String,
+ tsvString: String
+ ): Future[PerRequestMessage] =
withTSVFile(tsvString) { tsv =>
tsv.firstColumnHeader.split(":")(0) match {
case "workspace" =>
importWorkspaceAttributeTSV(workspaceNamespace, workspaceName, tsv)
case _ =>
- Future.successful(RequestCompleteWithErrorReport(StatusCodes.BadRequest, "Invalid TSV. First column header should start with \"workspace\""))
+ Future.successful(
+ RequestCompleteWithErrorReport(StatusCodes.BadRequest,
+ "Invalid TSV. First column header should start with \"workspace\""
+ )
+ )
}
}
- }
- private def importWorkspaceAttributeTSV(workspaceNamespace: String, workspaceName: String, tsv: TSVLoadFile): Future[PerRequestMessage] = {
+ private def importWorkspaceAttributeTSV(workspaceNamespace: String,
+ workspaceName: String,
+ tsv: TSVLoadFile
+ ): Future[PerRequestMessage] =
checkNumberOfRows(tsv, 2) {
checkFirstRowDistinct(tsv) {
rawlsDAO.getWorkspace(workspaceNamespace, workspaceName) flatMap { workspaceResponse =>
Try(getWorkspaceAttributeCalls(tsv)) match {
- case Failure(regret) => Future.successful(RequestCompleteWithErrorReport(StatusCodes.BadRequest,
- "One or more of your values are not in the correct format"))
- case Success(attributeCalls) => rawlsDAO.patchWorkspaceAttributes(workspaceNamespace, workspaceName, attributeCalls) map (RequestComplete(_))
+ case Failure(regret) =>
+ Future.successful(
+ RequestCompleteWithErrorReport(StatusCodes.BadRequest,
+ "One or more of your values are not in the correct format"
+ )
+ )
+ case Success(attributeCalls) =>
+ rawlsDAO.patchWorkspaceAttributes(workspaceNamespace, workspaceName, attributeCalls) map (RequestComplete(
+ _
+ ))
}
}
}
}
- }
- def getTags(workspaceNamespace: String, workspaceName: String): Future[PerRequestMessage] = {
+ def getTags(workspaceNamespace: String, workspaceName: String): Future[PerRequestMessage] =
rawlsDAO.getWorkspace(workspaceNamespace, workspaceName) flatMap { workspaceResponse =>
val tags = getTagsFromWorkspace(workspaceResponse.workspace)
Future(RequestComplete(StatusCodes.OK, formatTags(tags)))
}
- }
def putTags(workspaceNamespace: String, workspaceName: String, tags: List[String]): Future[PerRequestMessage] = {
val attrList = AttributeValueList(tags map (tag => AttributeString(tag.trim)))
@@ -153,7 +226,10 @@ class WorkspaceService(protected val argUserToken: WithAccessToken, val rawlsDAO
patchAndRepublishWorkspace(workspaceNamespace, workspaceName, Seq(op))
}
- private def patchAndRepublishWorkspace(workspaceNamespace: String, workspaceName: String, ops: Seq[AttributeUpdateOperation]) = {
+ private def patchAndRepublishWorkspace(workspaceNamespace: String,
+ workspaceName: String,
+ ops: Seq[AttributeUpdateOperation]
+ ) =
for {
ws <- rawlsDAO.patchWorkspaceAttributes(workspaceNamespace, workspaceName, ops)
_ <- republishDocument(ws, ontologyDAO, searchDAO)
@@ -161,36 +237,46 @@ class WorkspaceService(protected val argUserToken: WithAccessToken, val rawlsDAO
val tags = getTagsFromWorkspace(ws)
RequestComplete(StatusCodes.OK, formatTags(tags))
}
- }
- def patchTags(workspaceNamespace: String, workspaceName: String, tags: List[String]): Future[PerRequestMessage] = {
+ def patchTags(workspaceNamespace: String, workspaceName: String, tags: List[String]): Future[PerRequestMessage] =
rawlsDAO.getWorkspace(workspaceNamespace, workspaceName) flatMap { origWs =>
val origTags = getTagsFromWorkspace(origWs.workspace)
- val attrOps = (tags diff origTags) map (tag => AddListMember(AttributeName.withTagsNS(), AttributeString(tag.trim)))
+ val attrOps =
+ (tags diff origTags) map (tag => AddListMember(AttributeName.withTagsNS(), AttributeString(tag.trim)))
patchAndRepublishWorkspace(workspaceNamespace, workspaceName, attrOps)
}
- }
def deleteTags(workspaceNamespace: String, workspaceName: String, tags: List[String]): Future[PerRequestMessage] = {
val attrOps = tags map (tag => RemoveListMember(AttributeName.withTagsNS(), AttributeString(tag.trim)))
patchAndRepublishWorkspace(workspaceNamespace, workspaceName, attrOps)
}
- def unPublishSuccessMessage(workspaceNamespace: String, workspaceName: String): String = s" The workspace $workspaceNamespace:$workspaceName has been un-published."
+ def unPublishSuccessMessage(workspaceNamespace: String, workspaceName: String): String =
+ s" The workspace $workspaceNamespace:$workspaceName has been un-published."
- def deleteWorkspace(ns: String, name: String): Future[PerRequestMessage] = {
+ def deleteWorkspace(ns: String, name: String): Future[PerRequestMessage] =
rawlsDAO.getWorkspace(ns, name) flatMap { wsResponse =>
- val unpublishFuture: Future[WorkspaceDetails] = if (isPublished(wsResponse))
- setWorkspacePublishedStatus(wsResponse.workspace, publishArg = false, rawlsDAO, ontologyDAO, searchDAO)
- else
- Future.successful(wsResponse.workspace)
+ val unpublishFuture: Future[WorkspaceDetails] =
+ if (isPublished(wsResponse))
+ setWorkspacePublishedStatus(wsResponse.workspace, publishArg = false, rawlsDAO, ontologyDAO, searchDAO)
+ else
+ Future.successful(wsResponse.workspace)
unpublishFuture flatMap { ws =>
rawlsDAO.deleteWorkspace(ns, name) map { wsResponse =>
- RequestComplete(StatusCodes.Accepted, Some(List(wsResponse.getOrElse(""), unPublishSuccessMessage(ns, name)).mkString(" ")))
+ RequestComplete(StatusCodes.Accepted,
+ Some(List(wsResponse.getOrElse(""), unPublishSuccessMessage(ns, name)).mkString(" "))
+ )
}
} recover {
- case e: FireCloudExceptionWithErrorReport => RequestComplete(e.errorReport.statusCode.getOrElse(StatusCodes.InternalServerError), ErrorReport(message = s"You cannot delete this workspace: ${e.errorReport.message}"))
- case e: Throwable => RequestComplete(StatusCodes.InternalServerError, ErrorReport(message = s"You cannot delete this workspace: ${e.getMessage}"))
+ case e: FireCloudExceptionWithErrorReport =>
+ RequestComplete(
+ e.errorReport.statusCode.getOrElse(StatusCodes.InternalServerError),
+ ErrorReport(message = s"You cannot delete this workspace: ${e.errorReport.message}")
+ )
+ case e: Throwable =>
+ RequestComplete(StatusCodes.InternalServerError,
+ ErrorReport(message = s"You cannot delete this workspace: ${e.getMessage}")
+ )
}
} recoverWith {
// This case is only possible when a user owns a workspace, but has lost access to it because they have been removed
@@ -200,30 +286,26 @@ class WorkspaceService(protected val argUserToken: WithAccessToken, val rawlsDAO
// bother with unpublishing a workspace (that is strictly an Orch concept), but that is not a friendly UX, and we want to make our best
// attempt to unpublish the workspace if possible, although it is not critical. It is unlikely that this recoverWith would be
// reached for a published workspace anyway.
- case e: FireCloudExceptionWithErrorReport if e.errorReport.statusCode.contains(StatusCodes.NotFound) => {
+ case e: FireCloudExceptionWithErrorReport if e.errorReport.statusCode.contains(StatusCodes.NotFound) =>
rawlsDAO.deleteWorkspace(ns, name) map { wsResponse =>
RequestComplete(StatusCodes.Accepted, Some(wsResponse.getOrElse("")))
}
- }
}
- }
- def cloneWorkspace(namespace: String, name: String, cloneRequest: WorkspaceRequest): Future[PerRequestMessage] = {
+ def cloneWorkspace(namespace: String, name: String, cloneRequest: WorkspaceRequest): Future[PerRequestMessage] =
rawlsDAO.cloneWorkspace(namespace, name, cloneRequest).map { res =>
RequestComplete(StatusCodes.Created, res)
}
- }
- private def getTagsFromWorkspace(ws:WorkspaceDetails): Seq[String] = {
+ private def getTagsFromWorkspace(ws: WorkspaceDetails): Seq[String] =
ws.attributes.getOrElse(Map.empty).get(AttributeName.withTagsNS()) match {
- case Some(vals:AttributeValueList) => vals.list collect {
- case s:AttributeString => s.value
- }
+ case Some(vals: AttributeValueList) =>
+ vals.list collect { case s: AttributeString =>
+ s.value
+ }
case _ => Seq.empty[String]
}
- }
private def formatTags(tags: Seq[String]) = tags.toList.sortBy(_.toLowerCase)
-
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/utils/DateUtils.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/utils/DateUtils.scala
index 5f59d0585..70daa9a43 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/utils/DateUtils.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/utils/DateUtils.scala
@@ -1,65 +1,48 @@
package org.broadinstitute.dsde.firecloud.utils
-import org.joda.time.{Seconds, Hours, DateTime}
-
+import org.joda.time.{DateTime, Hours, Seconds}
object DateUtils {
val EPOCH = 1000L
-
- def nowPlus30Days: Long = {
+ def nowPlus30Days: Long =
nowDateTime.plusDays(30).getMillis / EPOCH
- }
- def nowMinus30Days: Long = {
+ def nowMinus30Days: Long =
nowDateTime.minusDays(30).getMillis / EPOCH
- }
- def nowPlus24Hours: Long = {
+ def nowPlus24Hours: Long =
nowDateTime.plusHours(24).getMillis / EPOCH
- }
- def nowMinus24Hours: Long = {
+ def nowMinus24Hours: Long =
nowDateTime.minusHours(24).getMillis / EPOCH
- }
- def nowPlus1Hour: Long = {
+ def nowPlus1Hour: Long =
nowDateTime.plusHours(1).getMillis / EPOCH
- }
- def nowMinus1Hour: Long = {
+ def nowMinus1Hour: Long =
nowDateTime.minusHours(1).getMillis / EPOCH
- }
- def hoursSince(seconds: Long): Int = {
+ def hoursSince(seconds: Long): Int =
Hours.hoursBetween(dtFromSeconds(seconds), nowDateTime).getHours
- }
- def hoursUntil(seconds: Long): Int = {
+ def hoursUntil(seconds: Long): Int =
Hours.hoursBetween(nowDateTime, dtFromSeconds(seconds)).getHours
- }
- def secondsSince(seconds: Long): Int = {
+ def secondsSince(seconds: Long): Int =
Seconds.secondsBetween(dtFromSeconds(seconds), nowDateTime).getSeconds
- }
-
- def now: Long = {
+ def now: Long =
nowDateTime.getMillis / EPOCH
- }
- def nowDateTime: DateTime = {
+ def nowDateTime: DateTime =
dtFromMillis(System.currentTimeMillis())
- }
- def dtFromMillis(millis: Long): DateTime = {
+ def dtFromMillis(millis: Long): DateTime =
new DateTime(millis)
- }
- def dtFromSeconds(seconds: Long): DateTime = {
+ def dtFromSeconds(seconds: Long): DateTime =
new DateTime(seconds * EPOCH)
- }
-
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/utils/DisabledServiceFactory.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/utils/DisabledServiceFactory.scala
index 94cfa04ce..249fa7bc6 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/utils/DisabledServiceFactory.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/utils/DisabledServiceFactory.scala
@@ -1,7 +1,7 @@
package org.broadinstitute.dsde.firecloud.utils
import java.lang.reflect.Proxy
-import scala.reflect.{ClassTag, classTag}
+import scala.reflect.{classTag, ClassTag}
object DisabledServiceFactory {
@@ -18,7 +18,10 @@ object DisabledServiceFactory {
classTag[T].runtimeClass.getClassLoader,
Array(classTag[T].runtimeClass),
(_, method, _) =>
- if (method.getName.equals("isEnabled") && method.getParameterCount == 0 && method.getReturnType == classOf[Boolean])
+ if (
+ method.getName
+ .equals("isEnabled") && method.getParameterCount == 0 && method.getReturnType == classOf[Boolean]
+ )
false
else
throw new UnsupportedOperationException(s"${method.toGenericString} is disabled.")
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/utils/EnabledUserDirectives.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/utils/EnabledUserDirectives.scala
index 50d04c414..2061ef9d4 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/utils/EnabledUserDirectives.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/utils/EnabledUserDirectives.scala
@@ -18,7 +18,7 @@ import org.broadinstitute.dsde.workbench.util.FutureSupport.toFutureTry
import scala.concurrent.{ExecutionContext, Future, Promise}
import scala.util.{Failure, Success}
-trait EnabledUserDirectives extends LazyLogging with SprayJsonSupport {
+trait EnabledUserDirectives extends LazyLogging with SprayJsonSupport {
// Hardcode an ErrorReportSource to allow differentiating between enabled-user errors and other errors.
implicit val errorReportSource: ErrorReportSource = ErrorReportSource("Orchestration-enabled-check")
@@ -33,7 +33,9 @@ trait EnabledUserDirectives extends LazyLogging with SprayJsonSupport {
* @param samBaseUrl where to find Sam - used for unit testing
* @return n/a
*/
- def requireEnabledUser(userInfo: UserInfo, samBaseUrl: String = FireCloudConfig.Sam.baseUrl)(innerRoute: RequestContext => Future[RouteResult]): Route = {
+ def requireEnabledUser(userInfo: UserInfo, samBaseUrl: String = FireCloudConfig.Sam.baseUrl)(
+ innerRoute: RequestContext => Future[RouteResult]
+ ): Route =
extractUri { uri =>
onComplete(getUserEnabled(userInfo.accessToken.token, samBaseUrl)) {
case Success(true) =>
@@ -44,31 +46,43 @@ trait EnabledUserDirectives extends LazyLogging with SprayJsonSupport {
// the 401/"User is disabled." response mirrors what Sam returns in this case.
throwErrorReport(StatusCodes.Unauthorized, "User is disabled.")
case Failure(fcerr: FireCloudExceptionWithErrorReport) =>
- logger.error(s"FireCloudExceptionWithErrorReport exception checking enabled status for user ${userInfo.userEmail}: (${fcerr.getMessage}) while calling $uri", fcerr)
+ logger.error(
+ s"FireCloudExceptionWithErrorReport exception checking enabled status for user ${userInfo.userEmail}: (${fcerr.getMessage}) while calling $uri",
+ fcerr
+ )
// rebuild the FireCloudExceptionWithErrorReport to ensure we're not passing along stack traces
val code = fcerr.errorReport.statusCode.getOrElse(StatusCodes.InternalServerError)
throwErrorReport(code, fcerr.getMessage)
- case Failure(apiex:ApiException) =>
- logger.error(s"ApiException exception checking enabled status for user ${userInfo.userEmail}: (${apiex.getMessage}) while calling $uri", apiex)
+ case Failure(apiex: ApiException) =>
+ logger.error(
+ s"ApiException exception checking enabled status for user ${userInfo.userEmail}: (${apiex.getMessage}) while calling $uri",
+ apiex
+ )
val code = StatusCode.int2StatusCode(apiex.getCode)
if (code == StatusCodes.NotFound) {
throwErrorReport(StatusCodes.Unauthorized, "User is not registered.")
} else {
- val message = if (Option(apiex.getMessage).isEmpty || apiex.getMessage.isEmpty) code.defaultMessage() else apiex.getMessage
+ val message =
+ if (Option(apiex.getMessage).isEmpty || apiex.getMessage.isEmpty) code.defaultMessage()
+ else apiex.getMessage
throwErrorReport(code, message)
}
case Failure(ex) =>
- logger.error(s"Unexpected exception checking enabled status for user ${userInfo.userEmail}: (${ex.getMessage}) while calling $uri", ex)
+ logger.error(
+ s"Unexpected exception checking enabled status for user ${userInfo.userEmail}: (${ex.getMessage}) while calling $uri",
+ ex
+ )
throwErrorReport(StatusCodes.InternalServerError, ex.getMessage)
}
}
- }
-
private class SamApiCallback[T](functionName: String = "userStatusInfo") extends ApiCallback[T] {
private val promise = Promise[T]()
- override def onFailure(e: ApiException, statusCode: Int, responseHeaders: java.util.Map[String, java.util.List[String]]): Unit = {
+ override def onFailure(e: ApiException,
+ statusCode: Int,
+ responseHeaders: java.util.Map[String, java.util.List[String]]
+ ): Unit = {
val response = e.getResponseBody
// attempt to propagate an ErrorReport from Sam. If we can't understand Sam's response as an ErrorReport,
// create our own error message.
@@ -82,14 +96,21 @@ trait EnabledUserDirectives extends LazyLogging with SprayJsonSupport {
toFutureTry(Unmarshal(response).to[String]) map { maybeString =>
val stringErrMsg = maybeString match {
case Success(stringErr) => stringErr
- case Failure(_) => response
+ case Failure(_) => response
}
- throw new FireCloudExceptionWithErrorReport(ErrorReport(StatusCode.int2StatusCode(statusCode), s"Sam call to $functionName failed with error '$stringErrMsg'"))
+ throw new FireCloudExceptionWithErrorReport(
+ ErrorReport(StatusCode.int2StatusCode(statusCode),
+ s"Sam call to $functionName failed with error '$stringErrMsg'"
+ )
+ )
}
}
promise.failure(e)
}
- override def onSuccess(result: T, statusCode: Int, responseHeaders: java.util.Map[String, java.util.List[String]]): Unit = promise.success(result)
+ override def onSuccess(result: T,
+ statusCode: Int,
+ responseHeaders: java.util.Map[String, java.util.List[String]]
+ ): Unit = promise.success(result)
override def onUploadProgress(bytesWritten: Long, contentLength: Long, done: Boolean): Unit = ()
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/utils/PermissionsSupport.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/utils/PermissionsSupport.scala
index 58e785eda..6d18bd3fe 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/utils/PermissionsSupport.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/utils/PermissionsSupport.scala
@@ -19,57 +19,86 @@ trait PermissionsSupport {
protected val samDao: SamDAO
implicit protected val executionContext: ExecutionContext
- def tryIsAdmin(userInfo: UserInfo): Future[Boolean] = {
- rawlsDAO.isAdmin(userInfo) recoverWith { case t => throw new FireCloudException("Unable to query for admin status.", t) }
- }
+ def tryIsAdmin(userInfo: UserInfo): Future[Boolean] =
+ rawlsDAO.isAdmin(userInfo) recoverWith { case t =>
+ throw new FireCloudException("Unable to query for admin status.", t)
+ }
- def asAdmin(op: => Future[PerRequestMessage])(implicit userInfo: UserInfo): Future[PerRequestMessage] = {
+ def asAdmin(op: => Future[PerRequestMessage])(implicit userInfo: UserInfo): Future[PerRequestMessage] =
tryIsAdmin(userInfo) flatMap { isAdmin =>
- if (isAdmin) op else Future.failed(new FireCloudExceptionWithErrorReport(errorReport = ErrorReport(StatusCodes.Forbidden, "You must be an admin.")))
+ if (isAdmin) op
+ else
+ Future.failed(
+ new FireCloudExceptionWithErrorReport(errorReport =
+ ErrorReport(StatusCodes.Forbidden, "You must be an admin.")
+ )
+ )
}
- }
- def tryIsCurator(userInfo: UserInfo): Future[Boolean] = {
- rawlsDAO.isLibraryCurator(userInfo) recoverWith { case t => throw new FireCloudException("Unable to query for library curator status.", t) }
- }
+ def tryIsCurator(userInfo: UserInfo): Future[Boolean] =
+ rawlsDAO.isLibraryCurator(userInfo) recoverWith { case t =>
+ throw new FireCloudException("Unable to query for library curator status.", t)
+ }
- def asCurator(op: => Future[PerRequestMessage])(implicit userInfo: UserInfo): Future[PerRequestMessage] = {
+ def asCurator(op: => Future[PerRequestMessage])(implicit userInfo: UserInfo): Future[PerRequestMessage] =
tryIsCurator(userInfo) flatMap { isCurator =>
- if (isCurator) op else Future.failed(new FireCloudExceptionWithErrorReport(errorReport = ErrorReport(StatusCodes.Forbidden, "You must be a library curator.")))
+ if (isCurator) op
+ else
+ Future.failed(
+ new FireCloudExceptionWithErrorReport(errorReport =
+ ErrorReport(StatusCodes.Forbidden, "You must be a library curator.")
+ )
+ )
}
- }
- def asPermitted(ns: String, name: String, lvl: WorkspaceAccessLevel, userInfo: UserInfo)(op: => Future[PerRequestMessage]): Future[PerRequestMessage] = {
+ def asPermitted(ns: String, name: String, lvl: WorkspaceAccessLevel, userInfo: UserInfo)(
+ op: => Future[PerRequestMessage]
+ ): Future[PerRequestMessage] =
hasAccessOrAdmin(ns, name, lvl, userInfo) flatMap { isPermitted =>
- if (isPermitted) op else Future.failed(new FireCloudExceptionWithErrorReport(errorReport = ErrorReport(StatusCodes.Forbidden, s"You must be an admin or have at least ${lvl.toString} access.")))
+ if (isPermitted) op
+ else
+ Future.failed(
+ new FireCloudExceptionWithErrorReport(errorReport =
+ ErrorReport(StatusCodes.Forbidden, s"You must be an admin or have at least ${lvl.toString} access.")
+ )
+ )
}
- }
- private def hasAccessOrAdmin(workspaceNamespace: String, workspaceName: String, neededLevel: WorkspaceAccessLevel, userInfo: UserInfo): Future[Boolean] = {
+ private def hasAccessOrAdmin(workspaceNamespace: String,
+ workspaceName: String,
+ neededLevel: WorkspaceAccessLevel,
+ userInfo: UserInfo
+ ): Future[Boolean] =
tryIsAdmin(userInfo) flatMap { isadmin =>
if (!isadmin) {
rawlsDAO.getWorkspace(workspaceNamespace, workspaceName)(userInfo.asInstanceOf[WithAccessToken]) map { ws =>
ws.accessLevel match {
case Some(accessLevel) => accessLevel >= neededLevel
- case None => false
+ case None => false
}
}
} else {
Future.successful(true)
}
}
- }
- def asGroupMember(group: String)(op: => Future[PerRequestMessage])(implicit userInfo: UserInfo): Future[PerRequestMessage] = {
+ def asGroupMember(
+ group: String
+ )(op: => Future[PerRequestMessage])(implicit userInfo: UserInfo): Future[PerRequestMessage] =
tryIsGroupMember(userInfo, group) flatMap { isGroupMember =>
- if (isGroupMember) op else Future.failed(new FireCloudExceptionWithErrorReport(ErrorReport(StatusCodes.Forbidden, "You must be in the appropriate group.")))
+ if (isGroupMember) op
+ else
+ Future.failed(
+ new FireCloudExceptionWithErrorReport(
+ ErrorReport(StatusCodes.Forbidden, "You must be in the appropriate group.")
+ )
+ )
}
- }
- def tryIsGroupMember(userInfo: UserInfo, group: String): Future[Boolean] = {
- samDao.isGroupMember(WorkbenchGroupName(group), userInfo) recoverWith {
- case t: Throwable => throw new FireCloudExceptionWithErrorReport(ErrorReport(StatusCodes.InternalServerError, "Unable to query for group membership status."))
+ def tryIsGroupMember(userInfo: UserInfo, group: String): Future[Boolean] =
+ samDao.isGroupMember(WorkbenchGroupName(group), userInfo) recoverWith { case t: Throwable =>
+ throw new FireCloudExceptionWithErrorReport(
+ ErrorReport(StatusCodes.InternalServerError, "Unable to query for group membership status.")
+ )
}
- }
}
-
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/utils/RestJsonClient.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/utils/RestJsonClient.scala
index 98c74a911..7ee8c8da5 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/utils/RestJsonClient.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/utils/RestJsonClient.scala
@@ -4,7 +4,7 @@ import java.time.Instant
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.coding.Coders._
-import akka.http.scaladsl.model.headers.{HttpEncodings, `Accept-Encoding`}
+import akka.http.scaladsl.model.headers.{`Accept-Encoding`, HttpEncodings}
import akka.http.scaladsl.model.{HttpRequest, HttpResponse, ResponseEntity}
import akka.http.scaladsl.unmarshalling.{Unmarshal, Unmarshaller}
import akka.stream.Materializer
@@ -26,30 +26,41 @@ trait RestJsonClient extends FireCloudRequestBuilding with PerformanceLogging {
implicit val materializer: Materializer
val http = Http(system)
- private final val NoPerfLabel: Instant = Instant.MIN
+ final private val NoPerfLabel: Instant = Instant.MIN
- def unAuthedRequest(req: HttpRequest, compressed: Boolean = false, useFireCloudHeader: Boolean = false,
- label: Option[String] = None): Future[HttpResponse] = {
- implicit val userInfo:WithAccessToken = null
+ def unAuthedRequest(req: HttpRequest,
+ compressed: Boolean = false,
+ useFireCloudHeader: Boolean = false,
+ label: Option[String] = None
+ ): Future[HttpResponse] = {
+ implicit val userInfo: WithAccessToken = null
doRequest(None)(req, compressed, useFireCloudHeader, label)
}
- def userAuthedRequest(req: HttpRequest, compressed: Boolean = false, useFireCloudHeader: Boolean = false,
- label: Option[String] = None)
- (implicit userInfo: WithAccessToken): Future[HttpResponse] =
+ def userAuthedRequest(req: HttpRequest,
+ compressed: Boolean = false,
+ useFireCloudHeader: Boolean = false,
+ label: Option[String] = None
+ )(implicit userInfo: WithAccessToken): Future[HttpResponse] =
doRequest(Option(addCredentials(userInfo.accessToken)))(req, compressed, useFireCloudHeader, label)
- def adminAuthedRequest(req: HttpRequest, compressed: Boolean = false, useFireCloudHeader: Boolean = false,
- label: Option[String] = None): Future[HttpResponse] =
+ def adminAuthedRequest(req: HttpRequest,
+ compressed: Boolean = false,
+ useFireCloudHeader: Boolean = false,
+ label: Option[String] = None
+ ): Future[HttpResponse] =
doRequest(Option(addAdminCredentials))(req, compressed, useFireCloudHeader, label)
- private def doRequest(addCreds: Option[RequestTransformer])(req: HttpRequest, compressed: Boolean = false, useFireCloudHeader: Boolean = false,
- label: Option[String] = None): Future[HttpResponse] = {
+ private def doRequest(addCreds: Option[RequestTransformer])(req: HttpRequest,
+ compressed: Boolean = false,
+ useFireCloudHeader: Boolean = false,
+ label: Option[String] = None
+ ): Future[HttpResponse] = {
val intermediateRequest = (compressed, useFireCloudHeader) match {
- case (true, true) => req.addHeader(`Accept-Encoding`(HttpEncodings.gzip)).addHeader(fireCloudHeader)
+ case (true, true) => req.addHeader(`Accept-Encoding`(HttpEncodings.gzip)).addHeader(fireCloudHeader)
case (true, false) => req.addHeader(`Accept-Encoding`(HttpEncodings.gzip))
case (false, true) => req.addHeader(fireCloudHeader)
- case _ => req
+ case _ => req
}
val finalRequest = addCreds.map(creds => creds(intermediateRequest)).getOrElse(intermediateRequest)
@@ -58,7 +69,7 @@ trait RestJsonClient extends FireCloudRequestBuilding with PerformanceLogging {
for {
response <- http.singleRequest(finalRequest)
- decodedResponse <- if(compressed) Future.successful(decodeResponse(response)) else Future.successful(response)
+ decodedResponse <- if (compressed) Future.successful(decodeResponse(response)) else Future.successful(response)
} yield {
if (tick != NoPerfLabel) {
val tock = Instant.now()
@@ -68,30 +79,45 @@ trait RestJsonClient extends FireCloudRequestBuilding with PerformanceLogging {
}
}
- def authedRequestToObject[T](req: HttpRequest, compressed: Boolean = false, useFireCloudHeader: Boolean = false,
- label: Option[String] = None)
- (implicit userInfo: WithAccessToken, unmarshaller: Unmarshaller[ResponseEntity, T], ers: ErrorReportSource): Future[T] = {
+ def authedRequestToObject[T](req: HttpRequest,
+ compressed: Boolean = false,
+ useFireCloudHeader: Boolean = false,
+ label: Option[String] = None
+ )(implicit
+ userInfo: WithAccessToken,
+ unmarshaller: Unmarshaller[ResponseEntity, T],
+ ers: ErrorReportSource
+ ): Future[T] =
requestToObject(true, req, compressed, useFireCloudHeader, label)
- }
- def unAuthedRequestToObject[T](req: HttpRequest, compressed: Boolean = false, useFireCloudHeader: Boolean = false,
- label: Option[String] = None)
- (implicit unmarshaller: Unmarshaller[ResponseEntity, T], ers: ErrorReportSource): Future[T] = {
- implicit val userInfo:WithAccessToken = null
+ def unAuthedRequestToObject[T](req: HttpRequest,
+ compressed: Boolean = false,
+ useFireCloudHeader: Boolean = false,
+ label: Option[String] = None
+ )(implicit unmarshaller: Unmarshaller[ResponseEntity, T], ers: ErrorReportSource): Future[T] = {
+ implicit val userInfo: WithAccessToken = null
requestToObject(false, req, compressed, useFireCloudHeader, label)
}
- def adminAuthedRequestToObject[T](req:HttpRequest, compressed: Boolean = false, useFireCloudHeader: Boolean = false)
- (implicit unmarshaller: Unmarshaller[ResponseEntity, T], ers: ErrorReportSource): Future[T] = {
+ def adminAuthedRequestToObject[T](req: HttpRequest,
+ compressed: Boolean = false,
+ useFireCloudHeader: Boolean = false
+ )(implicit unmarshaller: Unmarshaller[ResponseEntity, T], ers: ErrorReportSource): Future[T] =
resultsToObject(adminAuthedRequest(req, compressed, useFireCloudHeader))
- }
- private def requestToObject[T](auth: Boolean, req: HttpRequest, compressed: Boolean = false, useFireCloudHeader: Boolean = false,
- label: Option[String] = None)
- (implicit userInfo: WithAccessToken, unmarshaller: Unmarshaller[ResponseEntity, T], ers: ErrorReportSource): Future[T] = {
+ private def requestToObject[T](auth: Boolean,
+ req: HttpRequest,
+ compressed: Boolean = false,
+ useFireCloudHeader: Boolean = false,
+ label: Option[String] = None
+ )(implicit
+ userInfo: WithAccessToken,
+ unmarshaller: Unmarshaller[ResponseEntity, T],
+ ers: ErrorReportSource
+ ): Future[T] = {
val tick = if (label.nonEmpty) Instant.now() else NoPerfLabel
- val resp = if(auth) {
+ val resp = if (auth) {
userAuthedRequest(req, compressed, useFireCloudHeader)
} else {
unAuthedRequest(req, compressed, useFireCloudHeader)
@@ -100,10 +126,11 @@ trait RestJsonClient extends FireCloudRequestBuilding with PerformanceLogging {
resultsToObject(resp, label, tick)
}
- private def resultsToObject[T](resp: Future[HttpResponse], label: Option[String] = None, tick: Instant = NoPerfLabel)
- (implicit unmarshaller: Unmarshaller[ResponseEntity, T], ers: ErrorReportSource): Future[T] = {
+ private def resultsToObject[T](resp: Future[HttpResponse],
+ label: Option[String] = None,
+ tick: Instant = NoPerfLabel
+ )(implicit unmarshaller: Unmarshaller[ResponseEntity, T], ers: ErrorReportSource): Future[T] =
resp flatMap { response =>
-
if (label.nonEmpty && tick != NoPerfLabel) {
val tock = Instant.now()
perfLogger.info(perfmsg(label.get, response.status.value, tick, tock))
@@ -113,31 +140,27 @@ trait RestJsonClient extends FireCloudRequestBuilding with PerformanceLogging {
case s if s.isSuccess =>
Unmarshal(response.entity).to[T].recoverWith {
case de: DeserializationException =>
- throw new FireCloudExceptionWithErrorReport(
- ErrorReport(s"could not deserialize response: ${de.msg}"))
- case e: Throwable => {
+ throw new FireCloudExceptionWithErrorReport(ErrorReport(s"could not deserialize response: ${de.msg}"))
+ case e: Throwable =>
FCErrorReport(response).map { errorReport =>
throw new FireCloudExceptionWithErrorReport(errorReport)
}
- }
}
- case f => {
+ case f =>
FCErrorReport(response).map { errorReport =>
- //we never consume the response body in this case, so we must discard the bytes here
+ // we never consume the response body in this case, so we must discard the bytes here
response.discardEntityBytes()
throw new FireCloudExceptionWithErrorReport(errorReport)
}
- }
}
}
- }
private def decodeResponse(response: HttpResponse): HttpResponse = {
val decoder = response.encoding match {
- case HttpEncodings.gzip => Gzip
- case HttpEncodings.deflate => Deflate
+ case HttpEncodings.gzip => Gzip
+ case HttpEncodings.deflate => Deflate
case HttpEncodings.identity => NoCoding
- case _ => NoCoding
+ case _ => NoCoding
}
decoder.decodeMessage(response)
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/utils/StandardUserInfoDirectives.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/utils/StandardUserInfoDirectives.scala
index 816474e15..d5070c51d 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/utils/StandardUserInfoDirectives.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/utils/StandardUserInfoDirectives.scala
@@ -16,9 +16,12 @@ trait StandardUserInfoDirectives extends UserInfoDirectives {
headerValueByName("OIDC_CLAIM_email") &
optionalHeaderValueByName("OAUTH2_CLAIM_google_id") &
optionalHeaderValueByName("OAUTH2_CLAIM_idp_access_token")
- ) tmap {
- case (token, userId, expiresIn, email, googleIdOpt, googleTokenOpt) => {
- UserInfo(email, OAuth2BearerToken(token), expiresIn.toLong, googleIdOpt.getOrElse(userId), googleTokenOpt.map(OAuth2BearerToken))
- }
- }
+ ) tmap { case (token, userId, expiresIn, email, googleIdOpt, googleTokenOpt) =>
+ UserInfo(email,
+ OAuth2BearerToken(token),
+ expiresIn.toLong,
+ googleIdOpt.getOrElse(userId),
+ googleTokenOpt.map(OAuth2BearerToken)
+ )
+ }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/utils/StreamingPassthrough.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/utils/StreamingPassthrough.scala
index ec061d665..3e98f5302 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/utils/StreamingPassthrough.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/utils/StreamingPassthrough.scala
@@ -3,7 +3,7 @@ package org.broadinstitute.dsde.firecloud.utils
import akka.NotUsed
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
-import akka.http.scaladsl.model.headers.{Host, `Timeout-Access`}
+import akka.http.scaladsl.model.headers.{`Timeout-Access`, Host}
import akka.http.scaladsl.model.{HttpRequest, HttpResponse, StatusCodes, Uri}
import akka.http.scaladsl.server.Route
import akka.http.scaladsl.server.directives.{BasicDirectives, RouteDirectives}
@@ -17,9 +17,7 @@ import org.slf4j.LoggerFactory
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success}
-trait StreamingPassthrough
- extends BasicDirectives
- with RouteDirectives {
+trait StreamingPassthrough extends BasicDirectives with RouteDirectives {
// Log under the StreamingPassthrough class, not whatever class mixes this in.
protected lazy val streamingPassthroughLogger: Logger =
@@ -29,9 +27,8 @@ trait StreamingPassthrough
implicit val executionContext: ExecutionContext
val passthroughErrorReportSource: ErrorReportSource = ErrorReportSource("Orchestration")
- def escapePathSegment(pathString: String) = {
+ def escapePathSegment(pathString: String) =
UrlEscapers.urlPathSegmentEscaper().escape(pathString)
- }
/**
* Passes through, to remoteBaseUri, all requests that match or start with the
@@ -39,11 +36,10 @@ trait StreamingPassthrough
*
* @param remoteBaseUri the remote system to use as target for passthrough requests
*/
- def streamingPassthrough(remoteBaseUri: Uri): Route = {
+ def streamingPassthrough(remoteBaseUri: Uri): Route =
extractMatchedPath { localBasePath =>
passthroughImpl(localBasePath, remoteBaseUri)
}
- }
/**
* Passes through, to a remote server, all requests that match or start with the
@@ -55,9 +51,8 @@ trait StreamingPassthrough
* is the fully-qualified URL to a remote system
* to use as target for passthrough requests.
*/
- def streamingPassthrough(passthroughMapping: (Uri.Path, Uri)): Route = {
+ def streamingPassthrough(passthroughMapping: (Uri.Path, Uri)): Route =
passthroughImpl(passthroughMapping._1, passthroughMapping._2)
- }
/**
* Passes through, to a remote server, all requests that match or start with the
@@ -70,9 +65,8 @@ trait StreamingPassthrough
* to use as target for passthrough requests.
* @param pathOverride as the pre-defined path to use when constructing the remote path value
*/
- def streamingPassthroughWithPathRedirect(passthroughMapping: (Uri.Path, Uri), pathOverride: String): Route = {
+ def streamingPassthroughWithPathRedirect(passthroughMapping: (Uri.Path, Uri), pathOverride: String): Route =
passthroughImpl(passthroughMapping._1, passthroughMapping._2, Option(pathOverride))
- }
/**
* The passthrough implementation:
@@ -81,7 +75,10 @@ trait StreamingPassthrough
* - `remotePathOverride` to provide a pre-configured path if remote path structure is different from local
* - call the remote system and reply to the user via `routeResponse` streaming
*/
- private def passthroughImpl(localBasePath: Uri.Path, remoteBaseUri: Uri, remotePathOverride: Option[String] = None): Route = {
+ private def passthroughImpl(localBasePath: Uri.Path,
+ remoteBaseUri: Uri,
+ remotePathOverride: Option[String] = None
+ ): Route =
mapRequest(transformToPassthroughRequest(localBasePath, remoteBaseUri, remotePathOverride)) {
extractRequest { req =>
complete {
@@ -89,7 +86,6 @@ trait StreamingPassthrough
}
}
}
- }
/**
* Accepts an http request from an end user to Orchestration,
@@ -104,7 +100,9 @@ trait StreamingPassthrough
* @param req the request inbound to Orchestration
* @return the outbound request to be sent to another service
*/
- def transformToPassthroughRequest(localBasePath: Uri.Path, remoteBaseUri: Uri, remotePath: Option[String] = None)(req: HttpRequest): HttpRequest = {
+ def transformToPassthroughRequest(localBasePath: Uri.Path, remoteBaseUri: Uri, remotePath: Option[String] = None)(
+ req: HttpRequest
+ ): HttpRequest = {
// Convert the URI to the one suitable for the remote system
val targetUri = convertToRemoteUri(req.uri, localBasePath, remoteBaseUri, remotePath)
// Remove unwanted headers:
@@ -116,7 +114,7 @@ trait StreamingPassthrough
// so we remove and set it with targetUri host
val filteredHeaders = req.headers.filter { hdr =>
hdr.isNot(`Timeout-Access`.lowercaseName) &&
- hdr.isNot(Host.lowercaseName)
+ hdr.isNot(Host.lowercaseName)
}
val targetHeaders = filteredHeaders :+ Host(targetUri.authority.host)
@@ -140,7 +138,11 @@ trait StreamingPassthrough
* @param modifiedRemotePath a modified path value to be used over requestUri.path.toString if provided
* @return the URI suitable for sending to the remote system
*/
- def convertToRemoteUri(requestUri: Uri, localBasePath: Uri.Path, remoteBaseUri: Uri, modifiedRemotePath: Option[String] = None): Uri = {
+ def convertToRemoteUri(requestUri: Uri,
+ localBasePath: Uri.Path,
+ remoteBaseUri: Uri,
+ modifiedRemotePath: Option[String] = None
+ ): Uri = {
// Ensure the incoming request starts with the localBasePath. Abort if it doesn't.
// This condition should only be caused by developer error in which the streamingPassthrough
// directive is incorrectly configured inside a route.
@@ -159,10 +161,7 @@ trait StreamingPassthrough
// * the scheme, host, and port as defined in remoteBaseUri (host and port are combined into authority)
// * the path built from the remoteBaseUri path + remainder
// * everything else (querystring, fragment, userinfo) from the original request
- requestUri.copy(
- scheme = remoteBaseUri.scheme,
- authority = remoteBaseUri.authority,
- path = remotePath)
+ requestUri.copy(scheme = remoteBaseUri.scheme, authority = remoteBaseUri.authority, path = remotePath)
}
/**
@@ -173,7 +172,8 @@ trait StreamingPassthrough
* @return the Future-wrapped response from the remote server
*/
private def routeResponse(req: HttpRequest): Future[HttpResponse] = {
- val flowFuture = Source.single((req, NotUsed))
+ val flowFuture = Source
+ .single((req, NotUsed))
.via(Http().superPool[NotUsed]())
.runWith(Sink.head)
@@ -186,14 +186,11 @@ trait StreamingPassthrough
case Failure(ex) =>
// the remote server did not respond at all, so we have nothing to use for the reply;
// throw an error
- throw new FireCloudExceptionWithErrorReport(ErrorReport(StatusCodes.InternalServerError, ex)(passthroughErrorReportSource))
+ throw new FireCloudExceptionWithErrorReport(
+ ErrorReport(StatusCodes.InternalServerError, ex)(passthroughErrorReportSource)
+ )
}
}
}
-
-
-
-
-
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/utils/TSVFormatter.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/utils/TSVFormatter.scala
index 5274b595a..299ae3f70 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/utils/TSVFormatter.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/utils/TSVFormatter.scala
@@ -17,8 +17,8 @@ object TSVFormatter {
* @return Headers and rows combined.
*/
def exportToString(headers: IndexedSeq[String], rows: IndexedSeq[IndexedSeq[String]]): String = {
- val headerString:String = headers.mkString("\t") + "\n"
- val rowsString:String = rows.map{ _.mkString("\t") }.mkString("\n")
+ val headerString: String = headers.mkString("\t") + "\n"
+ val rowsString: String = rows.map(_.mkString("\t")).mkString("\n")
headerString + rowsString + "\n"
}
@@ -28,15 +28,13 @@ object TSVFormatter {
* @param entities Initial list of Entity
* @return new list of Entity
*/
- private def filterAttributeFromEntities(entities: Seq[Entity], attributeName: String): Seq[Entity] = {
- entities map {
- entity =>
- val attributes = entity.attributes filterNot {
- case (thisAttributeName, _) => thisAttributeName == AttributeName.withDefaultNS(attributeName)
- }
- entity.copy(attributes = attributes)
+ private def filterAttributeFromEntities(entities: Seq[Entity], attributeName: String): Seq[Entity] =
+ entities map { entity =>
+ val attributes = entity.attributes filterNot { case (thisAttributeName, _) =>
+ thisAttributeName == AttributeName.withDefaultNS(attributeName)
+ }
+ entity.copy(attributes = attributes)
}
- }
/**
* Generate a row of values in the same order as the headers.
@@ -46,17 +44,16 @@ object TSVFormatter {
* @return IndexedSeq of ordered data fields
*/
private def makeRow(entity: Entity, headerValues: IndexedSeq[String]): IndexedSeq[String] = {
- val rowMap: Map[Int, String] = entity.attributes map {
- case (attributeName, attribute) =>
- val columnPosition = headerValues.indexOf(AttributeName.toDelimitedName(attributeName))
- val cellValue = tsvSafeAttribute(attribute)
- columnPosition -> cellValue
+ val rowMap: Map[Int, String] = entity.attributes map { case (attributeName, attribute) =>
+ val columnPosition = headerValues.indexOf(AttributeName.toDelimitedName(attributeName))
+ val cellValue = tsvSafeAttribute(attribute)
+ columnPosition -> cellValue
}
// If there are entities that don't have a value for which there is a known header, that will
// be missing in the row. Fill up those positions with empty strings in that case.
val completedRowMap: IndexedSeq[(Int, String)] =
- IndexedSeq.range(1, headerValues.size).map {
- i => (i, rowMap.getOrElse(i, ""))
+ IndexedSeq.range(1, headerValues.size).map { i =>
+ (i, rowMap.getOrElse(i, ""))
}
// This rowMap manipulation:
@@ -78,8 +75,8 @@ object TSVFormatter {
// AttributeStringifier works for everything except single entity references;
// it even works for AttributeEntityReferenceList
val intermediateString = attribute match {
- case ref:AttributeEntityReference => attributeFormat.write(ref).compactPrint
- case _ => AttributeStringifier(attribute)
+ case ref: AttributeEntityReference => attributeFormat.write(ref).compactPrint
+ case _ => AttributeStringifier(attribute)
}
tsvSafeString(intermediateString)
}
@@ -91,13 +88,12 @@ object TSVFormatter {
* @param value The input value to make safe
* @return the safe value
*/
- def tsvSafeString(value: String): String = {
+ def tsvSafeString(value: String): String =
if (value.contains(TSVParser.DELIMITER)) {
s"\"$value\""
} else {
value
}
- }
/**
* Generate a header for a membership file.
@@ -105,10 +101,10 @@ object TSVFormatter {
* @param entityType The EntityType
* @return IndexedSeq of header Strings
*/
- def makeMembershipHeaders(entityType: String)(implicit modelSchema: ModelSchema): IndexedSeq[String] = {
- IndexedSeq[String](s"${TsvTypes.MEMBERSHIP}:${entityType}_id", modelSchema.getCollectionMemberType(entityType).get.getOrElse(
- entityType.replace("_set", "")))
- }
+ def makeMembershipHeaders(entityType: String)(implicit modelSchema: ModelSchema): IndexedSeq[String] =
+ IndexedSeq[String](s"${TsvTypes.MEMBERSHIP}:${entityType}_id",
+ modelSchema.getCollectionMemberType(entityType).get.getOrElse(entityType.replace("_set", ""))
+ )
/**
* Prepare an ordered list of row data for a membership file
@@ -117,19 +113,26 @@ object TSVFormatter {
* @param entities The Entity objects to convert to rows.
* @return Ordered list of rows
*/
- def makeMembershipRows(entityType: String, entities: Seq[Entity])(implicit modelSchema: ModelSchema): Seq[IndexedSeq[String]] = {
+ def makeMembershipRows(entityType: String, entities: Seq[Entity])(implicit
+ modelSchema: ModelSchema
+ ): Seq[IndexedSeq[String]] = {
val memberPlural = pluralizeMemberType(memberTypeFromEntityType(entityType, modelSchema), modelSchema)
- entities.filter {
+ entities
+ .filter {
_.entityType == entityType
- }.flatMap {
- entity =>
- entity.attributes.filter {
+ }
+ .flatMap { entity =>
+ entity.attributes
+ .filter {
// To make the membership file, we need the array of elements that correspond to the set type.
// All other top-level properties are not necessary and are only used for the data load file.
case (attributeName, _) => attributeName.equals(AttributeName.withDefaultNS(memberPlural))
- }.flatMap {
- case (_, AttributeEntityReference(`entityType`, entityName)) => Seq(IndexedSeq[String](entity.name, entityName))
- case (_, AttributeEntityReferenceList(refs)) => refs.map(ref => IndexedSeq[String](entity.name, ref.entityName))
+ }
+ .flatMap {
+ case (_, AttributeEntityReference(`entityType`, entityName)) =>
+ Seq(IndexedSeq[String](entity.name, entityName))
+ case (_, AttributeEntityReferenceList(refs)) =>
+ refs.map(ref => IndexedSeq[String](entity.name, ref.entityName))
case _ => Seq.empty
}
}
@@ -143,19 +146,25 @@ object TSVFormatter {
* @param requestedHeaders Which, if any, columns were requested. If none, return allHeaders (subject to sanitization)
* @return Entity name as first column header, followed by matching entity attribute labels
*/
- def makeEntityHeaders(entityType: String, allHeaders: Seq[String], requestedHeaders: Option[IndexedSeq[String]])(implicit modelSchema: ModelSchema): IndexedSeq[String] = {
+ def makeEntityHeaders(entityType: String, allHeaders: Seq[String], requestedHeaders: Option[IndexedSeq[String]])(
+ implicit modelSchema: ModelSchema
+ ): IndexedSeq[String] = {
// will throw exception if firecloud model was requested and the entity type
val memberPlural = pluralizeMemberType(memberTypeFromEntityType(entityType, modelSchema), modelSchema)
- val requestedHeadersSansId = requestedHeaders.
- // remove empty strings
- map(_.filter(_.length > 0)).
+ val requestedHeadersSansId = requestedHeaders
+ .
+ // remove empty strings
+ map(_.filter(_.length > 0))
+ .
// handle empty requested headers as no requested headers
- flatMap(rh => if (rh.isEmpty) None else Option(rh)).
+ flatMap(rh => if (rh.isEmpty) None else Option(rh))
+ .
// entity id always needs to be first and is handled differently so remove it from requestedHeaders
- map(_.filterNot(_.equalsIgnoreCase(entityType + "_id"))).
+ map(_.filterNot(_.equalsIgnoreCase(entityType + "_id")))
+ .
// filter out member attribute if a set type
- map { h => if (modelSchema.isCollectionType(entityType)) h.filterNot(_.equals(memberPlural)) else h }
+ map(h => if (modelSchema.isCollectionType(entityType)) h.filterNot(_.equals(memberPlural)) else h)
val filteredAllHeaders = if (modelSchema.isCollectionType(entityType)) {
allHeaders.filterNot(_.equals(memberPlural))
@@ -165,8 +174,9 @@ object TSVFormatter {
val requiredAttrsTry = modelSchema.getRequiredAttributes(entityType)
val entityHeader: String = requestedHeadersSansId match {
- // if not all required fields are requested, then this tsv is an update
- case Some(headers) if requiredAttrsTry.isSuccess && !requiredAttrsTry.get.keySet.forall(headers.contains) => s"${TsvTypes.UPDATE}:${entityType}_id"
+ // if not all required fields are requested, then this tsv is an update
+ case Some(headers) if requiredAttrsTry.isSuccess && !requiredAttrsTry.get.keySet.forall(headers.contains) =>
+ s"${TsvTypes.UPDATE}:${entityType}_id"
case _ => s"${TsvTypes.ENTITY}:${entityType}_id"
}
(entityHeader +: requestedHeadersSansId.getOrElse(filteredAllHeaders)).toIndexedSeq
@@ -180,7 +190,9 @@ object TSVFormatter {
* @param headers The universe of available column headers
* @return Ordered list of rows, each row entry value ordered by its corresponding header position
*/
- def makeEntityRows(entityType: String, entities: Seq[Entity], headers: IndexedSeq[String])(implicit modelSchema: ModelSchema): IndexedSeq[IndexedSeq[String]] = {
+ def makeEntityRows(entityType: String, entities: Seq[Entity], headers: IndexedSeq[String])(implicit
+ modelSchema: ModelSchema
+ ): IndexedSeq[IndexedSeq[String]] = {
// if we have a set entity, we need to filter out the attribute array of the members so that we only
// have top-level attributes to construct columns from.
val filteredEntities = if (modelSchema.isCollectionType(entityType)) {
@@ -191,12 +203,14 @@ object TSVFormatter {
}
// Turn them into rows
filteredEntities
- .filter { _.entityType == entityType }
- .map { entity => makeRow(entity, headers) }
+ .filter(_.entityType == entityType)
+ .map(entity => makeRow(entity, headers))
.toIndexedSeq
}
- def memberTypeFromEntityType(entityType: String, modelSchema: ModelSchema): String = modelSchema.getCollectionMemberType(entityType).get.getOrElse(entityType.replace("_set", ""))
- def pluralizeMemberType(memberType: String, modelSchema: ModelSchema): String = modelSchema.getPlural(memberType).getOrElse(memberType + "s")
+ def memberTypeFromEntityType(entityType: String, modelSchema: ModelSchema): String =
+ modelSchema.getCollectionMemberType(entityType).get.getOrElse(entityType.replace("_set", ""))
+ def pluralizeMemberType(memberType: String, modelSchema: ModelSchema): String =
+ modelSchema.getPlural(memberType).getOrElse(memberType + "s")
def isCollectionType(entityType: String, modelSchema: ModelSchema): Boolean = modelSchema.isCollectionType(entityType)
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/utils/TSVParser.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/utils/TSVParser.scala
index ba90ef7bf..8aa60e0ec 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/utils/TSVParser.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/utils/TSVParser.scala
@@ -5,9 +5,9 @@ import scala.jdk.CollectionConverters._
import com.univocity.parsers.csv.{CsvParser, CsvParserSettings}
case class TSVLoadFile(
- firstColumnHeader:String, //The first header column, used to determine the type of entities being imported
- headers:Seq[String], //All the headers
- tsvData:Seq[Seq[String]] //List of rows of the TSV, broken out into fields
+ firstColumnHeader: String, // The first header column, used to determine the type of entities being imported
+ headers: Seq[String], // All the headers
+ tsvData: Seq[Seq[String]] // List of rows of the TSV, broken out into fields
)
object TSVParser {
@@ -21,7 +21,7 @@ object TSVParser {
// Automatically detect what the line separator is (e.g. \n for Unix, \r\n for Windows).
settings.setLineSeparatorDetectionEnabled(true)
settings.setMaxColumns(1024)
- //64 mb in bytes/4 (assumes 4 bytes per character)
+ // 64 mb in bytes/4 (assumes 4 bytes per character)
settings.setMaxCharsPerColumn(16777216)
settings.getFormat.setDelimiter(DELIMITER)
settings.setErrorContentLength(16384)
@@ -39,7 +39,7 @@ object TSVParser {
tsvLine.toList
}
- def parse(tsvString: String): TSVLoadFile = {
+ def parse(tsvString: String): TSVLoadFile =
makeParser.parseAll(new StringReader(tsvString)).asScala.toList match {
case h :: t =>
val tsvData = t.zipWithIndex.map { case (line, idx) => parseLine(line, idx, h.length) }
@@ -49,11 +49,10 @@ object TSVParser {
// consists only of delimiters, all values will be empty.
// NB: CsvParserSettings.setSkipEmptyLines, setIgnoreTrailingWhitespaces, and setIgnoreLeadingWhitespaces
// do not help with this use case, so we write our own implementation.
- val validData = tsvData.collect {
+ val validData = tsvData.collect {
case hasValues if hasValues.exists(_.nonEmpty) => hasValues
}
TSVLoadFile(h.head, h.toList, validData)
case _ => throw new RuntimeException("TSV parsing error: no header")
}
- }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/CookieAuthedApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/CookieAuthedApiService.scala
index 74c5f48d9..1387c5464 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/CookieAuthedApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/CookieAuthedApiService.scala
@@ -23,33 +23,47 @@ trait CookieAuthedApiService extends Directives with RequestBuilding with LazyLo
private def dummyUserInfo(tokenStr: String) = UserInfo("dummy", OAuth2BearerToken(tokenStr), -1, "dummy")
val cookieAuthedRoutes: Route =
- // download "proxies" for TSV files
- // Note that these endpoints work in the same way as ExportEntitiesApiService tsv download.
- path( "cookie-authed" / "workspaces" / Segment / Segment/ "entities" / Segment / "tsv" ) { (workspaceNamespace, workspaceName, entityType) =>
- // this endpoint allows an arbitrary number of attribute names in the POST body (GAWB-1435)
- // but the URL cannot be saved for later use (firecloud-app#80)
- post {
- formFields(Symbol("FCtoken"), Symbol("attributeNames").?, Symbol("model").?) { (tokenValue, attributeNamesString, modelString) =>
- val attributeNames = attributeNamesString.map(_.split(",").toIndexedSeq)
- val userInfo = dummyUserInfo(tokenValue)
- val exportArgs = ExportEntitiesByTypeArguments(userInfo, workspaceNamespace, workspaceName, entityType, attributeNames, modelString)
-
- complete { exportEntitiesByTypeConstructor(exportArgs).ExportEntities }
- }
- } ~
- // this endpoint allows saving the URL for later use (firecloud-app#80)
- // but it's possible to exceed the maximum URI length by specifying too many attributes (GAWB-1435)
- get {
- cookie("FCtoken") { tokenCookie =>
- parameters(Symbol("attributeNames").?, Symbol("model").?) { (attributeNamesString, modelString) =>
+ // download "proxies" for TSV files
+ // Note that these endpoints work in the same way as ExportEntitiesApiService tsv download.
+ path("cookie-authed" / "workspaces" / Segment / Segment / "entities" / Segment / "tsv") {
+ (workspaceNamespace, workspaceName, entityType) =>
+ // this endpoint allows an arbitrary number of attribute names in the POST body (GAWB-1435)
+ // but the URL cannot be saved for later use (firecloud-app#80)
+ post {
+ formFields(Symbol("FCtoken"), Symbol("attributeNames").?, Symbol("model").?) {
+ (tokenValue, attributeNamesString, modelString) =>
val attributeNames = attributeNamesString.map(_.split(",").toIndexedSeq)
- val userInfo = dummyUserInfo(tokenCookie.value)
- val exportArgs = ExportEntitiesByTypeArguments(userInfo, workspaceNamespace, workspaceName, entityType, attributeNames, modelString)
+ val userInfo = dummyUserInfo(tokenValue)
+ val exportArgs = ExportEntitiesByTypeArguments(userInfo,
+ workspaceNamespace,
+ workspaceName,
+ entityType,
+ attributeNames,
+ modelString
+ )
- complete { exportEntitiesByTypeConstructor(exportArgs).ExportEntities }
+ complete(exportEntitiesByTypeConstructor(exportArgs).ExportEntities)
+ }
+ } ~
+ // this endpoint allows saving the URL for later use (firecloud-app#80)
+ // but it's possible to exceed the maximum URI length by specifying too many attributes (GAWB-1435)
+ get {
+ cookie("FCtoken") { tokenCookie =>
+ parameters(Symbol("attributeNames").?, Symbol("model").?) { (attributeNamesString, modelString) =>
+ val attributeNames = attributeNamesString.map(_.split(",").toIndexedSeq)
+ val userInfo = dummyUserInfo(tokenCookie.value)
+ val exportArgs = ExportEntitiesByTypeArguments(userInfo,
+ workspaceNamespace,
+ workspaceName,
+ entityType,
+ attributeNames,
+ modelString
+ )
+
+ complete(exportEntitiesByTypeConstructor(exportArgs).ExportEntities)
+ }
}
}
- }
}
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/CromIamApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/CromIamApiService.scala
index 8d73043e1..5b64922c8 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/CromIamApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/CromIamApiService.scala
@@ -6,9 +6,11 @@ import org.broadinstitute.dsde.firecloud.utils.{StandardUserInfoDirectives, Stre
import akka.http.scaladsl.model.{HttpMethods, Uri}
import akka.http.scaladsl.server.Route
-trait CromIamApiService extends FireCloudRequestBuilding
- with FireCloudDirectives with StandardUserInfoDirectives
- with StreamingPassthrough {
+trait CromIamApiService
+ extends FireCloudRequestBuilding
+ with FireCloudDirectives
+ with StandardUserInfoDirectives
+ with StreamingPassthrough {
lazy val workflowRoot: String = FireCloudConfig.CromIAM.authUrl + "/workflows/v1"
lazy val womtoolRoute: String = FireCloudConfig.CromIAM.authUrl + "/womtool/v1"
@@ -29,13 +31,15 @@ trait CromIamApiService extends FireCloudRequestBuilding
defined as /workflows/{id}/genomics/{operation}, meaning that path reconstruction is necessary
Since there's only one such route, it was simpler to have an explicit route defined for his edge case and have it evaluated
before the rest of the workflow routes.
- */
- val rawlsServiceRoute: Route = {
- pathPrefix("workflows" / Segment / Segment / "backend" / "metadata" / Segments) { (version, workflowId, operationSegments) =>
- val suffix = operationSegments.mkString("/")
- streamingPassthroughWithPathRedirect(Uri.Path(localBase) -> Uri(rawlsWorkflowRoot), s"/${workflowId}/genomics/${suffix}")
+ */
+ val rawlsServiceRoute: Route =
+ pathPrefix("workflows" / Segment / Segment / "backend" / "metadata" / Segments) {
+ (version, workflowId, operationSegments) =>
+ val suffix = operationSegments.mkString("/")
+ streamingPassthroughWithPathRedirect(Uri.Path(localBase) -> Uri(rawlsWorkflowRoot),
+ s"/${workflowId}/genomics/${suffix}"
+ )
}
- }
val cromIamServiceRoutes: Route =
pathPrefix("workflows" / Segment) { _ =>
@@ -53,14 +57,11 @@ trait CromIamApiService extends FireCloudRequestBuilding
}
}
-
val cromIamApiServiceRoutes = rawlsServiceRoute ~ cromIamServiceRoutes ~ womToolRoute
- val cromIamEngineRoutes: Route = {
- pathPrefix( "engine" / Segment ) { _ =>
+ val cromIamEngineRoutes: Route =
+ pathPrefix("engine" / Segment) { _ =>
streamingPassthrough(Uri.Path("/engine/v1") -> Uri(engineRoot))
}
- }
-
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/EntityApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/EntityApiService.scala
index be5ef15cc..05c6686b0 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/EntityApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/EntityApiService.scala
@@ -15,9 +15,12 @@ import org.slf4j.LoggerFactory
import scala.concurrent.ExecutionContext
import scala.util.Try
-trait EntityApiService extends FireCloudDirectives
- with StreamingPassthrough
- with FireCloudRequestBuilding with StandardUserInfoDirectives with RestJsonClient {
+trait EntityApiService
+ extends FireCloudDirectives
+ with StreamingPassthrough
+ with FireCloudRequestBuilding
+ with StandardUserInfoDirectives
+ with RestJsonClient {
implicit val executionContext: ExecutionContext
lazy val log = LoggerFactory.getLogger(getClass)
@@ -31,8 +34,13 @@ trait EntityApiService extends FireCloudDirectives
path("entities_with_type") {
get {
requireUserInfo() { userInfo =>
- //TODO: the model schema doesn't matter for this one. Ideally, make it Optional
- complete { entityServiceConstructor(FlexibleModelSchema).getEntitiesWithType(workspaceNamespace, workspaceName, userInfo) }
+ // TODO: the model schema doesn't matter for this one. Ideally, make it Optional
+ complete {
+ entityServiceConstructor(FlexibleModelSchema).getEntitiesWithType(workspaceNamespace,
+ workspaceName,
+ userInfo
+ )
+ }
}
}
} ~
@@ -47,16 +55,19 @@ trait EntityApiService extends FireCloudDirectives
requireUserInfo() { userInfo =>
parameter(Symbol("linkExistingEntities").?) { linkExistingEntities =>
entity(as[EntityCopyWithoutDestinationDefinition]) { copyRequest =>
- val linkExistingEntitiesBool = Try(linkExistingEntities.getOrElse("false").toBoolean).getOrElse(false)
- val copyMethodConfig = new EntityCopyDefinition(
- sourceWorkspace = copyRequest.sourceWorkspace,
- destinationWorkspace = WorkspaceName(workspaceNamespace, workspaceName),
- entityType = copyRequest.entityType,
- entityNames = copyRequest.entityNames)
- val extReq = Post(FireCloudConfig.Rawls.workspacesEntitiesCopyUrl(linkExistingEntitiesBool), copyMethodConfig)
-
+ val linkExistingEntitiesBool =
+ Try(linkExistingEntities.getOrElse("false").toBoolean).getOrElse(false)
+ val copyMethodConfig = new EntityCopyDefinition(
+ sourceWorkspace = copyRequest.sourceWorkspace,
+ destinationWorkspace = WorkspaceName(workspaceNamespace, workspaceName),
+ entityType = copyRequest.entityType,
+ entityNames = copyRequest.entityNames
+ )
+ val extReq = Post(FireCloudConfig.Rawls.workspacesEntitiesCopyUrl(linkExistingEntitiesBool),
+ copyMethodConfig
+ )
- complete { userAuthedRequest(extReq)(userInfo) }
+ complete(userAuthedRequest(extReq)(userInfo))
}
}
}
@@ -68,11 +79,17 @@ trait EntityApiService extends FireCloudDirectives
}
} ~
pathPrefix(Segment) { entityType =>
- streamingPassthrough(FireCloudConfig.Rawls.entityPathFromWorkspace(escapePathSegment(workspaceNamespace), escapePathSegment(workspaceName)) + "/" + entityType)
+ streamingPassthrough(
+ FireCloudConfig.Rawls.entityPathFromWorkspace(escapePathSegment(workspaceNamespace),
+ escapePathSegment(workspaceName)
+ ) + "/" + entityType
+ )
}
} ~
pathPrefix("entityQuery") {
- streamingPassthrough(entityQueryPathFromWorkspace(escapePathSegment(workspaceNamespace), escapePathSegment(workspaceName)))
+ streamingPassthrough(
+ entityQueryPathFromWorkspace(escapePathSegment(workspaceNamespace), escapePathSegment(workspaceName))
+ )
} ~
pathPrefix("entityTypes") {
extractRequest { req =>
@@ -84,19 +101,19 @@ trait EntityApiService extends FireCloudDirectives
patch {
passthrough(passthroughTarget, HttpMethods.PATCH)
} ~
- delete {
- passthrough(passthroughTarget, HttpMethods.DELETE)
- }
+ delete {
+ passthrough(passthroughTarget, HttpMethods.DELETE)
+ }
} ~
- pathPrefix("attributes") {
- path(Segment) { _ => // attributeName
- pathEnd {
- patch {
- passthrough(passthroughTarget, HttpMethods.PATCH)
+ pathPrefix("attributes") {
+ path(Segment) { _ => // attributeName
+ pathEnd {
+ patch {
+ passthrough(passthroughTarget, HttpMethods.PATCH)
+ }
}
}
}
- }
}
}
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/ExportEntitiesApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/ExportEntitiesApiService.scala
index e5e82e089..8b6722fbd 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/ExportEntitiesApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/ExportEntitiesApiService.scala
@@ -12,38 +12,54 @@ import org.broadinstitute.dsde.firecloud.utils.StandardUserInfoDirectives
import scala.concurrent.ExecutionContext
import scala.language.postfixOps
-trait ExportEntitiesApiService extends Directives with RequestBuilding with StandardUserInfoDirectives with LazyLogging {
+trait ExportEntitiesApiService
+ extends Directives
+ with RequestBuilding
+ with StandardUserInfoDirectives
+ with LazyLogging {
val exportEntitiesByTypeConstructor: ExportEntitiesByTypeArguments => ExportEntitiesByTypeActor
implicit val executionContext: ExecutionContext
val exportEntitiesRoutes: Route =
-
// Note that this endpoint works in the same way as CookieAuthedApiService tsv download.
- path( "api" / "workspaces" / Segment / Segment / "entities" / Segment / "tsv" ) { (workspaceNamespace, workspaceName, entityType) =>
- requireUserInfo() { userInfo =>
- get {
- parameters(Symbol("attributeNames").?, Symbol("model").?) { (attributeNamesString, modelString) =>
- val attributeNames = attributeNamesString.map(_.split(",").toIndexedSeq)
- val exportArgs = ExportEntitiesByTypeArguments(userInfo, workspaceNamespace, workspaceName, entityType, attributeNames, modelString)
- complete {
- exportEntitiesByTypeConstructor(exportArgs).ExportEntities
+ path("api" / "workspaces" / Segment / Segment / "entities" / Segment / "tsv") {
+ (workspaceNamespace, workspaceName, entityType) =>
+ requireUserInfo() { userInfo =>
+ get {
+ parameters(Symbol("attributeNames").?, Symbol("model").?) { (attributeNamesString, modelString) =>
+ val attributeNames = attributeNamesString.map(_.split(",").toIndexedSeq)
+ val exportArgs = ExportEntitiesByTypeArguments(userInfo,
+ workspaceNamespace,
+ workspaceName,
+ entityType,
+ attributeNames,
+ modelString
+ )
+ complete {
+ exportEntitiesByTypeConstructor(exportArgs).ExportEntities
+ }
}
- }
- } ~
- post {
- formFields(Symbol("attributeNames").?, Symbol("model").?) { (attributeNamesString, modelString) =>
- val attributeNames = attributeNamesString.map(_.split(",").toIndexedSeq)
- val model = if (modelString.nonEmpty && StringUtils.isBlank(modelString.get)) None else modelString
- val exportArgs = ExportEntitiesByTypeArguments(userInfo, workspaceNamespace, workspaceName, entityType, attributeNames, model)
- complete {
- exportEntitiesByTypeConstructor(exportArgs).streamEntitiesToWorkspaceBucket() map { gcsPath =>
- RequestComplete(OK, s"gs://${gcsPath.bucketName}/${gcsPath.objectName.value}")
+ } ~
+ post {
+ formFields(Symbol("attributeNames").?, Symbol("model").?) { (attributeNamesString, modelString) =>
+ val attributeNames = attributeNamesString.map(_.split(",").toIndexedSeq)
+ val model = if (modelString.nonEmpty && StringUtils.isBlank(modelString.get)) None else modelString
+ val exportArgs = ExportEntitiesByTypeArguments(userInfo,
+ workspaceNamespace,
+ workspaceName,
+ entityType,
+ attributeNames,
+ model
+ )
+ complete {
+ exportEntitiesByTypeConstructor(exportArgs).streamEntitiesToWorkspaceBucket() map { gcsPath =>
+ RequestComplete(OK, s"gs://${gcsPath.bucketName}/${gcsPath.objectName.value}")
+ }
+ }
}
}
- }
}
- }
}
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/Ga4ghApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/Ga4ghApiService.scala
index e11967d33..bd0324ce2 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/Ga4ghApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/Ga4ghApiService.scala
@@ -37,11 +37,11 @@ trait Ga4ghApiService extends FireCloudDirectives {
passthrough(uri, HttpMethods.GET)
}
} ~
- path("tools" / Segment) { (id) =>
+ path("tools" / Segment) { id =>
val targetUri = Uri(s"$agoraGA4GH/tools/$id")
passthrough(targetUri, HttpMethods.GET)
} ~
- path("tools" / Segment / "versions") { (id) =>
+ path("tools" / Segment / "versions") { id =>
val targetUri = Uri(s"$agoraGA4GH/tools/$id/versions")
passthrough(targetUri, HttpMethods.GET)
} ~
@@ -57,9 +57,11 @@ trait Ga4ghApiService extends FireCloudDirectives {
val targetUri = Uri(s"$agoraGA4GH/tools/$id/versions/$versionId/$descriptorType/descriptor")
passthrough(targetUri, HttpMethods.GET)
} ~
- path("tools" / Segment / "versions" / Segment / Segment / "descriptor" / Segment) { (id, versionId, descriptorType, relativePath) =>
- val targetUri = Uri(s"$agoraGA4GH/tools/$id/versions/$versionId/$descriptorType/descriptor/$relativePath")
- passthrough(targetUri, HttpMethods.GET)
+ path("tools" / Segment / "versions" / Segment / Segment / "descriptor" / Segment) {
+ (id, versionId, descriptorType, relativePath) =>
+ val targetUri =
+ Uri(s"$agoraGA4GH/tools/$id/versions/$versionId/$descriptorType/descriptor/$relativePath")
+ passthrough(targetUri, HttpMethods.GET)
} ~
path("tools" / Segment / "versions" / Segment / Segment / "tests") { (id, versionId, descriptorType) =>
val targetUri = Uri(s"$agoraGA4GH/tools/$id/versions/$versionId/$descriptorType/tests")
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/HealthApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/HealthApiService.scala
index 5a86c0de3..a3017b0cc 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/HealthApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/HealthApiService.scala
@@ -12,13 +12,12 @@ trait HealthApiService extends FireCloudDirectives {
implicit val executionContext: ExecutionContext
lazy val log = LoggerFactory.getLogger(getClass)
- val healthServiceRoutes: Route = {
+ val healthServiceRoutes: Route =
path("health") {
complete(OK)
} ~
path("error") {
complete(ServiceUnavailable)
}
- }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/LibraryApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/LibraryApiService.scala
index fa5d886a3..f955cc137 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/LibraryApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/LibraryApiService.scala
@@ -13,9 +13,11 @@ import spray.json.DefaultJsonProtocol._
import scala.jdk.CollectionConverters._
import scala.concurrent.ExecutionContext
-trait LibraryApiService extends FireCloudDirectives
- with StandardUserInfoDirectives with EnabledUserDirectives
- with RestJsonClient {
+trait LibraryApiService
+ extends FireCloudDirectives
+ with StandardUserInfoDirectives
+ with EnabledUserDirectives
+ with RestJsonClient {
implicit val executionContext: ExecutionContext
@@ -26,22 +28,22 @@ trait LibraryApiService extends FireCloudDirectives
val libraryRoutes: Route =
pathPrefix("duos") {
- path("autocomplete" / Segment) { (searchTerm) =>
+ path("autocomplete" / Segment) { searchTerm =>
get {
- complete { ontologyServiceConstructor().autocompleteOntology(searchTerm) }
+ complete(ontologyServiceConstructor().autocompleteOntology(searchTerm))
}
} ~
path("researchPurposeQuery") {
post {
entity(as[ResearchPurposeRequest]) { researchPurposeRequest =>
- complete { ontologyServiceConstructor().buildResearchPurposeQuery(researchPurposeRequest) }
+ complete(ontologyServiceConstructor().buildResearchPurposeQuery(researchPurposeRequest))
}
}
} ~
path("structuredData") {
post {
entity(as[StructuredDataRequest]) { request =>
- complete { ontologyServiceConstructor().buildStructuredUseRestrictionAttribute(request) }
+ complete(ontologyServiceConstructor().buildStructuredUseRestrictionAttribute(request))
}
}
}
@@ -60,90 +62,98 @@ trait LibraryApiService extends FireCloudDirectives
get { requestContext =>
userAuthedRequest(Get(rawlsCuratorUrl))(userInfo).flatMap { response =>
response.status match {
- case OK => requestContext.complete(OK, Curator(true))
+ case OK => requestContext.complete(OK, Curator(true))
case NotFound => requestContext.complete(OK, Curator(false))
- case _ => requestContext.complete(response) // replay the root exception
+ case _ => requestContext.complete(response) // replay the root exception
}
}
}
} ~
- path("groups") {
- pathEndOrSingleSlash {
- get {
- requireEnabledUser(userInfo) {
- complete(OK, FireCloudConfig.ElasticSearch.discoverGroupNames.asScala.toSeq)
- }
- }
- }
- } ~
- pathPrefix(Segment / Segment) { (namespace, name) =>
- path("metadata") {
- put {
- parameter("validate" ? "false") { validationParam =>
- val doValidate = java.lang.Boolean.valueOf(validationParam) // for lenient parsing
- entity(as[String]) { rawAttrsString =>
- complete { libraryServiceConstructor(userInfo).updateLibraryMetadata(namespace, name, rawAttrsString, doValidate) }
- }
- }
- } ~ {
+ path("groups") {
+ pathEndOrSingleSlash {
get {
- complete { libraryServiceConstructor(userInfo).getLibraryMetadata(namespace, name) }
+ requireEnabledUser(userInfo) {
+ complete(OK, FireCloudConfig.ElasticSearch.discoverGroupNames.asScala.toSeq)
+ }
}
}
} ~
- path("discoverableGroups") {
- put {
- entity(as[Seq[String]]) { newGroups =>
- complete { libraryServiceConstructor(userInfo).updateDiscoverableByGroups(namespace, name, newGroups) }
+ pathPrefix(Segment / Segment) { (namespace, name) =>
+ path("metadata") {
+ put {
+ parameter("validate" ? "false") { validationParam =>
+ val doValidate = java.lang.Boolean.valueOf(validationParam) // for lenient parsing
+ entity(as[String]) { rawAttrsString =>
+ complete {
+ libraryServiceConstructor(userInfo).updateLibraryMetadata(namespace,
+ name,
+ rawAttrsString,
+ doValidate
+ )
+ }
+ }
+ }
+ } ~ {
+ get {
+ complete(libraryServiceConstructor(userInfo).getLibraryMetadata(namespace, name))
+ }
}
} ~
- get {
- complete { libraryServiceConstructor(userInfo).getDiscoverableByGroups(namespace, name) }
- }
- } ~
- path("published") {
- post {
- complete { libraryServiceConstructor(userInfo).setWorkspaceIsPublished(namespace, name, true) }
+ path("discoverableGroups") {
+ put {
+ entity(as[Seq[String]]) { newGroups =>
+ complete {
+ libraryServiceConstructor(userInfo).updateDiscoverableByGroups(namespace, name, newGroups)
+ }
+ }
+ } ~
+ get {
+ complete(libraryServiceConstructor(userInfo).getDiscoverableByGroups(namespace, name))
+ }
} ~
- delete {
- complete { libraryServiceConstructor(userInfo).setWorkspaceIsPublished(namespace, name, false) }
+ path("published") {
+ post {
+ complete(libraryServiceConstructor(userInfo).setWorkspaceIsPublished(namespace, name, true))
+ } ~
+ delete {
+ complete(libraryServiceConstructor(userInfo).setWorkspaceIsPublished(namespace, name, false))
+ }
}
- }
- } ~
- path("admin" / "reindex") {
- post {
- complete { libraryServiceConstructor(userInfo).adminIndexAllWorkspaces() }
- }
- } ~
- pathPrefix("search") {
- pathEndOrSingleSlash {
+ } ~
+ path("admin" / "reindex") {
post {
- entity(as[LibrarySearchParams]) { params =>
- complete { libraryServiceConstructor(userInfo).findDocuments(params) }
+ complete(libraryServiceConstructor(userInfo).adminIndexAllWorkspaces())
+ }
+ } ~
+ pathPrefix("search") {
+ pathEndOrSingleSlash {
+ post {
+ entity(as[LibrarySearchParams]) { params =>
+ complete(libraryServiceConstructor(userInfo).findDocuments(params))
+ }
}
}
- }
- } ~
- pathPrefix("suggest") {
- pathEndOrSingleSlash {
- post {
- entity(as[LibrarySearchParams]) { params =>
- complete { libraryServiceConstructor(userInfo).suggest(params) }
+ } ~
+ pathPrefix("suggest") {
+ pathEndOrSingleSlash {
+ post {
+ entity(as[LibrarySearchParams]) { params =>
+ complete(libraryServiceConstructor(userInfo).suggest(params))
+ }
}
}
- }
- } ~
- pathPrefix("populate" / "suggest" / Segment ) { (field) =>
- get {
- requireEnabledUser(userInfo) {
- parameter(Symbol("q")) { text =>
- complete {
- libraryServiceConstructor(userInfo).populateSuggest(field, text)
+ } ~
+ pathPrefix("populate" / "suggest" / Segment) { field =>
+ get {
+ requireEnabledUser(userInfo) {
+ parameter(Symbol("q")) { text =>
+ complete {
+ libraryServiceConstructor(userInfo).populateSuggest(field, text)
+ }
}
}
}
}
- }
}
}
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/ManagedGroupApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/ManagedGroupApiService.scala
index 3c1c18429..b415c1e8e 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/ManagedGroupApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/ManagedGroupApiService.scala
@@ -23,32 +23,44 @@ trait ManagedGroupApiService extends Directives with RequestBuilding with Standa
pathPrefix("groups") {
pathEnd {
get {
- complete { managedGroupServiceConstructor(userInfo).listGroups() }
+ complete(managedGroupServiceConstructor(userInfo).listGroups())
}
} ~
pathPrefix(Segment) { groupName =>
pathEnd {
get {
- complete { managedGroupServiceConstructor(userInfo).listGroupMembers(WorkbenchGroupName(groupName)) }
+ complete(managedGroupServiceConstructor(userInfo).listGroupMembers(WorkbenchGroupName(groupName)))
} ~
- post {
- complete { managedGroupServiceConstructor(userInfo).createGroup(WorkbenchGroupName(groupName)) }
- } ~
- delete {
- complete { managedGroupServiceConstructor(userInfo).deleteGroup(WorkbenchGroupName(groupName)) }
- }
+ post {
+ complete(managedGroupServiceConstructor(userInfo).createGroup(WorkbenchGroupName(groupName)))
+ } ~
+ delete {
+ complete(managedGroupServiceConstructor(userInfo).deleteGroup(WorkbenchGroupName(groupName)))
+ }
} ~
path("requestAccess") {
post {
- complete { managedGroupServiceConstructor(userInfo).requestGroupAccess(WorkbenchGroupName(groupName)) }
+ complete {
+ managedGroupServiceConstructor(userInfo).requestGroupAccess(WorkbenchGroupName(groupName))
+ }
}
} ~
path(Segment / Segment) { (role, email) =>
put {
- complete { managedGroupServiceConstructor(userInfo).addGroupMember(WorkbenchGroupName(groupName), ManagedGroupRoles.withName(role), WorkbenchEmail(email)) }
+ complete {
+ managedGroupServiceConstructor(userInfo).addGroupMember(WorkbenchGroupName(groupName),
+ ManagedGroupRoles.withName(role),
+ WorkbenchEmail(email)
+ )
+ }
} ~
delete {
- complete { managedGroupServiceConstructor(userInfo).removeGroupMember(WorkbenchGroupName(groupName), ManagedGroupRoles.withName(role), WorkbenchEmail(email)) }
+ complete {
+ managedGroupServiceConstructor(userInfo).removeGroupMember(WorkbenchGroupName(groupName),
+ ManagedGroupRoles.withName(role),
+ WorkbenchEmail(email)
+ )
+ }
}
}
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/MethodConfigurationApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/MethodConfigurationApiService.scala
index ff0857729..817853b5b 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/MethodConfigurationApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/MethodConfigurationApiService.scala
@@ -24,26 +24,78 @@ object MethodConfigurationApiService {
val remoteCopyToMethodRepoConfigPath = FireCloudConfig.Rawls.authPrefix + "/methodconfigs/copyToMethodRepo"
val remoteCopyToMethodRepoConfigUrl = FireCloudConfig.Rawls.baseUrl + remoteCopyToMethodRepoConfigPath
- def remoteMethodConfigPath(workspaceNamespace:String, workspaceName:String, configNamespace:String, configName:String) =
- FireCloudConfig.Rawls.authPrefix + "/workspaces/%s/%s/methodconfigs/%s/%s".format(workspaceNamespace, workspaceName, configNamespace, configName)
- def remoteMethodConfigUrl(workspaceNamespace:String, workspaceName:String, configNamespace:String, configName:String) =
- FireCloudConfig.Rawls.baseUrl + remoteMethodConfigPath(workspaceNamespace, workspaceName, configNamespace, configName)
+ def remoteMethodConfigPath(workspaceNamespace: String,
+ workspaceName: String,
+ configNamespace: String,
+ configName: String
+ ) =
+ FireCloudConfig.Rawls.authPrefix + "/workspaces/%s/%s/methodconfigs/%s/%s".format(workspaceNamespace,
+ workspaceName,
+ configNamespace,
+ configName
+ )
+ def remoteMethodConfigUrl(workspaceNamespace: String,
+ workspaceName: String,
+ configNamespace: String,
+ configName: String
+ ) =
+ FireCloudConfig.Rawls.baseUrl + remoteMethodConfigPath(workspaceNamespace,
+ workspaceName,
+ configNamespace,
+ configName
+ )
- def remoteMethodConfigRenamePath(workspaceNamespace:String, workspaceName:String, configNamespace:String, configName:String) =
- FireCloudConfig.Rawls.authPrefix + "/workspaces/%s/%s/methodconfigs/%s/%s/rename".format(workspaceNamespace, workspaceName, configNamespace, configName)
- def remoteMethodConfigRenameUrl(workspaceNamespace:String, workspaceName:String, configNamespace:String, configName:String) =
- FireCloudConfig.Rawls.baseUrl + remoteMethodConfigRenamePath(workspaceNamespace, workspaceName, configNamespace, configName)
+ def remoteMethodConfigRenamePath(workspaceNamespace: String,
+ workspaceName: String,
+ configNamespace: String,
+ configName: String
+ ) =
+ FireCloudConfig.Rawls.authPrefix + "/workspaces/%s/%s/methodconfigs/%s/%s/rename".format(workspaceNamespace,
+ workspaceName,
+ configNamespace,
+ configName
+ )
+ def remoteMethodConfigRenameUrl(workspaceNamespace: String,
+ workspaceName: String,
+ configNamespace: String,
+ configName: String
+ ) =
+ FireCloudConfig.Rawls.baseUrl + remoteMethodConfigRenamePath(workspaceNamespace,
+ workspaceName,
+ configNamespace,
+ configName
+ )
- def remoteMethodConfigValidatePath(workspaceNamespace:String, workspaceName:String, configNamespace:String, configName:String) =
- FireCloudConfig.Rawls.authPrefix + "/workspaces/%s/%s/methodconfigs/%s/%s/validate".format(workspaceNamespace, workspaceName, configNamespace, configName)
- def remoteMethodConfigValidateUrl(workspaceNamespace:String, workspaceName:String, configNamespace:String, configName:String) =
- FireCloudConfig.Rawls.baseUrl + remoteMethodConfigValidatePath(workspaceNamespace, workspaceName, configNamespace, configName)
+ def remoteMethodConfigValidatePath(workspaceNamespace: String,
+ workspaceName: String,
+ configNamespace: String,
+ configName: String
+ ) =
+ FireCloudConfig.Rawls.authPrefix + "/workspaces/%s/%s/methodconfigs/%s/%s/validate".format(workspaceNamespace,
+ workspaceName,
+ configNamespace,
+ configName
+ )
+ def remoteMethodConfigValidateUrl(workspaceNamespace: String,
+ workspaceName: String,
+ configNamespace: String,
+ configName: String
+ ) =
+ FireCloudConfig.Rawls.baseUrl + remoteMethodConfigValidatePath(workspaceNamespace,
+ workspaceName,
+ configNamespace,
+ configName
+ )
}
-trait MethodConfigurationApiService extends FireCloudDirectives with SprayJsonSupport with StandardUserInfoDirectives with RestJsonClient {
+trait MethodConfigurationApiService
+ extends FireCloudDirectives
+ with SprayJsonSupport
+ with StandardUserInfoDirectives
+ with RestJsonClient {
- private final val ApiPrefix = "workspaces"
+ final private val ApiPrefix = "workspaces"
lazy val log = LoggerFactory.getLogger(getClass)
val methodConfigurationRoutes: Route = requireUserInfo() { userInfo =>
@@ -62,15 +114,17 @@ trait MethodConfigurationApiService extends FireCloudDirectives with SprayJsonSu
methodRepoName = ingest.configurationName,
methodRepoNamespace = ingest.configurationNamespace,
methodRepoSnapshotId = ingest.configurationSnapshotId,
- destination = Option(MethodConfigurationId(
- name = ingest.destinationName,
- namespace = ingest.destinationNamespace,
- workspaceName = Option(WorkspaceName(
- namespace = workspaceNamespace,
- name = workspaceName)))))
+ destination = Option(
+ MethodConfigurationId(
+ name = ingest.destinationName,
+ namespace = ingest.destinationNamespace,
+ workspaceName = Option(WorkspaceName(namespace = workspaceNamespace, name = workspaceName))
+ )
+ )
+ )
val extReq = Post(MethodConfigurationApiService.remoteCopyFromMethodRepoConfigUrl, copyMethodConfig)
- complete { userAuthedRequest(extReq)(userInfo) }
+ complete(userAuthedRequest(extReq)(userInfo))
}
}
} ~ path("copyToMethodRepo") {
@@ -79,30 +133,56 @@ trait MethodConfigurationApiService extends FireCloudDirectives with SprayJsonSu
val copyMethodConfig = new MethodConfigurationPublish(
methodRepoName = ingest.configurationName,
methodRepoNamespace = ingest.configurationNamespace,
- source = Option(MethodConfigurationId(
- name = ingest.sourceName,
- namespace = ingest.sourceNamespace,
- workspaceName = Option(WorkspaceName(
- namespace = workspaceNamespace,
- name = workspaceName)))))
+ source = Option(
+ MethodConfigurationId(name = ingest.sourceName,
+ namespace = ingest.sourceNamespace,
+ workspaceName =
+ Option(WorkspaceName(namespace = workspaceNamespace, name = workspaceName))
+ )
+ )
+ )
val extReq = Post(MethodConfigurationApiService.remoteCopyToMethodRepoConfigUrl, copyMethodConfig)
- complete { userAuthedRequest(extReq)(userInfo) }
+ complete(userAuthedRequest(extReq)(userInfo))
}
}
} ~ pathPrefix(Segment / Segment) { (configNamespace, configName) =>
pathEnd {
passthrough(
- encodeUri(MethodConfigurationApiService.remoteMethodConfigUrl(workspaceNamespace, workspaceName, configNamespace, configName)),
- HttpMethods.GET, HttpMethods.PUT, HttpMethods.POST, HttpMethods.DELETE)
+ encodeUri(
+ MethodConfigurationApiService.remoteMethodConfigUrl(workspaceNamespace,
+ workspaceName,
+ configNamespace,
+ configName
+ )
+ ),
+ HttpMethods.GET,
+ HttpMethods.PUT,
+ HttpMethods.POST,
+ HttpMethods.DELETE
+ )
} ~
path("rename") {
- passthrough(encodeUri(MethodConfigurationApiService.remoteMethodConfigRenameUrl(workspaceNamespace, workspaceName, configNamespace, configName)),
- HttpMethods.POST)
+ passthrough(encodeUri(
+ MethodConfigurationApiService.remoteMethodConfigRenameUrl(workspaceNamespace,
+ workspaceName,
+ configNamespace,
+ configName
+ )
+ ),
+ HttpMethods.POST
+ )
} ~
path("validate") {
- passthrough(encodeUri(MethodConfigurationApiService.remoteMethodConfigValidateUrl(workspaceNamespace, workspaceName, configNamespace, configName)),
- HttpMethods.GET)
+ passthrough(encodeUri(
+ MethodConfigurationApiService.remoteMethodConfigValidateUrl(workspaceNamespace,
+ workspaceName,
+ configNamespace,
+ configName
+ )
+ ),
+ HttpMethods.GET
+ )
}
}
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/MethodsApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/MethodsApiService.scala
index eb3a28f5d..829a75203 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/MethodsApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/MethodsApiService.scala
@@ -30,11 +30,10 @@ trait MethodsApiService extends MethodsApiServiceUrls with FireCloudDirectives w
val agoraPermissionService: UserInfo => AgoraPermissionService
val methodsApiServiceRoutes: Route =
- // routes that are valid for both configurations and methods
- pathPrefix( "configurations|methods".r ) { agoraEntityType =>
-
+ // routes that are valid for both configurations and methods
+ pathPrefix("configurations|methods".r) { agoraEntityType =>
val passthroughBase = agoraEntityType match {
- case "methods" => remoteMethodsUrl
+ case "methods" => remoteMethodsUrl
case "configurations" => remoteConfigurationsUrl
}
@@ -43,37 +42,39 @@ trait MethodsApiService extends MethodsApiServiceUrls with FireCloudDirectives w
extract(_.request.method) { method =>
extract(_.request.uri.query()) { query =>
// only pass query params for GETs
- val targetUri = if (method == HttpMethods.GET)
- Uri(passthroughBase).withQuery(query)
- else
- Uri(passthroughBase)
+ val targetUri =
+ if (method == HttpMethods.GET)
+ Uri(passthroughBase).withQuery(query)
+ else
+ Uri(passthroughBase)
passthrough(targetUri, method)
}
}
}
} ~
- pathPrefix( Segment / Segment / IntNumber ) { (namespace, name, snapshotId) =>
+ pathPrefix(Segment / Segment / IntNumber) { (namespace, name, snapshotId) =>
pathEnd {
(get | delete) {
extract(_.request.method) { method =>
extract(_.request.uri.query()) { query =>
// only pass query params for GETs
val baseUri = Uri(s"$passthroughBase/${urlify(namespace, name)}/$snapshotId")
- val targetUri = if (method == HttpMethods.GET)
- baseUri.withQuery(query)
- else
- baseUri
+ val targetUri =
+ if (method == HttpMethods.GET)
+ baseUri.withQuery(query)
+ else
+ baseUri
passthrough(targetUri, method)
}
}
}
} ~
- path( "permissions") {
+ path("permissions") {
val url = s"$passthroughBase/${urlify(namespace, name)}/$snapshotId/permissions"
get {
requireUserInfo() { userInfo =>
// pass to AgoraPermissionHandler
- complete { agoraPermissionService(userInfo).getAgoraPermission(url) }
+ complete(agoraPermissionService(userInfo).getAgoraPermission(url))
}
} ~
post {
@@ -83,7 +84,10 @@ trait MethodsApiService extends MethodsApiServiceUrls with FireCloudDirectives w
entity(as[List[FireCloudPermission]]) { fireCloudPermissions =>
requireUserInfo() { userInfo =>
complete {
- agoraPermissionService(userInfo).createAgoraPermission(url, fireCloudPermissions.map(_.toAgoraPermission))
+ agoraPermissionService(userInfo).createAgoraPermission(
+ url,
+ fireCloudPermissions.map(_.toAgoraPermission)
+ )
}
}
}
@@ -93,44 +97,48 @@ trait MethodsApiService extends MethodsApiServiceUrls with FireCloudDirectives w
}
} ~
// routes that are only valid for methods
- pathPrefix( "methods" ) {
+ pathPrefix("methods") {
val passthroughBase = remoteMethodsUrl
- path( "definitions" ) {
+ path("definitions") {
get {
passthrough(s"$passthroughBase/definitions", HttpMethods.GET)
}
} ~
- path( "permissions") {
+ path("permissions") {
put {
handleRejections(entityExtractionRejectionHandler) {
entity(as[List[MethodAclPair]]) { fireCloudPermissions =>
- val agoraPermissions = fireCloudPermissions map { fc =>
- EntityAccessControlAgora(Method(fc.method), fc.acls.map(_.toAgoraPermission))
- }
- requireUserInfo() { userInfo =>
- complete { agoraPermissionService(userInfo).batchInsertAgoraPermissions(agoraPermissions) }
- }
+ val agoraPermissions = fireCloudPermissions map { fc =>
+ EntityAccessControlAgora(Method(fc.method), fc.acls.map(_.toAgoraPermission))
+ }
+ requireUserInfo() { userInfo =>
+ complete(agoraPermissionService(userInfo).batchInsertAgoraPermissions(agoraPermissions))
+ }
}
}
}
} ~
- pathPrefix( Segment / Segment ) { (namespace, name) =>
- path( "configurations" ) {
+ pathPrefix(Segment / Segment) { (namespace, name) =>
+ path("configurations") {
get {
- passthrough(s"$passthroughBase/${urlify(namespace,name)}/configurations", HttpMethods.GET)
+ passthrough(s"$passthroughBase/${urlify(namespace, name)}/configurations", HttpMethods.GET)
}
} ~
- pathPrefix( IntNumber ) { snapshotId =>
+ pathPrefix(IntNumber) { snapshotId =>
pathEnd {
post {
extract(_.request.uri.query()) { query =>
- passthrough(Uri(s"$passthroughBase/${urlify(namespace, name)}/$snapshotId").withQuery(query), HttpMethods.POST)
+ passthrough(Uri(s"$passthroughBase/${urlify(namespace, name)}/$snapshotId").withQuery(query),
+ HttpMethods.POST
+ )
}
}
} ~
- path( "configurations" ) {
+ path("configurations") {
get {
- passthrough(s"$passthroughBase/${urlify(namespace,name)}/$snapshotId/configurations", HttpMethods.GET)
+ passthrough(s"$passthroughBase/${urlify(namespace, name)}/$snapshotId/configurations",
+ HttpMethods.GET
+ )
}
}
}
@@ -143,7 +151,7 @@ trait MethodsApiService extends MethodsApiServiceUrls with FireCloudDirectives w
though it's largely untested and there may still be problems. Any entities created
after syntax validation should be just fine and the encoding won't touch them.
*/
- private def urlify(namespace:String, name:String) = enc(namespace) + "/" + enc(name)
- private def enc(in:String) = java.net.URLEncoder.encode(in,"utf-8").replace("+", "%20")
+ private def urlify(namespace: String, name: String) = enc(namespace) + "/" + enc(name)
+ private def enc(in: String) = java.net.URLEncoder.encode(in, "utf-8").replace("+", "%20")
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/NamespaceApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/NamespaceApiService.scala
index 013ab1926..248b8b954 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/NamespaceApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/NamespaceApiService.scala
@@ -21,13 +21,15 @@ trait NamespaceApiService extends Directives with RequestBuilding with StandardU
pathPrefix("api" / "methods|configurations".r / Segment / "permissions") { (agoraEntity, namespace) =>
requireUserInfo() { userInfo =>
get {
- complete { namespaceServiceConstructor(userInfo).getFireCloudPermissions(namespace, agoraEntity) }
+ complete(namespaceServiceConstructor(userInfo).getFireCloudPermissions(namespace, agoraEntity))
} ~
post {
// explicitly pull in the json-extraction error handler from ModelJsonProtocol
handleRejections(entityExtractionRejectionHandler) {
entity(as[List[FireCloudPermission]]) { permissions =>
- complete { namespaceServiceConstructor(userInfo).postFireCloudPermissions(namespace, agoraEntity, permissions) }
+ complete {
+ namespaceServiceConstructor(userInfo).postFireCloudPermissions(namespace, agoraEntity, permissions)
+ }
}
}
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/NihApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/NihApiService.scala
index 7dd893ca9..4fa69a788 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/NihApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/NihApiService.scala
@@ -21,11 +21,11 @@ trait NihApiService extends Directives with RequestBuilding with EnabledUserDire
val syncRoute: Route =
path("sync_whitelist" / Segment) { whitelistName =>
post {
- complete { nihServiceConstructor().syncAllowlistAllUsers(whitelistName) }
+ complete(nihServiceConstructor().syncAllowlistAllUsers(whitelistName))
}
} ~ path("sync_whitelist") {
post {
- complete { nihServiceConstructor().syncAllNihAllowlistsAllUsers() }
+ complete(nihServiceConstructor().syncAllNihAllowlistsAllUsers())
}
}
@@ -37,18 +37,20 @@ trait NihApiService extends Directives with RequestBuilding with EnabledUserDire
path("callback") {
post {
entity(as[JWTWrapper]) { jwtWrapper =>
- complete { nihServiceConstructor().updateNihLinkAndSyncSelf(userInfo, jwtWrapper) }
+ complete(nihServiceConstructor().updateNihLinkAndSyncSelf(userInfo, jwtWrapper))
}
}
} ~
- path ("status") {
- complete { nihServiceConstructor().getNihStatus(userInfo) }
- } ~
- path ("account") {
- delete {
- complete { nihServiceConstructor().unlinkNihAccountAndSyncSelf(userInfo).map(_ => StatusCodes.NoContent) }
+ path("status") {
+ complete(nihServiceConstructor().getNihStatus(userInfo))
+ } ~
+ path("account") {
+ delete {
+ complete {
+ nihServiceConstructor().unlinkNihAccountAndSyncSelf(userInfo).map(_ => StatusCodes.NoContent)
+ }
+ }
}
- }
}
}
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/NotificationsApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/NotificationsApiService.scala
index db184171d..014af21f3 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/NotificationsApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/NotificationsApiService.scala
@@ -6,13 +6,13 @@ import org.broadinstitute.dsde.firecloud.FireCloudConfig
import org.broadinstitute.dsde.firecloud.service.FireCloudDirectives
import org.broadinstitute.dsde.firecloud.utils.StandardUserInfoDirectives
-trait NotificationsApiService extends FireCloudDirectives with StandardUserInfoDirectives {
- private final val ApiPrefix = "api/notifications"
- private final val General = "general"
- private final val Workspace = "workspace"
- private final val RawlsNotifications = FireCloudConfig.Rawls.notificationsUrl
+trait NotificationsApiService extends FireCloudDirectives with StandardUserInfoDirectives {
+ final private val ApiPrefix = "api/notifications"
+ final private val General = "general"
+ final private val Workspace = "workspace"
+ final private val RawlsNotifications = FireCloudConfig.Rawls.notificationsUrl
- final val notificationsRoutes: Route = {
+ final val notificationsRoutes: Route =
get {
pathPrefix(separateOnSlashes(ApiPrefix)) {
path(General) {
@@ -23,5 +23,4 @@ trait NotificationsApiService extends FireCloudDirectives with StandardUserInfo
}
}
}
- }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/OauthApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/OauthApiService.scala
index afb083194..b94cbeb58 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/OauthApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/OauthApiService.scala
@@ -31,7 +31,7 @@ trait OauthApiService extends FireCloudDirectives with StandardUserInfoDirective
path("api" / "refresh-token-status") {
get {
requireUserInfo() { _ =>
- complete { RequestComplete(StatusCodes.OK, Map("requiresRefresh" -> false)) }
+ complete(RequestComplete(StatusCodes.OK, Map("requiresRefresh" -> false)))
}
}
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/RegisterApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/RegisterApiService.scala
index b56319b70..143dcc91e 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/RegisterApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/RegisterApiService.scala
@@ -8,11 +8,20 @@ import org.broadinstitute.dsde.firecloud.service.{FireCloudDirectives, RegisterS
import org.broadinstitute.dsde.firecloud.utils.{EnabledUserDirectives, StandardUserInfoDirectives}
import spray.json.DefaultJsonProtocol._
import akka.http.scaladsl.server.Route
-import org.broadinstitute.dsde.firecloud.service.RegisterService.{samTosBaseUrl, samTosDetailsUrl, samTosStatusUrl, samTosTextUrl}
+import org.broadinstitute.dsde.firecloud.service.RegisterService.{
+ samTosBaseUrl,
+ samTosDetailsUrl,
+ samTosStatusUrl,
+ samTosTextUrl
+}
import scala.concurrent.ExecutionContext
-trait RegisterApiService extends FireCloudDirectives with EnabledUserDirectives with RequestBuilding with StandardUserInfoDirectives {
+trait RegisterApiService
+ extends FireCloudDirectives
+ with EnabledUserDirectives
+ with RequestBuilding
+ with StandardUserInfoDirectives {
implicit val executionContext: ExecutionContext
@@ -20,15 +29,15 @@ trait RegisterApiService extends FireCloudDirectives with EnabledUserDirectives
val v1RegisterRoutes: Route =
pathPrefix("users" / "v1" / "registerWithProfile") {
- post {
- requireUserInfo() { userInfo =>
- entity(as[RegisterRequest]) { registerRequest =>
- complete {
- registerServiceConstructor().createUserWithProfile(userInfo, registerRequest)
- }
+ post {
+ requireUserInfo() { userInfo =>
+ entity(as[RegisterRequest]) { registerRequest =>
+ complete {
+ registerServiceConstructor().createUserWithProfile(userInfo, registerRequest)
}
}
}
+ }
}
val registerRoutes: Route =
@@ -37,7 +46,7 @@ trait RegisterApiService extends FireCloudDirectives with EnabledUserDirectives
post {
requireUserInfo() { userInfo =>
entity(as[BasicProfile]) { basicProfile =>
- complete { registerServiceConstructor().createUpdateProfile(userInfo, basicProfile) }
+ complete(registerServiceConstructor().createUpdateProfile(userInfo, basicProfile))
}
}
}
@@ -51,7 +60,7 @@ trait RegisterApiService extends FireCloudDirectives with EnabledUserDirectives
requireUserInfo() { userInfo =>
requireEnabledUser(userInfo) {
entity(as[Map[String, String]]) { preferences =>
- complete { registerServiceConstructor().updateProfilePreferences(userInfo, preferences) }
+ complete(registerServiceConstructor().updateProfilePreferences(userInfo, preferences))
}
}
}
@@ -59,41 +68,40 @@ trait RegisterApiService extends FireCloudDirectives with EnabledUserDirectives
}
}
- val tosRoutes: Route = {
+ val tosRoutes: Route =
pathPrefix("tos") {
path("text") {
passthrough(samTosTextUrl, GET)
}
} ~
- pathPrefix("register" / "user") {
- pathPrefix("v1" / "termsofservice") {
- pathEndOrSingleSlash {
- post {
- requireUserInfo() { _ =>
- passthrough(samTosBaseUrl, POST)
- }
+ pathPrefix("register" / "user") {
+ pathPrefix("v1" / "termsofservice") {
+ pathEndOrSingleSlash {
+ post {
+ requireUserInfo() { _ =>
+ passthrough(samTosBaseUrl, POST)
+ }
+ } ~
+ delete {
+ requireUserInfo() { _ =>
+ passthrough(samTosBaseUrl, DELETE)
+ }
+ }
} ~
- delete {
- requireUserInfo() { _ =>
- passthrough(samTosBaseUrl, DELETE)
+ path("status") {
+ get {
+ requireUserInfo() { _ =>
+ passthrough(samTosStatusUrl, GET)
+ }
+ }
}
- }
} ~
- path("status") {
- get {
- requireUserInfo() { _ =>
- passthrough(samTosStatusUrl, GET)
+ pathPrefix("v2" / "self" / "termsOfServiceDetails") {
+ get {
+ requireUserInfo() { _ =>
+ passthrough(samTosDetailsUrl, GET)
+ }
}
}
- }
- } ~
- pathPrefix("v2" / "self" / "termsOfServiceDetails") {
- get {
- requireUserInfo() { _ =>
- passthrough(samTosDetailsUrl, GET)
- }
- }
}
- }
- }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/ShareLogApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/ShareLogApiService.scala
index e3972587a..5e515990b 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/ShareLogApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/ShareLogApiService.scala
@@ -8,27 +8,28 @@ import org.broadinstitute.dsde.firecloud.utils.{EnabledUserDirectives, StandardU
import scala.concurrent.ExecutionContext
-trait ShareLogApiService extends FireCloudDirectives
- with StandardUserInfoDirectives with EnabledUserDirectives
- with SprayJsonSupport {
+trait ShareLogApiService
+ extends FireCloudDirectives
+ with StandardUserInfoDirectives
+ with EnabledUserDirectives
+ with SprayJsonSupport {
implicit val executionContext: ExecutionContext
val shareLogServiceConstructor: () => ShareLogService
- val shareLogServiceRoutes: Route = {
+ val shareLogServiceRoutes: Route =
pathPrefix("sharelog") {
- path("sharees" ) {
+ path("sharees") {
get {
parameter("shareType".?) { shareType =>
requireUserInfo() { userInfo =>
- requireEnabledUser(userInfo) {
- complete { shareLogServiceConstructor().getSharees(userInfo.id, shareType.map(ShareType.withName)) }
- }
+ requireEnabledUser(userInfo) {
+ complete(shareLogServiceConstructor().getSharees(userInfo.id, shareType.map(ShareType.withName)))
+ }
}
}
}
}
}
- }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/StaticNotebooksApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/StaticNotebooksApiService.scala
index d8662914c..16eb947cb 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/StaticNotebooksApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/StaticNotebooksApiService.scala
@@ -14,20 +14,18 @@ trait StaticNotebooksApiService extends FireCloudDirectives with StandardUserInf
val calhounStaticNotebooksRoot: String = FireCloudConfig.StaticNotebooks.baseUrl
val calhounStaticNotebooksURL: String = s"$calhounStaticNotebooksRoot/api/convert"
- val staticNotebooksRoutes: Route = {
+ val staticNotebooksRoutes: Route =
path("staticNotebooks" / "convert") {
requireUserInfo() { userInfo =>
- post {
- requestContext =>
- // call Calhoun and pass its response back to our own caller
- // can't use passthrough() here because that demands a JSON response
- // and we expect this to return text/html
- val extReq = Post(calhounStaticNotebooksURL, requestContext.request.entity)
- userAuthedRequest(extReq)(userInfo).flatMap { resp =>
- requestContext.complete(resp)
- }
+ post { requestContext =>
+ // call Calhoun and pass its response back to our own caller
+ // can't use passthrough() here because that demands a JSON response
+ // and we expect this to return text/html
+ val extReq = Post(calhounStaticNotebooksURL, requestContext.request.entity)
+ userAuthedRequest(extReq)(userInfo).flatMap { resp =>
+ requestContext.complete(resp)
+ }
}
}
}
- }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/StatusApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/StatusApiService.scala
index 7b7172777..61fd3a5c3 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/StatusApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/StatusApiService.scala
@@ -19,22 +19,21 @@ object BuildTimeVersion {
trait StatusApiService extends Directives with RequestBuilding with SprayJsonSupport {
- private final val dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ")
+ final private val dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ")
implicit val executionContext: ExecutionContext
val statusServiceConstructor: () => StatusService
- val statusRoutes: Route = {
+ val statusRoutes: Route =
path("status") {
get {
- complete { statusServiceConstructor().collectStatusInfo() }
+ complete(statusServiceConstructor().collectStatusInfo())
}
} ~
- path( "version") {
+ path("version") {
get { requestContext =>
requestContext.complete(StatusCodes.OK, BuildTimeVersion.versionJson)
}
}
- }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/SubmissionApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/SubmissionApiService.scala
index a59b9e116..328f45a4a 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/SubmissionApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/SubmissionApiService.scala
@@ -6,13 +6,12 @@ import org.broadinstitute.dsde.firecloud.service.FireCloudDirectives
import org.broadinstitute.dsde.firecloud.utils.StreamingPassthrough
trait SubmissionApiService extends FireCloudDirectives with StreamingPassthrough {
- val submissionServiceRoutes: Route = {
+ val submissionServiceRoutes: Route =
pathPrefix("submissions" / "queueStatus") {
streamingPassthrough(submissionQueueStatusUrl)
} ~
- pathPrefix("workspaces" / Segment / Segment / "submissions") { (namespace, name) =>
- // N.B. streamingPassthrough to ".../submissions" also handles ".../submissionsCount"
- streamingPassthrough(s"$workspacesUrl/${escapePathSegment(namespace)}/${escapePathSegment(name)}/submissions")
- }
- }
+ pathPrefix("workspaces" / Segment / Segment / "submissions") { (namespace, name) =>
+ // N.B. streamingPassthrough to ".../submissions" also handles ".../submissionsCount"
+ streamingPassthrough(s"$workspacesUrl/${escapePathSegment(namespace)}/${escapePathSegment(name)}/submissions")
+ }
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/UserApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/UserApiService.scala
index ce6d76797..f36fb981f 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/UserApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/UserApiService.scala
@@ -53,7 +53,7 @@ object UserApiService {
// TODO: this should use UserInfoDirectives, not StandardUserInfoDirectives. That would require a refactoring
// of how we create service actors, so I'm pushing that work out to later.
trait UserApiService
- extends FireCloudRequestBuilding
+ extends FireCloudRequestBuilding
with FireCloudDirectives
with StandardUserInfoDirectives
with EnabledUserDirectives
@@ -69,7 +69,6 @@ trait UserApiService
path("me") {
parameter("userDetailsOnly".?) { userDetailsOnly =>
get { requestContext =>
-
// inspect headers for a pre-existing Authorization: header
val authorizationHeader: Option[HttpCredentials] = (requestContext.request.headers collect {
case Authorization(h) => h
@@ -81,123 +80,147 @@ trait UserApiService
respondWithErrorReport(Unauthorized, "No authorization header in request.", requestContext)
// browser sent Authorization header; try to query Sam for user status
case Some(header) =>
-
val version1 = !userDetailsOnly.exists(_.equalsIgnoreCase("true"))
- userAuthedRequest(Get(UserApiService.samRegisterUserInfoURL))(AccessToken(header.token())).flatMap { response =>
- handleSamResponse(response, requestContext, version1)
+ userAuthedRequest(Get(UserApiService.samRegisterUserInfoURL))(AccessToken(header.token())).flatMap {
+ response =>
+ handleSamResponse(response, requestContext, version1)
} recoverWith {
// we couldn't reach Sam (within timeout period). Respond with a Service Unavailable error.
- case error: Throwable => respondWithErrorReport(ServiceUnavailable, "Identity service did not produce a timely response, please try again later.", error, requestContext)
+ case error: Throwable =>
+ respondWithErrorReport(ServiceUnavailable,
+ "Identity service did not produce a timely response, please try again later.",
+ error,
+ requestContext
+ )
}
}
}
}
} ~
- pathPrefix("api") {
- pathPrefix("profile" / "billing") {
- pathEnd {
- get {
- passthrough(UserApiService.billingUrl, HttpMethods.GET)
- }
+ pathPrefix("api") {
+ pathPrefix("profile" / "billing") {
+ pathEnd {
+ get {
+ passthrough(UserApiService.billingUrl, HttpMethods.GET)
+ }
+ } ~
+ path(Segment) { projectName =>
+ get {
+ passthrough(UserApiService.billingProjectUrl(projectName), HttpMethods.GET)
+ }
+ }
} ~
- path(Segment) { projectName =>
+ path("profile" / "billingAccounts") {
get {
- passthrough(UserApiService.billingProjectUrl(projectName), HttpMethods.GET)
+ passthrough(UserApiService.billingAccountsUrl, HttpMethods.GET)
}
- }
- } ~
- path("profile" / "billingAccounts") {
- get {
- passthrough(UserApiService.billingAccountsUrl, HttpMethods.GET)
- }
- } ~
- path("profile" / "importstatus") {
- get {
- requireUserInfo() { userInfo =>
- complete { userServiceConstructor(userInfo).importPermission() }
- }
- }
- } ~
- path("profile" / "terra") {
- requireUserInfo() { userInfo =>
- requireEnabledUser(userInfo) {
+ } ~
+ path("profile" / "importstatus") {
get {
- complete { userServiceConstructor(userInfo).getTerraPreference }
- } ~
- post {
- complete { userServiceConstructor(userInfo).setTerraPreference() }
- } ~
- delete {
- complete { userServiceConstructor(userInfo).deleteTerraPreference() }
+ requireUserInfo() { userInfo =>
+ complete(userServiceConstructor(userInfo).importPermission())
+ }
+ }
+ } ~
+ path("profile" / "terra") {
+ requireUserInfo() { userInfo =>
+ requireEnabledUser(userInfo) {
+ get {
+ complete(userServiceConstructor(userInfo).getTerraPreference)
+ } ~
+ post {
+ complete(userServiceConstructor(userInfo).setTerraPreference())
+ } ~
+ delete {
+ complete(userServiceConstructor(userInfo).deleteTerraPreference())
+ }
+ }
+ }
+ } ~
+ pathPrefix("proxyGroup") {
+ path(Segment) { email =>
+ passthrough(UserApiService.samUserProxyGroupURL(email), HttpMethods.GET)
}
}
- }
} ~
- pathPrefix("proxyGroup") {
- path(Segment) { email =>
- passthrough(UserApiService.samUserProxyGroupURL(email), HttpMethods.GET)
- }
- }
- } ~
- pathPrefix("register") {
- pathEnd {
- get {
- passthrough(UserApiService.samRegisterUserURL, HttpMethods.GET)
- }
- } ~
- path("userinfo") {
- requireUserInfo() { userInfo =>
- complete { userServiceConstructor(userInfo).getUserProfileGoogle }
- }
- } ~
- pathPrefix("profile") {
- // GET /profile - get all keys for current user
+ pathPrefix("register") {
pathEnd {
get {
+ passthrough(UserApiService.samRegisterUserURL, HttpMethods.GET)
+ }
+ } ~
+ path("userinfo") {
requireUserInfo() { userInfo =>
- complete {
- userServiceConstructor(userInfo).getAllUserKeys
+ complete(userServiceConstructor(userInfo).getUserProfileGoogle)
+ }
+ } ~
+ pathPrefix("profile") {
+ // GET /profile - get all keys for current user
+ pathEnd {
+ get {
+ requireUserInfo() { userInfo =>
+ complete {
+ userServiceConstructor(userInfo).getAllUserKeys
+ }
+ }
}
}
}
- }
}
- }
- private def respondWithErrorReport(statusCode: StatusCode, message: String, requestContext: RequestContext): Future[RouteResult] = {
- requestContext.complete(statusCode, ErrorReport(statusCode=statusCode, message=message))
- }
+ private def respondWithErrorReport(statusCode: StatusCode,
+ message: String,
+ requestContext: RequestContext
+ ): Future[RouteResult] =
+ requestContext.complete(statusCode, ErrorReport(statusCode = statusCode, message = message))
- private def respondWithErrorReport(statusCode: StatusCode, message: String, error: Throwable, requestContext: RequestContext): Future[RouteResult] = {
+ private def respondWithErrorReport(statusCode: StatusCode,
+ message: String,
+ error: Throwable,
+ requestContext: RequestContext
+ ): Future[RouteResult] =
requestContext.complete(statusCode, ErrorReport(statusCode = statusCode, message = message, throwable = error))
- }
- private def handleSamResponse(response: HttpResponse, requestContext: RequestContext, version1: Boolean): Future[RouteResult] = {
+ private def handleSamResponse(response: HttpResponse,
+ requestContext: RequestContext,
+ version1: Boolean
+ ): Future[RouteResult] =
response.status match {
// Sam rejected our request. User is either invalid or their token timed out; this is truly unauthorized
case Unauthorized =>
- respondWithErrorReport(Unauthorized, "Request rejected by identity service - invalid user or expired token.", requestContext)
+ respondWithErrorReport(Unauthorized,
+ "Request rejected by identity service - invalid user or expired token.",
+ requestContext
+ )
// Sam 404 means the user is not registered with FireCloud
case NotFound =>
respondWithErrorReport(NotFound, "FireCloud user registration not found.", requestContext)
// Sam error? boo. All we can do is respond with an error.
case InternalServerError =>
- respondWithErrorReport(InternalServerError, "Identity service encountered an unknown error, please try again.", requestContext)
+ respondWithErrorReport(InternalServerError,
+ "Identity service encountered an unknown error, please try again.",
+ requestContext
+ )
// Sam found the user; we'll try to parse the response and inspect it
case OK =>
Unmarshal(response).to[RegistrationInfoV2].flatMap { regInfo =>
handleOkResponse(regInfo, requestContext, version1)
- } recoverWith {
- case error: Throwable => respondWithErrorReport(InternalServerError, "Received unparseable response from identity service.", requestContext)
+ } recoverWith { case error: Throwable =>
+ respondWithErrorReport(InternalServerError,
+ "Received unparseable response from identity service.",
+ requestContext
+ )
}
case x =>
// if we get any other error from Sam, pass that error on
respondWithErrorReport(x.intValue, "Unexpected response validating registration: " + x.toString, requestContext)
}
- }
- private def handleOkResponse(regInfo: RegistrationInfoV2, requestContext: RequestContext, version1: Boolean): Future[RouteResult] = {
+ private def handleOkResponse(regInfo: RegistrationInfoV2,
+ requestContext: RequestContext,
+ version1: Boolean
+ ): Future[RouteResult] =
if (regInfo.enabled) {
if (version1) {
respondWithUserDiagnostics(regInfo, requestContext)
@@ -207,29 +230,40 @@ trait UserApiService
} else {
respondWithErrorReport(Forbidden, "FireCloud user not activated.", requestContext)
}
- }
- private def respondWithUserDiagnostics(regInfo: RegistrationInfoV2, requestContext: RequestContext): Future[RouteResult] = {
- val authorizationHeader: HttpCredentials = (requestContext.request.headers collect {
- case Authorization(h) => h
- }).head //if we've gotten here, the header already exists. Will instead pass it through since that's "safer", TODO
-
- userAuthedRequest(Get(UserApiService.samRegisterUserDiagnosticsURL))(AccessToken(authorizationHeader.token())).flatMap { response =>
- response.status match {
- case InternalServerError =>
- respondWithErrorReport(InternalServerError, "Identity service encountered an unknown error, please try again.", requestContext)
- case OK =>
- Unmarshal(response).to[WorkbenchEnabledV2].flatMap { diagnostics =>
- if (diagnostics.inAllUsersGroup && diagnostics.inGoogleProxyGroup) {
- val v1RegInfo = RegistrationInfo(WorkbenchUserInfo(regInfo.userSubjectId, regInfo.userEmail), WorkbenchEnabled(diagnostics.inGoogleProxyGroup, diagnostics.enabled, diagnostics.inAllUsersGroup))
- requestContext.complete(OK, v1RegInfo)
- } else {
- respondWithErrorReport(Forbidden, "FireCloud user not activated.", requestContext)
+ private def respondWithUserDiagnostics(regInfo: RegistrationInfoV2,
+ requestContext: RequestContext
+ ): Future[RouteResult] = {
+ val authorizationHeader: HttpCredentials = (requestContext.request.headers collect { case Authorization(h) =>
+ h
+ }).head // if we've gotten here, the header already exists. Will instead pass it through since that's "safer", TODO
+
+ userAuthedRequest(Get(UserApiService.samRegisterUserDiagnosticsURL))(AccessToken(authorizationHeader.token()))
+ .flatMap { response =>
+ response.status match {
+ case InternalServerError =>
+ respondWithErrorReport(InternalServerError,
+ "Identity service encountered an unknown error, please try again.",
+ requestContext
+ )
+ case OK =>
+ Unmarshal(response).to[WorkbenchEnabledV2].flatMap { diagnostics =>
+ if (diagnostics.inAllUsersGroup && diagnostics.inGoogleProxyGroup) {
+ val v1RegInfo = RegistrationInfo(
+ WorkbenchUserInfo(regInfo.userSubjectId, regInfo.userEmail),
+ WorkbenchEnabled(diagnostics.inGoogleProxyGroup, diagnostics.enabled, diagnostics.inAllUsersGroup)
+ )
+ requestContext.complete(OK, v1RegInfo)
+ } else {
+ respondWithErrorReport(Forbidden, "FireCloud user not activated.", requestContext)
+ }
}
- }
- case x =>
- respondWithErrorReport(x.intValue, "Unexpected response validating registration: " + x.toString, requestContext)
+ case x =>
+ respondWithErrorReport(x.intValue,
+ "Unexpected response validating registration: " + x.toString,
+ requestContext
+ )
+ }
}
- }
}
}
diff --git a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/WorkspaceApiService.scala b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/WorkspaceApiService.scala
index cfa18f770..d283600ca 100644
--- a/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/WorkspaceApiService.scala
+++ b/src/main/scala/org/broadinstitute/dsde/firecloud/webservice/WorkspaceApiService.scala
@@ -11,7 +11,12 @@ import org.broadinstitute.dsde.firecloud.dataaccess.LegacyFileTypes.FILETYPE_PFB
import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol._
import org.broadinstitute.dsde.firecloud.model._
import org.broadinstitute.dsde.firecloud.service.PerRequest.RequestComplete
-import org.broadinstitute.dsde.firecloud.service.{FireCloudDirectives, FireCloudRequestBuilding, PermissionReportService, WorkspaceService}
+import org.broadinstitute.dsde.firecloud.service.{
+ FireCloudDirectives,
+ FireCloudRequestBuilding,
+ PermissionReportService,
+ WorkspaceService
+}
import org.broadinstitute.dsde.firecloud.utils.StandardUserInfoDirectives
import org.broadinstitute.dsde.firecloud.{EntityService, FireCloudConfig}
import org.broadinstitute.dsde.rawls.model.Attributable.AttributeMap
@@ -27,7 +32,7 @@ trait WorkspaceApiService extends FireCloudRequestBuilding with FireCloudDirecti
implicit val executionContext: ExecutionContext
- private final val dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ")
+ final private val dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ")
lazy val log: Logger = LoggerFactory.getLogger(getClass)
lazy val rawlsWorkspacesRoot: String = FireCloudConfig.Rawls.workspacesUrl
@@ -40,13 +45,15 @@ trait WorkspaceApiService extends FireCloudRequestBuilding with FireCloudDirecti
val workspaceRoutes: Route =
pathPrefix("cookie-authed") {
- path("workspaces" / Segment / Segment / "exportAttributesTSV") {
- (workspaceNamespace, workspaceName) =>
- cookie("FCtoken") { tokenCookie =>
- mapRequest(r => addCredentials(OAuth2BearerToken(tokenCookie.value)).apply(r)) {
- complete { workspaceServiceConstructor(new AccessToken(OAuth2BearerToken(tokenCookie.value))).exportWorkspaceAttributesTSV(workspaceNamespace, workspaceName, workspaceName + filename) }
+ path("workspaces" / Segment / Segment / "exportAttributesTSV") { (workspaceNamespace, workspaceName) =>
+ cookie("FCtoken") { tokenCookie =>
+ mapRequest(r => addCredentials(OAuth2BearerToken(tokenCookie.value)).apply(r)) {
+ complete {
+ workspaceServiceConstructor(new AccessToken(OAuth2BearerToken(tokenCookie.value)))
+ .exportWorkspaceAttributesTSV(workspaceNamespace, workspaceName, workspaceName + filename)
}
}
+ }
}
} ~
path("version" / "executionEngine") {
@@ -68,7 +75,7 @@ trait WorkspaceApiService extends FireCloudRequestBuilding with FireCloudDirecti
val baseUri = Uri(rawlsWorkspacesRoot + "/tags")
val uri = queryString match {
case Some(query) => baseUri.withQuery(Query(("q", query)))
- case None => baseUri
+ case None => baseUri
}
passthrough(uri.toString, HttpMethods.GET)
}
@@ -87,7 +94,9 @@ trait WorkspaceApiService extends FireCloudRequestBuilding with FireCloudDirecti
} ~
delete {
requireUserInfo() { userInfo =>
- complete { workspaceServiceConstructor(userInfo).deleteWorkspace(workspaceNamespace, workspaceName) }
+ complete {
+ workspaceServiceConstructor(userInfo).deleteWorkspace(workspaceNamespace, workspaceName)
+ }
}
}
} ~
@@ -102,11 +111,19 @@ trait WorkspaceApiService extends FireCloudRequestBuilding with FireCloudDirecti
post {
requireUserInfo() { userInfo =>
entity(as[MethodConfiguration]) { methodConfig =>
- if (!methodConfig.outputs.exists { param => param._2.value.startsWith("this.library:") || param._2.value.startsWith("workspace.library:")}) {
+ if (
+ !methodConfig.outputs.exists { param =>
+ param._2.value
+ .startsWith("this.library:") || param._2.value.startsWith("workspace.library:")
+ }
+ ) {
val passthroughReq = Post(workspacePath + "/methodconfigs", methodConfig)
- complete { userAuthedRequest(passthroughReq)(userInfo) }
+ complete(userAuthedRequest(passthroughReq)(userInfo))
} else {
- complete(StatusCodes.Forbidden, ErrorReport("Methods and configurations can not create or modify library attributes"))
+ complete(
+ StatusCodes.Forbidden,
+ ErrorReport("Methods and configurations can not create or modify library attributes")
+ )
}
}
}
@@ -117,12 +134,18 @@ trait WorkspaceApiService extends FireCloudRequestBuilding with FireCloudDirecti
requireUserInfo() { userInfo =>
parameter("async" ? "false") { asyncStr =>
parameter("deleteEmptyValues" ? "false") { deleteEmptyValuesStr =>
-
formFields(Symbol("entities")) { entitiesTSV =>
complete {
val isAsync = java.lang.Boolean.valueOf(asyncStr) // for lenient parsing
- val deleteEmptyValues = java.lang.Boolean.valueOf(deleteEmptyValuesStr) // for lenient parsing
- entityServiceConstructor(FlexibleModelSchema).importEntitiesFromTSV(workspaceNamespace, workspaceName, entitiesTSV, userInfo, isAsync, deleteEmptyValues)
+ val deleteEmptyValues =
+ java.lang.Boolean.valueOf(deleteEmptyValuesStr) // for lenient parsing
+ entityServiceConstructor(FlexibleModelSchema).importEntitiesFromTSV(workspaceNamespace,
+ workspaceName,
+ entitiesTSV,
+ userInfo,
+ isAsync,
+ deleteEmptyValues
+ )
}
}
@@ -137,8 +160,15 @@ trait WorkspaceApiService extends FireCloudRequestBuilding with FireCloudDirecti
parameter("deleteEmptyValues" ? "false") { deleteEmptyValuesStr =>
formFields(Symbol("entities")) { entitiesTSV =>
complete {
- val deleteEmptyValues = java.lang.Boolean.valueOf(deleteEmptyValuesStr) // for lenient parsing
- entityServiceConstructor(FirecloudModelSchema).importEntitiesFromTSV(workspaceNamespace, workspaceName, entitiesTSV, userInfo, deleteEmptyValues = deleteEmptyValues)
+ val deleteEmptyValues =
+ java.lang.Boolean.valueOf(deleteEmptyValuesStr) // for lenient parsing
+ entityServiceConstructor(FirecloudModelSchema).importEntitiesFromTSV(workspaceNamespace,
+ workspaceName,
+ entitiesTSV,
+ userInfo,
+ deleteEmptyValues =
+ deleteEmptyValues
+ )
}
}
}
@@ -152,7 +182,13 @@ trait WorkspaceApiService extends FireCloudRequestBuilding with FireCloudDirecti
// this endpoint does not accept a filetype. We hardcode the filetype to "pfb".
entity(as[PFBImportRequest]) { pfbRequest =>
val importRequest = AsyncImportRequest(pfbRequest.url, FILETYPE_PFB)
- complete { entityServiceConstructor(FlexibleModelSchema).importJob(workspaceNamespace, workspaceName, importRequest, userInfo) }
+ complete {
+ entityServiceConstructor(FlexibleModelSchema).importJob(workspaceNamespace,
+ workspaceName,
+ importRequest,
+ userInfo
+ )
+ }
}
}
}
@@ -161,18 +197,28 @@ trait WorkspaceApiService extends FireCloudRequestBuilding with FireCloudDirecti
post {
requireUserInfo() { userInfo =>
entity(as[AsyncImportRequest]) { importRequest =>
- complete { entityServiceConstructor(FlexibleModelSchema).importJob(workspaceNamespace, workspaceName, importRequest, userInfo) }
+ complete {
+ entityServiceConstructor(FlexibleModelSchema).importJob(workspaceNamespace,
+ workspaceName,
+ importRequest,
+ userInfo
+ )
+ }
}
}
}
} ~
// GET importPFB is deprecated; use GET importJob instead
- path(("importPFB" | "importJob")) {
+ path("importPFB" | "importJob") {
get {
requireUserInfo() { userInfo =>
parameter(Symbol("running_only").as[Boolean].withDefault(false)) { runningOnly =>
complete {
- entityServiceConstructor(FlexibleModelSchema).listJobs(workspaceNamespace, workspaceName, runningOnly, userInfo) map { respBody =>
+ entityServiceConstructor(FlexibleModelSchema).listJobs(workspaceNamespace,
+ workspaceName,
+ runningOnly,
+ userInfo
+ ) map { respBody =>
RequestComplete(OK, respBody)
}
}
@@ -185,7 +231,11 @@ trait WorkspaceApiService extends FireCloudRequestBuilding with FireCloudDirecti
get {
requireUserInfo() { userInfo =>
complete {
- entityServiceConstructor(FlexibleModelSchema).getJob(workspaceNamespace, workspaceName, jobId, userInfo) map { respBody =>
+ entityServiceConstructor(FlexibleModelSchema).getJob(workspaceNamespace,
+ workspaceName,
+ jobId,
+ userInfo
+ ) map { respBody =>
RequestComplete(OK, respBody)
}
}
@@ -196,7 +246,12 @@ trait WorkspaceApiService extends FireCloudRequestBuilding with FireCloudDirecti
patch {
requireUserInfo() { userInfo: UserInfo =>
entity(as[Seq[AttributeUpdateOperation]]) { replacementAttributes =>
- complete { workspaceServiceConstructor(userInfo).updateWorkspaceAttributes(workspaceNamespace, workspaceName, replacementAttributes) }
+ complete {
+ workspaceServiceConstructor(userInfo).updateWorkspaceAttributes(workspaceNamespace,
+ workspaceName,
+ replacementAttributes
+ )
+ }
}
}
}
@@ -204,9 +259,15 @@ trait WorkspaceApiService extends FireCloudRequestBuilding with FireCloudDirecti
path("setAttributes") {
patch {
requireUserInfo() { userInfo =>
- implicit val impAttributeFormat: AttributeFormat = new AttributeFormat with PlainArrayAttributeListSerializer
+ implicit val impAttributeFormat: AttributeFormat = new AttributeFormat
+ with PlainArrayAttributeListSerializer
entity(as[AttributeMap]) { newAttributes =>
- complete { workspaceServiceConstructor(userInfo).setWorkspaceAttributes(workspaceNamespace, workspaceName, newAttributes) }
+ complete {
+ workspaceServiceConstructor(userInfo).setWorkspaceAttributes(workspaceNamespace,
+ workspaceName,
+ newAttributes
+ )
+ }
}
}
}
@@ -214,7 +275,12 @@ trait WorkspaceApiService extends FireCloudRequestBuilding with FireCloudDirecti
path("exportAttributesTSV") {
get {
requireUserInfo() { userInfo =>
- complete { workspaceServiceConstructor(userInfo).exportWorkspaceAttributesTSV(workspaceNamespace, workspaceName, workspaceName + filename) }
+ complete {
+ workspaceServiceConstructor(userInfo).exportWorkspaceAttributesTSV(workspaceNamespace,
+ workspaceName,
+ workspaceName + filename
+ )
+ }
}
}
} ~
@@ -222,7 +288,12 @@ trait WorkspaceApiService extends FireCloudRequestBuilding with FireCloudDirecti
post {
requireUserInfo() { userInfo =>
formFields(Symbol("attributes")) { attributesTSV =>
- complete { workspaceServiceConstructor(userInfo).importAttributesFromTSV(workspaceNamespace, workspaceName, attributesTSV) }
+ complete {
+ workspaceServiceConstructor(userInfo).importAttributesFromTSV(workspaceNamespace,
+ workspaceName,
+ attributesTSV
+ )
+ }
}
}
}
@@ -232,7 +303,16 @@ trait WorkspaceApiService extends FireCloudRequestBuilding with FireCloudDirecti
requireUserInfo() { userInfo =>
parameter(Symbol("inviteUsersNotFound").?) { inviteUsersNotFound =>
entity(as[List[WorkspaceACLUpdate]]) { aclUpdates =>
- complete { workspaceServiceConstructor(userInfo).updateWorkspaceACL(workspaceNamespace, workspaceName, aclUpdates, userInfo.userEmail, userInfo.id, inviteUsersNotFound.getOrElse("false").toBoolean) }
+ complete {
+ workspaceServiceConstructor(userInfo).updateWorkspaceACL(
+ workspaceNamespace,
+ workspaceName,
+ aclUpdates,
+ userInfo.userEmail,
+ userInfo.id,
+ inviteUsersNotFound.getOrElse("false").toBoolean
+ )
+ }
}
}
}
@@ -246,13 +326,21 @@ trait WorkspaceApiService extends FireCloudRequestBuilding with FireCloudDirecti
path("catalog") {
get {
requireUserInfo() { userInfo =>
- complete { workspaceServiceConstructor(userInfo).getCatalog(workspaceNamespace, workspaceName, userInfo) }
+ complete {
+ workspaceServiceConstructor(userInfo).getCatalog(workspaceNamespace, workspaceName, userInfo)
+ }
}
} ~
patch {
requireUserInfo() { userInfo =>
entity(as[Seq[WorkspaceCatalog]]) { updates =>
- complete { workspaceServiceConstructor(userInfo).updateCatalog(workspaceNamespace, workspaceName, updates, userInfo) }
+ complete {
+ workspaceServiceConstructor(userInfo).updateCatalog(workspaceNamespace,
+ workspaceName,
+ updates,
+ userInfo
+ )
+ }
}
}
}
@@ -287,8 +375,17 @@ trait WorkspaceApiService extends FireCloudRequestBuilding with FireCloudDirecti
requireUserInfo() { userInfo =>
entity(as[WorkspaceRequest]) { createRequest =>
// the only reason this is not a passthrough is because library needs to overwrite any publish and discoverableByGroups values
- val cloneRequest = createRequest.copy(attributes = createRequest.attributes + (AttributeName("library","published") -> AttributeBoolean(false)) + (AttributeName("library","discoverableByGroups") -> AttributeValueEmptyList))
- complete { workspaceServiceConstructor(userInfo).cloneWorkspace(workspaceNamespace, workspaceName, cloneRequest) }
+ val cloneRequest = createRequest.copy(attributes =
+ createRequest.attributes + (AttributeName("library", "published") -> AttributeBoolean(
+ false
+ )) + (AttributeName("library", "discoverableByGroups") -> AttributeValueEmptyList)
+ )
+ complete {
+ workspaceServiceConstructor(userInfo).cloneWorkspace(workspaceNamespace,
+ workspaceName,
+ cloneRequest
+ )
+ }
}
}
}
@@ -309,28 +406,36 @@ trait WorkspaceApiService extends FireCloudRequestBuilding with FireCloudDirecti
path("storageCostEstimate") {
get {
requireUserInfo() { userInfo =>
- complete { workspaceServiceConstructor(userInfo).getStorageCostEstimate(workspaceNamespace, workspaceName) }
+ complete {
+ workspaceServiceConstructor(userInfo).getStorageCostEstimate(workspaceNamespace, workspaceName)
+ }
}
}
} ~
path("tags") {
requireUserInfo() { userInfo =>
get {
- complete { workspaceServiceConstructor(userInfo).getTags(workspaceNamespace, workspaceName) }
+ complete(workspaceServiceConstructor(userInfo).getTags(workspaceNamespace, workspaceName))
} ~
put {
entity(as[List[String]]) { tags =>
- complete { workspaceServiceConstructor(userInfo).putTags(workspaceNamespace, workspaceName, tags) }
+ complete {
+ workspaceServiceConstructor(userInfo).putTags(workspaceNamespace, workspaceName, tags)
+ }
}
} ~
patch {
entity(as[List[String]]) { tags =>
- complete { workspaceServiceConstructor(userInfo).patchTags(workspaceNamespace, workspaceName, tags) }
+ complete {
+ workspaceServiceConstructor(userInfo).patchTags(workspaceNamespace, workspaceName, tags)
+ }
}
} ~
delete {
entity(as[List[String]]) { tags =>
- complete { workspaceServiceConstructor(userInfo).deleteTags(workspaceNamespace, workspaceName, tags) }
+ complete {
+ workspaceServiceConstructor(userInfo).deleteTags(workspaceNamespace, workspaceName, tags)
+ }
}
}
}
@@ -339,7 +444,12 @@ trait WorkspaceApiService extends FireCloudRequestBuilding with FireCloudDirecti
requireUserInfo() { userInfo =>
post {
entity(as[PermissionReportRequest]) { reportInput =>
- complete { permissionReportServiceConstructor(userInfo).getPermissionReport(workspaceNamespace, workspaceName, reportInput) }
+ complete {
+ permissionReportServiceConstructor(userInfo).getPermissionReport(workspaceNamespace,
+ workspaceName,
+ reportInput
+ )
+ }
}
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/EntityServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/EntityServiceSpec.scala
index 15a15b0bf..b447c7e53 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/EntityServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/EntityServiceSpec.scala
@@ -8,7 +8,19 @@ import org.broadinstitute.dsde.firecloud.dataaccess.LegacyFileTypes.FILETYPE_RAW
import org.broadinstitute.dsde.firecloud.dataaccess.{MockCwdsDAO, MockRawlsDAO}
import org.broadinstitute.dsde.firecloud.mock.MockGoogleServicesDAO
import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol._
-import org.broadinstitute.dsde.firecloud.model.{AsyncImportRequest, AsyncImportResponse, EntityUpdateDefinition, FirecloudModelSchema, ImportOptions, CwdsListResponse, CwdsResponse, ModelSchema, RequestCompleteWithErrorReport, UserInfo, WithAccessToken}
+import org.broadinstitute.dsde.firecloud.model.{
+ AsyncImportRequest,
+ AsyncImportResponse,
+ CwdsListResponse,
+ CwdsResponse,
+ EntityUpdateDefinition,
+ FirecloudModelSchema,
+ ImportOptions,
+ ModelSchema,
+ RequestCompleteWithErrorReport,
+ UserInfo,
+ WithAccessToken
+}
import org.broadinstitute.dsde.firecloud.service.PerRequest.RequestComplete
import org.broadinstitute.dsde.firecloud.service.{BaseServiceSpec, PerRequest}
import org.broadinstitute.dsde.rawls.model.{ErrorReport, ErrorReportSource, WorkspaceName}
@@ -34,16 +46,14 @@ class EntityServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
implicit val errorReportSource: ErrorReportSource = ErrorReportSource("EntityServiceSpec")
- override def beforeEach(): Unit = {
+ override def beforeEach(): Unit =
searchDao.reset()
- }
- override def afterEach(): Unit = {
+ override def afterEach(): Unit =
searchDao.reset()
- }
private def dummyUserInfo(tokenStr: String) = UserInfo("dummy", OAuth2BearerToken(tokenStr), -1, "dummy")
-
+
"EntityService.importEntitiesFromTSV()" - {
val tsvParticipants = FileUtils.readAllTextFromResource("testfiles/tsv/ADD_PARTICIPANTS.txt")
val tsvMembership = FileUtils.readAllTextFromResource("testfiles/tsv/MEMBERSHIP_SAMPLE_SET.tsv")
@@ -53,116 +63,131 @@ class EntityServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
val userToken: UserInfo = UserInfo("me@me.com", OAuth2BearerToken(""), 3600, "111")
// (tsvType, tsvData)
- val asyncTSVs = List(
- ("upsert", tsvParticipants),
- ("membership", tsvMembership),
- ("update", tsvUpdate))
-
- asyncTSVs foreach {
- case (tsvType, tsvData) =>
- s"should return Accepted with an import jobId for (async=true + $tsvType TSV)" in {
- val testCwdsDao = new SuccessfulCwdsDAO
- val entityService = getEntityService(cwdsDAO = testCwdsDao)
- val response =
- entityService.importEntitiesFromTSV("workspaceNamespace", "workspaceName",
- tsvData, userToken, isAsync = true).futureValue
-
- val rqResponse = response.asInstanceOf[RequestComplete[(StatusCode, AsyncImportResponse)]]
- val expectedJobId = testCwdsDao.successDefinition.getJobId.toString
- val expectedUriPrefix = "gs://cwds-testconf-bucketname/to-cwds/" + MockRawlsDAO.mockWorkspaceId + "/"
- rqResponse.response match {
- case (status, asyncImportResponse) =>
- status shouldBe StatusCodes.Accepted
- asyncImportResponse.jobId shouldNot be(empty)
- asyncImportResponse.url should startWith(expectedUriPrefix)
- asyncImportResponse.workspace shouldEqual WorkspaceName("workspaceNamespace", "workspaceName")
- }
+ val asyncTSVs = List(("upsert", tsvParticipants), ("membership", tsvMembership), ("update", tsvUpdate))
+
+ asyncTSVs foreach { case (tsvType, tsvData) =>
+ s"should return Accepted with an import jobId for (async=true + $tsvType TSV)" in {
+ val testCwdsDao = new SuccessfulCwdsDAO
+ val entityService = getEntityService(cwdsDAO = testCwdsDao)
+ val response =
+ entityService
+ .importEntitiesFromTSV("workspaceNamespace", "workspaceName", tsvData, userToken, isAsync = true)
+ .futureValue
+
+ val rqResponse = response.asInstanceOf[RequestComplete[(StatusCode, AsyncImportResponse)]]
+ val expectedJobId = testCwdsDao.successDefinition.getJobId.toString
+ val expectedUriPrefix = "gs://cwds-testconf-bucketname/to-cwds/" + MockRawlsDAO.mockWorkspaceId + "/"
+ rqResponse.response match {
+ case (status, asyncImportResponse) =>
+ status shouldBe StatusCodes.Accepted
+ asyncImportResponse.jobId shouldNot be(empty)
+ asyncImportResponse.url should startWith(expectedUriPrefix)
+ asyncImportResponse.workspace shouldEqual WorkspaceName("workspaceNamespace", "workspaceName")
}
+ }
}
// (tsvType, expectedEntityType, tsvData)
- val goodTSVs = List(
- ("upsert", "participant", tsvParticipants),
- ("membership", "sample_set", tsvMembership),
- ("update", "sample", tsvUpdate))
-
- goodTSVs foreach {
- case (tsvType, expectedEntityType, tsvData) =>
- s"should return OK with the entity type for (async=false + $tsvType TSV)" in {
- val entityService = getEntityService()
- val response =
- entityService.importEntitiesFromTSV("workspaceNamespace", "workspaceName",
- tsvData, userToken).futureValue // isAsync defaults to false, so we omit it here
- response shouldBe RequestComplete(StatusCodes.OK, expectedEntityType)
- }
-
- s"should call the appropriate upsert/update method for (async=false + $tsvType TSV)" in {
- val mockedRawlsDAO = mockito[MockRawlsDAO] // mocking the mock
- when(mockedRawlsDAO.batchUpdateEntities(any[String], any[String], any[String],
- any[Seq[EntityUpdateDefinition]])(any[UserInfo]))
- .thenReturn(Future.successful(HttpResponse(StatusCodes.NoContent)))
-
- when(mockedRawlsDAO.batchUpsertEntities(any[String], any[String], any[String],
- any[Seq[EntityUpdateDefinition]])(any[UserInfo]))
- .thenReturn(Future.successful(HttpResponse(StatusCodes.NoContent)))
-
- val entityService = getEntityService(rawlsDAO = mockedRawlsDAO)
- val _ =
- entityService.importEntitiesFromTSV("workspaceNamespace", "workspaceName",
- tsvData, userToken).futureValue // isAsync defaults to false, so we omit it here
+ val goodTSVs = List(("upsert", "participant", tsvParticipants),
+ ("membership", "sample_set", tsvMembership),
+ ("update", "sample", tsvUpdate)
+ )
- if (tsvType == "update") {
- verify(mockedRawlsDAO, times(1)).batchUpdateEntities(
- ArgumentMatchers.eq("workspaceNamespace"), ArgumentMatchers.eq("workspaceName"),
- ArgumentMatchers.eq(expectedEntityType), any[Seq[EntityUpdateDefinition]])(any[UserInfo])
- } else {
- verify(mockedRawlsDAO, times(1)).batchUpsertEntities(
- ArgumentMatchers.eq("workspaceNamespace"), ArgumentMatchers.eq("workspaceName"),
- ArgumentMatchers.eq(expectedEntityType), ArgumentMatchers.any[Seq[EntityUpdateDefinition]])(ArgumentMatchers.any[UserInfo])
+ goodTSVs foreach { case (tsvType, expectedEntityType, tsvData) =>
+ s"should return OK with the entity type for (async=false + $tsvType TSV)" in {
+ val entityService = getEntityService()
+ val response =
+ entityService
+ .importEntitiesFromTSV("workspaceNamespace", "workspaceName", tsvData, userToken)
+ .futureValue // isAsync defaults to false, so we omit it here
+ response shouldBe RequestComplete(StatusCodes.OK, expectedEntityType)
+ }
- }
+ s"should call the appropriate upsert/update method for (async=false + $tsvType TSV)" in {
+ val mockedRawlsDAO = mockito[MockRawlsDAO] // mocking the mock
+ when(
+ mockedRawlsDAO.batchUpdateEntities(any[String], any[String], any[String], any[Seq[EntityUpdateDefinition]])(
+ any[UserInfo]
+ )
+ )
+ .thenReturn(Future.successful(HttpResponse(StatusCodes.NoContent)))
+
+ when(
+ mockedRawlsDAO.batchUpsertEntities(any[String], any[String], any[String], any[Seq[EntityUpdateDefinition]])(
+ any[UserInfo]
+ )
+ )
+ .thenReturn(Future.successful(HttpResponse(StatusCodes.NoContent)))
+
+ val entityService = getEntityService(rawlsDAO = mockedRawlsDAO)
+ val _ =
+ entityService
+ .importEntitiesFromTSV("workspaceNamespace", "workspaceName", tsvData, userToken)
+ .futureValue // isAsync defaults to false, so we omit it here
+
+ if (tsvType == "update") {
+ verify(mockedRawlsDAO, times(1)).batchUpdateEntities(
+ ArgumentMatchers.eq("workspaceNamespace"),
+ ArgumentMatchers.eq("workspaceName"),
+ ArgumentMatchers.eq(expectedEntityType),
+ any[Seq[EntityUpdateDefinition]]
+ )(any[UserInfo])
+ } else {
+ verify(mockedRawlsDAO, times(1)).batchUpsertEntities(
+ ArgumentMatchers.eq("workspaceNamespace"),
+ ArgumentMatchers.eq("workspaceName"),
+ ArgumentMatchers.eq(expectedEntityType),
+ ArgumentMatchers.any[Seq[EntityUpdateDefinition]]
+ )(ArgumentMatchers.any[UserInfo])
}
- s"should send $expectedEntityType tsv to cWDS with appropriate options" in {
- // set up mocks
- val cwdsDAO = mockito[MockCwdsDAO]
- val rawlsDAO = mockito[MockRawlsDAO]
+ }
- // inject mocks to entity service
- val entityService = getEntityService(cwdsDAO = cwdsDAO, rawlsDAO = rawlsDAO)
+ s"should send $expectedEntityType tsv to cWDS with appropriate options" in {
+ // set up mocks
+ val cwdsDAO = mockito[MockCwdsDAO]
+ val rawlsDAO = mockito[MockRawlsDAO]
- // set up behaviors
- val genericJob: GenericJob = new GenericJob
- genericJob.setJobId(UUID.randomUUID())
- // the "new MockRawlsDAO()" here is only to get access to a pre-canned WorkspaceResponse object
- val workspaceResponse = new MockRawlsDAO().rawlsWorkspaceResponseWithAttributes
+ // inject mocks to entity service
+ val entityService = getEntityService(cwdsDAO = cwdsDAO, rawlsDAO = rawlsDAO)
- when(cwdsDAO.isEnabled).thenReturn(true)
- when(cwdsDAO.getSupportedFormats).thenReturn(List("pfb","tdrexport", "rawlsjson"))
- when(rawlsDAO.getWorkspace(any[String], any[String])(any[UserInfo]))
- .thenReturn(Future.successful(workspaceResponse))
+ // set up behaviors
+ val genericJob: GenericJob = new GenericJob
+ genericJob.setJobId(UUID.randomUUID())
+ // the "new MockRawlsDAO()" here is only to get access to a pre-canned WorkspaceResponse object
+ val workspaceResponse = new MockRawlsDAO().rawlsWorkspaceResponseWithAttributes
- entityService.importEntitiesFromTSV("workspaceNamespace", "workspaceName", tsvData, dummyUserInfo("token"), true).futureValue
+ when(cwdsDAO.isEnabled).thenReturn(true)
+ when(cwdsDAO.getSupportedFormats).thenReturn(List("pfb", "tdrexport", "rawlsjson"))
+ when(rawlsDAO.getWorkspace(any[String], any[String])(any[UserInfo]))
+ .thenReturn(Future.successful(workspaceResponse))
- val argumentCaptor = ArgumentCaptor.forClass(classTag[AsyncImportRequest].runtimeClass).asInstanceOf[ArgumentCaptor[AsyncImportRequest]]
+ entityService
+ .importEntitiesFromTSV("workspaceNamespace", "workspaceName", tsvData, dummyUserInfo("token"), true)
+ .futureValue
- verify(cwdsDAO, times(1))
- .importV1(any[String], argumentCaptor.capture())(any[UserInfo])
- val capturedRequest = argumentCaptor.getValue
- capturedRequest.options should be(Some(ImportOptions(None, Some(tsvType != "update"))))
- verify(rawlsDAO, times(1))
- .getWorkspace(any[String], any[String])(any[UserInfo])
+ val argumentCaptor = ArgumentCaptor
+ .forClass(classTag[AsyncImportRequest].runtimeClass)
+ .asInstanceOf[ArgumentCaptor[AsyncImportRequest]]
- }
+ verify(cwdsDAO, times(1))
+ .importV1(any[String], argumentCaptor.capture())(any[UserInfo])
+ val capturedRequest = argumentCaptor.getValue
+ capturedRequest.options should be(Some(ImportOptions(None, Some(tsvType != "update"))))
+ verify(rawlsDAO, times(1))
+ .getWorkspace(any[String], any[String])(any[UserInfo])
+
+ }
}
"should return error for (async=true) when failed to write to GCS" in {
val testGoogleDAO = new ErroringGoogleServicesDAO
val entityService = getEntityService(mockGoogleServicesDAO = testGoogleDAO)
val response =
- entityService.importEntitiesFromTSV("workspaceNamespace", "workspaceName",
- tsvParticipants, userToken, isAsync = true).futureValue
+ entityService
+ .importEntitiesFromTSV("workspaceNamespace", "workspaceName", tsvParticipants, userToken, isAsync = true)
+ .futureValue
val errorResponse = response.asInstanceOf[RequestComplete[(StatusCode, ErrorReport)]]
@@ -177,8 +202,9 @@ class EntityServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
val testCwdsDao = new ErroringCwdsDao
val entityService = getEntityService(cwdsDAO = testCwdsDao)
val response =
- entityService.importEntitiesFromTSV("workspaceNamespace", "workspaceName",
- tsvParticipants, userToken, isAsync = true).futureValue
+ entityService
+ .importEntitiesFromTSV("workspaceNamespace", "workspaceName", tsvParticipants, userToken, isAsync = true)
+ .futureValue
val errorResponse = response.asInstanceOf[RequestComplete[(StatusCode, ErrorReport)]]
@@ -193,8 +219,7 @@ class EntityServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
s"should return error for (async=$async) when TSV is unparsable" in {
val entityService = getEntityService()
val caught = intercept[FireCloudExceptionWithErrorReport] {
- entityService.importEntitiesFromTSV("workspaceNamespace", "workspaceName",
- tsvInvalid, userToken, async)
+ entityService.importEntitiesFromTSV("workspaceNamespace", "workspaceName", tsvInvalid, userToken, async)
}
caught.errorReport.statusCode should contain(StatusCodes.BadRequest)
}
@@ -228,7 +253,9 @@ class EntityServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
val entityService = getEntityService(cwdsDAO = cwdsDAO)
// list jobs via entity service
- val actual = entityService.listJobs("workspaceNamespace", "workspaceName", runningOnly = true, dummyUserInfo("mytoken")).futureValue
+ val actual = entityService
+ .listJobs("workspaceNamespace", "workspaceName", runningOnly = true, dummyUserInfo("mytoken"))
+ .futureValue
actual should contain theSameElementsAs cwdsResponse
}
@@ -268,7 +295,8 @@ class EntityServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
// create input
val input = AsyncImportRequest(url = "https://example.com", filetype = importFiletype)
- entityService.importJob("workspaceNamespace", "workspaceName", input, dummyUserInfo("token"))
+ entityService
+ .importJob("workspaceNamespace", "workspaceName", input, dummyUserInfo("token"))
.futureValue // futureValue waits for the Future to complete
verify(cwdsDAO, times(1))
@@ -296,7 +324,8 @@ class EntityServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
val entityService = getEntityService(cwdsDAO = cwdsDAO)
// get job via entity service
- val actual = entityService.getJob("workspaceNamespace", "workspaceName", jobId, dummyUserInfo("mytoken")).futureValue
+ val actual =
+ entityService.getJob("workspaceNamespace", "workspaceName", jobId, dummyUserInfo("mytoken")).futureValue
actual shouldBe cwdsResponse
}
@@ -308,7 +337,8 @@ class EntityServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
when(cwdsDAO.isEnabled).thenReturn(true)
doThrow(new ApiException(404, "cWDS unit test intentional error"))
- .when(cwdsDAO).getJobV1(any[String], ArgumentMatchers.eq(jobId))(any[UserInfo])
+ .when(cwdsDAO)
+ .getJobV1(any[String], ArgumentMatchers.eq(jobId))(any[UserInfo])
// inject mocks to entity service
val entityService = getEntityService(cwdsDAO = cwdsDAO)
@@ -317,7 +347,7 @@ class EntityServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
val getJobFuture = entityService.getJob("workspaceNamespace", "workspaceName", jobId, dummyUserInfo("mytoken"))
ScalaFutures.whenReady(getJobFuture.failed) { actual =>
- actual shouldBe a [FireCloudExceptionWithErrorReport]
+ actual shouldBe a[FireCloudExceptionWithErrorReport]
val apiEx = actual.asInstanceOf[FireCloudExceptionWithErrorReport]
apiEx.errorReport.statusCode should contain(StatusCodes.NotFound)
}
@@ -330,7 +360,8 @@ class EntityServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
when(cwdsDAO.isEnabled).thenReturn(true)
doThrow(new ApiException(StatusCodes.ImATeapot.intValue, "cWDS unit test intentional error"))
- .when(cwdsDAO).getJobV1(any[String], ArgumentMatchers.eq(jobId))(any[UserInfo])
+ .when(cwdsDAO)
+ .getJobV1(any[String], ArgumentMatchers.eq(jobId))(any[UserInfo])
// inject mocks to entity service
val entityService = getEntityService(cwdsDAO = cwdsDAO)
@@ -339,17 +370,18 @@ class EntityServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
val getJobFuture = entityService.getJob("workspaceNamespace", "workspaceName", jobId, dummyUserInfo("mytoken"))
ScalaFutures.whenReady(getJobFuture.failed) { actual =>
- actual shouldBe a [FireCloudExceptionWithErrorReport]
+ actual shouldBe a[FireCloudExceptionWithErrorReport]
val apiEx = actual.asInstanceOf[FireCloudExceptionWithErrorReport]
- apiEx.errorReport.statusCode should contain (StatusCodes.ImATeapot)
- apiEx.getMessage should(include("cWDS unit test intentional error"))
+ apiEx.errorReport.statusCode should contain(StatusCodes.ImATeapot)
+ apiEx.getMessage should (include("cWDS unit test intentional error"))
}
}
}
private def getEntityService(mockGoogleServicesDAO: MockGoogleServicesDAO = new MockGoogleServicesDAO,
rawlsDAO: MockRawlsDAO = new MockRawlsDAO,
- cwdsDAO: MockCwdsDAO = new MockCwdsDAO(false)) = {
+ cwdsDAO: MockCwdsDAO = new MockCwdsDAO(false)
+ ) = {
val application = app.copy(googleServicesDAO = mockGoogleServicesDAO, rawlsDAO = rawlsDAO, cwdsDAO = cwdsDAO)
implicit val modelSchema: ModelSchema = FirecloudModelSchema
EntityService.constructor(application)(modelSchema)(global)
@@ -358,7 +390,10 @@ class EntityServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
class ErroringGoogleServicesDAO extends MockGoogleServicesDAO {
def errorDefinition: Exception = new StorageException(418, "intentional unit test failure")
- override def writeObjectAsRawlsSA(bucketName: GcsBucketName, objectKey: GcsObjectName, objectContents: Array[Byte]): GcsPath =
+ override def writeObjectAsRawlsSA(bucketName: GcsBucketName,
+ objectKey: GcsObjectName,
+ objectContents: Array[Byte]
+ ): GcsPath =
// throw a 418 so unit tests have an easy way to distinguish this error vs an error somewhere else in the stack
throw errorDefinition
}
@@ -371,25 +406,28 @@ class EntityServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
genericJob
}
- override def importV1(workspaceId: String, importRequest: AsyncImportRequest)(implicit userInfo: UserInfo): GenericJob = {
+ override def importV1(workspaceId: String, importRequest: AsyncImportRequest)(implicit
+ userInfo: UserInfo
+ ): GenericJob =
importRequest.filetype match {
case FILETYPE_RAWLS => successDefinition
- case _ => ???
+ case _ => ???
}
- }
}
class ErroringCwdsDao extends MockCwdsDAO {
// return a 429 so unit tests have an easy way to distinguish this error vs an error somewhere else in the stack
- def errorDefinition: GenericJob = throw new ApiException(StatusCodes.TooManyRequests.intValue, "intentional ErroringCwdsDao error")
+ def errorDefinition: GenericJob =
+ throw new ApiException(StatusCodes.TooManyRequests.intValue, "intentional ErroringCwdsDao error")
- override def importV1(workspaceId: String, importRequest: AsyncImportRequest)(implicit userInfo: UserInfo): GenericJob = {
+ override def importV1(workspaceId: String, importRequest: AsyncImportRequest)(implicit
+ userInfo: UserInfo
+ ): GenericJob =
importRequest.filetype match {
case FILETYPE_RAWLS => errorDefinition
- case _ => ???
+ case _ => ???
}
- }
}
-}
\ No newline at end of file
+}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpCwdsDAOSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpCwdsDAOSpec.scala
index 33c7bcf84..bd9ad43b9 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpCwdsDAOSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpCwdsDAOSpec.scala
@@ -1,7 +1,7 @@
package org.broadinstitute.dsde.firecloud.dataaccess
import org.broadinstitute.dsde.firecloud.FireCloudException
-import org.broadinstitute.dsde.firecloud.model.{AsyncImportRequest, ImportOptions, CwdsListResponse}
+import org.broadinstitute.dsde.firecloud.model.{AsyncImportRequest, CwdsListResponse, ImportOptions}
import org.databiosphere.workspacedata.model.{GenericJob, ImportRequest}
import org.databiosphere.workspacedata.model.GenericJob.{JobTypeEnum, StatusEnum}
import org.databiosphere.workspacedata.model.GenericJob.StatusEnum._
@@ -115,7 +115,7 @@ class HttpCwdsDAOSpec extends AnyFreeSpec with Matchers {
cwdsDao.toCwdsImportType("rawlsjson") shouldBe ImportRequest.TypeEnum.RAWLSJSON
}
"other input should throw" in {
- a [FireCloudException] should be thrownBy cwdsDao.toCwdsImportType("something-else")
+ a[FireCloudException] should be thrownBy cwdsDao.toCwdsImportType("something-else")
}
}
@@ -123,14 +123,12 @@ class HttpCwdsDAOSpec extends AnyFreeSpec with Matchers {
"should translate an import request with no options" in {
val testURI: URI = URI.create("https://example.com/")
- val input = AsyncImportRequest(url = testURI.toString,
- filetype = "pfb",
- options = None)
+ val input = AsyncImportRequest(url = testURI.toString, filetype = "pfb", options = None)
val expected = new ImportRequest()
expected.setUrl(testURI)
expected.setType(ImportRequest.TypeEnum.PFB)
- expected.setOptions(Map.empty[String,Object].asJava)
+ expected.setOptions(Map.empty[String, Object].asJava)
cwdsDao.toCwdsImportRequest(input) shouldBe expected
}
@@ -139,13 +137,14 @@ class HttpCwdsDAOSpec extends AnyFreeSpec with Matchers {
val testURI: URI = URI.create("https://example.com/")
val input = AsyncImportRequest(url = testURI.toString,
- filetype = "pfb",
- options = Some(ImportOptions(tdrSyncPermissions = None)))
+ filetype = "pfb",
+ options = Some(ImportOptions(tdrSyncPermissions = None))
+ )
val expected = new ImportRequest()
expected.setUrl(testURI)
expected.setType(ImportRequest.TypeEnum.PFB)
- expected.setOptions(Map.empty[String,Object].asJava)
+ expected.setOptions(Map.empty[String, Object].asJava)
cwdsDao.toCwdsImportRequest(input) shouldBe expected
}
@@ -154,13 +153,14 @@ class HttpCwdsDAOSpec extends AnyFreeSpec with Matchers {
val testURI: URI = URI.create("https://example.com/")
val input = AsyncImportRequest(url = testURI.toString,
- filetype = "pfb",
- options = Some(ImportOptions(tdrSyncPermissions = Some(true))))
+ filetype = "pfb",
+ options = Some(ImportOptions(tdrSyncPermissions = Some(true)))
+ )
val expected = new ImportRequest()
expected.setUrl(testURI)
expected.setType(ImportRequest.TypeEnum.PFB)
- expected.setOptions(Map[String,Object]("tdrSyncPermissions" -> true.asInstanceOf[Object]).asJava)
+ expected.setOptions(Map[String, Object]("tdrSyncPermissions" -> true.asInstanceOf[Object]).asJava)
cwdsDao.toCwdsImportRequest(input) shouldBe expected
}
@@ -169,13 +169,14 @@ class HttpCwdsDAOSpec extends AnyFreeSpec with Matchers {
val testURI: URI = URI.create("https://example.com/")
val input = AsyncImportRequest(url = testURI.toString,
- filetype = "pfb",
- options = Some(ImportOptions(tdrSyncPermissions = Some(false))))
+ filetype = "pfb",
+ options = Some(ImportOptions(tdrSyncPermissions = Some(false)))
+ )
val expected = new ImportRequest()
expected.setUrl(testURI)
expected.setType(ImportRequest.TypeEnum.PFB)
- expected.setOptions(Map[String,Object]("tdrSyncPermissions" -> false.asInstanceOf[Object]).asJava)
+ expected.setOptions(Map[String, Object]("tdrSyncPermissions" -> false.asInstanceOf[Object]).asJava)
cwdsDao.toCwdsImportRequest(input) shouldBe expected
}
@@ -184,20 +185,19 @@ class HttpCwdsDAOSpec extends AnyFreeSpec with Matchers {
val testURI: URI = URI.create("https://example.com/")
val input = AsyncImportRequest(url = testURI.toString,
- filetype = "rawlsjson",
- options = Some(ImportOptions(isUpsert = Some(true))))
+ filetype = "rawlsjson",
+ options = Some(ImportOptions(isUpsert = Some(true)))
+ )
val expected = new ImportRequest()
expected.setUrl(testURI)
expected.setType(ImportRequest.TypeEnum.RAWLSJSON)
- expected.setOptions(Map[String,Object]("isUpsert" -> true.asInstanceOf[Object]).asJava)
+ expected.setOptions(Map[String, Object]("isUpsert" -> true.asInstanceOf[Object]).asJava)
cwdsDao.toCwdsImportRequest(input) shouldBe expected
}
}
-
}
-
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpGoogleServicesDAOSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpGoogleServicesDAOSpec.scala
index cf0ebde7e..77d5e7bfb 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpGoogleServicesDAOSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/HttpGoogleServicesDAOSpec.scala
@@ -35,7 +35,11 @@ class HttpGoogleServicesDAOSpec extends AnyFlatSpec with Matchers with PrivateMe
val testProject = "broad-dsde-dev"
val priceListUrl = ConfigFactory.load().getString("googlecloud.priceListUrl")
- val defaultPriceList = GooglePriceList(GooglePrices(Map("us" -> BigDecimal(-0.11)), UsTieredPriceItem(Map(1L -> BigDecimal(-0.22)))), "v1", "1")
+ val defaultPriceList = GooglePriceList(
+ GooglePrices(Map("us" -> BigDecimal(-0.11)), UsTieredPriceItem(Map(1L -> BigDecimal(-0.22)))),
+ "v1",
+ "1"
+ )
implicit val system: ActorSystem = ActorSystem("HttpGoogleCloudStorageDAOSpec")
import system.dispatcher
val gcsDAO = new HttpGoogleServicesDAO(priceListUrl, defaultPriceList)
@@ -47,7 +51,7 @@ class HttpGoogleServicesDAOSpec extends AnyFlatSpec with Matchers with PrivateMe
val priceList: GooglePriceList = Await.result(errorGcsDAO.fetchPriceList, Duration.Inf)
- priceList.version should startWith ("v")
+ priceList.version should startWith("v")
priceList.updated should not be empty
priceList.prices.cpBigstoreStorage("us") shouldBe BigDecimal(-0.11)
priceList.prices.cpComputeengineInternetEgressNA.tiers.size shouldBe 1
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockAgoraDAO.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockAgoraDAO.scala
index 0fad8582c..95b5f77d6 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockAgoraDAO.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockAgoraDAO.scala
@@ -2,7 +2,12 @@ package org.broadinstitute.dsde.firecloud.dataaccess
import akka.http.scaladsl.model.Uri
import org.broadinstitute.dsde.firecloud.mock.MockAgoraACLData
-import org.broadinstitute.dsde.firecloud.model.OrchMethodRepository.{ACLNames, AgoraPermission, EntityAccessControlAgora, Method}
+import org.broadinstitute.dsde.firecloud.model.OrchMethodRepository.{
+ ACLNames,
+ AgoraPermission,
+ EntityAccessControlAgora,
+ Method
+}
import org.broadinstitute.dsde.firecloud.model.UserInfo
import org.broadinstitute.dsde.workbench.util.health.SubsystemStatus
@@ -18,24 +23,28 @@ object MockAgoraDAO {
class MockAgoraDAO extends AgoraDAO {
-
- override def getNamespacePermissions(ns: String, entity: String)(implicit userInfo: UserInfo): Future[List[AgoraPermission]] = {
+ override def getNamespacePermissions(ns: String, entity: String)(implicit
+ userInfo: UserInfo
+ ): Future[List[AgoraPermission]] =
Future(List(MockAgoraDAO.agoraPermission))
- }
- override def postNamespacePermissions(ns: String, entity: String, perms: List[AgoraPermission])(implicit userInfo: UserInfo): Future[List[AgoraPermission]] = {
+ override def postNamespacePermissions(ns: String, entity: String, perms: List[AgoraPermission])(implicit
+ userInfo: UserInfo
+ ): Future[List[AgoraPermission]] =
Future(List(MockAgoraDAO.agoraPermission))
- }
- override def getMultiEntityPermissions(entityType: _root_.org.broadinstitute.dsde.firecloud.model.OrchMethodRepository.AgoraEntityType.Value, entities: List[Method])(implicit userInfo: UserInfo) = {
+ override def getMultiEntityPermissions(
+ entityType: _root_.org.broadinstitute.dsde.firecloud.model.OrchMethodRepository.AgoraEntityType.Value,
+ entities: List[Method]
+ )(implicit userInfo: UserInfo) =
Future(List.empty[EntityAccessControlAgora])
- }
- def status: Future[SubsystemStatus] = {
+ def status: Future[SubsystemStatus] =
Future(SubsystemStatus(ok = true, None))
- }
- override def batchCreatePermissions(inputs: List[EntityAccessControlAgora])(implicit userInfo: UserInfo): Future[List[EntityAccessControlAgora]] =
+ override def batchCreatePermissions(inputs: List[EntityAccessControlAgora])(implicit
+ userInfo: UserInfo
+ ): Future[List[EntityAccessControlAgora]] =
Future.successful(MockAgoraACLData.multiUpsertResponse)
override def getPermission(url: String)(implicit userInfo: UserInfo): Future[List[AgoraPermission]] = {
@@ -58,7 +67,9 @@ class MockAgoraDAO extends AgoraDAO {
}
- override def createPermission(url: String, agoraPermissions: List[AgoraPermission])(implicit userInfo: UserInfo): Future[List[AgoraPermission]] = {
+ override def createPermission(url: String, agoraPermissions: List[AgoraPermission])(implicit
+ userInfo: UserInfo
+ ): Future[List[AgoraPermission]] = {
val pathString = Uri(url).path.toString()
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockCwdsDAO.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockCwdsDAO.scala
index 412ee4fc9..523522b4a 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockCwdsDAO.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockCwdsDAO.scala
@@ -12,54 +12,54 @@ import java.time.OffsetDateTime
import java.util.UUID
class MockCwdsDAO(
- enabled: Boolean = true,
- supportedFormats: List[String] = List("pfb", "tdrexport", "rawlsjson")
- ) extends HttpCwdsDAO(enabled, supportedFormats) {
+ enabled: Boolean = true,
+ supportedFormats: List[String] = List("pfb", "tdrexport", "rawlsjson")
+) extends HttpCwdsDAO(enabled, supportedFormats) {
implicit val errorReportSource: ErrorReportSource = ErrorReportSource(
"MockCWDS"
)
override def listJobsV1(workspaceId: String, runningOnly: Boolean)(implicit
- userInfo: UserInfo
+ userInfo: UserInfo
): List[CwdsListResponse] = List()
override def getJobV1(workspaceId: String, jobId: String)(implicit
- userInfo: UserInfo
+ userInfo: UserInfo
): CwdsListResponse =
CwdsListResponse(jobId, "ReadyForUpsert", "pfb", None)
override def importV1(
- workspaceId: String,
- importRequest: AsyncImportRequest
- )(implicit userInfo: UserInfo): GenericJob = {
- importRequest.filetype match { case FILETYPE_PFB | FILETYPE_TDR | FILETYPE_RAWLS =>
- if (importRequest.url.contains("forbidden"))
- throw new ApiException(
- Forbidden.intValue,
- "Missing Authorization: Bearer token in header"
- )
- else if (importRequest.url.contains("bad.request"))
- throw new ApiException(
- BadRequest.intValue,
- "Bad request as reported by cwds"
- )
- else if (importRequest.url.contains("its.lawsuit.time"))
- throw new ApiException(
- UnavailableForLegalReasons.intValue,
- "cwds message"
- )
- else if (importRequest.url.contains("good")) makeJob(workspaceId)
- else
- throw new ApiException(
- EnhanceYourCalm.intValue,
- "enhance your calm"
- )
- case _ => ???
+ workspaceId: String,
+ importRequest: AsyncImportRequest
+ )(implicit userInfo: UserInfo): GenericJob =
+ importRequest.filetype match {
+ case FILETYPE_PFB | FILETYPE_TDR | FILETYPE_RAWLS =>
+ if (importRequest.url.contains("forbidden"))
+ throw new ApiException(
+ Forbidden.intValue,
+ "Missing Authorization: Bearer token in header"
+ )
+ else if (importRequest.url.contains("bad.request"))
+ throw new ApiException(
+ BadRequest.intValue,
+ "Bad request as reported by cwds"
+ )
+ else if (importRequest.url.contains("its.lawsuit.time"))
+ throw new ApiException(
+ UnavailableForLegalReasons.intValue,
+ "cwds message"
+ )
+ else if (importRequest.url.contains("good")) makeJob(workspaceId)
+ else
+ throw new ApiException(
+ EnhanceYourCalm.intValue,
+ "enhance your calm"
+ )
+ case _ => ???
}
- }
private def makeJob(
- workspaceId: String
- ) = {
+ workspaceId: String
+ ) = {
val genericJob: GenericJob = new GenericJob
genericJob.setJobId(UUID.randomUUID())
genericJob.setStatus(StatusEnum.RUNNING)
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockOntologyDAO.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockOntologyDAO.scala
index 9192f3918..2299eddc4 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockOntologyDAO.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockOntologyDAO.scala
@@ -5,233 +5,315 @@ import org.broadinstitute.dsde.workbench.util.health.SubsystemStatus
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
-
class MockOntologyDAO extends OntologyDAO {
val data = Map(
// central sleep apnea
- "http://purl.obolibrary.org/obo/DOID_9220" -> List(TermResource(
- id="http://purl.obolibrary.org/obo/DOID_9220",
- ontology="Disease",
- usable=true,
- label="central sleep apnea",
- definition=Some("A sleep apnea that is characterized by a malfunction of the basic neurological controls for breathing rate and the failure to give the signal to inhale, causing the individual to miss one or more cycles of breathing."),
- synonyms=Some(List("primary central sleep apnea")),
- parents=Some(List(
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_0050847",
- order=1,
- label=Some("sleep apnea"),
- definition=Some("A sleep disorder characterized by repeated cessation and commencing of breathing that repeatedly disrupts sleep.")
- ),
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_535",
- order=2,
- label=Some("sleep disorder"),
- definition=Some("A disease of mental health that involves disruption of sleep patterns."),
- synonyms=Some(List("Non-organic sleep disorder"))
+ "http://purl.obolibrary.org/obo/DOID_9220" -> List(
+ TermResource(
+ id = "http://purl.obolibrary.org/obo/DOID_9220",
+ ontology = "Disease",
+ usable = true,
+ label = "central sleep apnea",
+ definition = Some(
+ "A sleep apnea that is characterized by a malfunction of the basic neurological controls for breathing rate and the failure to give the signal to inhale, causing the individual to miss one or more cycles of breathing."
),
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_150",
- order=3,
- label=Some("disease of mental health"),
- definition=Some("A disease that involves a psychological or behavioral pattern generally associated with subjective distress or disability that occurs in an individual, and which are not a part of normal development or culture.")
- ),
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_4",
- order=4,
- label=Some("disease"),
- definition=Some("A disease is a disposition (i) to undergo pathological processes that (ii) exists in an organism because of one or more disorders in that organism.")
+ synonyms = Some(List("primary central sleep apnea")),
+ parents = Some(
+ List(
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_0050847",
+ order = 1,
+ label = Some("sleep apnea"),
+ definition = Some(
+ "A sleep disorder characterized by repeated cessation and commencing of breathing that repeatedly disrupts sleep."
+ )
+ ),
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_535",
+ order = 2,
+ label = Some("sleep disorder"),
+ definition = Some("A disease of mental health that involves disruption of sleep patterns."),
+ synonyms = Some(List("Non-organic sleep disorder"))
+ ),
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_150",
+ order = 3,
+ label = Some("disease of mental health"),
+ definition = Some(
+ "A disease that involves a psychological or behavioral pattern generally associated with subjective distress or disability that occurs in an individual, and which are not a part of normal development or culture."
+ )
+ ),
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_4",
+ order = 4,
+ label = Some("disease"),
+ definition = Some(
+ "A disease is a disposition (i) to undergo pathological processes that (ii) exists in an organism because of one or more disorders in that organism."
+ )
+ )
+ )
)
- )))),
+ )
+ ),
// central sleep apnea
- "http://purl.obolibrary.org/obo/DOID_535" -> List(TermResource(
- id="http://purl.obolibrary.org/obo/DOID_535",
- ontology="Disease",
- usable=true,
- label="sleep disorder",
- definition=Some("A disease of mental health that involves disruption of sleep patterns."),
- synonyms=Some(List("Non-organic sleep disorder")),
- parents=Some(List(
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_150",
- order=1,
- label=Some("disease of mental health"),
- definition=Some("A disease that involves a psychological or behavioral pattern generally associated with subjective distress or disability that occurs in an individual, and which are not a part of normal development or culture.")
- ),
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_4",
- order=2,
- label=Some("disease"),
- definition=Some("A disease is a disposition (i) to undergo pathological processes that (ii) exists in an organism because of one or more disorders in that organism.")
+ "http://purl.obolibrary.org/obo/DOID_535" -> List(
+ TermResource(
+ id = "http://purl.obolibrary.org/obo/DOID_535",
+ ontology = "Disease",
+ usable = true,
+ label = "sleep disorder",
+ definition = Some("A disease of mental health that involves disruption of sleep patterns."),
+ synonyms = Some(List("Non-organic sleep disorder")),
+ parents = Some(
+ List(
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_150",
+ order = 1,
+ label = Some("disease of mental health"),
+ definition = Some(
+ "A disease that involves a psychological or behavioral pattern generally associated with subjective distress or disability that occurs in an individual, and which are not a part of normal development or culture."
+ )
+ ),
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_4",
+ order = 2,
+ label = Some("disease"),
+ definition = Some(
+ "A disease is a disposition (i) to undergo pathological processes that (ii) exists in an organism because of one or more disorders in that organism."
+ )
+ )
+ )
)
- )))),
+ )
+ ),
// ebola
- "http://purl.obolibrary.org/obo/DOID_4325" -> List(TermResource(
- id="http://purl.obolibrary.org/obo/DOID_4325",
- ontology="Disease",
- usable=true,
- label="Ebola hemorrhagic fever",
- definition=Some("A viral infectious disease that is a hemorrhagic fever, has_material_basis_in Zaire ebolavirus, has_material_basis_in Sudan ebolavirus, has_material_basis_in Cote d'Ivoire ebolavirus, or has_material_basis_in Bundibugyo ebolavirus, which are transmitted_by contact with the body fluids of an infected animal or person, transmitted_by contaminated fomites, or transmitted_by infected medical equipment. The infection has_symptom fever, has_symptom headache, has_symptom joint pain, has_symptom muscle aches, has_symptom sore throat, has_symptom weakness, has_symptom diarrhea, has_symptom vomiting, has_symptom stomach pain, has_symptom rash, has_symptom red eyes, has_symptom hiccups, and has_symptom internal and external bleeding."),
- synonyms=Some(List("Ebola virus disease")),
- parents=Some(List(
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_934",
- order=1,
- label=Some("viral infectious disease"),
- definition=Some("A disease by infectious agent that results_in infection, has_material_basis_in Viruses.")
+ "http://purl.obolibrary.org/obo/DOID_4325" -> List(
+ TermResource(
+ id = "http://purl.obolibrary.org/obo/DOID_4325",
+ ontology = "Disease",
+ usable = true,
+ label = "Ebola hemorrhagic fever",
+ definition = Some(
+ "A viral infectious disease that is a hemorrhagic fever, has_material_basis_in Zaire ebolavirus, has_material_basis_in Sudan ebolavirus, has_material_basis_in Cote d'Ivoire ebolavirus, or has_material_basis_in Bundibugyo ebolavirus, which are transmitted_by contact with the body fluids of an infected animal or person, transmitted_by contaminated fomites, or transmitted_by infected medical equipment. The infection has_symptom fever, has_symptom headache, has_symptom joint pain, has_symptom muscle aches, has_symptom sore throat, has_symptom weakness, has_symptom diarrhea, has_symptom vomiting, has_symptom stomach pain, has_symptom rash, has_symptom red eyes, has_symptom hiccups, and has_symptom internal and external bleeding."
),
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_0050117",
- order=2,
- label=Some("disease by infectious agent"),
- definition=Some("A disease that is the consequence of the presence of pathogenic microbial agents, including pathogenic viruses, pathogenic bacteria, fungi, protozoa, multicellular parasites, and aberrant proteins known as prions.")
- ),
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_4",
- order=3,
- label=Some("disease"),
- definition=Some("A disease is a disposition (i) to undergo pathological processes that (ii) exists in an organism because of one or more disorders in that organism.")
+ synonyms = Some(List("Ebola virus disease")),
+ parents = Some(
+ List(
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_934",
+ order = 1,
+ label = Some("viral infectious disease"),
+ definition =
+ Some("A disease by infectious agent that results_in infection, has_material_basis_in Viruses.")
+ ),
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_0050117",
+ order = 2,
+ label = Some("disease by infectious agent"),
+ definition = Some(
+ "A disease that is the consequence of the presence of pathogenic microbial agents, including pathogenic viruses, pathogenic bacteria, fungi, protozoa, multicellular parasites, and aberrant proteins known as prions."
+ )
+ ),
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_4",
+ order = 3,
+ label = Some("disease"),
+ definition = Some(
+ "A disease is a disposition (i) to undergo pathological processes that (ii) exists in an organism because of one or more disorders in that organism."
+ )
+ )
+ )
)
- )))),
+ )
+ ),
// leukemia
- "http://purl.obolibrary.org/obo/DOID_1240" -> List(TermResource(
- id="http://purl.obolibrary.org/obo/DOID_1240",
- ontology="Disease",
- usable=true,
- label="leukemia",
- definition=Some("A cancer that affects the blood or bone marrow characterized by an abnormal proliferation of blood cells."),
- parents=Some(List(
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_2531",
- order=1,
- label=Some("hematologic cancer"),
- definition=Some("An immune system cancer located_in the hematological system that is characterized by uncontrolled cellular proliferation in blood, bone marrow and lymph nodes.")
- ),
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_0060083",
- order=2,
- label=Some("immune system cancer"),
- definition=Some("An organ system cancer located_in the immune system that is characterized by uncontrolled cellular proliferation in organs of the immune system.")
- ),
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_0050686",
- order=3,
- label=Some("organ system cancer"),
- definition=Some("A cancer that is classified based on the organ it starts in.")
- ),
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_162",
- order=4,
- label=Some("cancer"),
- definition=Some("A disease of cellular proliferation that is malignant and primary, characterized by uncontrolled cellular proliferation, local cell invasion and metastasis."),
- synonyms=Some(List("primary cancer","malignant tumor ","malignant neoplasm"))
+ "http://purl.obolibrary.org/obo/DOID_1240" -> List(
+ TermResource(
+ id = "http://purl.obolibrary.org/obo/DOID_1240",
+ ontology = "Disease",
+ usable = true,
+ label = "leukemia",
+ definition = Some(
+ "A cancer that affects the blood or bone marrow characterized by an abnormal proliferation of blood cells."
),
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_14566",
- order=5,
- label=Some("disease of cellular proliferation"),
- definition=Some("A disease that is characterized by abnormally rapid cell division.")
- ),
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_4",
- order=6,
- label=Some("disease"),
- definition=Some("A disease is a disposition (i) to undergo pathological processes that (ii) exists in an organism because of one or more disorders in that organism.")
+ parents = Some(
+ List(
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_2531",
+ order = 1,
+ label = Some("hematologic cancer"),
+ definition = Some(
+ "An immune system cancer located_in the hematological system that is characterized by uncontrolled cellular proliferation in blood, bone marrow and lymph nodes."
+ )
+ ),
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_0060083",
+ order = 2,
+ label = Some("immune system cancer"),
+ definition = Some(
+ "An organ system cancer located_in the immune system that is characterized by uncontrolled cellular proliferation in organs of the immune system."
+ )
+ ),
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_0050686",
+ order = 3,
+ label = Some("organ system cancer"),
+ definition = Some("A cancer that is classified based on the organ it starts in.")
+ ),
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_162",
+ order = 4,
+ label = Some("cancer"),
+ definition = Some(
+ "A disease of cellular proliferation that is malignant and primary, characterized by uncontrolled cellular proliferation, local cell invasion and metastasis."
+ ),
+ synonyms = Some(List("primary cancer", "malignant tumor ", "malignant neoplasm"))
+ ),
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_14566",
+ order = 5,
+ label = Some("disease of cellular proliferation"),
+ definition = Some("A disease that is characterized by abnormally rapid cell division.")
+ ),
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_4",
+ order = 6,
+ label = Some("disease"),
+ definition = Some(
+ "A disease is a disposition (i) to undergo pathological processes that (ii) exists in an organism because of one or more disorders in that organism."
+ )
+ )
+ )
)
- )))),
+ )
+ ),
// hematologic cancer (first parent of leukemia)
- "http://purl.obolibrary.org/obo/DOID_2531" -> List(TermResource(
- id="http://purl.obolibrary.org/obo/DOID_2531",
- ontology="Disease",
- usable=true,
- label="hematologic cancer",
- definition=Some("An immune system cancer located_in the hematological system that is characterized by uncontrolled cellular proliferation in blood, bone marrow and lymph nodes."),
- parents=Some(List(
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_0060083",
- order=1,
- label=Some("immune system cancer"),
- definition=Some("An organ system cancer located_in the immune system that is characterized by uncontrolled cellular proliferation in organs of the immune system.")
- ),
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_0050686",
- order=2,
- label=Some("organ system cancer"),
- definition=Some("A cancer that is classified based on the organ it starts in.")
- ),
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_162",
- order=3,
- label=Some("cancer"),
- definition=Some("A disease of cellular proliferation that is malignant and primary, characterized by uncontrolled cellular proliferation, local cell invasion and metastasis."),
- synonyms=Some(List("primary cancer","malignant tumor ","malignant neoplasm"))
- ),
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_14566",
- order=4,
- label=Some("disease of cellular proliferation"),
- definition=Some("A disease that is characterized by abnormally rapid cell division.")
+ "http://purl.obolibrary.org/obo/DOID_2531" -> List(
+ TermResource(
+ id = "http://purl.obolibrary.org/obo/DOID_2531",
+ ontology = "Disease",
+ usable = true,
+ label = "hematologic cancer",
+ definition = Some(
+ "An immune system cancer located_in the hematological system that is characterized by uncontrolled cellular proliferation in blood, bone marrow and lymph nodes."
),
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_4",
- order=5,
- label=Some("disease"),
- definition=Some("A disease is a disposition (i) to undergo pathological processes that (ii) exists in an organism because of one or more disorders in that organism.")
+ parents = Some(
+ List(
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_0060083",
+ order = 1,
+ label = Some("immune system cancer"),
+ definition = Some(
+ "An organ system cancer located_in the immune system that is characterized by uncontrolled cellular proliferation in organs of the immune system."
+ )
+ ),
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_0050686",
+ order = 2,
+ label = Some("organ system cancer"),
+ definition = Some("A cancer that is classified based on the organ it starts in.")
+ ),
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_162",
+ order = 3,
+ label = Some("cancer"),
+ definition = Some(
+ "A disease of cellular proliferation that is malignant and primary, characterized by uncontrolled cellular proliferation, local cell invasion and metastasis."
+ ),
+ synonyms = Some(List("primary cancer", "malignant tumor ", "malignant neoplasm"))
+ ),
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_14566",
+ order = 4,
+ label = Some("disease of cellular proliferation"),
+ definition = Some("A disease that is characterized by abnormally rapid cell division.")
+ ),
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_4",
+ order = 5,
+ label = Some("disease"),
+ definition = Some(
+ "A disease is a disposition (i) to undergo pathological processes that (ii) exists in an organism because of one or more disorders in that organism."
+ )
+ )
+ )
)
- )))),
+ )
+ ),
// fetal alcohol spectrum disorder (has multiple parents at the same level)
- "http://purl.obolibrary.org/obo/DOID_0050696" -> List(TermResource(
- id="http://purl.obolibrary.org/obo/DOID_0050696",
- ontology="Disease",
- usable=true,
- label="fetal alcohol spectrum disorder",
- definition=Some("A specific developmental disorder and physical disorder that is characterized by physical, behavioral and learning birth defects resulting from maternal ingestion of alcohol during pregnancy."),
- parents=Some(List(
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_0080015",
- order=1,
- label=Some("physical disorder"),
- definition=Some("A disease that has_material_basis_in a genetic abnormality, error with embryonic development, infection or compromised intrauterine environment.")
+ "http://purl.obolibrary.org/obo/DOID_0050696" -> List(
+ TermResource(
+ id = "http://purl.obolibrary.org/obo/DOID_0050696",
+ ontology = "Disease",
+ usable = true,
+ label = "fetal alcohol spectrum disorder",
+ definition = Some(
+ "A specific developmental disorder and physical disorder that is characterized by physical, behavioral and learning birth defects resulting from maternal ingestion of alcohol during pregnancy."
),
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_0080015",
- order=1,
- label=Some("specific developmental disorder"),
- definition=Some("A developmental disorder of mental health that categorizes specific learning disabilities and developmental disorders affecting coordination.")
- ),
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_0060083",
- order=2,
- label=Some("developmental disorder of mental health"),
- definition=Some("A disease of mental health that occur during a child's developmental period between birth and age 18 resulting in retarding of the child's psychological or physical development.")
- ),
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_150",
- order=3,
- label=Some("disease of mental health"),
- definition=Some("A disease that involves a psychological or behavioral pattern generally associated with subjective distress or disability that occurs in an individual, and which are not a part of normal development or culture.")
- ),
- TermParent(
- id="http://purl.obolibrary.org/obo/DOID_4",
- order=4,
- label=Some("disease"),
- definition=Some("A disease is a disposition (i) to undergo pathological processes that (ii) exists in an organism because of one or more disorders in that organism.")
+ parents = Some(
+ List(
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_0080015",
+ order = 1,
+ label = Some("physical disorder"),
+ definition = Some(
+ "A disease that has_material_basis_in a genetic abnormality, error with embryonic development, infection or compromised intrauterine environment."
+ )
+ ),
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_0080015",
+ order = 1,
+ label = Some("specific developmental disorder"),
+ definition = Some(
+ "A developmental disorder of mental health that categorizes specific learning disabilities and developmental disorders affecting coordination."
+ )
+ ),
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_0060083",
+ order = 2,
+ label = Some("developmental disorder of mental health"),
+ definition = Some(
+ "A disease of mental health that occur during a child's developmental period between birth and age 18 resulting in retarding of the child's psychological or physical development."
+ )
+ ),
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_150",
+ order = 3,
+ label = Some("disease of mental health"),
+ definition = Some(
+ "A disease that involves a psychological or behavioral pattern generally associated with subjective distress or disability that occurs in an individual, and which are not a part of normal development or culture."
+ )
+ ),
+ TermParent(
+ id = "http://purl.obolibrary.org/obo/DOID_4",
+ order = 4,
+ label = Some("disease"),
+ definition = Some(
+ "A disease is a disposition (i) to undergo pathological processes that (ii) exists in an organism because of one or more disorders in that organism."
+ )
+ )
+ )
)
- )))),
+ )
+ ),
// disease, the root of the ontology tree
- "http://purl.obolibrary.org/obo/DOID_4" -> List(TermResource(
- id="http://purl.obolibrary.org/obo/DOID_4",
- ontology="Disease",
- usable=true,
- label="disease",
- definition=Some("A disease is a disposition (i) to undergo pathological processes that (ii) exists in an organism because of one or more disorders in that organism.")
- ))
+ "http://purl.obolibrary.org/obo/DOID_4" -> List(
+ TermResource(
+ id = "http://purl.obolibrary.org/obo/DOID_4",
+ ontology = "Disease",
+ usable = true,
+ label = "disease",
+ definition = Some(
+ "A disease is a disposition (i) to undergo pathological processes that (ii) exists in an organism because of one or more disorders in that organism."
+ )
+ )
+ )
)
override def search(term: String): List[TermResource] = data.getOrElse(term, List.empty[TermResource])
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockRawlsDAO.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockRawlsDAO.scala
index 2b473cbe7..c3801c4b3 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockRawlsDAO.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockRawlsDAO.scala
@@ -22,14 +22,13 @@ import scala.concurrent.Future
// Common things that can be accessed from tests
object MockRawlsDAO {
val mockWorkspaceId = UUID.randomUUID().toString
- val sampleAtts: Map[AttributeName, AttributeListElementable with Product with Serializable] = {
+ val sampleAtts: Map[AttributeName, AttributeListElementable with Product with Serializable] =
Map(
AttributeName.withDefaultNS("sample_type") -> AttributeString("Blood"),
AttributeName.withDefaultNS("header_1") -> AttributeString(MockUtils.randomAlpha()),
AttributeName.withDefaultNS("header_2") -> AttributeString(MockUtils.randomAlpha()),
AttributeName.withDefaultNS("participant_id") -> AttributeEntityReference("participant", "participant_name")
)
- }
val validSampleEntities = List(
Entity("sample_01", "sample", sampleAtts),
@@ -39,8 +38,14 @@ object MockRawlsDAO {
)
val validEntitiesMetadata = Map(
- "participant" -> EntityTypeMetadata(count = 1, idName = "participant_id", attributeNames = List("age", "gender", "cohort")),
- "sample" -> EntityTypeMetadata(count = validSampleEntities.size, idName = "sample_id", attributeNames = sampleAtts.map(_._1.name).toList),
+ "participant" -> EntityTypeMetadata(count = 1,
+ idName = "participant_id",
+ attributeNames = List("age", "gender", "cohort")
+ ),
+ "sample" -> EntityTypeMetadata(count = validSampleEntities.size,
+ idName = "sample_id",
+ attributeNames = sampleAtts.map(_._1.name).toList
+ ),
"sample_set" -> EntityTypeMetadata(count = 1, idName = "sample_set_id", attributeNames = List("samples"))
)
@@ -48,20 +53,21 @@ object MockRawlsDAO {
val largeSampleSize = 20000
- val largeSampleHeaders: Seq[AttributeName] = (1 to 150).map { h => AttributeName.withDefaultNS(s"prop_$h") }
+ val largeSampleHeaders: Seq[AttributeName] = (1 to 150).map(h => AttributeName.withDefaultNS(s"prop_$h"))
- val largeSampleAttributes: Map[AttributeName, AttributeString] = {
- largeSampleHeaders.map { h => Map(h -> AttributeString(MockUtils.randomAlpha()))}.reduce(_ ++ _)
- }
+ val largeSampleAttributes: Map[AttributeName, AttributeString] =
+ largeSampleHeaders.map(h => Map(h -> AttributeString(MockUtils.randomAlpha()))).reduce(_ ++ _)
val paginatedEntityRangeLimit = FireCloudConfig.Rawls.defaultPageSize - 1
- def generateSamplesInRange(from: Int): List[Entity] = (from to from + paginatedEntityRangeLimit).map { pos => Entity(s"sample_0$pos", "sample", largeSampleAttributes) }.toList
+ def generateSamplesInRange(from: Int): List[Entity] = (from to from + paginatedEntityRangeLimit).map { pos =>
+ Entity(s"sample_0$pos", "sample", largeSampleAttributes)
+ }.toList
val largeSampleMetadata = Map(
- "sample" -> EntityTypeMetadata(
- count = largeSampleSize,
- idName = "sample_id",
- attributeNames = largeSampleHeaders.map(_.name))
+ "sample" -> EntityTypeMetadata(count = largeSampleSize,
+ idName = "sample_id",
+ attributeNames = largeSampleHeaders.map(_.name)
+ )
)
// Large Sample Set Data
@@ -73,78 +79,116 @@ object MockRawlsDAO {
// Give each sample set a set of 100 samples. That gives us 500K entities to process.
val largeSampleSetSamples = AttributeEntityReferenceList(
- (1 to 100).map { i => AttributeEntityReference(entityType = "sample", entityName = s"sample_0$i") }
+ (1 to 100).map(i => AttributeEntityReference(entityType = "sample", entityName = s"sample_0$i"))
)
- val largeSampleSetAttributes: Map[AttributeName, Attribute] = {
+ val largeSampleSetAttributes: Map[AttributeName, Attribute] =
Map(AttributeName.withDefaultNS("samples") -> largeSampleSetSamples) ++
- largeSampleSetHeaders.map { h => Map(h -> AttributeString(MockUtils.randomAlpha()))}.reduce(_ ++ _)
- }
+ largeSampleSetHeaders.map(h => Map(h -> AttributeString(MockUtils.randomAlpha()))).reduce(_ ++ _)
- def generateSampleSetsInRange(from: Int): List[Entity] = (from to from + paginatedEntityRangeLimit).map { pos => Entity(s"sample_set_0$pos", "sample_set", largeSampleSetAttributes) }.toList
+ def generateSampleSetsInRange(from: Int): List[Entity] = (from to from + paginatedEntityRangeLimit).map { pos =>
+ Entity(s"sample_set_0$pos", "sample_set", largeSampleSetAttributes)
+ }.toList
val largeSampleSetMetadata = Map(
- "sample_set" -> EntityTypeMetadata(
- count = largeSampleSetSize,
- idName = "sample_set_id",
- attributeNames = largeSampleSetAttributes.map(_._1.name).toSeq))
-
+ "sample_set" -> EntityTypeMetadata(count = largeSampleSetSize,
+ idName = "sample_set_id",
+ attributeNames = largeSampleSetAttributes.map(_._1.name).toSeq
+ )
+ )
val validBigQueryEntities = List(
- Entity("shakespeare", "bigQuery", Map(AttributeName.withDefaultNS("query_str") -> AttributeString("SELECT * FROM [bigquery-public-data:samples.shakespeare] LIMIT 1000"))),
- Entity("king", "bigQuery", Map(AttributeName.withDefaultNS("query_str") -> AttributeString("SELECT * FROM [bigquery-public-data:samples.king] LIMIT 1000")))
+ Entity(
+ "shakespeare",
+ "bigQuery",
+ Map(
+ AttributeName.withDefaultNS("query_str") -> AttributeString(
+ "SELECT * FROM [bigquery-public-data:samples.shakespeare] LIMIT 1000"
+ )
+ )
+ ),
+ Entity("king",
+ "bigQuery",
+ Map(
+ AttributeName.withDefaultNS("query_str") -> AttributeString(
+ "SELECT * FROM [bigquery-public-data:samples.king] LIMIT 1000"
+ )
+ )
+ )
)
val validBigQuerySetEntities = List(
- Entity("settest", "bigQuery_set", Map(AttributeName.withDefaultNS("bigQuerys") -> AttributeEntityReferenceList(Seq(
- AttributeEntityReference("bigQuery", "shakespeare"),
- AttributeEntityReference("bigQuery", "king")))))
+ Entity(
+ "settest",
+ "bigQuery_set",
+ Map(
+ AttributeName.withDefaultNS("bigQuerys") -> AttributeEntityReferenceList(
+ Seq(AttributeEntityReference("bigQuery", "shakespeare"), AttributeEntityReference("bigQuery", "king"))
+ )
+ )
+ )
)
val nonModelPairEntities = List(
- Entity("RomeoAndJuliet", "pair", Map(AttributeName.withDefaultNS("names") -> AttributeValueList(Seq(AttributeString("Romeo"), AttributeString("Juliet"))))),
- Entity("PB&J", "pair", Map(AttributeName.withDefaultNS("names") -> AttributeValueList(Seq(AttributeString("PeanutButter"), AttributeString("Jelly")))))
+ Entity("RomeoAndJuliet",
+ "pair",
+ Map(
+ AttributeName.withDefaultNS("names") -> AttributeValueList(
+ Seq(AttributeString("Romeo"), AttributeString("Juliet"))
+ )
+ )
+ ),
+ Entity("PB&J",
+ "pair",
+ Map(
+ AttributeName.withDefaultNS("names") -> AttributeValueList(
+ Seq(AttributeString("PeanutButter"), AttributeString("Jelly"))
+ )
+ )
+ )
)
val namespacedEntities = List(
- Entity("first", "study", Map(
- AttributeName.withDefaultNS("foo") -> AttributeString("default-foovalue"),
- AttributeName.fromDelimitedName("tag:study_id") -> AttributeString("first-id"),
- AttributeName.fromDelimitedName("tag:foo") -> AttributeString("namespaced-foovalue")
- )),
- Entity("second", "study", Map(
- AttributeName.withDefaultNS("foo") -> AttributeString("default-bar"),
- AttributeName.fromDelimitedName("tag:study_id") -> AttributeString("second-id"),
- AttributeName.fromDelimitedName("tag:foo") -> AttributeString("namespaced-bar")
- ))
+ Entity(
+ "first",
+ "study",
+ Map(
+ AttributeName.withDefaultNS("foo") -> AttributeString("default-foovalue"),
+ AttributeName.fromDelimitedName("tag:study_id") -> AttributeString("first-id"),
+ AttributeName.fromDelimitedName("tag:foo") -> AttributeString("namespaced-foovalue")
+ )
+ ),
+ Entity(
+ "second",
+ "study",
+ Map(
+ AttributeName.withDefaultNS("foo") -> AttributeString("default-bar"),
+ AttributeName.fromDelimitedName("tag:study_id") -> AttributeString("second-id"),
+ AttributeName.fromDelimitedName("tag:foo") -> AttributeString("namespaced-bar")
+ )
+ )
)
val nonModelBigQueryMetadata = Map(
- "bigQuery" -> EntityTypeMetadata(
- count = 2,
- idName = "bigQuery_id",
- attributeNames = Seq("query_str")))
+ "bigQuery" -> EntityTypeMetadata(count = 2, idName = "bigQuery_id", attributeNames = Seq("query_str"))
+ )
val nonModelBigQuerySetMetadata = Map(
- "bigQuery_set" -> EntityTypeMetadata(
- count = 1,
- idName = "bigQuery_set_id",
- attributeNames = Seq("bigQuerys")))
+ "bigQuery_set" -> EntityTypeMetadata(count = 1, idName = "bigQuery_set_id", attributeNames = Seq("bigQuerys"))
+ )
val nonModelPairMetadata = Map(
- "pair" -> EntityTypeMetadata(
- count = 2,
- idName = "pair_id",
- attributeNames = Seq("names")))
+ "pair" -> EntityTypeMetadata(count = 2, idName = "pair_id", attributeNames = Seq("names"))
+ )
val namespacedMetadata = Map(
- "study" -> EntityTypeMetadata(
- count = 2,
- idName = "study_id",
- attributeNames = Seq("foo", "tag:foo", "tag:study_id")))
+ "study" -> EntityTypeMetadata(count = 2,
+ idName = "study_id",
+ attributeNames = Seq("foo", "tag:foo", "tag:study_id")
+ )
+ )
}
-
/**
* Created by davidan on 9/28/16.
*
@@ -155,23 +199,29 @@ class MockRawlsDAO extends RawlsDAO {
"attributes",
"att",
"id",
- "", //bucketname
+ "", // bucketname
Some("wf-collection"),
DateTime.now(),
DateTime.now(),
"ansingh",
- Some(Map(AttributeName("default", "a") -> AttributeBoolean(true),
- AttributeName("default", "b") -> AttributeNumber(1.23),
- AttributeName("default", "c") -> AttributeString(""),
- AttributeName("default", "d") -> AttributeString("escape quo\"te"),
- AttributeName("default", "e") -> AttributeString("this\thas\ttabs\tin\tit"),
- AttributeName("default", "f") -> AttributeValueList(Seq(
- AttributeString("v6"),
- AttributeNumber(999),
- AttributeBoolean(true)
- )))),
+ Some(
+ Map(
+ AttributeName("default", "a") -> AttributeBoolean(true),
+ AttributeName("default", "b") -> AttributeNumber(1.23),
+ AttributeName("default", "c") -> AttributeString(""),
+ AttributeName("default", "d") -> AttributeString("escape quo\"te"),
+ AttributeName("default", "e") -> AttributeString("this\thas\ttabs\tin\tit"),
+ AttributeName("default", "f") -> AttributeValueList(
+ Seq(
+ AttributeString("v6"),
+ AttributeNumber(999),
+ AttributeBoolean(true)
+ )
+ )
+ )
+ ),
false,
- Some(Set.empty), //authdomain
+ Some(Set.empty), // authdomain
WorkspaceVersions.V2,
GoogleProjectId("googleProject"),
Some(GoogleProjectNumber("googleProjectNumber")),
@@ -188,25 +238,31 @@ class MockRawlsDAO extends RawlsDAO {
"attributes",
"att",
"id",
- "", //bucketname
+ "", // bucketname
Some("wf-collection"),
DateTime.now(),
DateTime.now(),
"ansingh",
- Some(Map(AttributeName("default", "a") -> AttributeBoolean(true),
- AttributeName("default", "b") -> AttributeNumber(1.23),
- AttributeName("default", "c") -> AttributeString(""),
- AttributeName("library", "published") -> AttributeBoolean(true),
- AttributeName("library", "projectName") -> AttributeString("testing"),
- AttributeName("default", "d") -> AttributeString("escape quo\"te"),
- AttributeName("default", "e") -> AttributeString("v1"),
- AttributeName("default", "f") -> AttributeValueList(Seq(
- AttributeString("v6"),
- AttributeNumber(999),
- AttributeBoolean(true)
- )))),
+ Some(
+ Map(
+ AttributeName("default", "a") -> AttributeBoolean(true),
+ AttributeName("default", "b") -> AttributeNumber(1.23),
+ AttributeName("default", "c") -> AttributeString(""),
+ AttributeName("library", "published") -> AttributeBoolean(true),
+ AttributeName("library", "projectName") -> AttributeString("testing"),
+ AttributeName("default", "d") -> AttributeString("escape quo\"te"),
+ AttributeName("default", "e") -> AttributeString("v1"),
+ AttributeName("default", "f") -> AttributeValueList(
+ Seq(
+ AttributeString("v6"),
+ AttributeNumber(999),
+ AttributeBoolean(true)
+ )
+ )
+ )
+ ),
false,
- Some(Set.empty), //authdomain
+ Some(Set.empty), // authdomain
WorkspaceVersions.V2,
GoogleProjectId("googleProject"),
Some(GoogleProjectNumber("googleProjectNumber")),
@@ -223,36 +279,48 @@ class MockRawlsDAO extends RawlsDAO {
"attributes",
"att",
"id",
- "", //bucketname
+ "", // bucketname
Some("wf-collection"),
DateTime.now(),
DateTime.now(),
"ansingh",
- Some(Map(
- AttributeName.withLibraryNS("datasetName") -> AttributeString("name"),
- AttributeName.withLibraryNS("datasetVersion") -> AttributeString("v1.0"),
- AttributeName.withLibraryNS("datasetDescription") -> AttributeString("desc"),
- AttributeName.withLibraryNS("datasetCustodian") -> AttributeString("cust"),
- AttributeName.withLibraryNS("datasetDepositor") -> AttributeString("depo"),
- AttributeName.withLibraryNS("contactEmail") -> AttributeString("name@example.com"),
- AttributeName.withLibraryNS("datasetOwner") -> AttributeString("owner"),
- AttributeName.withLibraryNS("institute") -> AttributeValueList(Seq( AttributeString("inst"),AttributeString("it"),AttributeString("ute") )),
- AttributeName.withLibraryNS("indication") -> AttributeString("indic"),
- AttributeName.withLibraryNS("numSubjects") -> AttributeNumber(123),
- AttributeName.withLibraryNS("projectName") -> AttributeString("proj"),
- AttributeName.withLibraryNS("datatype") -> AttributeValueList(Seq( AttributeString("data"),AttributeString("type") )),
- AttributeName.withLibraryNS("dataCategory") -> AttributeValueList(Seq( AttributeString("data"),AttributeString("category") )),
- AttributeName.withLibraryNS("dataUseRestriction") -> AttributeString("dur"),
- AttributeName.withLibraryNS("studyDesign") -> AttributeString("study"),
- AttributeName.withLibraryNS("cellType") -> AttributeString("cell"),
- AttributeName.withLibraryNS("requiresExternalApproval") -> AttributeBoolean(false),
- AttributeName.withLibraryNS("useLimitationOption") -> AttributeString("orsp"),
- AttributeName.withLibraryNS("technology") -> AttributeValueList(Seq( AttributeString("is an optional"),AttributeString("array attribute") )),
- AttributeName.withLibraryNS("orsp") -> AttributeString("some orsp"),
- LibraryService.discoverableWSAttribute -> AttributeValueList(Seq( AttributeString("group1"),AttributeString("group2") ))
- )),
+ Some(
+ Map(
+ AttributeName.withLibraryNS("datasetName") -> AttributeString("name"),
+ AttributeName.withLibraryNS("datasetVersion") -> AttributeString("v1.0"),
+ AttributeName.withLibraryNS("datasetDescription") -> AttributeString("desc"),
+ AttributeName.withLibraryNS("datasetCustodian") -> AttributeString("cust"),
+ AttributeName.withLibraryNS("datasetDepositor") -> AttributeString("depo"),
+ AttributeName.withLibraryNS("contactEmail") -> AttributeString("name@example.com"),
+ AttributeName.withLibraryNS("datasetOwner") -> AttributeString("owner"),
+ AttributeName.withLibraryNS("institute") -> AttributeValueList(
+ Seq(AttributeString("inst"), AttributeString("it"), AttributeString("ute"))
+ ),
+ AttributeName.withLibraryNS("indication") -> AttributeString("indic"),
+ AttributeName.withLibraryNS("numSubjects") -> AttributeNumber(123),
+ AttributeName.withLibraryNS("projectName") -> AttributeString("proj"),
+ AttributeName.withLibraryNS("datatype") -> AttributeValueList(
+ Seq(AttributeString("data"), AttributeString("type"))
+ ),
+ AttributeName.withLibraryNS("dataCategory") -> AttributeValueList(
+ Seq(AttributeString("data"), AttributeString("category"))
+ ),
+ AttributeName.withLibraryNS("dataUseRestriction") -> AttributeString("dur"),
+ AttributeName.withLibraryNS("studyDesign") -> AttributeString("study"),
+ AttributeName.withLibraryNS("cellType") -> AttributeString("cell"),
+ AttributeName.withLibraryNS("requiresExternalApproval") -> AttributeBoolean(false),
+ AttributeName.withLibraryNS("useLimitationOption") -> AttributeString("orsp"),
+ AttributeName.withLibraryNS("technology") -> AttributeValueList(
+ Seq(AttributeString("is an optional"), AttributeString("array attribute"))
+ ),
+ AttributeName.withLibraryNS("orsp") -> AttributeString("some orsp"),
+ LibraryService.discoverableWSAttribute -> AttributeValueList(
+ Seq(AttributeString("group1"), AttributeString("group2"))
+ )
+ )
+ ),
false,
- Some(Set.empty), //authdomain
+ Some(Set.empty), // authdomain
WorkspaceVersions.V2,
GoogleProjectId("googleProject"),
Some(GoogleProjectNumber("googleProjectNumber")),
@@ -265,10 +333,30 @@ class MockRawlsDAO extends RawlsDAO {
WorkspaceState.Ready
)
- val rawlsWorkspaceResponseWithAttributes = WorkspaceResponse(Some(WorkspaceAccessLevels.Owner), canShare = Some(false), canCompute = Some(true), catalog = Some(false), rawlsWorkspaceWithAttributes, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None)
- val publishedRawlsWorkspaceResponseWithAttributes = WorkspaceResponse(Some(WorkspaceAccessLevels.Owner), canShare = Some(false), canCompute = Some(true), catalog = Some(false), publishedRawlsWorkspaceWithAttributes, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None)
+ val rawlsWorkspaceResponseWithAttributes = WorkspaceResponse(
+ Some(WorkspaceAccessLevels.Owner),
+ canShare = Some(false),
+ canCompute = Some(true),
+ catalog = Some(false),
+ rawlsWorkspaceWithAttributes,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
+ val publishedRawlsWorkspaceResponseWithAttributes = WorkspaceResponse(
+ Some(WorkspaceAccessLevels.Owner),
+ canShare = Some(false),
+ canCompute = Some(true),
+ catalog = Some(false),
+ publishedRawlsWorkspaceWithAttributes,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
- def newWorkspace: WorkspaceDetails = {
+ def newWorkspace: WorkspaceDetails =
WorkspaceDetails(
namespace = "namespace",
name = "name",
@@ -290,65 +378,249 @@ class MockRawlsDAO extends RawlsDAO {
cloudPlatform = None,
state = WorkspaceState.Ready
)
- }
override def isAdmin(userInfo: UserInfo): Future[Boolean] = Future.successful(false)
- override def isLibraryCurator(userInfo: UserInfo): Future[Boolean] = {
+ override def isLibraryCurator(userInfo: UserInfo): Future[Boolean] =
Future.successful(userInfo.id == "curator")
- }
- override def getBucketUsage(ns: String, name: String)(implicit userInfo: WithAccessToken): Future[BucketUsageResponse] = {
+ override def getBucketUsage(ns: String, name: String)(implicit
+ userInfo: WithAccessToken
+ ): Future[BucketUsageResponse] =
Future.successful(BucketUsageResponse(BigInt("256000000000"), Option(new DateTime(0))))
- }
- override def getWorkspace(ns: String, name: String)(implicit userToken: WithAccessToken): Future[WorkspaceResponse] = {
+ override def getWorkspace(ns: String, name: String)(implicit userToken: WithAccessToken): Future[WorkspaceResponse] =
ns match {
- case "projectowner" => Future(WorkspaceResponse(Some(WorkspaceAccessLevels.ProjectOwner), canShare = Some(true), canCompute = Some(true), catalog = Some(false), newWorkspace, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None))
- case "reader" => Future(WorkspaceResponse(Some(WorkspaceAccessLevels.Read), canShare = Some(false), canCompute = Some(true), catalog = Some(false), newWorkspace, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None))
+ case "projectowner" =>
+ Future(
+ WorkspaceResponse(
+ Some(WorkspaceAccessLevels.ProjectOwner),
+ canShare = Some(true),
+ canCompute = Some(true),
+ catalog = Some(false),
+ newWorkspace,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
+ )
+ case "reader" =>
+ Future(
+ WorkspaceResponse(
+ Some(WorkspaceAccessLevels.Read),
+ canShare = Some(false),
+ canCompute = Some(true),
+ catalog = Some(false),
+ newWorkspace,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
+ )
case "attributes" => Future(rawlsWorkspaceResponseWithAttributes)
- case "publishedreader" => Future(WorkspaceResponse(Some(WorkspaceAccessLevels.Read), canShare = Some(false), canCompute = Some(true), catalog = Some(false), publishedRawlsWorkspaceWithAttributes, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None))
- case "publishedreadercatalog" => Future(WorkspaceResponse(Some(WorkspaceAccessLevels.Read), canShare = Some(false), canCompute = Some(true), catalog = Some(true), publishedRawlsWorkspaceWithAttributes, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None))
- case "publishedwriter" => Future(WorkspaceResponse(Some(WorkspaceAccessLevels.Write), canShare = Some(false), canCompute = Some(true), catalog = Some(false), publishedRawlsWorkspaceWithAttributes, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None))
- case "unpublishedwriter" => Future(WorkspaceResponse(Some(WorkspaceAccessLevels.Write), canShare = Some(false), canCompute = Some(true), catalog = Some(false), rawlsWorkspaceWithAttributes, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None))
- case "publishedowner" => Future.successful(WorkspaceResponse(Some(WorkspaceAccessLevels.Owner), canShare = Some(true), canCompute = Some(true), catalog = Some(false), publishedRawlsWorkspaceWithAttributes, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None))
- case "libraryValid" => Future.successful(WorkspaceResponse(Some(WorkspaceAccessLevels.Owner), canShare = Some(true), canCompute = Some(true), catalog = Some(false), unpublishedRawlsWorkspaceLibraryValid, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None))
- case "usBucketWorkspace" => Future.successful(WorkspaceResponse(Some(WorkspaceAccessLevels.Owner), canShare = Some(true), canCompute = Some(true), catalog = Some(false), newWorkspace.copy(bucketName = "usBucket"), Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None))
- case "europeWest1BucketWorkspace" => Future.successful(WorkspaceResponse(Some(WorkspaceAccessLevels.Owner), canShare = Some(true), canCompute = Some(true), catalog = Some(false), newWorkspace.copy(bucketName = "europeWest1Bucket"), Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None))
- case _ => Future.successful(WorkspaceResponse(Some(WorkspaceAccessLevels.Owner), canShare = Some(true), canCompute = Some(true), catalog = Some(false), newWorkspace, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None))
+ case "publishedreader" =>
+ Future(
+ WorkspaceResponse(
+ Some(WorkspaceAccessLevels.Read),
+ canShare = Some(false),
+ canCompute = Some(true),
+ catalog = Some(false),
+ publishedRawlsWorkspaceWithAttributes,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
+ )
+ case "publishedreadercatalog" =>
+ Future(
+ WorkspaceResponse(
+ Some(WorkspaceAccessLevels.Read),
+ canShare = Some(false),
+ canCompute = Some(true),
+ catalog = Some(true),
+ publishedRawlsWorkspaceWithAttributes,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
+ )
+ case "publishedwriter" =>
+ Future(
+ WorkspaceResponse(
+ Some(WorkspaceAccessLevels.Write),
+ canShare = Some(false),
+ canCompute = Some(true),
+ catalog = Some(false),
+ publishedRawlsWorkspaceWithAttributes,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
+ )
+ case "unpublishedwriter" =>
+ Future(
+ WorkspaceResponse(
+ Some(WorkspaceAccessLevels.Write),
+ canShare = Some(false),
+ canCompute = Some(true),
+ catalog = Some(false),
+ rawlsWorkspaceWithAttributes,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
+ )
+ case "publishedowner" =>
+ Future.successful(
+ WorkspaceResponse(
+ Some(WorkspaceAccessLevels.Owner),
+ canShare = Some(true),
+ canCompute = Some(true),
+ catalog = Some(false),
+ publishedRawlsWorkspaceWithAttributes,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
+ )
+ case "libraryValid" =>
+ Future.successful(
+ WorkspaceResponse(
+ Some(WorkspaceAccessLevels.Owner),
+ canShare = Some(true),
+ canCompute = Some(true),
+ catalog = Some(false),
+ unpublishedRawlsWorkspaceLibraryValid,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
+ )
+ case "usBucketWorkspace" =>
+ Future.successful(
+ WorkspaceResponse(
+ Some(WorkspaceAccessLevels.Owner),
+ canShare = Some(true),
+ canCompute = Some(true),
+ catalog = Some(false),
+ newWorkspace.copy(bucketName = "usBucket"),
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
+ )
+ case "europeWest1BucketWorkspace" =>
+ Future.successful(
+ WorkspaceResponse(
+ Some(WorkspaceAccessLevels.Owner),
+ canShare = Some(true),
+ canCompute = Some(true),
+ catalog = Some(false),
+ newWorkspace.copy(bucketName = "europeWest1Bucket"),
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
+ )
+ case _ =>
+ Future.successful(
+ WorkspaceResponse(
+ Some(WorkspaceAccessLevels.Owner),
+ canShare = Some(true),
+ canCompute = Some(true),
+ catalog = Some(false),
+ newWorkspace,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
+ )
}
- }
- override def getWorkspaces(implicit userInfo: WithAccessToken): Future[Seq[WorkspaceListResponse]] = {
- Future.successful(Seq(WorkspaceListResponse(WorkspaceAccessLevels.ProjectOwner, Some(true), Some(true), newWorkspace, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), false),
- WorkspaceListResponse(WorkspaceAccessLevels.Read, Some(false), Some(false), newWorkspace, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), false),
- WorkspaceListResponse(WorkspaceAccessLevels.Owner, Some(true), Some(true), rawlsWorkspaceWithAttributes, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), false),
- WorkspaceListResponse(WorkspaceAccessLevels.Owner, Some(true), Some(true), publishedRawlsWorkspaceWithAttributes, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), false),
- WorkspaceListResponse(WorkspaceAccessLevels.Owner, Some(true), Some(true), newWorkspace, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), false)))
- }
+ override def getWorkspaces(implicit userInfo: WithAccessToken): Future[Seq[WorkspaceListResponse]] =
+ Future.successful(
+ Seq(
+ WorkspaceListResponse(WorkspaceAccessLevels.ProjectOwner,
+ Some(true),
+ Some(true),
+ newWorkspace,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ false
+ ),
+ WorkspaceListResponse(WorkspaceAccessLevels.Read,
+ Some(false),
+ Some(false),
+ newWorkspace,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ false
+ ),
+ WorkspaceListResponse(
+ WorkspaceAccessLevels.Owner,
+ Some(true),
+ Some(true),
+ rawlsWorkspaceWithAttributes,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ false
+ ),
+ WorkspaceListResponse(
+ WorkspaceAccessLevels.Owner,
+ Some(true),
+ Some(true),
+ publishedRawlsWorkspaceWithAttributes,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ false
+ ),
+ WorkspaceListResponse(WorkspaceAccessLevels.Owner,
+ Some(true),
+ Some(true),
+ newWorkspace,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ false
+ )
+ )
+ )
- override def patchWorkspaceAttributes(ns: String, name: String, attributes: Seq[AttributeUpdateOperation])(implicit userToken: WithAccessToken): Future[WorkspaceDetails] = {
+ override def patchWorkspaceAttributes(ns: String, name: String, attributes: Seq[AttributeUpdateOperation])(implicit
+ userToken: WithAccessToken
+ ): Future[WorkspaceDetails] =
if (name == WorkspaceApiServiceSpec.publishedWorkspace.name) {
Future.successful(publishedRawlsWorkspaceWithAttributes)
} else {
Future.successful(newWorkspace)
}
- }
- override def updateLibraryAttributes(ns: String, name: String, attributeOperations: Seq[AttributeUpdateOperation])(implicit userToken: WithAccessToken): Future[WorkspaceDetails] = {
- Future.successful((newWorkspace))
- }
+ override def updateLibraryAttributes(ns: String, name: String, attributeOperations: Seq[AttributeUpdateOperation])(
+ implicit userToken: WithAccessToken
+ ): Future[WorkspaceDetails] =
+ Future.successful(newWorkspace)
- override def getAllLibraryPublishedWorkspaces(implicit userToken: WithAccessToken): Future[Seq[WorkspaceDetails]] = Future.successful(Seq.empty[WorkspaceDetails])
+ override def getAllLibraryPublishedWorkspaces(implicit userToken: WithAccessToken): Future[Seq[WorkspaceDetails]] =
+ Future.successful(Seq.empty[WorkspaceDetails])
override def getWorkspaceACL(ns: String, name: String)(implicit userToken: WithAccessToken) =
Future.successful(WorkspaceACL(Map.empty[String, AccessEntry]))
- override def patchWorkspaceACL(ns: String, name: String, aclUpdates: Seq[WorkspaceACLUpdate], inviteUsersNotFound: Boolean)(implicit userToken: WithAccessToken): Future[WorkspaceACLUpdateResponseList] = {
+ override def patchWorkspaceACL(ns: String,
+ name: String,
+ aclUpdates: Seq[WorkspaceACLUpdate],
+ inviteUsersNotFound: Boolean
+ )(implicit userToken: WithAccessToken): Future[WorkspaceACLUpdateResponseList] =
Future.successful(WorkspaceACLUpdateResponseList(aclUpdates.toSet, aclUpdates.toSet, aclUpdates.toSet))
- }
- override def fetchAllEntitiesOfType(workspaceNamespace: String, workspaceName: String, entityType: String)(implicit userInfo: UserInfo): Future[Seq[Entity]] = {
+ override def fetchAllEntitiesOfType(workspaceNamespace: String, workspaceName: String, entityType: String)(implicit
+ userInfo: UserInfo
+ ): Future[Seq[Entity]] =
if (workspaceName == "invalid") {
Future.failed(new FireCloudExceptionWithErrorReport(ErrorReport(StatusCodes.NotFound, "Workspace not found")))
} else {
@@ -356,15 +628,21 @@ class MockRawlsDAO extends RawlsDAO {
case "sample" =>
val sampleAtts = Map(
AttributeName.withDefaultNS("sample_type") -> AttributeString("Blood"),
- AttributeName.withDefaultNS("ref_fasta") -> AttributeString("gs://cancer-exome-pipeline-demo-data/Homo_sapiens_assembly19.fasta"),
- AttributeName.withDefaultNS("ref_dict") -> AttributeString("gs://cancer-exome-pipeline-demo-data/Homo_sapiens_assembly19.dict"),
+ AttributeName.withDefaultNS("ref_fasta") -> AttributeString(
+ "gs://cancer-exome-pipeline-demo-data/Homo_sapiens_assembly19.fasta"
+ ),
+ AttributeName.withDefaultNS("ref_dict") -> AttributeString(
+ "gs://cancer-exome-pipeline-demo-data/Homo_sapiens_assembly19.dict"
+ ),
AttributeName.withDefaultNS("participant_id") -> AttributeEntityReference("participant", "subject_HCC1143")
)
Future.successful(List(Entity("sample_01", "sample", sampleAtts)))
case "participant" =>
val participantAtts = Map(
AttributeName.withDefaultNS("tumor_platform") -> AttributeString("illumina"),
- AttributeName.withDefaultNS("ref_fasta") -> AttributeString("gs://cancer-exome-pipeline-demo-data/Homo_sapiens_assembly19.fasta"),
+ AttributeName.withDefaultNS("ref_fasta") -> AttributeString(
+ "gs://cancer-exome-pipeline-demo-data/Homo_sapiens_assembly19.fasta"
+ ),
AttributeName.withDefaultNS("tumor_strip_unpaired") -> AttributeString("TRUE")
)
Future.successful(List(Entity("subject_HCC1143", "participant", participantAtts)))
@@ -372,20 +650,34 @@ class MockRawlsDAO extends RawlsDAO {
Future.successful(Seq.empty)
}
}
- }
- override def queryEntitiesOfType(workspaceNamespace: String, workspaceName: String, entityType: String, query: EntityQuery)(implicit userToken: UserInfo): Future[EntityQueryResponse] = {
+ override def queryEntitiesOfType(workspaceNamespace: String,
+ workspaceName: String,
+ entityType: String,
+ query: EntityQuery
+ )(implicit userToken: UserInfo): Future[EntityQueryResponse] =
if (workspaceName == "exception") {
- Future.failed(new FireCloudExceptionWithErrorReport(ErrorReport(StatusCodes.InternalServerError, "Exception getting workspace")))
+ Future.failed(
+ new FireCloudExceptionWithErrorReport(
+ ErrorReport(StatusCodes.InternalServerError, "Exception getting workspace")
+ )
+ )
} else if (workspaceName == "page3exception" && query.page == 3) {
- Future.failed(new FireCloudExceptionWithErrorReport(ErrorReport(StatusCodes.NotFound, s"Exception querying for entities on page ${query.page}")))
+ Future.failed(
+ new FireCloudExceptionWithErrorReport(
+ ErrorReport(StatusCodes.NotFound, s"Exception querying for entities on page ${query.page}")
+ )
+ )
} else if (workspaceName == "invalid") {
Future.failed(new FireCloudExceptionWithErrorReport(ErrorReport(StatusCodes.NotFound, "Workspace not found")))
} else if (workspaceName == "largeSampleSet") {
val sampleSetRange = generateSampleSetsInRange(query.page * query.pageSize)
val queryResponse: EntityQueryResponse = EntityQueryResponse(
parameters = query,
- resultMetadata = EntityQueryResultMetadata(unfilteredCount = largeSampleSetSize, filteredCount = largeSampleSetSize, filteredPageCount = largeSampleSetSize/query.pageSize),
+ resultMetadata = EntityQueryResultMetadata(unfilteredCount = largeSampleSetSize,
+ filteredCount = largeSampleSetSize,
+ filteredPageCount = largeSampleSetSize / query.pageSize
+ ),
results = sampleSetRange
)
Future.successful(queryResponse)
@@ -393,7 +685,10 @@ class MockRawlsDAO extends RawlsDAO {
val sampleRange = generateSamplesInRange(query.page * query.pageSize)
val queryResponse: EntityQueryResponse = EntityQueryResponse(
parameters = query,
- resultMetadata = EntityQueryResultMetadata(unfilteredCount = largeSampleSize, filteredCount = largeSampleSize, filteredPageCount = largeSampleSize/query.pageSize),
+ resultMetadata = EntityQueryResultMetadata(unfilteredCount = largeSampleSize,
+ filteredCount = largeSampleSize,
+ filteredPageCount = largeSampleSize / query.pageSize
+ ),
results = sampleRange
)
Future.successful(queryResponse)
@@ -428,16 +723,24 @@ class MockRawlsDAO extends RawlsDAO {
} else {
val queryResponse: EntityQueryResponse = EntityQueryResponse(
parameters = query,
- resultMetadata = EntityQueryResultMetadata(unfilteredCount = validSampleEntities.size, filteredCount = validSampleEntities.size, filteredPageCount = 1),
+ resultMetadata = EntityQueryResultMetadata(unfilteredCount = validSampleEntities.size,
+ filteredCount = validSampleEntities.size,
+ filteredPageCount = 1
+ ),
results = validSampleEntities
)
Future.successful(queryResponse)
}
- }
- override def getEntityTypes(workspaceNamespace: String, workspaceName: String)(implicit userToken: UserInfo): Future[Map[String, EntityTypeMetadata]] = {
+ override def getEntityTypes(workspaceNamespace: String, workspaceName: String)(implicit
+ userToken: UserInfo
+ ): Future[Map[String, EntityTypeMetadata]] =
if (workspaceName == "exception") {
- Future.failed(new FireCloudExceptionWithErrorReport(ErrorReport(StatusCodes.InternalServerError, "Exception getting workspace")))
+ Future.failed(
+ new FireCloudExceptionWithErrorReport(
+ ErrorReport(StatusCodes.InternalServerError, "Exception getting workspace")
+ )
+ )
} else if (workspaceName == "invalid") {
Future.failed(new FireCloudExceptionWithErrorReport(ErrorReport(StatusCodes.NotFound, "Workspace not found")))
} else if (workspaceName == "largeSampleSet") {
@@ -455,39 +758,83 @@ class MockRawlsDAO extends RawlsDAO {
} else {
Future.successful(validEntitiesMetadata)
}
- }
- override def getCatalog(workspaceNamespace: String, workspaceName: String)(implicit userToken: WithAccessToken) = {
+ override def getCatalog(workspaceNamespace: String, workspaceName: String)(implicit userToken: WithAccessToken) =
Future.successful(Seq(WorkspaceCatalog("user@gmail.com", true)))
- }
- override def patchCatalog(workspaceNamespace: String, workspaceName: String, updates: Seq[WorkspaceCatalog])(implicit userToken: WithAccessToken) = {
- val responses = updates.map(cat => WorkspaceCatalogResponse(cat.email.substring(0, cat.email.indexOf("@"))+"id", cat.catalog))
+ override def patchCatalog(workspaceNamespace: String, workspaceName: String, updates: Seq[WorkspaceCatalog])(implicit
+ userToken: WithAccessToken
+ ) = {
+ val responses =
+ updates.map(cat => WorkspaceCatalogResponse(cat.email.substring(0, cat.email.indexOf("@")) + "id", cat.catalog))
Future.successful(WorkspaceCatalogUpdateResponseList(responses, Seq.empty))
}
-
- override def getAgoraMethodConfigs(workspaceNamespace: String, workspaceName: String)(implicit userToken: WithAccessToken) =
+ override def getAgoraMethodConfigs(workspaceNamespace: String, workspaceName: String)(implicit
+ userToken: WithAccessToken
+ ) =
Future.successful(Seq.empty[AgoraConfigurationShort])
def status: Future[SubsystemStatus] = Future(SubsystemStatus(true, None))
- def deleteWorkspace(workspaceNamespace: String, workspaceName: String)(implicit userToken: WithAccessToken): Future[Option[String]] = {
+ def deleteWorkspace(workspaceNamespace: String, workspaceName: String)(implicit
+ userToken: WithAccessToken
+ ): Future[Option[String]] =
Future.successful(Some("Your Google bucket 'bucketId' will be deleted within 24h."))
- }
- override def getProjects(implicit userToken: WithAccessToken): Future[Seq[Project.RawlsBillingProjectMembership]] = Future(Seq.empty[Project.RawlsBillingProjectMembership])
+ override def getProjects(implicit userToken: WithAccessToken): Future[Seq[Project.RawlsBillingProjectMembership]] =
+ Future(Seq.empty[Project.RawlsBillingProjectMembership])
- override def getProjectMembers(projectId: String)(implicit userToken: WithAccessToken): Future[Seq[RawlsBillingProjectMember]] =
+ override def getProjectMembers(projectId: String)(implicit
+ userToken: WithAccessToken
+ ): Future[Seq[RawlsBillingProjectMember]] =
Future(Seq.empty)
- override def addUserToBillingProject(projectId: String, role: ProjectRole, email: String)(implicit userToken: WithAccessToken): Future[Boolean] = Future(true)
-
- override def removeUserFromBillingProject(projectId: String, role: ProjectRole, email: String)(implicit userToken: WithAccessToken): Future[Boolean] = Future(true)
-
- override def batchUpsertEntities(workspaceNamespace: String, workspaceName: String, entityType: String, upserts: Seq[EntityUpdateDefinition])(implicit userToken: UserInfo): Future[HttpResponse] = Future.successful(HttpResponse(StatusCodes.NoContent))
-
- override def batchUpdateEntities(workspaceNamespace: String, workspaceName: String, entityType: String, updates: Seq[EntityUpdateDefinition])(implicit userToken: UserInfo): Future[HttpResponse] = Future.successful(HttpResponse(StatusCodes.NoContent))
-
- override def cloneWorkspace(workspaceNamespace: String, workspaceName: String, cloneRequest: WorkspaceRequest)(implicit userToken: WithAccessToken): Future[WorkspaceDetails] = Future.successful(WorkspaceDetails(cloneRequest.namespace, cloneRequest.name, "id", "bucket", Some("workflow-collection-id"), DateTime.now(), DateTime.now(), "test-user", Some(cloneRequest.attributes), false, cloneRequest.authorizationDomain, WorkspaceVersions.V2, GoogleProjectId("googleProject"), Some(GoogleProjectNumber("googleProjectNumber")), Some(RawlsBillingAccountName("billingAccount")), None, None, Option(DateTime.now()), None, None, WorkspaceState.Ready))
+ override def addUserToBillingProject(projectId: String, role: ProjectRole, email: String)(implicit
+ userToken: WithAccessToken
+ ): Future[Boolean] = Future(true)
+
+ override def removeUserFromBillingProject(projectId: String, role: ProjectRole, email: String)(implicit
+ userToken: WithAccessToken
+ ): Future[Boolean] = Future(true)
+
+ override def batchUpsertEntities(workspaceNamespace: String,
+ workspaceName: String,
+ entityType: String,
+ upserts: Seq[EntityUpdateDefinition]
+ )(implicit userToken: UserInfo): Future[HttpResponse] = Future.successful(HttpResponse(StatusCodes.NoContent))
+
+ override def batchUpdateEntities(workspaceNamespace: String,
+ workspaceName: String,
+ entityType: String,
+ updates: Seq[EntityUpdateDefinition]
+ )(implicit userToken: UserInfo): Future[HttpResponse] = Future.successful(HttpResponse(StatusCodes.NoContent))
+
+ override def cloneWorkspace(workspaceNamespace: String, workspaceName: String, cloneRequest: WorkspaceRequest)(
+ implicit userToken: WithAccessToken
+ ): Future[WorkspaceDetails] = Future.successful(
+ WorkspaceDetails(
+ cloneRequest.namespace,
+ cloneRequest.name,
+ "id",
+ "bucket",
+ Some("workflow-collection-id"),
+ DateTime.now(),
+ DateTime.now(),
+ "test-user",
+ Some(cloneRequest.attributes),
+ false,
+ cloneRequest.authorizationDomain,
+ WorkspaceVersions.V2,
+ GoogleProjectId("googleProject"),
+ Some(GoogleProjectNumber("googleProjectNumber")),
+ Some(RawlsBillingAccountName("billingAccount")),
+ None,
+ None,
+ Option(DateTime.now()),
+ None,
+ None,
+ WorkspaceState.Ready
+ )
+ )
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockResearchPurposeSupport.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockResearchPurposeSupport.scala
index 9a5966229..a4a9880ad 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockResearchPurposeSupport.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockResearchPurposeSupport.scala
@@ -5,7 +5,9 @@ import org.elasticsearch.index.query.BoolQueryBuilder
import org.elasticsearch.index.query.QueryBuilders.{boolQuery, termQuery}
class MockResearchPurposeSupport extends ResearchPurposeSupport {
- override def researchPurposeFilters(researchPurpose: ResearchPurpose, makeAttributeName: String => String): BoolQueryBuilder = {
+ override def researchPurposeFilters(researchPurpose: ResearchPurpose,
+ makeAttributeName: String => String
+ ): BoolQueryBuilder = {
val query = boolQuery
researchPurpose.DS.foreach { id =>
query.should(termQuery("structuredUseRestriction.DS", id.numericId))
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockSamDAO.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockSamDAO.scala
index 9945a5740..6b33fafb6 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockSamDAO.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockSamDAO.scala
@@ -4,11 +4,29 @@ import akka.http.scaladsl.model.StatusCodes
import org.broadinstitute.dsde.firecloud.{FireCloudException, FireCloudExceptionWithErrorReport}
import org.broadinstitute.dsde.firecloud.HealthChecks.termsOfServiceUrl
import org.broadinstitute.dsde.firecloud.model.ManagedGroupRoles.ManagedGroupRole
-import org.broadinstitute.dsde.firecloud.model.{AccessToken, FireCloudManagedGroupMembership, RegistrationInfo, RegistrationInfoV2, SamResource, SamUserResponse, UserIdInfo, UserInfo, WithAccessToken, WorkbenchEnabled, WorkbenchUserInfo}
+import org.broadinstitute.dsde.firecloud.model.{
+ AccessToken,
+ FireCloudManagedGroupMembership,
+ RegistrationInfo,
+ RegistrationInfoV2,
+ SamResource,
+ SamUserResponse,
+ UserIdInfo,
+ UserInfo,
+ WithAccessToken,
+ WorkbenchEnabled,
+ WorkbenchUserInfo
+}
import org.broadinstitute.dsde.workbench.util.health.SubsystemStatus
import org.broadinstitute.dsde.rawls.model.{ErrorReport, RawlsUserEmail}
import org.broadinstitute.dsde.workbench.model.google.GoogleProject
-import org.broadinstitute.dsde.workbench.model.{AzureB2CId, GoogleSubjectId, WorkbenchEmail, WorkbenchGroupName, WorkbenchUserId}
+import org.broadinstitute.dsde.workbench.model.{
+ AzureB2CId,
+ GoogleSubjectId,
+ WorkbenchEmail,
+ WorkbenchGroupName,
+ WorkbenchUserId
+}
import java.time.Instant
import scala.concurrent.Future
@@ -18,27 +36,48 @@ import scala.concurrent.Future
*/
class MockSamDAO extends SamDAO {
var groups: Map[WorkbenchGroupName, Set[WorkbenchEmail]] = Map(
- WorkbenchGroupName("TCGA-dbGaP-Authorized") -> Set(WorkbenchEmail("tcga-linked"), WorkbenchEmail("tcga-linked-no-expire-date"), WorkbenchEmail("tcga-linked-expired"), WorkbenchEmail("tcga-linked-user-invalid-expire-date"), WorkbenchEmail("tcga-and-target-linked"), WorkbenchEmail("tcga-and-target-linked-expired")),
- WorkbenchGroupName("TARGET-dbGaP-Authorized") -> Set(WorkbenchEmail("target-linked"), WorkbenchEmail("target-linked-expired"), WorkbenchEmail("tcga-and-target-linked"), WorkbenchEmail("tcga-and-target-linked-expired"))
+ WorkbenchGroupName("TCGA-dbGaP-Authorized") -> Set(
+ WorkbenchEmail("tcga-linked"),
+ WorkbenchEmail("tcga-linked-no-expire-date"),
+ WorkbenchEmail("tcga-linked-expired"),
+ WorkbenchEmail("tcga-linked-user-invalid-expire-date"),
+ WorkbenchEmail("tcga-and-target-linked"),
+ WorkbenchEmail("tcga-and-target-linked-expired")
+ ),
+ WorkbenchGroupName("TARGET-dbGaP-Authorized") -> Set(
+ WorkbenchEmail("target-linked"),
+ WorkbenchEmail("target-linked-expired"),
+ WorkbenchEmail("tcga-and-target-linked"),
+ WorkbenchEmail("tcga-and-target-linked-expired")
+ )
)
- override def registerUser(termsOfService: Option[String])(implicit userInfo: WithAccessToken): Future[RegistrationInfo] =
+ override def registerUser(
+ termsOfService: Option[String]
+ )(implicit userInfo: WithAccessToken): Future[RegistrationInfo] =
if (termsOfService.contains(termsOfServiceUrl)) enabledUserInfo
- else Future.failed(new FireCloudExceptionWithErrorReport(errorReport = ErrorReport(StatusCodes.Forbidden,
- s"You must accept the Terms of Service in order to register.")))
+ else
+ Future.failed(
+ new FireCloudExceptionWithErrorReport(errorReport =
+ ErrorReport(StatusCodes.Forbidden, s"You must accept the Terms of Service in order to register.")
+ )
+ )
override def getRegistrationStatus(implicit userInfo: WithAccessToken): Future[RegistrationInfo] =
if (userInfo.accessToken.token.equals("new")) unknownUserInfo
else enabledUserInfo
- override def getUserIds(email: RawlsUserEmail)(implicit userInfo: WithAccessToken): Future[UserIdInfo] = customUserId(email.value)
+ override def getUserIds(email: RawlsUserEmail)(implicit userInfo: WithAccessToken): Future[UserIdInfo] = customUserId(
+ email.value
+ )
- override def listWorkspaceResources(implicit userInfo: WithAccessToken): Future[Seq[SamResource.UserPolicy]] = Future.successful(Seq.empty)
+ override def listWorkspaceResources(implicit userInfo: WithAccessToken): Future[Seq[SamResource.UserPolicy]] =
+ Future.successful(Seq.empty)
override def status: Future[SubsystemStatus] = Future.successful(SubsystemStatus(ok = true, messages = None))
override def createGroup(groupName: WorkbenchGroupName)(implicit userInfo: WithAccessToken): Future[Unit] = {
- val groupWithMembers = groupName -> Set(WorkbenchEmail(userInfo.accessToken.token)) //ugh
+ val groupWithMembers = groupName -> Set(WorkbenchEmail(userInfo.accessToken.token)) // ugh
this.synchronized { groups = groups + groupWithMembers }
Future.successful(())
@@ -56,27 +95,37 @@ class MockSamDAO extends SamDAO {
Future.successful(groupMemberships.toList)
}
- override def getGroupEmail(groupName: WorkbenchGroupName)(implicit userInfo: WithAccessToken): Future[WorkbenchEmail] = {
+ override def getGroupEmail(groupName: WorkbenchGroupName)(implicit
+ userInfo: WithAccessToken
+ ): Future[WorkbenchEmail] =
Future.successful(WorkbenchEmail(groupName.value))
- }
- override def listGroupPolicyEmails(groupName: WorkbenchGroupName, policyName: ManagedGroupRole)(implicit userInfo: WithAccessToken): Future[List[WorkbenchEmail]] = Future.successful(groups(groupName).toList)
+ override def listGroupPolicyEmails(groupName: WorkbenchGroupName, policyName: ManagedGroupRole)(implicit
+ userInfo: WithAccessToken
+ ): Future[List[WorkbenchEmail]] = Future.successful(groups(groupName).toList)
- override def addGroupMember(groupName: WorkbenchGroupName, role: ManagedGroupRole, email: WorkbenchEmail)(implicit userInfo: WithAccessToken): Future[Unit] = {
+ override def addGroupMember(groupName: WorkbenchGroupName, role: ManagedGroupRole, email: WorkbenchEmail)(implicit
+ userInfo: WithAccessToken
+ ): Future[Unit] = {
val groupWithNewMembers = groupName -> (groups(groupName).filterNot(_.equals(email)) ++ Set(email))
this.synchronized { groups = groups + groupWithNewMembers }
Future.successful(())
}
- override def removeGroupMember(groupName: WorkbenchGroupName, role: ManagedGroupRole, email: WorkbenchEmail)(implicit userInfo: WithAccessToken): Future[Unit] = {
+ override def removeGroupMember(groupName: WorkbenchGroupName, role: ManagedGroupRole, email: WorkbenchEmail)(implicit
+ userInfo: WithAccessToken
+ ): Future[Unit] = {
val groupWithNewMembers = groupName -> (groups(groupName).filterNot(_.equals(email)) -- Set(email))
this.synchronized { groups = groups + groupWithNewMembers }
Future.successful(())
}
- override def overwriteGroupMembers(groupName: WorkbenchGroupName, role: ManagedGroupRole, memberList: List[WorkbenchEmail])(implicit userInfo: WithAccessToken): Future[Unit] = {
+ override def overwriteGroupMembers(groupName: WorkbenchGroupName,
+ role: ManagedGroupRole,
+ memberList: List[WorkbenchEmail]
+ )(implicit userInfo: WithAccessToken): Future[Unit] = {
val groupWithNewMembers = groupName -> memberList.toSet
this.synchronized { groups = groups + groupWithNewMembers }
@@ -97,51 +146,61 @@ class MockSamDAO extends SamDAO {
}
private val enabledUserInfo = Future.successful {
- RegistrationInfo(
- WorkbenchUserInfo(userSubjectId = "foo", userEmail = "bar"),
- WorkbenchEnabled(google = true, ldap = true, allUsersGroup = true))
+ RegistrationInfo(WorkbenchUserInfo(userSubjectId = "foo", userEmail = "bar"),
+ WorkbenchEnabled(google = true, ldap = true, allUsersGroup = true)
+ )
}
private val unknownUserInfo = Future.successful {
- RegistrationInfo(
- WorkbenchUserInfo(userSubjectId = "foo", userEmail = "bar"),
- WorkbenchEnabled(google = false, ldap = false, allUsersGroup = false))
+ RegistrationInfo(WorkbenchUserInfo(userSubjectId = "foo", userEmail = "bar"),
+ WorkbenchEnabled(google = false, ldap = false, allUsersGroup = false)
+ )
}
private def customUserInfo(email: String) = Future.successful {
- RegistrationInfo(
- WorkbenchUserInfo(email, email),
- WorkbenchEnabled(google = true, ldap = true, allUsersGroup = true))
+ RegistrationInfo(WorkbenchUserInfo(email, email),
+ WorkbenchEnabled(google = true, ldap = true, allUsersGroup = true)
+ )
}
private def customUserId(email: String) = Future.successful {
UserIdInfo(email, email, email)
}
- override def isGroupMember(groupName: WorkbenchGroupName, userInfo: UserInfo): Future[Boolean] = {
+ override def isGroupMember(groupName: WorkbenchGroupName, userInfo: UserInfo): Future[Boolean] =
Future.successful(groups.getOrElse(groupName, Set.empty).contains(WorkbenchEmail(userInfo.userEmail)))
- }
- override def requestGroupAccess(groupName: WorkbenchGroupName)(implicit userInfo: WithAccessToken): Future[Unit] = {
- Future.successful(()) //not really a good way to mock this at the moment, TODO
- }
+ override def requestGroupAccess(groupName: WorkbenchGroupName)(implicit userInfo: WithAccessToken): Future[Unit] =
+ Future.successful(()) // not really a good way to mock this at the moment, TODO
- override def addPolicyMember(resourceTypeName: String, resourceId: String, policyName: String, email: WorkbenchEmail)(implicit userInfo: WithAccessToken) = Future.successful(()) //todo
+ override def addPolicyMember(resourceTypeName: String, resourceId: String, policyName: String, email: WorkbenchEmail)(
+ implicit userInfo: WithAccessToken
+ ) = Future.successful(()) // todo
override def getPetServiceAccountTokenForUser(user: WithAccessToken, scopes: Seq[String]): Future[AccessToken] =
Future.failed(new FireCloudException("mock not implemented"))
- override def getPetServiceAccountKeyForUser(user: WithAccessToken, project: GoogleProject): Future[String] = Future.successful("""{"fake":"key""}""")
+ override def getPetServiceAccountKeyForUser(user: WithAccessToken, project: GoogleProject): Future[String] =
+ Future.successful("""{"fake":"key""}""")
- override def setPolicyPublic(resourceTypeName: String, resourceId: String, policyName: String, public: Boolean)(implicit userInfo: WithAccessToken): Future[Unit] = Future.successful(())
+ override def setPolicyPublic(resourceTypeName: String, resourceId: String, policyName: String, public: Boolean)(
+ implicit userInfo: WithAccessToken
+ ): Future[Unit] = Future.successful(())
- override def registerUserSelf(acceptsTermsOfService: Boolean)(implicit userInfo: WithAccessToken): Future[SamUserResponse] = {
+ override def registerUserSelf(
+ acceptsTermsOfService: Boolean
+ )(implicit userInfo: WithAccessToken): Future[SamUserResponse] =
if (acceptsTermsOfService) {
registeredUser
} else {
- Future.failed(new FireCloudExceptionWithErrorReport(new ErrorReport("sam", "invalid", Some(StatusCodes.BadRequest), Seq.empty, Seq.empty, None)))
+ Future.failed(
+ new FireCloudExceptionWithErrorReport(
+ new ErrorReport("sam", "invalid", Some(StatusCodes.BadRequest), Seq.empty, Seq.empty, None)
+ )
+ )
}
- }
- override def getUsersForIds(samUserIds: Seq[WorkbenchUserId])(implicit userInfo: WithAccessToken): Future[Seq[WorkbenchUserInfo]] = Future.successful(Seq())
+ override def getUsersForIds(samUserIds: Seq[WorkbenchUserId])(implicit
+ userInfo: WithAccessToken
+ ): Future[Seq[WorkbenchUserInfo]] = Future.successful(Seq())
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockSearchDAO.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockSearchDAO.scala
index 765293fe8..f1b5176cd 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockSearchDAO.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockSearchDAO.scala
@@ -26,22 +26,26 @@ class MockSearchDAO extends SearchDAO {
var autocompleteInvoked = new AtomicBoolean(false)
var populateSuggestInvoked = new AtomicBoolean(false)
- override def bulkIndex(docs: Seq[Document], refresh:Boolean = false) = LibraryBulkIndexResponse(0, false, Map.empty)
+ override def bulkIndex(docs: Seq[Document], refresh: Boolean = false) = LibraryBulkIndexResponse(0, false, Map.empty)
- override def indexDocument(doc: Document) = {
+ override def indexDocument(doc: Document) =
indexDocumentInvoked.set(true)
- }
- override def deleteDocument(id: String) = {
+ override def deleteDocument(id: String) =
deleteDocumentInvoked.set(true)
- }
- override def findDocuments(librarySearchParams: LibrarySearchParams, groups: Seq[String], workspacePolicyMap: Map[String, UserPolicy]): Future[LibrarySearchResponse] = {
+ override def findDocuments(librarySearchParams: LibrarySearchParams,
+ groups: Seq[String],
+ workspacePolicyMap: Map[String, UserPolicy]
+ ): Future[LibrarySearchResponse] = {
findDocumentsInvoked.set(true)
Future(LibrarySearchResponse(librarySearchParams, 0, Seq[JsValue](), Seq[LibraryAggregationResponse]()))
}
- override def suggestionsFromAll(librarySearchParams: LibrarySearchParams, groups: Seq[String], workspacePolicyMap: Map[String, UserPolicy]): Future[LibrarySearchResponse] = {
+ override def suggestionsFromAll(librarySearchParams: LibrarySearchParams,
+ groups: Seq[String],
+ workspacePolicyMap: Map[String, UserPolicy]
+ ): Future[LibrarySearchResponse] = {
autocompleteInvoked.set(true)
Future(LibrarySearchResponse(librarySearchParams, 0, Seq[JsValue](), Seq[LibraryAggregationResponse]()))
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockShareLogDAO.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockShareLogDAO.scala
index d87e071a2..0e8c81c2c 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockShareLogDAO.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockShareLogDAO.scala
@@ -10,25 +10,28 @@ class MockShareLogDAO extends ShareLogDAO {
private val errorMessage = "unit test exception: override in a class specific to test"
- override def logShare(userId: String, sharee: String, shareType: ShareType.Value): Share = throw new Exception(errorMessage)
+ override def logShare(userId: String, sharee: String, shareType: ShareType.Value): Share = throw new Exception(
+ errorMessage
+ )
- override def logShares(userId: String, sharees: Seq[String], shareType: ShareType.Value): Seq[Share] = throw new Exception(errorMessage)
+ override def logShares(userId: String, sharees: Seq[String], shareType: ShareType.Value): Seq[Share] =
+ throw new Exception(errorMessage)
override def getShare(share: Share): Share = throw new Exception(errorMessage)
- override def getShares(userId: String, shareType: Option[ShareType.Value] = None): Seq[Share] = throw new Exception(errorMessage)
+ override def getShares(userId: String, shareType: Option[ShareType.Value] = None): Seq[Share] = throw new Exception(
+ errorMessage
+ )
}
class ShareLogApiServiceSpecShareLogDAO extends MockShareLogDAO {
- override def getShares(userId: String, shareType: Option[ShareType.Value]): Seq[Share] = {
+ override def getShares(userId: String, shareType: Option[ShareType.Value]): Seq[Share] =
ElasticSearchShareLogDAOSpecFixtures.fixtureShares
- }
}
class WorkspaceApiServiceSpecShareLogDAO extends MockShareLogDAO {
- override def logShares(userId: String, sharees: Seq[String], shareType: ShareType.Value): Seq[Share] = {
- sharees map{ sharee => Share(userId, sharee, shareType)}
- }
-}
\ No newline at end of file
+ override def logShares(userId: String, sharees: Seq[String], shareType: ShareType.Value): Seq[Share] =
+ sharees map { sharee => Share(userId, sharee, shareType) }
+}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockShibbolethDAO.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockShibbolethDAO.scala
index 2dfb9b99e..212179731 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockShibbolethDAO.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockShibbolethDAO.scala
@@ -3,8 +3,8 @@ package org.broadinstitute.dsde.firecloud.dataaccess
import scala.concurrent.Future
class MockShibbolethDAO extends ShibbolethDAO {
- val publicKey = "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsDPAkAwpWiO2659gPsIj\nzx9IypuiInn2F4IaCCJSxtjqRNw5g6QPJeMVjmnn3jT8CCMzvoOIOq8n7rmyog/p\npjJpq4AcVA0GjV8Nz7cWF/VwR+e/mN5CGvY4OfnCTBi5PUmywGLZMcJNhcbnka69\nexL18WwnM0d6/A/LYcmCQcI+YuakDksGAdrOn74WOrKQFa78SVOnB6Mfpf65rmu7\nTMQ66JBUuM2vIW+P1p4//+9MBSKUoGyXkbOsykBc1XYn/lLRoDCf2onYDTGjdILh\n7eSXdi6+VzgQ7j3hdkSRSj+mN2Vmq/AEWHd1lc/OQDMcRcEnRPyhwny9VW0gehyt\nWwIDAQAB\n-----END PUBLIC KEY-----"
- override def getPublicKey(): Future[String] = {
+ val publicKey =
+ "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsDPAkAwpWiO2659gPsIj\nzx9IypuiInn2F4IaCCJSxtjqRNw5g6QPJeMVjmnn3jT8CCMzvoOIOq8n7rmyog/p\npjJpq4AcVA0GjV8Nz7cWF/VwR+e/mN5CGvY4OfnCTBi5PUmywGLZMcJNhcbnka69\nexL18WwnM0d6/A/LYcmCQcI+YuakDksGAdrOn74WOrKQFa78SVOnB6Mfpf65rmu7\nTMQ66JBUuM2vIW+P1p4//+9MBSKUoGyXkbOsykBc1XYn/lLRoDCf2onYDTGjdILh\n7eSXdi6+VzgQ7j3hdkSRSj+mN2Vmq/AEWHd1lc/OQDMcRcEnRPyhwny9VW0gehyt\nWwIDAQAB\n-----END PUBLIC KEY-----"
+ override def getPublicKey(): Future[String] =
Future.successful(publicKey)
- }
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockThurloeDAO.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockThurloeDAO.scala
index 4ed3af2be..898849b00 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockThurloeDAO.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/dataaccess/MockThurloeDAO.scala
@@ -3,7 +3,13 @@ package org.broadinstitute.dsde.firecloud.dataaccess
import java.util.NoSuchElementException
import org.broadinstitute.dsde.firecloud.dataaccess.MockThurloeDAO._
-import org.broadinstitute.dsde.firecloud.model.{BasicProfile, FireCloudKeyValue, ProfileWrapper, UserInfo, WithAccessToken}
+import org.broadinstitute.dsde.firecloud.model.{
+ BasicProfile,
+ FireCloudKeyValue,
+ ProfileWrapper,
+ UserInfo,
+ WithAccessToken
+}
import org.broadinstitute.dsde.firecloud.utils.DateUtils
import org.broadinstitute.dsde.workbench.util.health.SubsystemStatus
@@ -57,96 +63,104 @@ class MockThurloeDAO extends ThurloeDAO {
Map(
NORMAL_USER -> baseProfile,
"foo" -> baseProfile, // to match registeredUser, enabledUserInfo, unknownUserInfo from MockSamDao
- //TCGA users
- TCGA_LINKED -> baseProfile.++(Set(
- FireCloudKeyValue(Some("email"), Some(TCGA_LINKED)),
- FireCloudKeyValue(Some("linkedNihUsername"), Some("tcga-user")),
- FireCloudKeyValue(Some("linkExpireTime"), Some(DateUtils.nowPlus30Days.toString)))
- ),
- TCGA_LINKED_NO_EXPIRE_DATE -> baseProfile.++(Set(
- FireCloudKeyValue(Some("email"), Some(TCGA_LINKED_NO_EXPIRE_DATE)),
- FireCloudKeyValue(Some("linkedNihUsername"), Some("firecloud-user")))
- ),
- TCGA_LINKED_EXPIRED -> baseProfile.++(Set(
- FireCloudKeyValue(Some("email"), Some(TCGA_LINKED_EXPIRED)),
- FireCloudKeyValue(Some("linkedNihUsername"), Some("firecloud-user2")),
- FireCloudKeyValue(Some("linkExpireTime"), Some(DateUtils.nowMinus1Hour.toString)))
+ // TCGA users
+ TCGA_LINKED -> baseProfile.++(
+ Set(
+ FireCloudKeyValue(Some("email"), Some(TCGA_LINKED)),
+ FireCloudKeyValue(Some("linkedNihUsername"), Some("tcga-user")),
+ FireCloudKeyValue(Some("linkExpireTime"), Some(DateUtils.nowPlus30Days.toString))
+ )
),
- TCGA_LINKED_INVALID_EXPIRE_DATE -> baseProfile.++(Set(
- FireCloudKeyValue(Some("email"), Some(TCGA_LINKED_INVALID_EXPIRE_DATE)),
- FireCloudKeyValue(Some("linkedNihUsername"), Some("firecloud-dev")),
- FireCloudKeyValue(Some("linkExpireTime"), Some("expiration-dates-cant-be-words!")))
+ TCGA_LINKED_NO_EXPIRE_DATE -> baseProfile.++(
+ Set(FireCloudKeyValue(Some("email"), Some(TCGA_LINKED_NO_EXPIRE_DATE)),
+ FireCloudKeyValue(Some("linkedNihUsername"), Some("firecloud-user"))
+ )
),
- TCGA_UNLINKED -> baseProfile.++(Set(
- FireCloudKeyValue(Some("email"), Some(TCGA_UNLINKED)))
+ TCGA_LINKED_EXPIRED -> baseProfile.++(
+ Set(
+ FireCloudKeyValue(Some("email"), Some(TCGA_LINKED_EXPIRED)),
+ FireCloudKeyValue(Some("linkedNihUsername"), Some("firecloud-user2")),
+ FireCloudKeyValue(Some("linkExpireTime"), Some(DateUtils.nowMinus1Hour.toString))
+ )
),
- //TARGET users
- TARGET_LINKED -> baseProfile.++(Set(
- FireCloudKeyValue(Some("email"), Some(TARGET_LINKED)),
- FireCloudKeyValue(Some("linkedNihUsername"), Some("target-user")),
- FireCloudKeyValue(Some("linkExpireTime"), Some(DateUtils.nowPlus30Days.toString)))
+ TCGA_LINKED_INVALID_EXPIRE_DATE -> baseProfile.++(
+ Set(
+ FireCloudKeyValue(Some("email"), Some(TCGA_LINKED_INVALID_EXPIRE_DATE)),
+ FireCloudKeyValue(Some("linkedNihUsername"), Some("firecloud-dev")),
+ FireCloudKeyValue(Some("linkExpireTime"), Some("expiration-dates-cant-be-words!"))
+ )
),
- TARGET_LINKED_EXPIRED -> baseProfile.++(Set(
- FireCloudKeyValue(Some("email"), Some(TARGET_LINKED_EXPIRED)),
- FireCloudKeyValue(Some("linkedNihUsername"), Some("firecloud-user2")),
- FireCloudKeyValue(Some("linkExpireTime"), Some(DateUtils.nowMinus1Hour.toString)))
+ TCGA_UNLINKED -> baseProfile.++(Set(FireCloudKeyValue(Some("email"), Some(TCGA_UNLINKED)))),
+ // TARGET users
+ TARGET_LINKED -> baseProfile.++(
+ Set(
+ FireCloudKeyValue(Some("email"), Some(TARGET_LINKED)),
+ FireCloudKeyValue(Some("linkedNihUsername"), Some("target-user")),
+ FireCloudKeyValue(Some("linkExpireTime"), Some(DateUtils.nowPlus30Days.toString))
+ )
),
- TARGET_UNLINKED -> baseProfile.++(Set(
- FireCloudKeyValue(Some("email"), Some(TARGET_UNLINKED)))
+ TARGET_LINKED_EXPIRED -> baseProfile.++(
+ Set(
+ FireCloudKeyValue(Some("email"), Some(TARGET_LINKED_EXPIRED)),
+ FireCloudKeyValue(Some("linkedNihUsername"), Some("firecloud-user2")),
+ FireCloudKeyValue(Some("linkExpireTime"), Some(DateUtils.nowMinus1Hour.toString))
+ )
),
- //TCGA and TARGET users
- TCGA_AND_TARGET_LINKED -> baseProfile.++(Set(
- FireCloudKeyValue(Some("email"), Some(TCGA_AND_TARGET_LINKED)),
- FireCloudKeyValue(Some("linkedNihUsername"), Some("firecloud-dev")),
- FireCloudKeyValue(Some("linkExpireTime"), Some(DateUtils.nowPlus30Days.toString)))
+ TARGET_UNLINKED -> baseProfile.++(Set(FireCloudKeyValue(Some("email"), Some(TARGET_UNLINKED)))),
+ // TCGA and TARGET users
+ TCGA_AND_TARGET_LINKED -> baseProfile.++(
+ Set(
+ FireCloudKeyValue(Some("email"), Some(TCGA_AND_TARGET_LINKED)),
+ FireCloudKeyValue(Some("linkedNihUsername"), Some("firecloud-dev")),
+ FireCloudKeyValue(Some("linkExpireTime"), Some(DateUtils.nowPlus30Days.toString))
+ )
),
- TCGA_AND_TARGET_LINKED_EXPIRED -> baseProfile.++(Set(
- FireCloudKeyValue(Some("email"), Some(TCGA_AND_TARGET_LINKED_EXPIRED)),
- FireCloudKeyValue(Some("linkedNihUsername"), Some("firecloud-user2")),
- FireCloudKeyValue(Some("linkExpireTime"), Some(DateUtils.nowMinus1Hour.toString)))
+ TCGA_AND_TARGET_LINKED_EXPIRED -> baseProfile.++(
+ Set(
+ FireCloudKeyValue(Some("email"), Some(TCGA_AND_TARGET_LINKED_EXPIRED)),
+ FireCloudKeyValue(Some("linkedNihUsername"), Some("firecloud-user2")),
+ FireCloudKeyValue(Some("linkExpireTime"), Some(DateUtils.nowMinus1Hour.toString))
+ )
),
- TCGA_AND_TARGET_UNLINKED -> baseProfile.++(Set(
- FireCloudKeyValue(Some("email"), Some(TCGA_AND_TARGET_UNLINKED)))
+ TCGA_AND_TARGET_UNLINKED -> baseProfile.++(Set(FireCloudKeyValue(Some("email"), Some(TCGA_AND_TARGET_UNLINKED)))),
+ // Anonymized google groups
+ HAVE_GOOGLE_GROUP -> baseProfile.++(
+ Set(FireCloudKeyValue(Some("anonymousGroup"), Some("existing-google-group@support.something.firecloud.org")))
),
- //Anonymized google groups
- HAVE_GOOGLE_GROUP -> baseProfile.++(Set(
- FireCloudKeyValue(Some("anonymousGroup"), Some("existing-google-group@support.something.firecloud.org")))
- ),
- HAVE_EMPTY_GOOGLE_GROUP -> baseProfile.++(Set(
- FireCloudKeyValue(Some("anonymousGroup"), Some("")))
- ),
- NO_CONTACT_EMAIL -> baseProfile.++(Set(
- FireCloudKeyValue(Some("contactEmail"), Some("")))
- )
+ HAVE_EMPTY_GOOGLE_GROUP -> baseProfile.++(Set(FireCloudKeyValue(Some("anonymousGroup"), Some("")))),
+ NO_CONTACT_EMAIL -> baseProfile.++(Set(FireCloudKeyValue(Some("contactEmail"), Some(""))))
)
-
override def getAllKVPs(forUserId: String, callerToken: WithAccessToken): Future[Option[ProfileWrapper]] = {
- val profileWrapper = try {
- Option(ProfileWrapper(forUserId, mockKeyValues(forUserId).toList))
- } catch {
- case e:NoSuchElementException => None
- }
+ val profileWrapper =
+ try
+ Option(ProfileWrapper(forUserId, mockKeyValues(forUserId).toList))
+ catch {
+ case e: NoSuchElementException => None
+ }
Future.successful(profileWrapper)
}
- override def saveProfile(userInfo: UserInfo, profile: BasicProfile): Future[Unit] = {
+ override def saveProfile(userInfo: UserInfo, profile: BasicProfile): Future[Unit] =
saveKeyValues(userInfo, profile.propertyValueMap).map(_ => ())
- }
override def saveKeyValues(userInfo: UserInfo, keyValues: Map[String, String]): Future[Try[Unit]] = {
val newKVsForUser = userInfo.id -> (mockKeyValues(userInfo.id).filter {
case FireCloudKeyValue(Some(key), _) => !keyValues.contains(key)
- case FireCloudKeyValue(_, _) => false
- } ++ keyValues.map { case (key, value) => FireCloudKeyValue(Option(key), Option(value))})
+ case FireCloudKeyValue(_, _) => false
+ } ++ keyValues.map { case (key, value) => FireCloudKeyValue(Option(key), Option(value)) })
mockKeyValues = mockKeyValues + newKVsForUser
Future.successful(Success(()))
}
-
- override def saveKeyValues(forUserId: String, callerToken: WithAccessToken, keyValues: Map[String, String]): Future[Try[Unit]] = {
- val newKVsForUser = (forUserId -> (mockKeyValues(forUserId) ++ keyValues.map { case (key, value) => FireCloudKeyValue(Option(key), Option(value))}))
+ override def saveKeyValues(forUserId: String,
+ callerToken: WithAccessToken,
+ keyValues: Map[String, String]
+ ): Future[Try[Unit]] = {
+ val newKVsForUser = forUserId -> (mockKeyValues(forUserId) ++ keyValues.map { case (key, value) =>
+ FireCloudKeyValue(Option(key), Option(value))
+ })
mockKeyValues = mockKeyValues + newKVsForUser
Future.successful(Success(()))
}
@@ -158,14 +172,15 @@ class MockThurloeDAO extends ThurloeDAO {
}
override def getAllUserValuesForKey(key: String): Future[Map[String, String]] = {
- val userValuesForKey = mockKeyValues.map{ case (userId, keyValues) =>
- userId -> keyValues.filter(_.key.equals(Option(key)))
- }.flatMap { case (userId, kvPair) =>
- if(kvPair.nonEmpty) {
- Some((userId -> kvPair.head.value.get))
+ val userValuesForKey = mockKeyValues
+ .map { case (userId, keyValues) =>
+ userId -> keyValues.filter(_.key.equals(Option(key)))
+ }
+ .flatMap { case (userId, kvPair) =>
+ if (kvPair.nonEmpty) {
+ Some(userId -> kvPair.head.value.get)
+ } else None
}
- else None
- }
Future.successful(userValuesForKey)
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/AutoSuggestSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/AutoSuggestSpec.scala
index 6a911ba5b..e09341358 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/AutoSuggestSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/AutoSuggestSpec.scala
@@ -11,7 +11,12 @@ import spray.json.{JsObject, JsString}
import scala.concurrent.Await
import scala.concurrent.duration.{Duration, MINUTES}
-class AutoSuggestSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll with LazyLogging with ElasticSearchDAOQuerySupport {
+class AutoSuggestSpec
+ extends AnyFreeSpec
+ with Matchers
+ with BeforeAndAfterAll
+ with LazyLogging
+ with ElasticSearchDAOQuerySupport {
override def beforeAll() = {
// use re-create here, since instantiating the DAO will create it in the first place
@@ -23,9 +28,8 @@ class AutoSuggestSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll w
logger.info("... fixtures indexed.")
}
- override def afterAll() = {
+ override def afterAll() =
searchDAO.deleteIndex()
- }
"Library integration" - {
"Elastic Search" - {
@@ -37,21 +41,23 @@ class AutoSuggestSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll w
"Text search autocomplete suggestions correctness" - {
val testCases: Seq[(String, Seq[String])] = Seq(
- ("brca open", Seq("TCGA_BRCA_OpenAccess")),
- ("brca o", Seq("TCGA_BRCA_OpenAccess")),
- ("BRCA_Cont", Seq("TCGA_BRCA_ControlledAccess")),
- ("glio", Seq("Glioblastoma multiforme")),
- ("thy", Seq("Thyroid carcinoma", "Thymoma", "TCGA_THYM_ControlledAccess")),
- ("test", Seq("testing123","test indication")),
- ("kidn", Seq("Kidney Chromophobe","Kidney Renal Clear Cell Carcinoma","Kidney Renal Papillary Cell Carcinoma")),
+ ("brca open", Seq("TCGA_BRCA_OpenAccess")),
+ ("brca o", Seq("TCGA_BRCA_OpenAccess")),
+ ("BRCA_Cont", Seq("TCGA_BRCA_ControlledAccess")),
+ ("glio", Seq("Glioblastoma multiforme")),
+ ("thy", Seq("Thyroid carcinoma", "Thymoma", "TCGA_THYM_ControlledAccess")),
+ ("test", Seq("testing123", "test indication")),
+ ("kidn",
+ Seq("Kidney Chromophobe", "Kidney Renal Clear Cell Carcinoma", "Kidney Renal Papillary Cell Carcinoma")
+ ),
("Mesothelioma", Seq("Mesothelioma")),
("encodingtest", Seq("ZZZ &foo")), // make sure the result is not encoded
- ("xyz", Seq.empty[String]),
- ("idney", Seq.empty[String]), // we only do leading-edge ngrams
- ("access", Seq.empty[String]) // we only do leading-edge ngrams
+ ("xyz", Seq.empty[String]),
+ ("idney", Seq.empty[String]), // we only do leading-edge ngrams
+ ("access", Seq.empty[String]) // we only do leading-edge ngrams
)
- testCases foreach { x:(String, Seq[String]) =>
+ testCases foreach { x: (String, Seq[String]) =>
val (crit, expected) = x
s"suggestions for '$crit'" in {
@@ -61,20 +67,20 @@ class AutoSuggestSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll w
}
// remove highlighting from the suggestion so we can match it easier
val suggestions = (searchResponse.results map {
- case jso:JsObject =>
+ case jso: JsObject =>
val flds = jso.fields
assert(flds.contains("suggestion"), "suggestion result should have a `suggestion` key")
val sugg = flds("suggestion")
sugg match {
- case js:JsString =>
+ case js: JsString =>
val suggStr = js.value
// if ES returns a highlight, ensure the highlight is valid for the source
val hlt = flds.get("highlight")
hlt match {
- case Some(js:JsString) =>
+ case Some(js: JsString) =>
assert(suggStr.contains(js.value), "if highlight exists, suggestion should contain highlight")
case None => // nothing to validate here
- case _ => fail("if highlight key exists, should be a JsString")
+ case _ => fail("if highlight key exists, should be a JsString")
}
suggStr
case _ => fail("suggestion key should be a JsString")
@@ -96,16 +102,16 @@ class AutoSuggestSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll w
"should not return duplicates" in {
// "Pancreatic adenocarcinoma" exists twice, but should be de-duplicated
val results = Await.result(searchDAO.suggestionsForFieldPopulate("library:datasetOwner", "pan"), dur)
- assertResult(Seq("Pancreatic adenocarcinoma")) { results }
+ assertResult(Seq("Pancreatic adenocarcinoma"))(results)
}
"should return multiple distinct suggestions" in {
val results = Await.result(searchDAO.suggestionsForFieldPopulate("library:datasetOwner", "thy"), dur)
- assertResult(Set("Thyroid carcinoma", "Thymoma")) { results.toSet }
+ assertResult(Set("Thyroid carcinoma", "Thymoma"))(results.toSet)
}
}
val dur = Duration(2, MINUTES)
- private def suggestionsFor(txt:String) = {
+ private def suggestionsFor(txt: String) = {
val criteria = emptyCriteria.copy(searchString = Some(txt))
Await.result(searchDAO.suggestionsFromAll(criteria, Seq.empty[String], Map.empty), dur)
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/DataUseRestrictionSearchSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/DataUseRestrictionSearchSpec.scala
index c2d1dd97f..abb2b7404 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/DataUseRestrictionSearchSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/DataUseRestrictionSearchSpec.scala
@@ -16,7 +16,12 @@ import scala.concurrent.ExecutionContext.Implicits.global
import scala.language.postfixOps
import spray.json.DefaultJsonProtocol._
-class DataUseRestrictionSearchSpec extends AnyFreeSpec with SearchResultValidation with BeforeAndAfterAll with Matchers with LibraryServiceSupport {
+class DataUseRestrictionSearchSpec
+ extends AnyFreeSpec
+ with SearchResultValidation
+ with BeforeAndAfterAll
+ with Matchers
+ with LibraryServiceSupport {
val datasets: Seq[WorkspaceDetails] = DataUseRestrictionTestFixtures.allDatasets
@@ -33,9 +38,8 @@ class DataUseRestrictionSearchSpec extends AnyFreeSpec with SearchResultValidati
logger.info("... fixtures indexed.")
}
- override def afterAll(): Unit = {
+ override def afterAll(): Unit =
searchDAO.deleteIndex()
- }
"Library Data Use Restriction Indexing" - {
@@ -112,7 +116,10 @@ class DataUseRestrictionSearchSpec extends AnyFreeSpec with SearchResultValidati
"RS-G:Female should be indexed as RS-G:true, RS-FM:true" in {
val searchResponse = searchFor("RSGFemale")
- assertDataUseRestrictions(searchResponse, DataUseRestriction(`RS-G` = true, `RS-FM` = true), Seq("RS-G", "RS-FM"))
+ assertDataUseRestrictions(searchResponse,
+ DataUseRestriction(`RS-G` = true, `RS-FM` = true),
+ Seq("RS-G", "RS-FM")
+ )
}
"RS-G:Male should be indexed as RS-G:true, RS-M:true" in {
@@ -127,7 +134,10 @@ class DataUseRestrictionSearchSpec extends AnyFreeSpec with SearchResultValidati
"RS-FM dataset should be indexed as true" in {
val searchResponse = searchFor("RSGFemale")
- assertDataUseRestrictions(searchResponse, DataUseRestriction(`RS-G` = true, `RS-FM` = true), Seq("RS-G", "RS-FM"))
+ assertDataUseRestrictions(searchResponse,
+ DataUseRestriction(`RS-G` = true, `RS-FM` = true),
+ Seq("RS-G", "RS-FM")
+ )
}
"RS-M dataset should be indexed as true" in {
@@ -137,7 +147,11 @@ class DataUseRestrictionSearchSpec extends AnyFreeSpec with SearchResultValidati
"DS:non-empty list dataset should have values" in {
val searchResponse = searchFor("DS-unique")
- assertDataUseRestrictions(searchResponse, DataUseRestriction(DS = DataUseRestrictionTestFixtures.diseaseValuesInts), DataUseRestrictionTestFixtures.diseaseValuesLabels.map("DS:" + _))
+ assertDataUseRestrictions(
+ searchResponse,
+ DataUseRestriction(DS = DataUseRestrictionTestFixtures.diseaseValuesInts),
+ DataUseRestrictionTestFixtures.diseaseValuesLabels.map("DS:" + _)
+ )
}
"IRB dataset should be indexed as true" in {
@@ -147,7 +161,8 @@ class DataUseRestrictionSearchSpec extends AnyFreeSpec with SearchResultValidati
"'EVERYTHING' dataset should have a mix of values" in {
val searchResponse = searchFor("EVERYTHING")
- assertDataUseRestrictions(searchResponse,
+ assertDataUseRestrictions(
+ searchResponse,
DataUseRestriction(
GRU = true,
HMB = true,
@@ -169,7 +184,8 @@ class DataUseRestrictionSearchSpec extends AnyFreeSpec with SearchResultValidati
"'TOP_THREE' dataset should have a mix of values" in {
val searchResponse = searchFor("TOP_THREE")
- assertDataUseRestrictions(searchResponse,
+ assertDataUseRestrictions(
+ searchResponse,
DataUseRestriction(
GRU = true,
HMB = true,
@@ -184,41 +200,38 @@ class DataUseRestrictionSearchSpec extends AnyFreeSpec with SearchResultValidati
}
-
//////////////////
// Utility methods
//////////////////
-
override def searchFor(text: String): LibrarySearchResponse = {
- val criteria = emptyCriteria.copy(
- searchString = Some(text),
- size = datasets.size)
+ val criteria = emptyCriteria.copy(searchString = Some(text), size = datasets.size)
Await.result(searchDAO.findDocuments(criteria, Seq.empty[String], Map.empty), dur)
}
- private def getDataUseRestrictions(searchResponse: LibrarySearchResponse): Seq[DataUseRestriction] = {
+ private def getDataUseRestrictions(searchResponse: LibrarySearchResponse): Seq[DataUseRestriction] =
searchResponse.results.map { hit =>
val sdur = hit.asJsObject.fields(AttributeName.toDelimitedName(structuredUseRestrictionAttributeName)).asJsObject
sdur.convertTo[DataUseRestriction]
}
- }
- private def getConsentCodes(searchResponse: LibrarySearchResponse): Seq[String] = {
+ private def getConsentCodes(searchResponse: LibrarySearchResponse): Seq[String] =
searchResponse.results.flatMap { hit =>
val jsObj = hit.asJsObject
if (jsObj.getFields(AttributeName.toDelimitedName(consentCodesAttributeName)).nonEmpty) {
jsObj.fields(AttributeName.toDelimitedName(consentCodesAttributeName)).convertTo[Seq[String]]
- } else { Seq.empty}
+ } else { Seq.empty }
}
- }
- private def assertDataUseRestrictions(searchResponse: LibrarySearchResponse, expected: DataUseRestriction, expectedCodes: Seq[String] = Seq.empty[String]): Unit = {
+ private def assertDataUseRestrictions(searchResponse: LibrarySearchResponse,
+ expected: DataUseRestriction,
+ expectedCodes: Seq[String] = Seq.empty[String]
+ ): Unit = {
searchResponse shouldNot be(null)
if (searchResponse.results.size != 1) {
logger.error(s"Size: ${searchResponse.results.size}")
- searchResponse.results.map { sr => logger.error(s"${sr.toString}")}
+ searchResponse.results.map(sr => logger.error(s"${sr.toString}"))
}
searchResponse.results.size should be(1)
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ESIntegrationSupport.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ESIntegrationSupport.scala
index 97cecda1a..0dfbc5131 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ESIntegrationSupport.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ESIntegrationSupport.scala
@@ -24,11 +24,11 @@ object ESIntegrationSupport extends IntegrationTestConfig {
val timeStr = new SimpleDateFormat("yyyyMMdd't'HH:mm:ss").format(Calendar.getInstance.getTime)
val username = Try(System.getProperty("user.name")) match {
case Success(str) => str.toLowerCase
- case Failure(ex) => "unknownuser"
+ case Failure(ex) => "unknownuser"
}
val hostname = Try(java.net.InetAddress.getLocalHost.getHostName) match {
case Success(str) => str.toLowerCase
- case Failure(ex) => "unknownhostname"
+ case Failure(ex) => "unknownhostname"
}
Seq(tag, username, hostname, timeStr).mkString("_")
@@ -37,23 +37,21 @@ object ESIntegrationSupport extends IntegrationTestConfig {
// construct a client, using IntegrationTestConfig's server names (which should be the runtime server names)
lazy val client: TransportClient = ElasticUtils.buildClient(ITElasticSearch.servers, ITElasticSearch.clusterName)
- lazy val mockOntologyDAO:OntologyDAO = new MockOntologyDAO
- lazy val researchPurposeSupport:ResearchPurposeSupport = new ESResearchPurposeSupport(mockOntologyDAO)
+ lazy val mockOntologyDAO: OntologyDAO = new MockOntologyDAO
+ lazy val researchPurposeSupport: ResearchPurposeSupport = new ESResearchPurposeSupport(mockOntologyDAO)
- lazy val searchDAO:SearchDAO = {
+ lazy val searchDAO: SearchDAO =
// use the temporary index name defined above
new ElasticSearchDAO(client, itTestIndexName, researchPurposeSupport)
- }
- lazy val ontologyDAO:OntologyDAO = {
+ lazy val ontologyDAO: OntologyDAO =
// use the index name defined in reference.conf, since we execute read-only
new ElasticSearchOntologyDAO(client, FireCloudConfig.ElasticSearch.ontologyIndexName)
- }
- lazy val shareLogDAO:ShareLogDAO = {
+ lazy val shareLogDAO: ShareLogDAO =
new ElasticSearchShareLogDAO(client, itTestIndexName, RefreshPolicy.IMMEDIATE)
- }
- lazy val emptyCriteria = LibrarySearchParams(None,Map.empty[String,Seq[String]],None,Map.empty[String,Int],None,None,None,None)
+ lazy val emptyCriteria =
+ LibrarySearchParams(None, Map.empty[String, Seq[String]], None, Map.empty[String, Int], None, None, None, None)
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ElasticSearchShareLogDAOSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ElasticSearchShareLogDAOSpec.scala
index 816946704..e5ed80414 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ElasticSearchShareLogDAOSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ElasticSearchShareLogDAOSpec.scala
@@ -19,10 +19,9 @@ class ElasticSearchShareLogDAOSpec extends AnyFreeSpec with Matchers with Before
}
}
- override def afterAll() = {
+ override def afterAll() =
// using the delete from search dao because we don't have recreate in sharelog dao
searchDAO.deleteIndex()
- }
private def scrubShares(in: Seq[Share]) = in.map(_.copy(timestamp = None))
@@ -34,7 +33,7 @@ class ElasticSearchShareLogDAOSpec extends AnyFreeSpec with Matchers with Before
val checkFake2 = shareLogDAO.getShares("fake2")
val check = checkFake1 ++ checkFake2
- assertResult(expected.size) { check.size }
+ assertResult(expected.size)(check.size)
scrubShares(check) should contain theSameElementsAs scrubShares(expected)
}
"should get shares of a specific type and none others" in {
@@ -44,7 +43,7 @@ class ElasticSearchShareLogDAOSpec extends AnyFreeSpec with Matchers with Before
.sortBy(_.sharee)
val check = shareLogDAO.getShares("fake1", Some(ShareType.GROUP)).sortBy(_.sharee)
- assertResult(expected.size) { check.size }
+ assertResult(expected.size)(check.size)
scrubShares(check) should contain theSameElementsAs scrubShares(expected)
}
}
@@ -53,7 +52,7 @@ class ElasticSearchShareLogDAOSpec extends AnyFreeSpec with Matchers with Before
val share = Share("roger", "syd@gmail.com", ShareType.WORKSPACE)
val loggedShare = shareLogDAO.logShare(share.userId, share.sharee, share.shareType)
val check = shareLogDAO.getShare(share)
- assertResult(loggedShare) { check }
+ assertResult(loggedShare)(check)
}
"should successfully log a record of a user sharing a workspace with the same user twice" in {
val loggedShare = shareLogDAO.logShare("fake4", "fake3@gmail.com", ShareType.WORKSPACE)
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/FilterLimitsSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/FilterLimitsSpec.scala
index 30c6bcb8a..aa210bb2e 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/FilterLimitsSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/FilterLimitsSpec.scala
@@ -11,7 +11,12 @@ import org.scalatest.matchers.should.Matchers
import scala.concurrent.duration.{Duration, MINUTES}
-class FilterLimitsSpec extends AnyFreeSpec with Matchers with SearchResultValidation with BeforeAndAfterAll with LazyLogging {
+class FilterLimitsSpec
+ extends AnyFreeSpec
+ with Matchers
+ with SearchResultValidation
+ with BeforeAndAfterAll
+ with LazyLogging {
override def beforeAll() = {
// use re-create here, since instantiating the DAO will create it in the first place
@@ -23,17 +28,31 @@ class FilterLimitsSpec extends AnyFreeSpec with Matchers with SearchResultValida
logger.info("... fixtures indexed.")
}
- override def afterAll() = {
+ override def afterAll() =
searchDAO.deleteIndex()
- }
"Library integration" - {
"search with 100000 filter criteria" - {
"returns 1 result without error " in {
- val wsMatchesMap = Map("testing123" -> UserPolicy(ResourceId("testing123"), false, AccessPolicyName(WorkspaceAccessLevels.Read.toString), Seq.empty.toSet, Seq.empty.toSet))
- val wsMap = 0.to(100000).map { num =>
- (num.toString -> UserPolicy(ResourceId(num.toString), false, AccessPolicyName(WorkspaceAccessLevels.Read.toString), Seq.empty.toSet, Seq.empty.toSet))
- }.toMap
+ val wsMatchesMap = Map(
+ "testing123" -> UserPolicy(ResourceId("testing123"),
+ false,
+ AccessPolicyName(WorkspaceAccessLevels.Read.toString),
+ Seq.empty.toSet,
+ Seq.empty.toSet
+ )
+ )
+ val wsMap = 0
+ .to(100000)
+ .map { num =>
+ num.toString -> UserPolicy(ResourceId(num.toString),
+ false,
+ AccessPolicyName(WorkspaceAccessLevels.Read.toString),
+ Seq.empty.toSet,
+ Seq.empty.toSet
+ )
+ }
+ .toMap
val searchResponse = searchWithFilter(wsMap ++ wsMatchesMap)
assertResult(wsMatchesMap.size) {
searchResponse.total
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/IntegrationTestFixtures.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/IntegrationTestFixtures.scala
index 880794533..a98113538 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/IntegrationTestFixtures.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/IntegrationTestFixtures.scala
@@ -8,7 +8,7 @@ import org.broadinstitute.dsde.rawls.model.{AttributeName, AttributeNumber, Attr
*/
object IntegrationTestFixtures {
- val datasetTuples:Seq[(String,String,Int)] = Seq(
+ val datasetTuples: Seq[(String, String, Int)] = Seq(
("TCGA_DLBC_ControlledAccess", "Lymphoid Neoplasm Diffuse Large B-cell Lymphoma", 111),
("TCGA_DLBC_OpenAccess", "Lymphoid Neoplasm Diffuse Large B-cell Lymphoma", 111),
("TCGA_GBM_OpenAccess", "Glioblastoma multiforme", 222),
@@ -30,7 +30,7 @@ object IntegrationTestFixtures {
("TCGA_THCA_ControlledAccess", "Thyroid carcinoma", 9999),
("TCGA_THYM_ControlledAccess", "Thymoma", 998),
("testing123", "test indication", 321),
- ("TCGA_ACC_ControlledAccess", "Adrenocortical carcinoma", 123),
+ ("TCGA_ACC_ControlledAccess", "Adrenocortical carcinoma", 123),
("TCGA_BRCA_ControlledAccess", "Breast Invasive Carcinoma", 2),
("TCGA_BRCA_OpenAccess", "Breast Invasive Carcinoma", 2),
("TCGA_CHOL_ControlledAccess", "Cholangiocarcinoma", 3),
@@ -39,26 +39,30 @@ object IntegrationTestFixtures {
("ZZZ &foo", "results won't be escaped", 1)
)
- val fixtureDocs:Seq[Document] = datasetTuples map {
- case (name, phenotype, count) =>
- Document(name, Map(
- AttributeName("library","datasetName") -> AttributeString(name),
- AttributeName("library","indication") -> AttributeString(phenotype),
- AttributeName("library","datasetOwner") -> AttributeString(phenotype),
- AttributeName("library","numSubjects") -> AttributeNumber(count)
- ))
+ val fixtureDocs: Seq[Document] = datasetTuples map { case (name, phenotype, count) =>
+ Document(
+ name,
+ Map(
+ AttributeName("library", "datasetName") -> AttributeString(name),
+ AttributeName("library", "indication") -> AttributeString(phenotype),
+ AttributeName("library", "datasetOwner") -> AttributeString(phenotype),
+ AttributeName("library", "numSubjects") -> AttributeNumber(count)
+ )
+ )
}
- val fixtureRestrictedDocs:Seq[Document] = datasetTuples map {
- case (name, phenotype, count) =>
- Document(name, Map(
- AttributeName("library","datasetName") -> AttributeString(name),
- AttributeName("library","indication") -> AttributeString(phenotype),
- AttributeName("library","datasetOwner") -> AttributeString(phenotype),
- AttributeName("library","numSubjects") -> AttributeNumber(count),
+ val fixtureRestrictedDocs: Seq[Document] = datasetTuples map { case (name, phenotype, count) =>
+ Document(
+ name,
+ Map(
+ AttributeName("library", "datasetName") -> AttributeString(name),
+ AttributeName("library", "indication") -> AttributeString(phenotype),
+ AttributeName("library", "datasetOwner") -> AttributeString(phenotype),
+ AttributeName("library", "numSubjects") -> AttributeNumber(count),
AttributeName.withDefaultNS("workspaceId") -> AttributeString(name),
AttributeName.withDefaultNS("_discoverableByGroups") -> AttributeValueList(Seq(AttributeString("no_one")))
- ))
+ )
+ )
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/OntologyAutocompleteSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/OntologyAutocompleteSpec.scala
index 07f3ba47c..9e2f3cd7c 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/OntologyAutocompleteSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/OntologyAutocompleteSpec.scala
@@ -7,26 +7,25 @@ import org.scalatest.freespec.AnyFreeSpec
class OntologyAutocompleteSpec extends AnyFreeSpec {
-
"Ontology Autocompete" - {
"should match prefixes" in {
- val terms:List[TermResource] = ontologyDAO.autocomplete("bipo")
+ val terms: List[TermResource] = ontologyDAO.autocomplete("bipo")
val labels = terms.map(_.label)
// NB: yes, what's in the index is actually "ll", not "II"
- assertResult(Set("bipolar disorder", "bipolar I disorder", "bipolar ll disorder")) { labels.toSet }
+ assertResult(Set("bipolar disorder", "bipolar I disorder", "bipolar ll disorder"))(labels.toSet)
}
"should return empty list for unknown prefix" in {
- val terms:List[TermResource] = ontologyDAO.autocomplete("Mxyzptlk")
- assertResult(List.empty[TermResource]) { terms }
+ val terms: List[TermResource] = ontologyDAO.autocomplete("Mxyzptlk")
+ assertResult(List.empty[TermResource])(terms)
}
"should limit results to 20" in {
// search for a common prefix
- val terms:List[TermResource] = ontologyDAO.autocomplete("dis")
- assertResult(20) { terms.size }
+ val terms: List[TermResource] = ontologyDAO.autocomplete("dis")
+ assertResult(20)(terms.size)
}
"should search in both synonyms and labels" in {
// search for a common prefix
- val terms:List[TermResource] = ontologyDAO.autocomplete("leukoe")
+ val terms: List[TermResource] = ontologyDAO.autocomplete("leukoe")
val labels = terms.map(_.label)
val expected = Set(
// matches in label:
@@ -40,11 +39,9 @@ class OntologyAutocompleteSpec extends AnyFreeSpec {
"myelophthisic anemia",
"subacute sclerosing panencephalitis"
)
- assertResult(expected) { labels.toSet }
+ assertResult(expected)(labels.toSet)
}
}
-
-
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/OntologySearchSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/OntologySearchSpec.scala
index 0da2b838c..efb763801 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/OntologySearchSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/OntologySearchSpec.scala
@@ -7,7 +7,12 @@ import org.scalatest.BeforeAndAfterAll
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.matchers.should.Matchers
-class OntologySearchSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll with LazyLogging with SearchResultValidation {
+class OntologySearchSpec
+ extends AnyFreeSpec
+ with Matchers
+ with BeforeAndAfterAll
+ with LazyLogging
+ with SearchResultValidation {
override def beforeAll() = {
// use re-create here, since instantiating the DAO will create it in the first place
@@ -19,9 +24,8 @@ class OntologySearchSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAl
logger.info("... fixtures indexed.")
}
- override def afterAll() = {
+ override def afterAll() =
searchDAO.deleteIndex()
- }
/*
OntologySearchTextFixtures has five datasets, with these ontology nodes:
@@ -44,9 +48,8 @@ class OntologySearchSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAl
"search for 'disease'" - {
"should find all datasets with an ontology node" in {
val searchResponse = searchFor("disease")
- assertResult(6) {searchResponse.total}
- assert(searchResponse.results.forall(js =>
- js.asJsObject.fields.contains("library:diseaseOntologyID")))
+ assertResult(6)(searchResponse.total)
+ assert(searchResponse.results.forall(js => js.asJsObject.fields.contains("library:diseaseOntologyID")))
}
}
"search for 'disease of mental health'" - {
@@ -54,9 +57,9 @@ class OntologySearchSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAl
// leukemia has a parent of "disease of cellular proliferation". We won't match
// that text because 1) "of" is a stop word, and 2) we need to match 3<75% tokens
val searchResponse = searchFor("disease of mental health")
- assertResult(2) {searchResponse.total}
+ assertResult(2)(searchResponse.total)
validateResultNames(
- Set("CSA_9220","FASD_0050696"),
+ Set("CSA_9220", "FASD_0050696"),
searchResponse
)
}
@@ -64,7 +67,7 @@ class OntologySearchSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAl
"search for 'ebola fever'" - {
"should find a dataset tagged directly to ebola" in {
val searchResponse = searchFor("ebola fever")
- assertResult(1) {searchResponse.total}
+ assertResult(1)(searchResponse.total)
validateResultNames(
Set("E_4325"),
searchResponse
@@ -74,7 +77,7 @@ class OntologySearchSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAl
"search for 'hematologic cancer'" - {
"should find datasets to hematologic cancer or its children" in {
val searchResponse = searchFor("hematologic cancer")
- assertResult(2) {searchResponse.total}
+ assertResult(2)(searchResponse.total)
validateResultNames(
Set("L_1240", "HC_2531"),
searchResponse
@@ -84,7 +87,7 @@ class OntologySearchSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAl
"search for 'leukemia'" - {
"should find datasets to leukemia but not its parents" in {
val searchResponse = searchFor("leukemia")
- assertResult(1) {searchResponse.total}
+ assertResult(1)(searchResponse.total)
validateResultNames(
Set("L_1240"),
searchResponse
@@ -94,7 +97,7 @@ class OntologySearchSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAl
"searching against an ontology node that has multiple branches in its DAG" - {
"should match against the leaf node" in {
val searchResponse = searchFor("fetal alcohol spectrum disorder")
- assertResult(1) {searchResponse.total}
+ assertResult(1)(searchResponse.total)
validateResultNames(
Set("FASD_0050696"),
searchResponse
@@ -102,13 +105,13 @@ class OntologySearchSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAl
}
"should match against either branch" in {
val searchResponse = searchFor("physical disorder")
- assertResult(1) {searchResponse.total}
+ assertResult(1)(searchResponse.total)
validateResultNames(
Set("FASD_0050696"),
searchResponse
)
val searchResponse2 = searchFor("specific developmental disorder")
- assertResult(1) {searchResponse2.total}
+ assertResult(1)(searchResponse2.total)
validateResultNames(
Set("FASD_0050696"),
searchResponse2
@@ -116,7 +119,7 @@ class OntologySearchSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAl
}
"should match against parents above the branch" in {
val searchResponse = searchFor("developmental disorder of mental health")
- assertResult(1) {searchResponse.total}
+ assertResult(1)(searchResponse.total)
validateResultNames(
Set("FASD_0050696"),
searchResponse
@@ -126,19 +129,19 @@ class OntologySearchSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAl
"searches that include parents" - {
"should match minimum of 3<75% terms" in {
val searchResponse = searchFor("disease cellular proliferation single origin coffee")
- assertResult(0) {searchResponse.total}
+ assertResult(0)(searchResponse.total)
}
"should not span multiple parent nodes" in {
val searchResponse = searchFor("hematologic immune organ proliferation")
- assertResult(0) {searchResponse.total}
+ assertResult(0)(searchResponse.total)
}
"should not span leaf and parents" in {
val searchResponse = searchFor("ebola virus disease")
- assertResult(0) {searchResponse.total}
+ assertResult(0)(searchResponse.total)
}
"should not match on parent descriptions (only labels)" in {
val searchResponse = searchFor("undergo pathological processes")
- assertResult(0) {searchResponse.total}
+ assertResult(0)(searchResponse.total)
}
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/OntologySearchTestFixtures.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/OntologySearchTestFixtures.scala
index 2888e9f53..45b23374f 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/OntologySearchTestFixtures.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/OntologySearchTestFixtures.scala
@@ -4,31 +4,51 @@ import org.broadinstitute.dsde.firecloud.dataaccess.MockOntologyDAO
import org.broadinstitute.dsde.firecloud.model.Document
import org.broadinstitute.dsde.rawls.model.Attributable.AttributeMap
import org.broadinstitute.dsde.rawls.model.{AttributeName, AttributeValueRawJson, _}
-import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol.{impOntologyESTermParent, impOntologyTermParent, impOntologyTermResource}
+import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol.{
+ impOntologyESTermParent,
+ impOntologyTermParent,
+ impOntologyTermResource
+}
import spray.json._
import spray.json.DefaultJsonProtocol._
object OntologySearchTestFixtures {
- val datasetTuples:Seq[(String,String,Int,Option[String])] = Seq(
- ("CSA_9220", "Kombucha vaporware flexitarian chambray bespoke", 111, Some("http://purl.obolibrary.org/obo/DOID_9220")), // central sleep apnea
+ val datasetTuples: Seq[(String, String, Int, Option[String])] = Seq(
+ ("CSA_9220",
+ "Kombucha vaporware flexitarian chambray bespoke",
+ 111,
+ Some("http://purl.obolibrary.org/obo/DOID_9220")
+ ), // central sleep apnea
("E_4325", "Af iceland squid cold-pressed", 111, Some("http://purl.obolibrary.org/obo/DOID_4325")), // ebola
- ("L_1240", "3 wolf moon vape try-hard knausgaard", 222, Some("http://purl.obolibrary.org/obo/DOID_1240")), // leukemia
+ ("L_1240",
+ "3 wolf moon vape try-hard knausgaard",
+ 222,
+ Some("http://purl.obolibrary.org/obo/DOID_1240")
+ ), // leukemia
("HC_2531", "wolf freegan irony lomo", 222, Some("http://purl.obolibrary.org/obo/DOID_2531")), // hematologic cancer
- ("FASD_0050696", "gastropub tattooed hammock mustache", 333, Some("http://purl.obolibrary.org/obo/DOID_0050696")), // fetal alcohol spectrum disorder
- ("D_4", "Neutra selvage chicharrones, prism taxidermy cray squid", 222, Some("http://purl.obolibrary.org/obo/DOID_4")), // disease
+ ("FASD_0050696",
+ "gastropub tattooed hammock mustache",
+ 333,
+ Some("http://purl.obolibrary.org/obo/DOID_0050696")
+ ), // fetal alcohol spectrum disorder
+ ("D_4",
+ "Neutra selvage chicharrones, prism taxidermy cray squid",
+ 222,
+ Some("http://purl.obolibrary.org/obo/DOID_4")
+ ), // disease
("None", "Health of activated charcoal portland", 222, None) // no doid
)
val ontologyDAO = new MockOntologyDAO
- val fixtureDocs:Seq[Document] = datasetTuples map {x:(String,String,Int,Option[String]) =>
- val phenoAttrs:AttributeMap = x._4 match {
+ val fixtureDocs: Seq[Document] = datasetTuples map { x: (String, String, Int, Option[String]) =>
+ val phenoAttrs: AttributeMap = x._4 match {
case Some(doid) =>
val term = ontologyDAO.data(doid).head
val diseaseAttrs = Map(
- AttributeName("library","diseaseOntologyID") -> AttributeString(doid),
- AttributeName("library","diseaseOntologyLabel") -> AttributeString(term.label)
+ AttributeName("library", "diseaseOntologyID") -> AttributeString(doid),
+ AttributeName("library", "diseaseOntologyLabel") -> AttributeString(term.label)
)
val parentAttr = term.parents match {
case Some(parents) =>
@@ -40,11 +60,14 @@ object OntologySearchTestFixtures {
case _ => Map.empty[AttributeName, Attribute]
}
- Document(x._1, Map(
- AttributeName("library","datasetName") -> AttributeString(x._1),
- AttributeName("library","indication") -> AttributeString(x._2),
- AttributeName("library","numSubjects") -> AttributeNumber(x._3)
- ) ++ phenoAttrs)
+ Document(
+ x._1,
+ Map(
+ AttributeName("library", "datasetName") -> AttributeString(x._1),
+ AttributeName("library", "indication") -> AttributeString(x._2),
+ AttributeName("library", "numSubjects") -> AttributeNumber(x._3)
+ ) ++ phenoAttrs
+ )
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/OntologyTermResourceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/OntologyTermResourceSpec.scala
index cc9616023..9f7f589d9 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/OntologyTermResourceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/OntologyTermResourceSpec.scala
@@ -14,34 +14,34 @@ class OntologyTermResourceSpec extends AnyFreeSpec {
"should find central sleep apnea" in {
val id = "http://purl.obolibrary.org/obo/DOID_9220"
val terms = ontologyDAO.search(id)
- assertResult(getExpected(id)) { Some(terms) }
+ assertResult(getExpected(id))(Some(terms))
}
"should find sleep apnea" in {
val id = "http://purl.obolibrary.org/obo/DOID_535"
val terms = ontologyDAO.search(id)
- assertResult(getExpected(id)) { Some(terms) }
+ assertResult(getExpected(id))(Some(terms))
}
"should find ebola" in {
val id = "http://purl.obolibrary.org/obo/DOID_4325"
val terms = ontologyDAO.search(id)
- assertResult(getExpected(id)) { Some(terms) }
+ assertResult(getExpected(id))(Some(terms))
}
}
// the term search returns a slimmer version of parents than are stored
// in our mock data; strip out the mock data.
- private def getExpected(id: String) = {
+ private def getExpected(id: String) =
mockdata.get(id) map { terms =>
terms.map { term =>
- val slimParents:Option[List[TermParent]] = term.parents match {
+ val slimParents: Option[List[TermParent]] = term.parents match {
case None => None
- case Some(ps) => Some(ps.map { p =>
- p.copy(definition = None, synonyms = None)
- })
+ case Some(ps) =>
+ Some(ps.map { p =>
+ p.copy(definition = None, synonyms = None)
+ })
}
term.copy(parents = slimParents)
}
}
- }
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ResearchPurposeSearchSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ResearchPurposeSearchSpec.scala
index 6d3fe7504..be7602a63 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ResearchPurposeSearchSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ResearchPurposeSearchSpec.scala
@@ -8,7 +8,12 @@ import org.scalatest.BeforeAndAfterAll
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.matchers.should.Matchers
-class ResearchPurposeSearchSpec extends AnyFreeSpec with SearchResultValidation with Matchers with BeforeAndAfterAll with LazyLogging {
+class ResearchPurposeSearchSpec
+ extends AnyFreeSpec
+ with SearchResultValidation
+ with Matchers
+ with BeforeAndAfterAll
+ with LazyLogging {
override def beforeAll() = {
// use re-create here, since instantiating the DAO will create it in the first place
@@ -20,9 +25,8 @@ class ResearchPurposeSearchSpec extends AnyFreeSpec with SearchResultValidation
logger.info("... fixtures indexed.")
}
- override def afterAll() = {
+ override def afterAll() =
searchDAO.deleteIndex()
- }
// we don't have to test any use cases of the user omitting a research purpose - all the
// other tests do that.
@@ -147,8 +151,10 @@ class ResearchPurposeSearchSpec extends AnyFreeSpec with SearchResultValidation
"Disease focused research (DS)" - {
"should return any dataset where the disease matches exactly" - {
- val researchPurpose = ResearchPurpose.default.copy(DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_535")))
- val expected = Set("one", "two", "six", "seven", "eleven", "twelve", "sixteen", "eighteen") // GRU, HMB, and sleep disorder
+ val researchPurpose =
+ ResearchPurpose.default.copy(DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_535")))
+ val expected =
+ Set("one", "two", "six", "seven", "eleven", "twelve", "sixteen", "eighteen") // GRU, HMB, and sleep disorder
"monolithic search" in {
val searchResponse = searchWithPurpose(researchPurpose)
validateResultNames(expected, searchResponse)
@@ -159,8 +165,19 @@ class ResearchPurposeSearchSpec extends AnyFreeSpec with SearchResultValidation
}
}
"should return any dataset where the RP's disease is a child of the dataset's disease" - {
- val researchPurpose = ResearchPurpose.default.copy(DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_9220")))
- val expected = Set("one", "two", "six", "seven", "eleven", "twelve", "sixteen", "seventeen", "eighteen", "twenty") // GRU, HMB, central sleep apnea, and sleep disorder
+ val researchPurpose =
+ ResearchPurpose.default.copy(DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_9220")))
+ val expected = Set("one",
+ "two",
+ "six",
+ "seven",
+ "eleven",
+ "twelve",
+ "sixteen",
+ "seventeen",
+ "eighteen",
+ "twenty"
+ ) // GRU, HMB, central sleep apnea, and sleep disorder
"monolithic search" in {
val searchResponse = searchWithPurpose(researchPurpose)
validateResultNames(expected, searchResponse)
@@ -172,7 +189,8 @@ class ResearchPurposeSearchSpec extends AnyFreeSpec with SearchResultValidation
}
"should return any GRU or HMB dataset" - {
// disease search for leukemia, which is not in our test fixtures
- val researchPurpose = ResearchPurpose.default.copy(DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_1240")))
+ val researchPurpose =
+ ResearchPurpose.default.copy(DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_1240")))
val expected = Set("one", "two", "six", "seven", "eleven", "twelve")
"monolithic search" in {
val searchResponse = searchWithPurpose(researchPurpose)
@@ -184,7 +202,8 @@ class ResearchPurposeSearchSpec extends AnyFreeSpec with SearchResultValidation
}
}
"should intersect with a standard facet filter" in {
- val researchPurpose = ResearchPurpose.default.copy(DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_535")))
+ val researchPurpose =
+ ResearchPurpose.default.copy(DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_535")))
val filter = Map("library:projectName" -> Seq("beryllium"))
val searchResponse = searchWithPurpose(researchPurpose, filter)
validateResultNames(
@@ -193,7 +212,8 @@ class ResearchPurposeSearchSpec extends AnyFreeSpec with SearchResultValidation
)
}
"should intersect with a text search" in {
- val researchPurpose = ResearchPurpose.default.copy(DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_535")))
+ val researchPurpose =
+ ResearchPurpose.default.copy(DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_535")))
val searchResponse = searchWithPurpose(researchPurpose, "lazy")
validateResultNames(
Set("eleven", "twelve"),
@@ -201,11 +221,20 @@ class ResearchPurposeSearchSpec extends AnyFreeSpec with SearchResultValidation
)
}
"should affect search suggestions" in {
- val researchPurpose = ResearchPurpose.default.copy(DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_535")))
+ val researchPurpose =
+ ResearchPurpose.default.copy(DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_535")))
val searchResponse = suggestWithPurpose(researchPurpose, "anti")
validateSuggestions(
// Set("one", "two", "six", "seven", "eleven", "twelve", "sixteen", "eighteen"), // GRU, HMB, and sleep disorder
- Set("antiaging", "antialias", "antibody", "antic", "anticoagulant", "anticorruption", "antiegalitarian", "antielitism"),
+ Set("antiaging",
+ "antialias",
+ "antibody",
+ "antic",
+ "anticoagulant",
+ "anticorruption",
+ "antiegalitarian",
+ "antielitism"
+ ),
searchResponse
)
}
@@ -213,8 +242,19 @@ class ResearchPurposeSearchSpec extends AnyFreeSpec with SearchResultValidation
"Methods development/Validation study (NMDS)" - {
"should return any dataset where NMDS is true and the disease matches exactly" - {
- val researchPurpose = ResearchPurpose.default.copy(NMDS=true, DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_535")))
- val expected = Set("one", "two", "six", "seven", "eleven", "twelve", "sixteen", "eighteen") // NMDS=false or (NMDS=true and disease-match logic)
+ val researchPurpose =
+ ResearchPurpose.default.copy(NMDS = true,
+ DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_535"))
+ )
+ val expected = Set("one",
+ "two",
+ "six",
+ "seven",
+ "eleven",
+ "twelve",
+ "sixteen",
+ "eighteen"
+ ) // NMDS=false or (NMDS=true and disease-match logic)
// NB: this doesn't match fixture "twenty" because even though the NMDS clauses are satisfied, the DS
// clause is not. In other words, if you made this search without specifying NMDS=true, you wouldn't
// match on "twenty".
@@ -228,8 +268,12 @@ class ResearchPurposeSearchSpec extends AnyFreeSpec with SearchResultValidation
}
}
"should return any dataset where NMDS is true and the RP's disease is a child of the dataset's disease" - {
- val researchPurpose = ResearchPurpose.default.copy(NMDS=true, DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_9220")))
- val expected = Set("one", "two", "six", "seven", "eleven", "twelve", "sixteen", "seventeen", "eighteen", "twenty")
+ val researchPurpose =
+ ResearchPurpose.default.copy(NMDS = true,
+ DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_9220"))
+ )
+ val expected =
+ Set("one", "two", "six", "seven", "eleven", "twelve", "sixteen", "seventeen", "eighteen", "twenty")
"monolithic search" in {
val searchResponse = searchWithPurpose(researchPurpose)
validateResultNames(expected, searchResponse)
@@ -252,7 +296,10 @@ class ResearchPurposeSearchSpec extends AnyFreeSpec with SearchResultValidation
}
}
"should intersect with a standard facet filter" in {
- val researchPurpose = ResearchPurpose.default.copy(NMDS=true, DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_535")))
+ val researchPurpose =
+ ResearchPurpose.default.copy(NMDS = true,
+ DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_535"))
+ )
val filter = Map("library:projectName" -> Seq("beryllium"))
val searchResponse = searchWithPurpose(researchPurpose, filter)
validateResultNames(
@@ -261,7 +308,10 @@ class ResearchPurposeSearchSpec extends AnyFreeSpec with SearchResultValidation
)
}
"should intersect with a text search" in {
- val researchPurpose = ResearchPurpose.default.copy(NMDS=true, DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_9220")))
+ val researchPurpose =
+ ResearchPurpose.default.copy(NMDS = true,
+ DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_9220"))
+ )
val searchResponse = searchWithPurpose(researchPurpose, "lazy")
validateResultNames(
Set("eleven", "twelve"),
@@ -269,10 +319,23 @@ class ResearchPurposeSearchSpec extends AnyFreeSpec with SearchResultValidation
)
}
"should affect search suggestions" in {
- val researchPurpose = ResearchPurpose.default.copy(NMDS=true, DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_9220")))
+ val researchPurpose =
+ ResearchPurpose.default.copy(NMDS = true,
+ DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_9220"))
+ )
val searchResponse = suggestWithPurpose(researchPurpose, "anti")
validateSuggestions(
- Set("antiaging", "antialias", "antibody", "antic", "anticoagulant", "anticorruption", "antiegalitarian", "antielectron", "antielitism", "antifashion"),
+ Set("antiaging",
+ "antialias",
+ "antibody",
+ "antic",
+ "anticoagulant",
+ "anticorruption",
+ "antiegalitarian",
+ "antielectron",
+ "antielitism",
+ "antifashion"
+ ),
searchResponse
)
}
@@ -280,7 +343,10 @@ class ResearchPurposeSearchSpec extends AnyFreeSpec with SearchResultValidation
"Control set (NCTRL)" - {
"should return any dataset where the disease matches exactly" - {
- val researchPurpose = ResearchPurpose.default.copy(NCTRL = true, DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_535")))
+ val researchPurpose =
+ ResearchPurpose.default.copy(NCTRL = true,
+ DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_535"))
+ )
val expected = Set("two", "six", "twelve", "sixteen", "eighteen")
"monolithic search" in {
val searchResponse = searchWithPurpose(researchPurpose)
@@ -292,7 +358,10 @@ class ResearchPurposeSearchSpec extends AnyFreeSpec with SearchResultValidation
}
}
"should return any dataset where the RP's disease is a child of the dataset's disease" - {
- val researchPurpose = ResearchPurpose.default.copy(NCTRL = true, DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_9220")))
+ val researchPurpose =
+ ResearchPurpose.default.copy(NCTRL = true,
+ DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_9220"))
+ )
val expected = Set("two", "six", "twelve", "sixteen", "seventeen", "eighteen", "twenty")
"monolithic search" in {
val searchResponse = searchWithPurpose(researchPurpose)
@@ -316,7 +385,10 @@ class ResearchPurposeSearchSpec extends AnyFreeSpec with SearchResultValidation
}
}
"should intersect with a standard facet filter" in {
- val researchPurpose = ResearchPurpose.default.copy(NCTRL = true, DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_535")))
+ val researchPurpose =
+ ResearchPurpose.default.copy(NCTRL = true,
+ DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_535"))
+ )
val filter = Map("library:projectName" -> Seq("beryllium"))
val searchResponse = searchWithPurpose(researchPurpose, filter)
validateResultNames(
@@ -325,7 +397,10 @@ class ResearchPurposeSearchSpec extends AnyFreeSpec with SearchResultValidation
)
}
"should intersect with a text search" in {
- val researchPurpose = ResearchPurpose.default.copy(NCTRL = true, DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_535")))
+ val researchPurpose =
+ ResearchPurpose.default.copy(NCTRL = true,
+ DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_535"))
+ )
val searchResponse = searchWithPurpose(researchPurpose, "lazy")
validateResultNames(
Set("twelve"),
@@ -333,7 +408,10 @@ class ResearchPurposeSearchSpec extends AnyFreeSpec with SearchResultValidation
)
}
"should affect search suggestions" in {
- val researchPurpose = ResearchPurpose.default.copy(NCTRL = true, DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_535")))
+ val researchPurpose =
+ ResearchPurpose.default.copy(NCTRL = true,
+ DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_535"))
+ )
val searchResponse = suggestWithPurpose(researchPurpose, "anti")
validateSuggestions(
Set("antialias", "antibody", "anticorruption", "antiegalitarian", "antielitism"),
@@ -342,7 +420,6 @@ class ResearchPurposeSearchSpec extends AnyFreeSpec with SearchResultValidation
}
}
-
"Research purpose with multiple restrictions enabled" - {
"should intersect each restriction" - {
val researchPurpose = ResearchPurpose.default.copy(NAGR = true, NCU = true)
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ResearchPurposeSearchTestFixtures.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ResearchPurposeSearchTestFixtures.scala
index 2f4923088..06a8f954f 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ResearchPurposeSearchTestFixtures.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ResearchPurposeSearchTestFixtures.scala
@@ -15,30 +15,67 @@ object ResearchPurposeSearchTestFixtures extends DataUseRestrictionSupport {
// indication: used to test suggestions
// datasetCustodian: used to test text search
// restriction: used to test research purpose matching
- val datasetTuples:Seq[(String,String,String,String,DataUseRestriction)] = Seq(
- ("one", "hydrogen", "antiaging", "the quick brown fox", DataUseRestriction(GRU=true, NCU=true, NCTRL = true)),
- ("two", "hydrogen", "antialias", "the quick brown fox", DataUseRestriction(HMB=true, NMDS=true)),
- ("three", "hydrogen", "antianxiety", "the quick brown fox", DataUseRestriction(NCU=true, NMDS=true, NCTRL = true)),
- ("four", "hydrogen", "antibacterial", "the quick brown fox", DataUseRestriction(NAGR=true, NMDS=true, NCTRL = true)),
- ("five", "hydrogen", "antibiotic", "the quick brown fox", DataUseRestriction(NPU=true, NMDS=true)),
-
- ("six", "helium", "antibody", "jumped over the", DataUseRestriction(GRU=true, NCU=true)),
- ("seven", "helium", "antic", "jumped over the", DataUseRestriction(HMB=true, NMDS=true, NCTRL = true)),
- ("eight", "helium", "anticavity", "jumped over the", DataUseRestriction(NCU=true, NMDS=true, NCTRL = true)),
- ("nine", "helium", "anticipate", "jumped over the", DataUseRestriction(NAGR=true, NMDS=true, NCTRL = true)),
- ("ten", "helium", "anticlimactic", "jumped over the", DataUseRestriction(NPU=true, NMDS=true)),
-
- ("eleven", "lithium", "anticoagulant", "lazy dog", DataUseRestriction(GRU=true, NCU=true, NCTRL = true)),
- ("twelve", "lithium", "anticorruption", "lazy dog", DataUseRestriction(HMB=true, NMDS=true)),
- ("thirteen", "lithium", "antidepressant", "lazy dog", DataUseRestriction(NCU=true, NMDS=true, NCTRL = true)),
- ("fourteen", "lithium", "antidisestablishmentarianism", "lazy dog", DataUseRestriction(NAGR=true, NMDS=true, NCTRL = true)),
- ("fifteen", "lithium", "antidote", "lazy dog", DataUseRestriction(NPU=true, NMDS=true)),
-
- ("sixteen", "beryllium", "antiegalitarian", "sphinx of", DataUseRestriction(NCU=true, DS=Seq(535))), // sleep disorder, parent of central sleep apnea
- ("seventeen", "beryllium", "antielectron", "sphinx of", DataUseRestriction(NCU=true, NMDS=true, DS=Seq(9220))), // central sleep apnea
- ("eighteen", "beryllium", "antielitism", "sphinx of", DataUseRestriction(NCU=true, NMDS=true, DS=Seq(535,4325))), // sleep disorder and Ebola hemorrhagic fever
- ("nineteen", "beryllium", "antiepileptic", "sphinx of", DataUseRestriction(NCU=true, NMDS=true, DS=Seq(4325))), // Ebola hemorrhagic fever
- ("twenty", "beryllium", "antifashion", "sphinx of", DataUseRestriction(NCU=true, DS=Seq(9220,4325))) // central sleep apnea and Ebola hemorrhagic fever
+ val datasetTuples: Seq[(String, String, String, String, DataUseRestriction)] = Seq(
+ ("one", "hydrogen", "antiaging", "the quick brown fox", DataUseRestriction(GRU = true, NCU = true, NCTRL = true)),
+ ("two", "hydrogen", "antialias", "the quick brown fox", DataUseRestriction(HMB = true, NMDS = true)),
+ ("three",
+ "hydrogen",
+ "antianxiety",
+ "the quick brown fox",
+ DataUseRestriction(NCU = true, NMDS = true, NCTRL = true)
+ ),
+ ("four",
+ "hydrogen",
+ "antibacterial",
+ "the quick brown fox",
+ DataUseRestriction(NAGR = true, NMDS = true, NCTRL = true)
+ ),
+ ("five", "hydrogen", "antibiotic", "the quick brown fox", DataUseRestriction(NPU = true, NMDS = true)),
+ ("six", "helium", "antibody", "jumped over the", DataUseRestriction(GRU = true, NCU = true)),
+ ("seven", "helium", "antic", "jumped over the", DataUseRestriction(HMB = true, NMDS = true, NCTRL = true)),
+ ("eight", "helium", "anticavity", "jumped over the", DataUseRestriction(NCU = true, NMDS = true, NCTRL = true)),
+ ("nine", "helium", "anticipate", "jumped over the", DataUseRestriction(NAGR = true, NMDS = true, NCTRL = true)),
+ ("ten", "helium", "anticlimactic", "jumped over the", DataUseRestriction(NPU = true, NMDS = true)),
+ ("eleven", "lithium", "anticoagulant", "lazy dog", DataUseRestriction(GRU = true, NCU = true, NCTRL = true)),
+ ("twelve", "lithium", "anticorruption", "lazy dog", DataUseRestriction(HMB = true, NMDS = true)),
+ ("thirteen", "lithium", "antidepressant", "lazy dog", DataUseRestriction(NCU = true, NMDS = true, NCTRL = true)),
+ ("fourteen",
+ "lithium",
+ "antidisestablishmentarianism",
+ "lazy dog",
+ DataUseRestriction(NAGR = true, NMDS = true, NCTRL = true)
+ ),
+ ("fifteen", "lithium", "antidote", "lazy dog", DataUseRestriction(NPU = true, NMDS = true)),
+ ("sixteen",
+ "beryllium",
+ "antiegalitarian",
+ "sphinx of",
+ DataUseRestriction(NCU = true, DS = Seq(535))
+ ), // sleep disorder, parent of central sleep apnea
+ ("seventeen",
+ "beryllium",
+ "antielectron",
+ "sphinx of",
+ DataUseRestriction(NCU = true, NMDS = true, DS = Seq(9220))
+ ), // central sleep apnea
+ ("eighteen",
+ "beryllium",
+ "antielitism",
+ "sphinx of",
+ DataUseRestriction(NCU = true, NMDS = true, DS = Seq(535, 4325))
+ ), // sleep disorder and Ebola hemorrhagic fever
+ ("nineteen",
+ "beryllium",
+ "antiepileptic",
+ "sphinx of",
+ DataUseRestriction(NCU = true, NMDS = true, DS = Seq(4325))
+ ), // Ebola hemorrhagic fever
+ ("twenty",
+ "beryllium",
+ "antifashion",
+ "sphinx of",
+ DataUseRestriction(NCU = true, DS = Seq(9220, 4325))
+ ) // central sleep apnea and Ebola hemorrhagic fever
)
/*
@@ -48,14 +85,17 @@ object ResearchPurposeSearchTestFixtures extends DataUseRestrictionSupport {
Jinxed wizards pluck ivy from the big quilt
*/
- val fixtureDocs:Seq[Document] = datasetTuples map {x:(String,String,String,String,DataUseRestriction) =>
- Document(x._1, Map(
- AttributeName("library","datasetName") -> AttributeString(x._1),
- AttributeName("library","projectName") -> AttributeString(x._2),
- AttributeName("library","indication") -> AttributeString(x._3),
- AttributeName("library","datasetCustodian") -> AttributeString(x._4),
- structuredUseRestrictionAttributeName -> AttributeValueRawJson(x._5.toJson)
- ))
+ val fixtureDocs: Seq[Document] = datasetTuples map { x: (String, String, String, String, DataUseRestriction) =>
+ Document(
+ x._1,
+ Map(
+ AttributeName("library", "datasetName") -> AttributeString(x._1),
+ AttributeName("library", "projectName") -> AttributeString(x._2),
+ AttributeName("library", "indication") -> AttributeString(x._3),
+ AttributeName("library", "datasetCustodian") -> AttributeString(x._4),
+ structuredUseRestrictionAttributeName -> AttributeValueRawJson(x._5.toJson)
+ )
+ )
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ResearchPurposeSearchUseCaseFixtures.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ResearchPurposeSearchUseCaseFixtures.scala
index 59ee11197..b118440b0 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ResearchPurposeSearchUseCaseFixtures.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ResearchPurposeSearchUseCaseFixtures.scala
@@ -15,43 +15,44 @@ object ResearchPurposeSearchUseCaseFixtures extends DataUseRestrictionSupport {
// dataset name: used to validate results, reflects the row of the above spreadsheet
// project name: used to make this code readable
// restriction: used to test research purpose matching
- val datasetTuples:Seq[(Int,String,DataUseRestriction)] = Seq(
- (7, "GRU", DataUseRestriction(GRU=true)),
- (8, "HMB", DataUseRestriction(HMB=true)),
- (9, "DS-CANCER (DOID:162)", DataUseRestriction(DS=Seq(162))),
- (10, "DS-BREAST_CANCER", DataUseRestriction(DS=Seq(1612))),
- (11, "DS-DIABETES, NPU", DataUseRestriction(DS=Seq(9351), NPU=true)),
- (12, "DS-CANCER (DOID:162), NMDS", DataUseRestriction(DS=Seq(162), NMDS=true)),
- (13, "GRU, NMDS", DataUseRestriction(GRU=true, NMDS=true)),
- (14, "HMB, NMDS", DataUseRestriction(HMB=true, NMDS=true)),
- (15, "GRU, NPU", DataUseRestriction(GRU=true, NPU=true)),
- (16, "HMB, NPU", DataUseRestriction(HMB=true, NPU=true)),
- (17, "MESA", DataUseRestriction(HMB=true, NPU=true)),
- (18, "WHI", DataUseRestriction(HMB=true, NPU=true, IRB=true)),
- (19, "GeneSTAR", DataUseRestriction(DS=Seq(1287), IRB=true, NMDS=true, NPU=true)),
- (20, "Diabetes Heart HMB", DataUseRestriction(HMB=true)),
- (21, "Diabetes Heart DS", DataUseRestriction(DS=Seq(1287))),
- (22, "GENOA", DataUseRestriction(DS=Seq(2349), NPU=true)),
- (23, "COPDGENE", DataUseRestriction(HMB=true)),
- (24, "EO-COPD", DataUseRestriction(DS=Seq(3083))),
- (25, "Mitchell Amish", DataUseRestriction(HMB=true, IRB=true, NMDS=true)),
- (26, "FHS", DataUseRestriction(HMB=true, IRB=true, NPU=true, NMDS=true)),
- (27, "MGH HMB", DataUseRestriction(HMB=true, IRB=true)),
- (28, "MGH DS", DataUseRestriction(DS=Seq(60224), IRB=true)),
- (29, "VU - Dawood", DataUseRestriction(GRU=true, IRB=true)),
- (30, "VU - Ben", DataUseRestriction(HMB=true, IRB=true)),
- (31, "HVH HMB", DataUseRestriction(HMB=true, IRB=true, NMDS=true)),
- (32, "HVH DS", DataUseRestriction(DS=Seq(1287), IRB=true, NMDS=true))
+ val datasetTuples: Seq[(Int, String, DataUseRestriction)] = Seq(
+ (7, "GRU", DataUseRestriction(GRU = true)),
+ (8, "HMB", DataUseRestriction(HMB = true)),
+ (9, "DS-CANCER (DOID:162)", DataUseRestriction(DS = Seq(162))),
+ (10, "DS-BREAST_CANCER", DataUseRestriction(DS = Seq(1612))),
+ (11, "DS-DIABETES, NPU", DataUseRestriction(DS = Seq(9351), NPU = true)),
+ (12, "DS-CANCER (DOID:162), NMDS", DataUseRestriction(DS = Seq(162), NMDS = true)),
+ (13, "GRU, NMDS", DataUseRestriction(GRU = true, NMDS = true)),
+ (14, "HMB, NMDS", DataUseRestriction(HMB = true, NMDS = true)),
+ (15, "GRU, NPU", DataUseRestriction(GRU = true, NPU = true)),
+ (16, "HMB, NPU", DataUseRestriction(HMB = true, NPU = true)),
+ (17, "MESA", DataUseRestriction(HMB = true, NPU = true)),
+ (18, "WHI", DataUseRestriction(HMB = true, NPU = true, IRB = true)),
+ (19, "GeneSTAR", DataUseRestriction(DS = Seq(1287), IRB = true, NMDS = true, NPU = true)),
+ (20, "Diabetes Heart HMB", DataUseRestriction(HMB = true)),
+ (21, "Diabetes Heart DS", DataUseRestriction(DS = Seq(1287))),
+ (22, "GENOA", DataUseRestriction(DS = Seq(2349), NPU = true)),
+ (23, "COPDGENE", DataUseRestriction(HMB = true)),
+ (24, "EO-COPD", DataUseRestriction(DS = Seq(3083))),
+ (25, "Mitchell Amish", DataUseRestriction(HMB = true, IRB = true, NMDS = true)),
+ (26, "FHS", DataUseRestriction(HMB = true, IRB = true, NPU = true, NMDS = true)),
+ (27, "MGH HMB", DataUseRestriction(HMB = true, IRB = true)),
+ (28, "MGH DS", DataUseRestriction(DS = Seq(60224), IRB = true)),
+ (29, "VU - Dawood", DataUseRestriction(GRU = true, IRB = true)),
+ (30, "VU - Ben", DataUseRestriction(HMB = true, IRB = true)),
+ (31, "HVH HMB", DataUseRestriction(HMB = true, IRB = true, NMDS = true)),
+ (32, "HVH DS", DataUseRestriction(DS = Seq(1287), IRB = true, NMDS = true))
)
- val fixtureDocs:Seq[Document] = datasetTuples map {x:(Int,String,DataUseRestriction) =>
- Document(x._1.toString, Map(
- AttributeName("library","datasetName") -> AttributeString(x._1.toString),
- AttributeName("library","projectName") -> AttributeString(x._2),
- structuredUseRestrictionAttributeName -> AttributeValueRawJson(x._3.toJson)
- ))
+ val fixtureDocs: Seq[Document] = datasetTuples map { x: (Int, String, DataUseRestriction) =>
+ Document(
+ x._1.toString,
+ Map(
+ AttributeName("library", "datasetName") -> AttributeString(x._1.toString),
+ AttributeName("library", "projectName") -> AttributeString(x._2),
+ structuredUseRestrictionAttributeName -> AttributeValueRawJson(x._3.toJson)
+ )
+ )
}
-
-
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ResearchPurposeSearchUseCasesSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ResearchPurposeSearchUseCasesSpec.scala
index e1f091011..e7e66fa6f 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ResearchPurposeSearchUseCasesSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ResearchPurposeSearchUseCasesSpec.scala
@@ -9,7 +9,12 @@ import org.scalatest.BeforeAndAfterAll
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.matchers.should.Matchers
-class ResearchPurposeSearchUseCasesSpec extends AnyFreeSpec with SearchResultValidation with Matchers with BeforeAndAfterAll with LazyLogging {
+class ResearchPurposeSearchUseCasesSpec
+ extends AnyFreeSpec
+ with SearchResultValidation
+ with Matchers
+ with BeforeAndAfterAll
+ with LazyLogging {
// cases as defined in the doc at
// https://docs.google.com/a/broadinstitute.org/spreadsheets/d/16XzKpOFCyqRTNy9XHFFPx-Vf4PWFydsWAS7exzx26WM/edit?usp=sharing
@@ -24,9 +29,8 @@ class ResearchPurposeSearchUseCasesSpec extends AnyFreeSpec with SearchResultVal
logger.info("... fixtures indexed.")
}
- override def afterAll() = {
+ override def afterAll() =
searchDAO.deleteIndex()
- }
"Library research purpose PO use cases" - {
@@ -38,7 +42,8 @@ class ResearchPurposeSearchUseCasesSpec extends AnyFreeSpec with SearchResultVal
"Research purpose C: Cancer (DOID:162)" - {
"should return PO-defined results" in {
- val researchPurpose = ResearchPurpose.default.copy(DS=Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_162")))
+ val researchPurpose =
+ ResearchPurpose.default.copy(DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_162")))
val searchResponse = searchWithPurpose(researchPurpose)
validateResultNames(
Set(7, 8, 9, 12, 13, 14, 15, 16, 17, 18, 20, 23, 25, 26, 27, 29, 30, 31),
@@ -49,7 +54,10 @@ class ResearchPurposeSearchUseCasesSpec extends AnyFreeSpec with SearchResultVal
"Research purpose D: Cancer, Methods" - {
"should return PO-defined results" in {
- val researchPurpose = ResearchPurpose.default.copy(NMDS=true, DS=Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_162")))
+ val researchPurpose =
+ ResearchPurpose.default.copy(NMDS = true,
+ DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_162"))
+ )
val searchResponse = searchWithPurpose(researchPurpose)
validateResultNames(
Set(7, 8, 9, 12, 13, 14, 15, 16, 17, 18, 20, 23, 25, 26, 27, 29, 30, 31),
@@ -60,7 +68,7 @@ class ResearchPurposeSearchUseCasesSpec extends AnyFreeSpec with SearchResultVal
"Research purpose E: Controls" - {
"should return PO-defined results" in {
- val researchPurpose = ResearchPurpose.default.copy(NCTRL=true)
+ val researchPurpose = ResearchPurpose.default.copy(NCTRL = true)
val searchResponse = searchWithPurpose(researchPurpose)
validateResultNames(
Set(7, 8, 13, 14, 15, 16, 17, 18, 20, 23, 25, 26, 27, 29, 30, 31),
@@ -71,7 +79,10 @@ class ResearchPurposeSearchUseCasesSpec extends AnyFreeSpec with SearchResultVal
"Research purpose F: Cancer, controls" - {
"should return PO-defined results" in {
- val researchPurpose = ResearchPurpose.default.copy(NCTRL=true, DS=Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_162")))
+ val researchPurpose =
+ ResearchPurpose.default.copy(NCTRL = true,
+ DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_162"))
+ )
val searchResponse = searchWithPurpose(researchPurpose)
validateResultNames(
Set(7, 8, 9, 12, 13, 14, 15, 16, 17, 18, 20, 23, 25, 26, 27, 29, 30, 31),
@@ -82,7 +93,10 @@ class ResearchPurposeSearchUseCasesSpec extends AnyFreeSpec with SearchResultVal
"Research purpose G: Diabetes, controls" - {
"should return PO-defined results" in {
- val researchPurpose = ResearchPurpose.default.copy(NCTRL=true, DS=Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_9351")))
+ val researchPurpose =
+ ResearchPurpose.default.copy(NCTRL = true,
+ DS = Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_9351"))
+ )
val searchResponse = searchWithPurpose(researchPurpose)
validateResultNames(
Set(7, 8, 11, 13, 14, 15, 16, 17, 18, 20, 23, 25, 26, 27, 29, 30, 31),
@@ -93,7 +107,7 @@ class ResearchPurposeSearchUseCasesSpec extends AnyFreeSpec with SearchResultVal
"Research purpose H: Commercial use" - {
"should return PO-defined results" in {
- val researchPurpose = ResearchPurpose.default.copy(NCU=true)
+ val researchPurpose = ResearchPurpose.default.copy(NCU = true)
val searchResponse = searchWithPurpose(researchPurpose)
validateResultNames(
Set(7, 8, 9, 10, 12, 13, 14, 20, 21, 23, 24, 25, 27, 28, 29, 30, 31, 32),
@@ -104,7 +118,7 @@ class ResearchPurposeSearchUseCasesSpec extends AnyFreeSpec with SearchResultVal
"Research purpose I: methods, commercial use" - {
"should return PO-defined results" in {
- val researchPurpose = ResearchPurpose.default.copy(NCU=true, NMDS=true)
+ val researchPurpose = ResearchPurpose.default.copy(NCU = true, NMDS = true)
val searchResponse = searchWithPurpose(researchPurpose)
validateResultNames(
Set(7, 8, 9, 10, 20, 21, 23, 24, 27, 28, 29, 30),
@@ -115,11 +129,9 @@ class ResearchPurposeSearchUseCasesSpec extends AnyFreeSpec with SearchResultVal
}
- private def validateResultNames(expectedNames:Set[Int], response:LibrarySearchResponse): Unit = {
+ private def validateResultNames(expectedNames: Set[Int], response: LibrarySearchResponse): Unit = {
val stringNames = expectedNames.map(_.toString)
super.validateResultNames(stringNames, response)
}
-
-
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/SearchResultValidation.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/SearchResultValidation.scala
index cb3e5e3e1..3c7180a41 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/SearchResultValidation.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/SearchResultValidation.scala
@@ -18,19 +18,22 @@ trait SearchResultValidation {
val dur = Duration(2, MINUTES)
- def searchFor(txt:String) = {
+ def searchFor(txt: String) = {
val criteria = emptyCriteria.copy(searchString = Some(txt))
Await.result(searchDAO.findDocuments(criteria, Seq.empty[String], Map.empty), dur)
}
- def searchWithPurpose(researchPurpose: Option[ResearchPurpose], term:Option[String], filters:Option[Map[String, Seq[String]]]): LibrarySearchResponse = {
+ def searchWithPurpose(researchPurpose: Option[ResearchPurpose],
+ term: Option[String],
+ filters: Option[Map[String, Seq[String]]]
+ ): LibrarySearchResponse = {
val criteria = emptyCriteria.copy(
searchString = term,
researchPurpose = researchPurpose,
filters = filters.getOrElse(Map.empty[String, Seq[String]])
)
// set size to 100 to make sure we return all results for testing comparisons
- Await.result(searchDAO.findDocuments(criteria.copy(size=100), Seq.empty[String], Map.empty), dur)
+ Await.result(searchDAO.findDocuments(criteria.copy(size = 100), Seq.empty[String], Map.empty), dur)
}
def searchWithPurpose(researchPurpose: ResearchPurpose): LibrarySearchResponse =
@@ -43,11 +46,9 @@ trait SearchResultValidation {
searchWithPurpose(Some(researchPurpose), None, Some(filters))
def suggestWithPurpose(researchPurpose: ResearchPurpose, term: String) = {
- val criteria = emptyCriteria.copy(
- searchString = Some(term),
- researchPurpose = Some(researchPurpose))
+ val criteria = emptyCriteria.copy(searchString = Some(term), researchPurpose = Some(researchPurpose))
// set size to 100 to make sure we return all results for testing comparisons
- Await.result(searchDAO.suggestionsFromAll(criteria.copy(size=100), Seq.empty[String], Map.empty), dur)
+ Await.result(searchDAO.suggestionsFromAll(criteria.copy(size = 100), Seq.empty[String], Map.empty), dur)
}
/**
@@ -57,7 +58,8 @@ trait SearchResultValidation {
* would.
*/
def searchWithResearchPurposeQuery(researchPurpose: ResearchPurpose): SearchResponse = {
- val boolQuery: BoolQueryBuilder = researchPurposeSupport.researchPurposeFilters(researchPurpose, name => "library:" + name)
+ val boolQuery: BoolQueryBuilder =
+ researchPurposeSupport.researchPurposeFilters(researchPurpose, name => "library:" + name)
// Use a MockResearchPurposeSupport here to prove that it's using the query created above
val elasticSearchDAO = new ElasticSearchDAO(client, itTestIndexName, new MockResearchPurposeSupport)
@@ -65,25 +67,21 @@ trait SearchResultValidation {
elasticSearchDAO.executeESRequest[SearchRequest, SearchResponse, SearchRequestBuilder](searchRequest)
}
- def searchWithFilter(workspacePolicyMap: Map[String, UserPolicy]) = {
+ def searchWithFilter(workspacePolicyMap: Map[String, UserPolicy]) =
Await.result(searchDAO.findDocuments(emptyCriteria, Seq.empty[String], workspacePolicyMap), dur)
- }
- def validateResultNames(expectedNames:Set[String], response:LibrarySearchResponse) = {
+ def validateResultNames(expectedNames: Set[String], response: LibrarySearchResponse) =
validateResultField("library:datasetName", expectedNames, response)
- }
- def validateResultIndications(expectedIndications:Set[String], response:LibrarySearchResponse) = {
+ def validateResultIndications(expectedIndications: Set[String], response: LibrarySearchResponse) =
validateResultField("library:indication", expectedIndications, response)
- }
- def validateSuggestions(expectedSuggestions:Set[String], response:LibrarySearchResponse) = {
+ def validateSuggestions(expectedSuggestions: Set[String], response: LibrarySearchResponse) =
validateResultField("suggestion", expectedSuggestions, response)
- }
- def validateResultField(attrName:String, expectedValues:Set[String], response:LibrarySearchResponse) = {
- val actualValues:Set[String] = getResultField(attrName, response)
- assertResult(expectedValues) {actualValues}
+ def validateResultField(attrName: String, expectedValues: Set[String], response: LibrarySearchResponse) = {
+ val actualValues: Set[String] = getResultField(attrName, response)
+ assertResult(expectedValues)(actualValues)
}
def validateResultNames(expectedNames: Set[String], response: SearchResponse): Unit = {
@@ -91,16 +89,14 @@ trait SearchResultValidation {
_.getSourceAsString.parseJson
}
val names = getResultField("library:datasetName", results)
- assertResult(expectedNames) {names}
+ assertResult(expectedNames)(names)
}
- def getResultField(attrName:String, response:LibrarySearchResponse):Set[String] = {
+ def getResultField(attrName: String, response: LibrarySearchResponse): Set[String] =
getResultField(attrName, response.results)
- }
- def getResultField(attrName: String, results: Seq[JsValue]) = {
- (results map {jsval:JsValue =>
+ def getResultField(attrName: String, results: Seq[JsValue]) =
+ (results map { jsval: JsValue =>
jsval.asJsObject.fields(attrName).convertTo[String]
}).toSet
- }
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/SortSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/SortSpec.scala
index 88b4bc59a..3ba861f7c 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/SortSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/SortSpec.scala
@@ -17,7 +17,6 @@ class SortSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll with Laz
val dur = Duration(2, MINUTES)
-
override def beforeAll() = {
// use re-create here, since instantiating the DAO will create it in the first place
searchDAO.recreateIndex()
@@ -28,9 +27,8 @@ class SortSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll with Laz
logger.info("... fixtures indexed.")
}
- override def afterAll() = {
+ override def afterAll() =
searchDAO.deleteIndex()
- }
"Library integration" - {
"Elastic Search" - {
@@ -41,35 +39,35 @@ class SortSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll with Laz
"search with no sort (or filter) criteria" - {
"returns all results in engine-defined order" in {
val searchResponse = sortBy(None, None)
- assertResult(IntegrationTestFixtures.datasetTuples.size) {searchResponse.total}
+ assertResult(IntegrationTestFixtures.datasetTuples.size)(searchResponse.total)
// results are sorted by relevancy/native index order, which we won't test here
}
}
"search with empty string as sort criteria" - {
"returns all results in engine-defined order" in {
val searchResponse = sortBy(Some(""), None)
- assertResult(IntegrationTestFixtures.datasetTuples.size) {searchResponse.total}
+ assertResult(IntegrationTestFixtures.datasetTuples.size)(searchResponse.total)
// results are sorted by relevancy/native index order, which we won't test here
}
}
"sort by datasetName asc" - {
"finds the correct first result" in {
val searchResponse = sortBy("library:datasetName", "asc")
- assertResult(IntegrationTestFixtures.datasetTuples.size) {searchResponse.total}
+ assertResult(IntegrationTestFixtures.datasetTuples.size)(searchResponse.total)
validateFirstResult("library:datasetName", "TCGA_ACC_ControlledAccess", searchResponse)
}
}
"sort by datasetName desc" - {
"finds the correct first result" in {
val searchResponse = sortBy("library:datasetName", "desc")
- assertResult(IntegrationTestFixtures.datasetTuples.size) {searchResponse.total}
+ assertResult(IntegrationTestFixtures.datasetTuples.size)(searchResponse.total)
validateFirstResult("library:datasetName", "ZZZ &foo", searchResponse)
}
}
"sort by datasetName with no sort order" - {
"implicitly applies asc sort order" in {
val searchResponse = sortBy("library:datasetName")
- assertResult(IntegrationTestFixtures.datasetTuples.size) {searchResponse.total}
+ assertResult(IntegrationTestFixtures.datasetTuples.size)(searchResponse.total)
validateFirstResult("library:datasetName", "TCGA_ACC_ControlledAccess", searchResponse)
}
}
@@ -77,72 +75,66 @@ class SortSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll with Laz
"properly sorts and pages" in {
val criteria = emptyCriteria.copy(sortField = Some("library:datasetName"), from = 2)
val searchResponse = searchFor(criteria)
- assertResult(IntegrationTestFixtures.datasetTuples.size) {searchResponse.total}
+ assertResult(IntegrationTestFixtures.datasetTuples.size)(searchResponse.total)
validateFirstResult("library:datasetName", "TCGA_BRCA_OpenAccess", searchResponse)
}
}
"sort by numSubjects asc" - {
"finds the correct first result" in {
val searchResponse = sortBy("library:numSubjects", "asc")
- assertResult(IntegrationTestFixtures.datasetTuples.size) {searchResponse.total}
+ assertResult(IntegrationTestFixtures.datasetTuples.size)(searchResponse.total)
validateFirstResult("library:numSubjects", 1, searchResponse)
}
}
"sort by numSubjects desc" - {
"finds the correct first result" in {
val searchResponse = sortBy("library:numSubjects", "desc")
- assertResult(IntegrationTestFixtures.datasetTuples.size) {searchResponse.total}
+ assertResult(IntegrationTestFixtures.datasetTuples.size)(searchResponse.total)
validateFirstResult("library:numSubjects", 4455667, searchResponse)
}
}
"sort by numSubjects desc with no sort order" - {
"implicitly applies asc sort order" in {
val searchResponse = sortBy("library:numSubjects")
- assertResult(IntegrationTestFixtures.datasetTuples.size) {searchResponse.total}
+ assertResult(IntegrationTestFixtures.datasetTuples.size)(searchResponse.total)
validateFirstResult("library:numSubjects", 1, searchResponse)
}
}
"sort by numSubjects with pagination" - {
"properly sorts and pages" in {
- val criteria = emptyCriteria.copy(sortField = Some("library:numSubjects"), sortDirection = Some("desc"), from = 3, size = 2)
+ val criteria =
+ emptyCriteria.copy(sortField = Some("library:numSubjects"), sortDirection = Some("desc"), from = 3, size = 2)
val searchResponse = searchFor(criteria)
- assertResult(IntegrationTestFixtures.datasetTuples.size) {searchResponse.total}
+ assertResult(IntegrationTestFixtures.datasetTuples.size)(searchResponse.total)
validateFirstResult("library:numSubjects", 444, searchResponse)
}
}
}
-
- private def sortBy(sortField: String): LibrarySearchResponse = {
+ private def sortBy(sortField: String): LibrarySearchResponse =
sortBy(Some(sortField), None)
- }
- private def sortBy(sortField: String, sortDirection: String): LibrarySearchResponse = {
+ private def sortBy(sortField: String, sortDirection: String): LibrarySearchResponse =
sortBy(Some(sortField), Some(sortDirection))
- }
private def sortBy(sortField: Option[String] = None, sortDirection: Option[String] = None): LibrarySearchResponse = {
- val criteria = emptyCriteria.copy(sortField=sortField, sortDirection=sortDirection)
+ val criteria = emptyCriteria.copy(sortField = sortField, sortDirection = sortDirection)
searchFor(criteria)
}
- private def searchFor(criteria: LibrarySearchParams) = {
+ private def searchFor(criteria: LibrarySearchParams) =
Await.result(searchDAO.findDocuments(criteria, Seq.empty[String], Map.empty), dur)
- }
- private def validateFirstResult(field: String, expectedValue: String, response: LibrarySearchResponse): Unit = {
+ private def validateFirstResult(field: String, expectedValue: String, response: LibrarySearchResponse): Unit =
validateFirstResult(field, JsString(expectedValue), response)
- }
- private def validateFirstResult(field: String, expectedValue: Int, response: LibrarySearchResponse): Unit = {
+ private def validateFirstResult(field: String, expectedValue: Int, response: LibrarySearchResponse): Unit =
validateFirstResult(field, JsNumber(expectedValue), response)
- }
- private def validateFirstResult(field:String, expectedValue: JsValue, response:LibrarySearchResponse): Unit = {
+ private def validateFirstResult(field: String, expectedValue: JsValue, response: LibrarySearchResponse): Unit = {
val res = getFirstResult(response)
- val actualValue = res.fields.getOrElse(field, fail(s"field $field does not exist in results") )
- assertResult(expectedValue) {actualValue}
+ val actualValue = res.fields.getOrElse(field, fail(s"field $field does not exist in results"))
+ assertResult(expectedValue)(actualValue)
}
- private def getFirstResult(response:LibrarySearchResponse): JsObject = {
+ private def getFirstResult(response: LibrarySearchResponse): JsObject =
response.results.head.asJsObject
- }
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/TextSearchSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/TextSearchSpec.scala
index 3688e7d7c..9a7f7ceed 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/TextSearchSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/TextSearchSpec.scala
@@ -7,7 +7,12 @@ import org.scalatest.BeforeAndAfterAll
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.matchers.should.Matchers
-class TextSearchSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll with LazyLogging with SearchResultValidation {
+class TextSearchSpec
+ extends AnyFreeSpec
+ with Matchers
+ with BeforeAndAfterAll
+ with LazyLogging
+ with SearchResultValidation {
override def beforeAll() = {
// use re-create here, since instantiating the DAO will create it in the first place
@@ -19,9 +24,8 @@ class TextSearchSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll wi
logger.info("... fixtures indexed.")
}
- override def afterAll() = {
+ override def afterAll() =
searchDAO.deleteIndex()
- }
"Library integration" - {
"Elastic Search" - {
@@ -32,7 +36,7 @@ class TextSearchSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll wi
"search for 'brca'" - {
"should find just the two BRCA datasets" in {
val searchResponse = searchFor("brca")
- assertResult(2) {searchResponse.total}
+ assertResult(2)(searchResponse.total)
validateResultNames(
Set("TCGA_BRCA_ControlledAccess", "TCGA_BRCA_OpenAccess"),
searchResponse
@@ -42,7 +46,7 @@ class TextSearchSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll wi
"search for 'tcga_brca'" - {
"should find just the two BRCA datasets" in {
val searchResponse = searchFor("tcga_brca")
- assertResult(2) {searchResponse.total}
+ assertResult(2)(searchResponse.total)
validateResultNames(
Set("TCGA_BRCA_ControlledAccess", "TCGA_BRCA_OpenAccess"),
searchResponse
@@ -52,7 +56,7 @@ class TextSearchSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll wi
"search for 'tcga brca'" - {
"should find just the two BRCA datasets" in {
val searchResponse = searchFor("tcga brca")
- assertResult(2) {searchResponse.total}
+ assertResult(2)(searchResponse.total)
validateResultNames(
Set("TCGA_BRCA_ControlledAccess", "TCGA_BRCA_OpenAccess"),
searchResponse
@@ -62,7 +66,7 @@ class TextSearchSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll wi
"search for 'tcga_brca_openaccess'" - {
"should find just the single BRCA open-access dataset" in {
val searchResponse = searchFor("tcga_brca_openaccess")
- assertResult(1) {searchResponse.total}
+ assertResult(1)(searchResponse.total)
validateResultNames(
Set("TCGA_BRCA_OpenAccess"),
searchResponse
@@ -73,7 +77,7 @@ class TextSearchSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll wi
"should find all openaccess datasets, plus the BRCA controlled access" in {
// we'll match on 2 of the 3 tokens, so we find "tcga openaccess" as well as "tcga brca" and "brca openaccess"
val searchResponse = searchFor("tcga brca openaccess")
- assertResult(13) {searchResponse.total}
+ assertResult(13)(searchResponse.total)
val actualNames = getResultField("library:datasetName", searchResponse)
assert(
actualNames.forall(name => name.equals("TCGA_BRCA_ControlledAccess") || name.endsWith("_OpenAccess"))
@@ -83,9 +87,9 @@ class TextSearchSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll wi
"search for 'kidney renal papillary cell carcinoma'" - {
"should find four datasets with two types of kidney carcinomas" in {
val searchResponse = searchFor("kidney renal papillary cell carcinoma")
- assertResult(4) {searchResponse.total}
+ assertResult(4)(searchResponse.total)
validateResultIndications(
- Set("Kidney Renal Clear Cell Carcinoma","Kidney Renal Papillary Cell Carcinoma"),
+ Set("Kidney Renal Clear Cell Carcinoma", "Kidney Renal Papillary Cell Carcinoma"),
searchResponse
)
}
@@ -93,7 +97,7 @@ class TextSearchSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll wi
"search for 'testing123'" - {
"should find the single dataset named 'testing123'" in {
val searchResponse = searchFor("testing123")
- assertResult(1) {searchResponse.total}
+ assertResult(1)(searchResponse.total)
validateResultNames(
Set("testing123"),
searchResponse
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/mock/MockAgoraACLData.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/mock/MockAgoraACLData.scala
index 8aad454f6..e9d60308f 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/mock/MockAgoraACLData.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/mock/MockAgoraACLData.scala
@@ -1,7 +1,12 @@
package org.broadinstitute.dsde.firecloud.mock
import org.broadinstitute.dsde.firecloud.model.OrchMethodRepository.ACLNames._
-import org.broadinstitute.dsde.firecloud.model.OrchMethodRepository.{AgoraPermission, EntityAccessControlAgora, FireCloudPermission, Method}
+import org.broadinstitute.dsde.firecloud.model.OrchMethodRepository.{
+ AgoraPermission,
+ EntityAccessControlAgora,
+ FireCloudPermission,
+ Method
+}
/**
* Created by davidan on 10/29/15.
@@ -23,8 +28,9 @@ object MockAgoraACLData {
private val readerAgora = AgoraPermission(Some("reader@broadinstitute.org"), Some(ListReader))
private val noAccessAgora = AgoraPermission(Some("noaccess@broadinstitute.org"), Some(ListNoAccess))
// AGORA EDGE CASES
- private val partialsAgora = AgoraPermission(Some("agora-partial@broadinstitute.org"), Some(List("Read","Write")))
- private val extrasAgora = AgoraPermission(Some("agora-extras@broadinstitute.org"), Some(ListOwner ++ List("Extra","Permissions")))
+ private val partialsAgora = AgoraPermission(Some("agora-partial@broadinstitute.org"), Some(List("Read", "Write")))
+ private val extrasAgora =
+ AgoraPermission(Some("agora-extras@broadinstitute.org"), Some(ListOwner ++ List("Extra", "Permissions")))
private val emptyAgora = AgoraPermission(Some("agora-empty@broadinstitute.org"), Some(List("")))
private val noneAgora = AgoraPermission(Some("agora-none@broadinstitute.org"), None)
private val emptyUserAgora = AgoraPermission(Some(""), Some(ListOwner))
@@ -37,14 +43,23 @@ object MockAgoraACLData {
val translatedStandardAgora = List(ownerAgora, readerAgora, ownerAgora, noAccessAgora)
- val edgesAgora = standardAgora ++ List(partialsAgora, extrasAgora, emptyAgora, noneAgora, emptyUserAgora, noneUserAgora)
+ val edgesAgora =
+ standardAgora ++ List(partialsAgora, extrasAgora, emptyAgora, noneAgora, emptyUserAgora, noneUserAgora)
// multi-permissions endpoint response
val multiUpsertResponse: List[EntityAccessControlAgora] = List(
- EntityAccessControlAgora(Method(Some("ns1"),Some("n1"),Some(1)), Seq(AgoraPermission(Some("user1@example.com"), Some(ListAll))), None),
- EntityAccessControlAgora(Method(Some("ns2"),Some("n2"),Some(2)), Seq(AgoraPermission(Some("user2@example.com"), Some(ListReader))), None),
- EntityAccessControlAgora(Method(Some("ns3"),Some("n3"),Some(3)), Seq.empty[AgoraPermission], Some("this is an error message"))
+ EntityAccessControlAgora(Method(Some("ns1"), Some("n1"), Some(1)),
+ Seq(AgoraPermission(Some("user1@example.com"), Some(ListAll))),
+ None
+ ),
+ EntityAccessControlAgora(Method(Some("ns2"), Some("n2"), Some(2)),
+ Seq(AgoraPermission(Some("user2@example.com"), Some(ListReader))),
+ None
+ ),
+ EntityAccessControlAgora(Method(Some("ns3"), Some("n3"), Some(3)),
+ Seq.empty[AgoraPermission],
+ Some("this is an error message")
+ )
)
-
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/mock/MockGoogleServicesDAO.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/mock/MockGoogleServicesDAO.scala
index fd4b051b2..4c1cd1ee2 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/mock/MockGoogleServicesDAO.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/mock/MockGoogleServicesDAO.scala
@@ -16,7 +16,7 @@ import scala.concurrent.{ExecutionContext, Future}
class MockGoogleServicesDAO extends GoogleServicesDAO {
- private final val spreadsheetJson = """{
+ final private val spreadsheetJson = """{
| "properties": {
| "autoRecalc": "ON_CHANGE",
| "defaultFormat": {
@@ -65,38 +65,51 @@ class MockGoogleServicesDAO extends GoogleServicesDAO {
| "spreadsheetUrl": "https://docs.google.com/spreadsheets/d/randomId/edit"
|}
|""".stripMargin.parseJson.asJsObject
- final val spreadsheetUpdateJson = """{"spreadsheetId":"randomId","updatedRange":"Sheet1!A1:F45","updatedRows":45,"updatedCells":270,"updatedColumns":6}""".parseJson.asJsObject
+ final val spreadsheetUpdateJson =
+ """{"spreadsheetId":"randomId","updatedRange":"Sheet1!A1:F45","updatedRows":45,"updatedCells":270,"updatedColumns":6}""".parseJson.asJsObject
val pubsubMessages = new LinkedBlockingQueue[String]()
override def getAdminUserAccessToken: String = "adminUserAccessToken"
- override def getBucketObjectAsInputStream(bucketName: String, objectKey: String): InputStream = {
+ override def getBucketObjectAsInputStream(bucketName: String, objectKey: String): InputStream =
objectKey match {
case "target-whitelist.txt" => new ByteArrayInputStream("firecloud-dev\ntarget-user".getBytes("UTF-8"))
- case "tcga-whitelist.txt" => new ByteArrayInputStream("firecloud-dev\ntcga-user".getBytes("UTF-8"))
- case _ => new ByteArrayInputStream(" ".getBytes("UTF-8"))
+ case "tcga-whitelist.txt" => new ByteArrayInputStream("firecloud-dev\ntcga-user".getBytes("UTF-8"))
+ case _ => new ByteArrayInputStream(" ".getBytes("UTF-8"))
}
- }
override def getObjectResourceUrl(bucketName: String, objectKey: String): String = ""
- override def writeObjectAsRawlsSA(bucketName: GcsBucketName, objectKey: GcsObjectName, objectContents: Array[Byte]): GcsPath = GcsPath(bucketName, objectKey)
- override def writeObjectAsRawlsSA(bucketName: GcsBucketName, objectKey: GcsObjectName, tempFile: File): GcsPath = GcsPath(bucketName, objectKey)
+ override def writeObjectAsRawlsSA(bucketName: GcsBucketName,
+ objectKey: GcsObjectName,
+ objectContents: Array[Byte]
+ ): GcsPath = GcsPath(bucketName, objectKey)
+ override def writeObjectAsRawlsSA(bucketName: GcsBucketName, objectKey: GcsObjectName, tempFile: File): GcsPath =
+ GcsPath(bucketName, objectKey)
- override def getUserProfile(accessToken: WithAccessToken)
- (implicit executionContext: ExecutionContext): Future[HttpResponse] = Future.failed(new UnsupportedOperationException)
- override val fetchPriceList: Future[GooglePriceList] = {
- Future.successful(GooglePriceList(GooglePrices(Map("us" -> 0.01, "europe-west1" -> 0.02), UsTieredPriceItem(Map(1024L -> BigDecimal(0.12)))), "v0", "18-November-2016"))
- }
+ override def getUserProfile(accessToken: WithAccessToken)(implicit
+ executionContext: ExecutionContext
+ ): Future[HttpResponse] = Future.failed(new UnsupportedOperationException)
+ override val fetchPriceList: Future[GooglePriceList] =
+ Future.successful(
+ GooglePriceList(
+ GooglePrices(Map("us" -> 0.01, "europe-west1" -> 0.02), UsTieredPriceItem(Map(1024L -> BigDecimal(0.12)))),
+ "v0",
+ "18-November-2016"
+ )
+ )
override def deleteGoogleGroup(groupEmail: String): Unit = ()
- override def createGoogleGroup(groupName: String): Option[String] = Option("new-google-group@support.something.firecloud.org")
- override def addMemberToAnonymizedGoogleGroup(groupName: String, targetUserEmail: String): Option[String] = Option("user-email@something.com")
- override def getBucket(bucketName: String, petKey: String): Option[Bucket] = {
+ override def createGoogleGroup(groupName: String): Option[String] = Option(
+ "new-google-group@support.something.firecloud.org"
+ )
+ override def addMemberToAnonymizedGoogleGroup(groupName: String, targetUserEmail: String): Option[String] = Option(
+ "user-email@something.com"
+ )
+ override def getBucket(bucketName: String, petKey: String): Option[Bucket] =
bucketName match {
- case "usBucket" => Option(new Bucket().setName("usBucket").setLocation("US"))
- case "europeWest1Bucket"=> Option(new Bucket().setName("europeWest1").setLocation("EUROPE-WEST1"))
+ case "usBucket" => Option(new Bucket().setName("usBucket").setLocation("US"))
+ case "europeWest1Bucket" => Option(new Bucket().setName("europeWest1").setLocation("EUROPE-WEST1"))
}
- }
def status: Future[SubsystemStatus] = Future(SubsystemStatus(ok = true, messages = None))
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/mock/MockTSV.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/mock/MockTSV.scala
index 7a213e01a..b5f9cd4ff 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/mock/MockTSV.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/mock/MockTSV.scala
@@ -8,13 +8,13 @@ object MockTSVStrings {
/*
* Utilities for generating test data.
*/
- private implicit class TSVListSupport(elems: List[String]) {
+ implicit private class TSVListSupport(elems: List[String]) {
def tabbed: String = elems.mkString("\t")
def newlineSeparated: String = elems.mkString("\n")
def windowsNewlineSeparated: String = elems.mkString("\r\n")
}
- private implicit class TSVStringSupport(str: String) {
+ implicit private class TSVStringSupport(str: String) {
def quoted: String = s""""$str""""
}
@@ -26,10 +26,10 @@ object MockTSVStrings {
val onlyNewlines = "\n\n\n\n\n\n"
val rowTooLong = List(
- List("foo", "bar", "baz").tabbed,
- List("this", "line's", "fine").tabbed,
- List("this", "line's", "too", "long").tabbed
- ).newlineSeparated
+ List("foo", "bar", "baz").tabbed,
+ List("this", "line's", "fine").tabbed,
+ List("this", "line's", "too", "long").tabbed
+ ).newlineSeparated
val rowTooShort = List(
List("foo", "bar", "baz").tabbed,
@@ -44,9 +44,9 @@ object MockTSVStrings {
).newlineSeparated
val validOneLine = List(
- List("foo", "bar", "baz").tabbed,
- List("woop", "de", "doo").tabbed
- ).newlineSeparated
+ List("foo", "bar", "baz").tabbed,
+ List("woop", "de", "doo").tabbed
+ ).newlineSeparated
val trailingNewlines = validOneLine + "\n\n\n\n"
@@ -59,46 +59,47 @@ object MockTSVStrings {
/*
* TSVs for testing the TSV import code.
*/
- val missingTSVType = List(
- List("sample_id", "bar", "baz").tabbed,
- List("woop", "de", "doo").tabbed,
- List("hip", "hip", "hooray").tabbed).newlineSeparated
+ val missingTSVType = List(List("sample_id", "bar", "baz").tabbed,
+ List("woop", "de", "doo").tabbed,
+ List("hip", "hip", "hooray").tabbed
+ ).newlineSeparated
- val nonexistentTSVType = List(
- List("wobble:sample_id", "bar", "baz").tabbed,
- List("woop", "de", "doo").tabbed,
- List("hip", "hip", "hooray").tabbed).newlineSeparated
+ val nonexistentTSVType = List(List("wobble:sample_id", "bar", "baz").tabbed,
+ List("woop", "de", "doo").tabbed,
+ List("hip", "hip", "hooray").tabbed
+ ).newlineSeparated
- val malformedEntityType = List(
- List("entity:sampleid", "bar", "baz").tabbed,
- List("woop", "de", "doo").tabbed,
- List("hip", "hip", "hooray").tabbed).newlineSeparated
+ val malformedEntityType = List(List("entity:sampleid", "bar", "baz").tabbed,
+ List("woop", "de", "doo").tabbed,
+ List("hip", "hip", "hooray").tabbed
+ ).newlineSeparated
- //membership TSVs
- val membershipUnknownFirstColumnHeader = List(
- List("membership:sampel_id", "bar").tabbed,
- List("woop", "de").tabbed,
- List("hip", "hip").tabbed).newlineSeparated
+ // membership TSVs
+ val membershipUnknownFirstColumnHeader = List(List("membership:sampel_id", "bar").tabbed,
+ List("woop", "de").tabbed,
+ List("hip", "hip").tabbed
+ ).newlineSeparated
- val membershipNotCollectionType = List(
- List("membership:sample_id", "bar").tabbed,
- List("woop", "de").tabbed,
- List("hip", "hip").tabbed).newlineSeparated
+ val membershipNotCollectionType = List(List("membership:sample_id", "bar").tabbed,
+ List("woop", "de").tabbed,
+ List("hip", "hip").tabbed
+ ).newlineSeparated
- val membershipMissingMembersHeader = List( //missing sample_id
+ val membershipMissingMembersHeader = List( // missing sample_id
List("membership:sample_set_id").tabbed,
List("sset_1").tabbed,
- List("sset_2").tabbed).newlineSeparated
+ List("sset_2").tabbed
+ ).newlineSeparated
- val membershipExtraAttributes = List(
- List("membership:sample_set_id", "sample", "other_attribute").tabbed,
- List("woop", "de", "doo").tabbed,
- List("hip", "hip", "hooray").tabbed).newlineSeparated
+ val membershipExtraAttributes = List(List("membership:sample_set_id", "sample", "other_attribute").tabbed,
+ List("woop", "de", "doo").tabbed,
+ List("hip", "hip", "hooray").tabbed
+ ).newlineSeparated
- val membershipValid = List(
- List("membership:sample_set_id", "sample").tabbed,
- List("sset_01", "sample_01").tabbed,
- List("sset_01", "sample_02").tabbed).newlineSeparated
+ val membershipValid = List(List("membership:sample_set_id", "sample").tabbed,
+ List("sset_01", "sample_01").tabbed,
+ List("sset_01", "sample_02").tabbed
+ ).newlineSeparated
val membershipValidWithMultipleNewlines = List(
List("membership:sample_set_id", "sample").tabbed,
@@ -106,7 +107,8 @@ object MockTSVStrings {
List("sset_01", "sample_02").tabbed,
List().tabbed,
List().tabbed,
- List().tabbed).newlineSeparated
+ List().tabbed
+ ).newlineSeparated
val membershipValidWithMultipleDelimiterOnlylines = List(
List("membership:sample_set_id", "sample").tabbed,
@@ -114,28 +116,29 @@ object MockTSVStrings {
List("sset_01", "sample_02").tabbed,
List("", "").tabbed,
List("", "").tabbed,
- List("", "").tabbed).newlineSeparated
+ List("", "").tabbed
+ ).newlineSeparated
- val defaultMembershipValid = List(
- List("sample_set_id", "sample").tabbed,
- List("sset_01", "sample_01").tabbed,
- List("sset_01", "sample_02").tabbed).newlineSeparated
+ val defaultMembershipValid = List(List("sample_set_id", "sample").tabbed,
+ List("sset_01", "sample_01").tabbed,
+ List("sset_01", "sample_02").tabbed
+ ).newlineSeparated
- //entity TSVs
- val entityNonModelFirstColumnHeader = List(
- List("entity:bigQueyr_id", "bar", "baz").tabbed,
- List("woop", "de", "doo").tabbed,
- List("hip", "hip", "hooray").tabbed).newlineSeparated
+ // entity TSVs
+ val entityNonModelFirstColumnHeader = List(List("entity:bigQueyr_id", "bar", "baz").tabbed,
+ List("woop", "de", "doo").tabbed,
+ List("hip", "hip", "hooray").tabbed
+ ).newlineSeparated
- val entityHasDupes = List(
- List("entity:participant_id", "some_attribute").tabbed,
- List("part_01", "de").tabbed,
- List("part_01", "hip").tabbed).newlineSeparated
+ val entityHasDupes = List(List("entity:participant_id", "some_attribute").tabbed,
+ List("part_01", "de").tabbed,
+ List("part_01", "hip").tabbed
+ ).newlineSeparated
- val entityHasCollectionMembers = List(
- List("entity:sample_set_id", "sample").tabbed,
- List("sset_01", "sample_01").tabbed,
- List("sset_01", "sample_02").tabbed).newlineSeparated
+ val entityHasCollectionMembers = List(List("entity:sample_set_id", "sample").tabbed,
+ List("sset_01", "sample_01").tabbed,
+ List("sset_01", "sample_02").tabbed
+ ).newlineSeparated
val entityHasNoRows = List(
List("entity:sample_set_id", "sample").tabbed
@@ -145,15 +148,16 @@ object MockTSVStrings {
List("sample_set_id", "sample").tabbed
).newlineSeparated
- val entityUpdateMissingRequiredAttrs = List( //missing participant
+ val entityUpdateMissingRequiredAttrs = List( // missing participant
List("entity:sample_id", "some_attribute").tabbed,
List("sample_01", "de").tabbed,
- List("sample_02", "hip").tabbed).newlineSeparated
+ List("sample_02", "hip").tabbed
+ ).newlineSeparated
- val entityUpdateWithRequiredAttrs = List(
- List("entity:sample_id", "participant").tabbed,
- List("sample_01", "part_01").tabbed,
- List("sample_02", "part_02").tabbed).newlineSeparated
+ val entityUpdateWithRequiredAttrs = List(List("entity:sample_id", "participant").tabbed,
+ List("sample_01", "part_01").tabbed,
+ List("sample_02", "part_02").tabbed
+ ).newlineSeparated
val entityUpdateWithMultipleNewlines = List(
List("entity:sample_id", "participant").tabbed,
@@ -161,90 +165,86 @@ object MockTSVStrings {
List("sample_02", "part_02").tabbed,
List().tabbed,
List().tabbed,
- List().tabbed).newlineSeparated
+ List().tabbed
+ ).newlineSeparated
val entityUpdateWithMultipleDelimiterOnlylines = List(
List("entity:sample_id", "participant").tabbed,
List("sample_01", "part_01").tabbed,
List("sample_02", "part_02").tabbed,
- List("","").tabbed,
- List("","").tabbed,
- List("","").tabbed).newlineSeparated
+ List("", "").tabbed,
+ List("", "").tabbed,
+ List("", "").tabbed
+ ).newlineSeparated
- val defaultUpdateWithRequiredAttrs = List(
- List("sample_id", "participant").tabbed,
- List("sample_01", "part_01").tabbed,
- List("sample_02", "part_02").tabbed).newlineSeparated
+ val defaultUpdateWithRequiredAttrs = List(List("sample_id", "participant").tabbed,
+ List("sample_01", "part_01").tabbed,
+ List("sample_02", "part_02").tabbed
+ ).newlineSeparated
- val entityUpdateWithRequiredAndOptionalAttrs = List(
- List("entity:sample_id", "participant", "some_attribute").tabbed,
- List("sample_01", "part_01", "foo").tabbed,
- List("sample_02", "part_02", "bar").tabbed).newlineSeparated
+ val entityUpdateWithRequiredAndOptionalAttrs = List(List("entity:sample_id", "participant", "some_attribute").tabbed,
+ List("sample_01", "part_01", "foo").tabbed,
+ List("sample_02", "part_02", "bar").tabbed
+ ).newlineSeparated
- val defaultUpdateWithRequiredAndOptionalAttrs = List(
- List("sample_id", "participant", "some_attribute").tabbed,
- List("sample_01", "part_01", "foo").tabbed,
- List("sample_02", "part_02", "bar").tabbed).newlineSeparated
+ val defaultUpdateWithRequiredAndOptionalAttrs = List(List("sample_id", "participant", "some_attribute").tabbed,
+ List("sample_01", "part_01", "foo").tabbed,
+ List("sample_02", "part_02", "bar").tabbed
+ ).newlineSeparated
- //update TSVs
- val updateNonModelFirstColumnHeader = List(
- List("update:bigQuery_id", "bar", "baz").tabbed,
- List("woop", "de", "doo").tabbed,
- List("hip", "hip", "hooray").tabbed).newlineSeparated
+ // update TSVs
+ val updateNonModelFirstColumnHeader = List(List("update:bigQuery_id", "bar", "baz").tabbed,
+ List("woop", "de", "doo").tabbed,
+ List("hip", "hip", "hooray").tabbed
+ ).newlineSeparated
- val updateHasDupes = List(
- List("update:participant_id", "some_attribute").tabbed,
- List("part_01", "de").tabbed,
- List("part_01", "hip").tabbed).newlineSeparated
+ val updateHasDupes = List(List("update:participant_id", "some_attribute").tabbed,
+ List("part_01", "de").tabbed,
+ List("part_01", "hip").tabbed
+ ).newlineSeparated
- val updateHasCollectionMembers = List(
- List("update:sample_set_id", "sample").tabbed,
- List("sset_01", "sample_01").tabbed,
- List("sset_01", "sample_02").tabbed).newlineSeparated
+ val updateHasCollectionMembers = List(List("update:sample_set_id", "sample").tabbed,
+ List("sset_01", "sample_01").tabbed,
+ List("sset_01", "sample_02").tabbed
+ ).newlineSeparated
- val updateMissingRequiredAttrs = List( //missing participant
+ val updateMissingRequiredAttrs = List( // missing participant
List("update:sample_id", "some_attribute").tabbed,
List("sample_01", "de").tabbed,
- List("sample_02", "hip").tabbed).newlineSeparated
-
- val updateWithRequiredAttrs = List(
- List("update:sample_id", "participant").tabbed,
- List("sample_01", "part_01").tabbed,
- List("sample_02", "part_02").tabbed).newlineSeparated
+ List("sample_02", "hip").tabbed
+ ).newlineSeparated
- val updateWithRequiredAndOptionalAttrs = List(
- List("update:sample_id", "participant", "some_attribute").tabbed,
- List("sample_01", "part_01", "foo").tabbed,
- List("sample_02", "part_02", "bar").tabbed).newlineSeparated
+ val updateWithRequiredAttrs = List(List("update:sample_id", "participant").tabbed,
+ List("sample_01", "part_01").tabbed,
+ List("sample_02", "part_02").tabbed
+ ).newlineSeparated
+ val updateWithRequiredAndOptionalAttrs = List(List("update:sample_id", "participant", "some_attribute").tabbed,
+ List("sample_01", "part_01", "foo").tabbed,
+ List("sample_02", "part_02", "bar").tabbed
+ ).newlineSeparated
- val addNewWorkspaceAttributes = List(
- List("workspace:attributeName1", "attributeName2", "attributeName3").tabbed,
- List("\"attributeValue1\"", "true", "800").tabbed).newlineSeparated
+ val addNewWorkspaceAttributes = List(List("workspace:attributeName1", "attributeName2", "attributeName3").tabbed,
+ List("\"attributeValue1\"", "true", "800").tabbed
+ ).newlineSeparated
- val duplicateKeysWorkspaceAttributes = List(
- List("workspace:a1", "a1").tabbed,
- List("v1", "v2").tabbed).newlineSeparated
+ val duplicateKeysWorkspaceAttributes =
+ List(List("workspace:a1", "a1").tabbed, List("v1", "v2").tabbed).newlineSeparated
- val wrongHeaderWorkspaceAttributes = List(
- List("a3", "a4").tabbed,
- List("v3", "v4").tabbed).newlineSeparated
+ val wrongHeaderWorkspaceAttributes = List(List("a3", "a4").tabbed, List("v3", "v4").tabbed).newlineSeparated
- val tooManyNamesWorkspaceAttributes = List(
- List("workspace:a5", "a6", "a7").tabbed,
- List("v5", "v6").tabbed).newlineSeparated
+ val tooManyNamesWorkspaceAttributes =
+ List(List("workspace:a5", "a6", "a7").tabbed, List("v5", "v6").tabbed).newlineSeparated
- val tooManyValuesWorkspaceAttributes = List(
- List("workspace:a5", "a6").tabbed,
- List("v5", "v6", "v7").tabbed).newlineSeparated
+ val tooManyValuesWorkspaceAttributes =
+ List(List("workspace:a5", "a6").tabbed, List("v5", "v6", "v7").tabbed).newlineSeparated
- val tooManyRowsWorkspaceAttributes = List(
- List("workspace:a5", "a6").tabbed,
- List("v5", "v6", "v7").tabbed,
- List("v8", "v9", "v10").tabbed).newlineSeparated
+ val tooManyRowsWorkspaceAttributes = List(List("workspace:a5", "a6").tabbed,
+ List("v5", "v6", "v7").tabbed,
+ List("v8", "v9", "v10").tabbed
+ ).newlineSeparated
- val tooFewRowsWorkspaceAttributes = List(
- List("workspace:a5", "a6").tabbed).newlineSeparated
+ val tooFewRowsWorkspaceAttributes = List(List("workspace:a5", "a6").tabbed).newlineSeparated
val quotedValues = List(
List("foo".quoted, "bar".quoted).tabbed,
@@ -286,71 +286,86 @@ object MockTSVStrings {
val validHugeFile = List(
(1 to 1000).map(num => s"header$num").toList.tabbed,
- (1 to 1000).map(row => (1 to 1000).map(column => s"row${row}column$column").toList.tabbed ).toList.newlineSeparated
+ (1 to 1000).map(row => (1 to 1000).map(column => s"row${row}column$column").toList.tabbed).toList.newlineSeparated
).newlineSeparated
}
object MockTSVLoadFiles {
- //DON'T replace these with TSVParser.parse their corresponding MockTSVStrings objects...
- //these are used to test the TSVParser!
- val validOneLine = TSVLoadFile("foo",
- Seq("foo", "bar", "baz"),
- Seq(Seq("woop", "de", "doo")))
-
- val validMultiLine = TSVLoadFile("foo",
- Seq("foo", "bar", "baz"),
- Seq(
- Seq("woop", "de", "doo"),
- Seq("hip", "hip", "hooray")))
+ // DON'T replace these with TSVParser.parse their corresponding MockTSVStrings objects...
+ // these are used to test the TSVParser!
+ val validOneLine = TSVLoadFile("foo", Seq("foo", "bar", "baz"), Seq(Seq("woop", "de", "doo")))
+
+ val validMultiLine =
+ TSVLoadFile("foo", Seq("foo", "bar", "baz"), Seq(Seq("woop", "de", "doo"), Seq("hip", "hip", "hooray")))
- val validWithBlanks = TSVLoadFile("foo",
- Seq("foo", "bar", "baz"),
- Seq(Seq("woop", "", "doo")))
+ val validWithBlanks = TSVLoadFile("foo", Seq("foo", "bar", "baz"), Seq(Seq("woop", "", "doo")))
- val validWorkspaceAttributes = TSVLoadFile("workspace", Seq("a1", "a2", "a3", "a4"), Seq(Seq("v1", "2", "[1,2,3]","""{"tables":{"sample":{"save":["participant",false,"sample",true]}}}""")))
+ val validWorkspaceAttributes = TSVLoadFile(
+ "workspace",
+ Seq("a1", "a2", "a3", "a4"),
+ Seq(Seq("v1", "2", "[1,2,3]", """{"tables":{"sample":{"save":["participant",false,"sample",true]}}}"""))
+ )
val validOneWorkspaceAttribute = TSVLoadFile("workspace", Seq("a1"), Seq(Seq("v1")))
val validEmptyStrWSAttribute = TSVLoadFile("workspace", Seq("a1"), Seq(Seq("")))
val validRemoveWSAttribute = TSVLoadFile("workspace", Seq("a1"), Seq(Seq("__DELETE__")))
val validRemoveAddAttribute = TSVLoadFile("workspace", Seq("a1", "a2"), Seq(Seq("__DELETE__", "v2")))
val validQuotedValues = TSVLoadFile("foo", Seq("foo", "bar"), Seq(Seq("baz", "biz")))
val validQuotedValuesWithTabs = TSVLoadFile("foo", Seq("foo", "bar"), Seq(Seq("baz", "this\thas\ttabs")))
- val validNamespacedAttributes = TSVLoadFile("foo", Seq("foo", "tag:foo", "bar", "tag:bar"), Seq(Seq("1","2","3","4"), Seq("5","6","7","8")))
+ val validNamespacedAttributes =
+ TSVLoadFile("foo", Seq("foo", "tag:foo", "bar", "tag:bar"), Seq(Seq("1", "2", "3", "4"), Seq("5", "6", "7", "8")))
val missingFields1 = TSVLoadFile("foo", Seq("foo", "bar", "baz"), Seq(Seq("biz", "", "buz")))
val missingFields2 = TSVLoadFile("foo", Seq("foo", "bar", "baz"), Seq(Seq("", "", "buz"), Seq("abc", "123", "")))
val entityWithAttributeBooleanArray = TSVLoadFile("array", Seq("array"), Seq(Seq("bla", """[false,true,true]""")))
val entityWithAttributeNumberArray = TSVLoadFile("array", Seq("array"), Seq(Seq("bla", """[1,2,3]""")))
val entityWithAttributeStringArray = TSVLoadFile("array", Seq("array"), Seq(Seq("bla", """["foo","bar","baz"]""")))
- val entityWithAttributeEntityReferenceArray = TSVLoadFile("array", Seq("array"), Seq(Seq("bla", """[{"entityType":"sample","entityName":"HCC1143"},{"entityType":"sample","entityName":"HCC1143_10"},{"entityType":"sample","entityName":"HCC1143_100"}]""")))
+ val entityWithAttributeEntityReferenceArray = TSVLoadFile(
+ "array",
+ Seq("array"),
+ Seq(
+ Seq(
+ "bla",
+ """[{"entityType":"sample","entityName":"HCC1143"},{"entityType":"sample","entityName":"HCC1143_10"},{"entityType":"sample","entityName":"HCC1143_100"}]"""
+ )
+ )
+ )
val entityWithAttributeMixedArray = TSVLoadFile("array", Seq("array"), Seq(Seq("bla", """[false,"foo",1]""")))
- val entityWithAttributeArrayOfObjects = TSVLoadFile("array", Seq("array"), Seq(Seq("bla", """[{"one":"two"},{"three":"four"},{"five":"six"}]""")))
+ val entityWithAttributeArrayOfObjects =
+ TSVLoadFile("array", Seq("array"), Seq(Seq("bla", """[{"one":"two"},{"three":"four"},{"five":"six"}]""")))
val entityWithEmptyAttributeArray = TSVLoadFile("array", Seq("array"), Seq(Seq("bla", """[]""")))
- val entityWithBooleanAndNumberAttributes = TSVLoadFile("foo", Seq("foo", "booleans", "numbers", "strings"), Seq(Seq("e1", "true", "0", "string"), Seq("e2", "false", "3.14", ",")))
- val entityWithNestedArrays = TSVLoadFile("array", Seq("array"), Seq(Seq("bla", """[["one","two"],["three","four"],["five","six"]]""")))
+ val entityWithBooleanAndNumberAttributes = TSVLoadFile(
+ "foo",
+ Seq("foo", "booleans", "numbers", "strings"),
+ Seq(Seq("e1", "true", "0", "string"), Seq("e2", "false", "3.14", ","))
+ )
+ val entityWithNestedArrays =
+ TSVLoadFile("array", Seq("array"), Seq(Seq("bla", """[["one","two"],["three","four"],["five","six"]]""")))
val validHugeFile = TSVLoadFile("header1",
- (1 to 1000).map(num => s"header$num"),
- (1 to 1000).map(row => (1 to 1000).map(column => s"row${row}column$column") )
+ (1 to 1000).map(num => s"header$num"),
+ (1 to 1000).map(row => (1 to 1000).map(column => s"row${row}column$column"))
)
}
object MockTSVFormData {
- def wrapInMultipart( fieldName: String, data: String ): Multipart.FormData = {
- Multipart.FormData(Seq(BodyPart(fieldName, data)):_*)
- }
+ def wrapInMultipart(fieldName: String, data: String): Multipart.FormData =
+ Multipart.FormData(Seq(BodyPart(fieldName, data)): _*)
val missingTSVType = wrapInMultipart("entities", MockTSVStrings.missingTSVType)
val nonexistentTSVType = wrapInMultipart("entities", MockTSVStrings.nonexistentTSVType)
val malformedEntityType = wrapInMultipart("entities", MockTSVStrings.malformedEntityType)
- val membershipUnknownFirstColumnHeader = wrapInMultipart("entities", MockTSVStrings.membershipUnknownFirstColumnHeader)
+ val membershipUnknownFirstColumnHeader =
+ wrapInMultipart("entities", MockTSVStrings.membershipUnknownFirstColumnHeader)
val membershipNotCollectionType = wrapInMultipart("entities", MockTSVStrings.membershipNotCollectionType)
val membershipMissingMembersHeader = wrapInMultipart("entities", MockTSVStrings.membershipMissingMembersHeader)
val membershipExtraAttributes = wrapInMultipart("entities", MockTSVStrings.membershipExtraAttributes)
val membershipValid = wrapInMultipart("entities", MockTSVStrings.membershipValid)
- val membershipValidWithMultipleNewlines = wrapInMultipart("entities", MockTSVStrings.membershipValidWithMultipleNewlines)
- val membershipValidWithMultipleDelimiterOnlylines = wrapInMultipart("entities", MockTSVStrings.membershipValidWithMultipleDelimiterOnlylines)
+ val membershipValidWithMultipleNewlines =
+ wrapInMultipart("entities", MockTSVStrings.membershipValidWithMultipleNewlines)
+ val membershipValidWithMultipleDelimiterOnlylines =
+ wrapInMultipart("entities", MockTSVStrings.membershipValidWithMultipleDelimiterOnlylines)
val entityUnknownFirstColumnHeader = wrapInMultipart("entities", MockTSVStrings.entityNonModelFirstColumnHeader)
val entityHasDupes = wrapInMultipart("entities", MockTSVStrings.entityHasDupes)
@@ -359,19 +374,23 @@ object MockTSVFormData {
val entityUpdateMissingRequiredAttrs = wrapInMultipart("entities", MockTSVStrings.entityUpdateMissingRequiredAttrs)
val entityUpdateWithRequiredAttrs = wrapInMultipart("entities", MockTSVStrings.entityUpdateWithRequiredAttrs)
val entityUpdateWithMultipleNewlines = wrapInMultipart("entities", MockTSVStrings.entityUpdateWithMultipleNewlines)
- val entityUpdateWithMultipleDelimiterOnlylines = wrapInMultipart("entities", MockTSVStrings.entityUpdateWithMultipleDelimiterOnlylines)
- val entityUpdateWithRequiredAndOptionalAttrs = wrapInMultipart("entities", MockTSVStrings.entityUpdateWithRequiredAndOptionalAttrs)
+ val entityUpdateWithMultipleDelimiterOnlylines =
+ wrapInMultipart("entities", MockTSVStrings.entityUpdateWithMultipleDelimiterOnlylines)
+ val entityUpdateWithRequiredAndOptionalAttrs =
+ wrapInMultipart("entities", MockTSVStrings.entityUpdateWithRequiredAndOptionalAttrs)
val updateNonModelFirstColumnHeader = wrapInMultipart("entities", MockTSVStrings.updateNonModelFirstColumnHeader)
val updateHasDupes = wrapInMultipart("entities", MockTSVStrings.updateHasDupes)
val updateHasCollectionMembers = wrapInMultipart("entities", MockTSVStrings.updateHasCollectionMembers)
val updateMissingRequiredAttrs = wrapInMultipart("entities", MockTSVStrings.updateMissingRequiredAttrs)
val updateWithRequiredAttrs = wrapInMultipart("entities", MockTSVStrings.updateWithRequiredAttrs)
- val updateWithRequiredAndOptionalAttrs = wrapInMultipart("entities", MockTSVStrings.updateWithRequiredAndOptionalAttrs)
+ val updateWithRequiredAndOptionalAttrs =
+ wrapInMultipart("entities", MockTSVStrings.updateWithRequiredAndOptionalAttrs)
val defaultHasNoRows = wrapInMultipart("entities", MockTSVStrings.defaultEntityHasNoRows)
val defaultUpdateWithRequiredAttrs = wrapInMultipart("entities", MockTSVStrings.defaultUpdateWithRequiredAttrs)
- val defaultUpdateWithRequiredAndOptionalAttrs = wrapInMultipart("entities", MockTSVStrings.defaultUpdateWithRequiredAndOptionalAttrs)
+ val defaultUpdateWithRequiredAndOptionalAttrs =
+ wrapInMultipart("entities", MockTSVStrings.defaultUpdateWithRequiredAndOptionalAttrs)
val defaultMembershipValid = wrapInMultipart("entities", MockTSVStrings.defaultMembershipValid)
val addNewWorkspaceAttributes = wrapInMultipart("attributes", MockTSVStrings.addNewWorkspaceAttributes)
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/mock/MockUtils.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/mock/MockUtils.scala
index f19579ca5..99798c98e 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/mock/MockUtils.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/mock/MockUtils.scala
@@ -20,37 +20,33 @@ object MockUtils {
val samServerPort = 8994
val cromiamServerPort = 8995
- def randomPositiveInt(): Int = {
- scala.util.Random.nextInt(9) + 1
- }
+ def randomPositiveInt(): Int =
+ scala.util.Random.nextInt(9) + 1
- def randomAlpha(): String = {
- val chars = ('a' to 'z') ++ ('A' to 'Z') ++ ('0' to '9')
- randomStringFromCharList(randomPositiveInt(), chars)
- }
+ def randomAlpha(): String = {
+ val chars = ('a' to 'z') ++ ('A' to 'Z') ++ ('0' to '9')
+ randomStringFromCharList(randomPositiveInt(), chars)
+ }
- def randomBoolean(): Boolean = {
+ def randomBoolean(): Boolean =
scala.util.Random.nextBoolean()
- }
- def randomStringFromCharList(length: Int, chars: Seq[Char]): String = {
- val sb = new StringBuilder
- for (i <- 1 to length) {
- val randomNum = util.Random.nextInt(chars.length)
- sb.append(chars(randomNum))
- }
- sb.toString()
- }
+ def randomStringFromCharList(length: Int, chars: Seq[Char]): String = {
+ val sb = new StringBuilder
+ for (i <- 1 to length) {
+ val randomNum = util.Random.nextInt(chars.length)
+ sb.append(chars(randomNum))
+ }
+ sb.toString()
+ }
- def isoDate(): String = {
- isoDateFormat.format(new Date())
- }
+ def isoDate(): String =
+ isoDateFormat.format(new Date())
def rawlsErrorReport(statusCode: StatusCode) =
ErrorReport("Rawls", "dummy text", Option(statusCode), Seq(), Seq(), None)
- def randomElement[A](list: List[A]): A = {
+ def randomElement[A](list: List[A]): A =
list(scala.util.Random.nextInt(list.length))
- }
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/mock/MockWorkspaceServer.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/mock/MockWorkspaceServer.scala
index d08582943..e2a22e7f0 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/mock/MockWorkspaceServer.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/mock/MockWorkspaceServer.scala
@@ -4,7 +4,14 @@ import org.broadinstitute.dsde.firecloud.FireCloudConfig
import org.broadinstitute.dsde.firecloud.mock.MockUtils._
import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol._
import org.broadinstitute.dsde.firecloud.model._
-import org.broadinstitute.dsde.rawls.model.{GoogleProjectId, GoogleProjectNumber, RawlsBillingAccountName, WorkspaceDetails, WorkspaceState, WorkspaceVersions}
+import org.broadinstitute.dsde.rawls.model.{
+ GoogleProjectId,
+ GoogleProjectNumber,
+ RawlsBillingAccountName,
+ WorkspaceDetails,
+ WorkspaceState,
+ WorkspaceVersions
+}
import org.joda.time.DateTime
import org.mockserver.integration.ClientAndServer
import org.mockserver.integration.ClientAndServer._
@@ -30,9 +37,9 @@ object MockWorkspaceServer {
DateTime.now(),
DateTime.now(),
"my_workspace_creator",
- Some(Map()), //attributes
- false, //locked
- Some(Set.empty), //authdomain
+ Some(Map()), // attributes
+ false, // locked
+ Some(Set.empty), // authdomain
WorkspaceVersions.V2,
GoogleProjectId("googleProject"),
Some(GoogleProjectNumber("googleProjectNumber")),
@@ -54,9 +61,9 @@ object MockWorkspaceServer {
DateTime.now(),
DateTime.now(),
"my_workspace_creator",
- Some(Map()), //attributes
- false, //locked
- Some(Set.empty), //authdomain
+ Some(Map()), // attributes
+ false, // locked
+ Some(Set.empty), // authdomain
WorkspaceVersions.V2,
GoogleProjectId("googleProject"),
Some(GoogleProjectNumber("googleProjectNumber")),
@@ -113,9 +120,8 @@ object MockWorkspaceServer {
var workspaceServer: ClientAndServer = _
- def stopWorkspaceServer(): Unit = {
+ def stopWorkspaceServer(): Unit =
workspaceServer.stop()
- }
def startWorkspaceServer(): Unit = {
workspaceServer = startClientAndServer(MockUtils.workspaceServerPort)
@@ -126,7 +132,8 @@ object MockWorkspaceServer {
.when(
request()
.withMethod("GET")
- .withPath("/api/submissions/queueStatus"))
+ .withPath("/api/submissions/queueStatus")
+ )
.respond(
response()
.withHeaders(header)
@@ -137,8 +144,11 @@ object MockWorkspaceServer {
.when(
request()
.withMethod("GET")
- .withPath(s"${workspaceBasePath}/%s/%s/submissionsCount"
- .format(mockValidWorkspace.namespace, mockValidWorkspace.name)))
+ .withPath(
+ s"${workspaceBasePath}/%s/%s/submissionsCount"
+ .format(mockValidWorkspace.namespace, mockValidWorkspace.name)
+ )
+ )
.respond(
response()
.withHeaders(header)
@@ -151,30 +161,37 @@ object MockWorkspaceServer {
.withMethod("POST")
// presence of auth header will differentiate this mock response from the one at line 137
.withHeader(authHeader)
- .withPath(s"${workspaceBasePath}/%s/%s/submissions"
- .format(mockValidWorkspace.namespace, mockValidWorkspace.name)))
+ .withPath(
+ s"${workspaceBasePath}/%s/%s/submissions"
+ .format(mockValidWorkspace.namespace, mockValidWorkspace.name)
+ )
+ )
.respond(
- callback().
- withCallbackClass("org.broadinstitute.dsde.firecloud.mock.ValidSubmissionCallback")
+ callback().withCallbackClass("org.broadinstitute.dsde.firecloud.mock.ValidSubmissionCallback")
)
MockWorkspaceServer.workspaceServer
.when(
request()
.withMethod("POST")
- .withPath(s"${workspaceBasePath}/%s/%s/submissions/validate"
- .format(mockValidWorkspace.namespace, mockValidWorkspace.name)))
+ .withPath(
+ s"${workspaceBasePath}/%s/%s/submissions/validate"
+ .format(mockValidWorkspace.namespace, mockValidWorkspace.name)
+ )
+ )
.respond(
- callback().
- withCallbackClass("org.broadinstitute.dsde.firecloud.mock.ValidSubmissionCallback")
+ callback().withCallbackClass("org.broadinstitute.dsde.firecloud.mock.ValidSubmissionCallback")
)
MockWorkspaceServer.workspaceServer
.when(
request()
.withMethod("POST")
- .withPath(s"${workspaceBasePath}/%s/%s/submissions"
- .format(mockValidWorkspace.namespace, mockValidWorkspace.name)))
+ .withPath(
+ s"${workspaceBasePath}/%s/%s/submissions"
+ .format(mockValidWorkspace.namespace, mockValidWorkspace.name)
+ )
+ )
.respond(
response()
.withHeaders(header)
@@ -185,8 +202,11 @@ object MockWorkspaceServer {
.when(
request()
.withMethod("GET")
- .withPath(s"${workspaceBasePath}/%s/%s/submissions"
- .format(mockValidWorkspace.namespace, mockValidWorkspace.name)))
+ .withPath(
+ s"${workspaceBasePath}/%s/%s/submissions"
+ .format(mockValidWorkspace.namespace, mockValidWorkspace.name)
+ )
+ )
.respond(
response()
.withHeaders(header)
@@ -197,8 +217,11 @@ object MockWorkspaceServer {
.when(
request()
.withMethod("GET")
- .withPath(s"${workspaceBasePath}/%s/%s/submissions/%s"
- .format(mockValidWorkspace.namespace, mockValidWorkspace.name, mockValidId)))
+ .withPath(
+ s"${workspaceBasePath}/%s/%s/submissions/%s"
+ .format(mockValidWorkspace.namespace, mockValidWorkspace.name, mockValidId)
+ )
+ )
.respond(
response()
.withHeaders(header)
@@ -210,8 +233,11 @@ object MockWorkspaceServer {
.when(
request()
.withMethod("DELETE")
- .withPath(s"${workspaceBasePath}/%s/%s/submissions/%s"
- .format(mockValidWorkspace.namespace, mockValidWorkspace.name, mockValidId)))
+ .withPath(
+ s"${workspaceBasePath}/%s/%s/submissions/%s"
+ .format(mockValidWorkspace.namespace, mockValidWorkspace.name, mockValidId)
+ )
+ )
.respond(
response()
.withHeaders(header)
@@ -223,8 +249,11 @@ object MockWorkspaceServer {
.when(
request()
.withMethod("PATCH")
- .withPath(s"${workspaceBasePath}/%s/%s/submissions/%s"
- .format(mockValidWorkspace.namespace, mockValidWorkspace.name, id)))
+ .withPath(
+ s"${workspaceBasePath}/%s/%s/submissions/%s"
+ .format(mockValidWorkspace.namespace, mockValidWorkspace.name, id)
+ )
+ )
.respond(
response()
.withHeaders(header)
@@ -237,8 +266,11 @@ object MockWorkspaceServer {
.when(
request()
.withMethod("GET")
- .withPath(s"${workspaceBasePath}/%s/%s/submissions/%s"
- .format(mockValidWorkspace.namespace, mockValidWorkspace.name, mockInvalidId)))
+ .withPath(
+ s"${workspaceBasePath}/%s/%s/submissions/%s"
+ .format(mockValidWorkspace.namespace, mockValidWorkspace.name, mockInvalidId)
+ )
+ )
.respond(
response()
.withHeaders(header)
@@ -250,8 +282,11 @@ object MockWorkspaceServer {
.when(
request()
.withMethod("DELETE")
- .withPath(s"${workspaceBasePath}/%s/%s/submissions/%s"
- .format(mockValidWorkspace.namespace, mockValidWorkspace.name, mockInvalidId)))
+ .withPath(
+ s"${workspaceBasePath}/%s/%s/submissions/%s"
+ .format(mockValidWorkspace.namespace, mockValidWorkspace.name, mockInvalidId)
+ )
+ )
.respond(
response()
.withHeaders(header)
@@ -263,8 +298,11 @@ object MockWorkspaceServer {
.when(
request()
.withMethod("GET")
- .withPath(s"${workspaceBasePath}/%s/%s/submissions/%s/workflows/%s"
- .format(mockValidWorkspace.namespace, mockValidWorkspace.name, mockValidId, mockValidId)))
+ .withPath(
+ s"${workspaceBasePath}/%s/%s/submissions/%s/workflows/%s"
+ .format(mockValidWorkspace.namespace, mockValidWorkspace.name, mockValidId, mockValidId)
+ )
+ )
.respond(
response()
.withHeaders(header)
@@ -276,8 +314,15 @@ object MockWorkspaceServer {
.when(
request()
.withMethod("GET")
- .withPath(s"${workspaceBasePath}/%s/%s/submissions/%s/workflows/%s"
- .format(mockSpacedWorkspace.namespace, UrlEscapers.urlPathSegmentEscaper().escape(mockSpacedWorkspace.name), mockValidId, mockValidId)))
+ .withPath(
+ s"${workspaceBasePath}/%s/%s/submissions/%s/workflows/%s"
+ .format(mockSpacedWorkspace.namespace,
+ UrlEscapers.urlPathSegmentEscaper().escape(mockSpacedWorkspace.name),
+ mockValidId,
+ mockValidId
+ )
+ )
+ )
.respond(
response()
.withHeaders(header)
@@ -289,8 +334,11 @@ object MockWorkspaceServer {
.when(
request()
.withMethod("GET")
- .withPath(s"${workspaceBasePath}/%s/%s/submissions/%s/workflows/%s"
- .format(mockValidWorkspace.namespace, mockValidWorkspace.name, mockInvalidId, mockInvalidId)))
+ .withPath(
+ s"${workspaceBasePath}/%s/%s/submissions/%s/workflows/%s"
+ .format(mockValidWorkspace.namespace, mockValidWorkspace.name, mockInvalidId, mockInvalidId)
+ )
+ )
.respond(
response()
.withHeaders(header)
@@ -302,8 +350,11 @@ object MockWorkspaceServer {
.when(
request()
.withMethod("GET")
- .withPath(s"${workspaceBasePath}/%s/%s/submissions/%s/workflows/%s/outputs"
- .format(mockValidWorkspace.namespace, mockValidWorkspace.name, mockValidId, mockValidId)))
+ .withPath(
+ s"${workspaceBasePath}/%s/%s/submissions/%s/workflows/%s/outputs"
+ .format(mockValidWorkspace.namespace, mockValidWorkspace.name, mockValidId, mockValidId)
+ )
+ )
.respond(
response()
.withHeaders(header)
@@ -315,8 +366,11 @@ object MockWorkspaceServer {
.when(
request()
.withMethod("GET")
- .withPath(s"${workspaceBasePath}/%s/%s/submissions/%s/workflows/%s/outputs"
- .format(mockValidWorkspace.namespace, mockValidWorkspace.name, mockInvalidId, mockInvalidId)))
+ .withPath(
+ s"${workspaceBasePath}/%s/%s/submissions/%s/workflows/%s/outputs"
+ .format(mockValidWorkspace.namespace, mockValidWorkspace.name, mockInvalidId, mockInvalidId)
+ )
+ )
.respond(
response()
.withHeaders(header)
@@ -328,7 +382,8 @@ object MockWorkspaceServer {
.when(
request()
.withMethod("GET")
- .withPath(s"$notificationsBasePath/workspace/${mockValidWorkspace.namespace}/${mockValidWorkspace.name}"))
+ .withPath(s"$notificationsBasePath/workspace/${mockValidWorkspace.namespace}/${mockValidWorkspace.name}")
+ )
.respond(
response()
.withHeaders(header)
@@ -339,7 +394,8 @@ object MockWorkspaceServer {
.when(
request()
.withMethod("GET")
- .withPath(s"$notificationsBasePath/general"))
+ .withPath(s"$notificationsBasePath/general")
+ )
.respond(
response()
.withHeaders(header)
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/mock/SamMockserverUtils.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/mock/SamMockserverUtils.scala
index 1602ef243..7982a424c 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/mock/SamMockserverUtils.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/mock/SamMockserverUtils.scala
@@ -13,19 +13,23 @@ trait SamMockserverUtils {
*
* @param samMockserver the Sam mockserver to configure
*/
- def returnEnabledUser(samMockserver: ClientAndServer): Unit = {
+ def returnEnabledUser(samMockserver: ClientAndServer): Unit =
samMockserver
- .when(request
- .withMethod("GET")
- .withPath("/register/user/v2/self/info"))
- .respond(response()
- .withHeaders(MockUtils.header).withBody(
- """{
- | "adminEnabled": true,
- | "enabled": true,
- | "userEmail": "enabled@nowhere.com",
- | "userSubjectId": "enabled-id"
- |}""".stripMargin).withStatusCode(OK.intValue))
- }
+ .when(
+ request
+ .withMethod("GET")
+ .withPath("/register/user/v2/self/info")
+ )
+ .respond(
+ response()
+ .withHeaders(MockUtils.header)
+ .withBody("""{
+ | "adminEnabled": true,
+ | "enabled": true,
+ | "userEmail": "enabled@nowhere.com",
+ | "userSubjectId": "enabled-id"
+ |}""".stripMargin)
+ .withStatusCode(OK.intValue)
+ )
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/mock/ValidEntityCopyCallback.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/mock/ValidEntityCopyCallback.scala
index 61bdf8c00..3d55313b3 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/mock/ValidEntityCopyCallback.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/mock/ValidEntityCopyCallback.scala
@@ -16,7 +16,7 @@ class ValidEntityCopyCallback extends ExpectationResponseCallback {
val copyRequest = httpRequest.getBodyAsString.parseJson.convertTo[EntityCopyDefinition]
(copyRequest.sourceWorkspace.namespace, copyRequest.destinationWorkspace.name) match {
- case (x:String, y:String) if x == "broad-dsde-dev" && y == "valid" =>
+ case (x: String, y: String) if x == "broad-dsde-dev" && y == "valid" =>
response()
.withHeaders(header)
.withStatusCode(Created.intValue)
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/mock/ValidEntityDeleteCallback.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/mock/ValidEntityDeleteCallback.scala
index 979200739..b7d9c0ffc 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/mock/ValidEntityDeleteCallback.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/mock/ValidEntityDeleteCallback.scala
@@ -23,8 +23,7 @@ class ValidEntityDeleteCallback extends ExpectationResponseCallback {
response()
.withHeaders(header)
.withStatusCode(NoContent.intValue)
- }
- else {
+ } else {
response()
.withHeaders(header)
.withStatusCode(BadRequest.intValue)
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/mock/ValidSubmissionCallback.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/mock/ValidSubmissionCallback.scala
index 570282151..2bfb10216 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/mock/ValidSubmissionCallback.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/mock/ValidSubmissionCallback.scala
@@ -16,14 +16,15 @@ class ValidSubmissionCallback extends ExpectationResponseCallback {
val jsonAst = httpRequest.getBodyAsString.parseJson
val submission = jsonAst.convertTo[OrchSubmissionRequest]
submission match {
- case x if x.entityName.isDefined &&
- x.entityType.isDefined &&
- x.expression.isDefined &&
- x.useCallCache.isDefined &&
- x.deleteIntermediateOutputFiles.isDefined &&
- x.workflowFailureMode.isDefined &&
- x.methodConfigurationName.isDefined &&
- x.methodConfigurationNamespace.isDefined =>
+ case x
+ if x.entityName.isDefined &&
+ x.entityType.isDefined &&
+ x.expression.isDefined &&
+ x.useCallCache.isDefined &&
+ x.deleteIntermediateOutputFiles.isDefined &&
+ x.workflowFailureMode.isDefined &&
+ x.methodConfigurationName.isDefined &&
+ x.methodConfigurationNamespace.isDefined =>
response()
.withHeaders(header)
.withStatusCode(OK.intValue)
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/model/ElasticSearchSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/model/ElasticSearchSpec.scala
index 8b3000ff6..3a6c44f0d 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/model/ElasticSearchSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/model/ElasticSearchSpec.scala
@@ -11,31 +11,31 @@ import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol._
/**
* Created by ahaessly on 1/19/17.
*/
-class ElasticSearchSpec extends AnyFreeSpec with Assertions {
+class ElasticSearchSpec extends AnyFreeSpec with Assertions {
"LibrarySearchParams model" - {
"when unmarshalling from json" - {
"should handle filters" in {
val testData = """ {"filters": {"library:datatype":["cancer"]},"fieldAggregations":{"library:indication":5}} """
val item = testData.parseJson.convertTo[LibrarySearchParams]
- assertResult(Seq("cancer")) {item.filters.getOrElse("library:datatype", Seq.empty)}
+ assertResult(Seq("cancer"))(item.filters.getOrElse("library:datatype", Seq.empty))
}
"should handle sort field and direction" in {
val testData = """ {"sortField" : "field", "sortDirection" : "direction"} """
val item = testData.parseJson.convertTo[LibrarySearchParams]
- assertResult(Some("field")) {item.sortField}
- assertResult(Some("direction")) {item.sortDirection}
+ assertResult(Some("field"))(item.sortField)
+ assertResult(Some("direction"))(item.sortDirection)
}
"should handle missing parameters" in {
val testData = """ {} """
val params = testData.parseJson.convertTo[LibrarySearchParams]
- assertResult(0) {params.from}
- assertResult(10) {params.size}
- assertResult(None) {params.searchString}
- assertResult(Map.empty) {params.filters}
- assertResult(Map.empty) {params.fieldAggregations}
- assertResult(None) {params.sortField}
- assertResult(None) {params.sortDirection}
+ assertResult(0)(params.from)
+ assertResult(10)(params.size)
+ assertResult(None)(params.searchString)
+ assertResult(Map.empty)(params.filters)
+ assertResult(Map.empty)(params.fieldAggregations)
+ assertResult(None)(params.sortField)
+ assertResult(None)(params.sortDirection)
}
"should create params with missing research purpose if none specified" in {
val testData = """ {"filters": {"library:datatype":["cancer"]},"fieldAggregations":{"library:indication":5}} """
@@ -50,10 +50,16 @@ class ElasticSearchSpec extends AnyFreeSpec with Assertions {
| } """.stripMargin
val item = testData.parseJson.convertTo[LibrarySearchParams]
val expectedResearchPurpose = ResearchPurpose(
- Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_123"), DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_456")),
- NMDS=true, NCTRL=false, NAGR=true, POA=false, NCU=true
+ Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_123"),
+ DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_456")
+ ),
+ NMDS = true,
+ NCTRL = false,
+ NAGR = true,
+ POA = false,
+ NCU = true
)
- assertResult(Some(expectedResearchPurpose)) {item.researchPurpose}
+ assertResult(Some(expectedResearchPurpose))(item.researchPurpose)
}
"should reject an incomplete research purpose" in {
// testData is missing NMDS
@@ -81,36 +87,53 @@ class ElasticSearchSpec extends AnyFreeSpec with Assertions {
"when marshalling to json" - {
"should handle arbitrary namespace" in {
val testData = LibrarySearchParams(None, Map.empty, None, Map.empty)
- assertResult("""{"filters":{},"fieldAggregations":{},"from":0,"size":10}""".parseJson) {testData.toJson.toString.parseJson}
+ assertResult("""{"filters":{},"fieldAggregations":{},"from":0,"size":10}""".parseJson) {
+ testData.toJson.toString.parseJson
+ }
}
"should handle sort field and direction" in {
- val testData = LibrarySearchParams(None, Map.empty, None, Map.empty, sortField=Some("field"), sortDirection=Some("direction"))
- assertResult("""{"filters":{},"fieldAggregations":{},"from":0,"size":10,"sortField":"field","sortDirection":"direction"}""".parseJson) {testData.toJson.toString.parseJson}
+ val testData = LibrarySearchParams(None,
+ Map.empty,
+ None,
+ Map.empty,
+ sortField = Some("field"),
+ sortDirection = Some("direction")
+ )
+ assertResult(
+ """{"filters":{},"fieldAggregations":{},"from":0,"size":10,"sortField":"field","sortDirection":"direction"}""".parseJson
+ )(testData.toJson.toString.parseJson)
}
"should properly serialize research purpose" in {
val researchPurpose = ResearchPurpose(
- Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_123"), DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_456")),
- NMDS=true, NCTRL=false, NAGR=true, POA=false, NCU=true
+ Seq(DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_123"),
+ DiseaseOntologyNodeId("http://purl.obolibrary.org/obo/DOID_456")
+ ),
+ NMDS = true,
+ NCTRL = false,
+ NAGR = true,
+ POA = false,
+ NCU = true
)
val testData = LibrarySearchParams(None, Map.empty, Some(researchPurpose), Map.empty)
- assertResult(
- """{"filters":{},
- | "researchPurpose":{"POA":false,"NCU":true,"NAGR":true,"NMDS":true,"NCTRL":false,
- | "DS":["http://purl.obolibrary.org/obo/DOID_123","http://purl.obolibrary.org/obo/DOID_456"]},
- | "fieldAggregations":{},"from":0,"size":10}""".stripMargin.parseJson) {testData.toJson.toString.parseJson}
+ assertResult("""{"filters":{},
+ | "researchPurpose":{"POA":false,"NCU":true,"NAGR":true,"NMDS":true,"NCTRL":false,
+ | "DS":["http://purl.obolibrary.org/obo/DOID_123","http://purl.obolibrary.org/obo/DOID_456"]},
+ | "fieldAggregations":{},"from":0,"size":10}""".stripMargin.parseJson) {
+ testData.toJson.toString.parseJson
+ }
}
}
}
"ESInternalType model" - {
- val modelObject = ESInternalType("string",index="not_analyzed",include_in_all=false)
+ val modelObject = ESInternalType("string", index = "not_analyzed", include_in_all = false)
val modelJsonStr = """{"include_in_all":false,"index":"not_analyzed","type":"string"}"""
"when unmarshalling from json" - {
"using parseJson" in {
val item = modelJsonStr.parseJson.convertTo[ESPropertyFields]
assert(item.isInstanceOf[ESInternalType])
- assertResult(modelObject) {item.asInstanceOf[ESInternalType]}
+ assertResult(modelObject)(item.asInstanceOf[ESInternalType])
}
"using impESPropertyFields" in {
val item = impESPropertyFields.read(modelJsonStr.parseJson)
@@ -136,11 +159,14 @@ class ElasticSearchSpec extends AnyFreeSpec with Assertions {
}
"ESNestedType model" - {
- val modelObject = ESNestedType(Map(
- "foo" -> ESInnerField("string"),
- "bar" -> ESInnerField("integer", include_in_all=Some(false))
- ))
- val modelJsonStr = """{"properties":{"foo":{"type":"string"},"bar":{"include_in_all":false,"type":"integer"}},"type":"nested"}"""
+ val modelObject = ESNestedType(
+ Map(
+ "foo" -> ESInnerField("string"),
+ "bar" -> ESInnerField("integer", include_in_all = Some(false))
+ )
+ )
+ val modelJsonStr =
+ """{"properties":{"foo":{"type":"string"},"bar":{"include_in_all":false,"type":"integer"}},"type":"nested"}"""
"when unmarshalling from json" - {
"using parseJson" in {
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/model/FlexibleModelSchemaSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/model/FlexibleModelSchemaSpec.scala
index 95e4bcc16..377e56153 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/model/FlexibleModelSchemaSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/model/FlexibleModelSchemaSpec.scala
@@ -9,17 +9,25 @@ class FlexibleModelSchemaSpec extends AnyFreeSpec {
"ModelSchema.isAttributeArray" - {
"should be false for various scalars" in {
- List("", "-1", "0", "1", "hello", "{}", "true", "false",
- "null", "123.45", "[", "]", "][", "[-]") foreach { input =>
- withClue(s"for input '$input', should return false") {
- assert(!schema.isAttributeArray(input))
- }
+ List("", "-1", "0", "1", "hello", "{}", "true", "false", "null", "123.45", "[", "]", "][", "[-]") foreach {
+ input =>
+ withClue(s"for input '$input', should return false") {
+ assert(!schema.isAttributeArray(input))
+ }
}
}
"should be true for various arrays" in {
- List("[]", "[1]", "[1,2,3]", "[true]", "[true,false]", "[true,1,null]",
- """["foo"]""", """["foo","bar"]""", """["white", "space"]""",
+ List(
+ "[]",
+ "[1]",
+ "[1,2,3]",
+ "[true]",
+ "[true,false]",
+ "[true,1,null]",
+ """["foo"]""",
+ """["foo","bar"]""",
+ """["white", "space"]""",
"""["foo",1,true,null]""",
"""[{}, [{},{},{"a":"b"},true], "foo"]"""
) foreach { input =>
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/model/OrchMethodRepositorySpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/model/OrchMethodRepositorySpec.scala
index 8ecfb28e4..1cdea6657 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/model/OrchMethodRepositorySpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/model/OrchMethodRepositorySpec.scala
@@ -9,41 +9,31 @@ class OrchMethodRepositorySpec extends AnyFreeSpec with Matchers {
"FireCloudPermission" - {
"Correctly formed permissions should validate" - {
"Valid email user" in {
- val permission = FireCloudPermission(
- user = "test@broadinstitute.org",
- role = ACLNames.Owner)
- permission shouldNot be (null)
+ val permission = FireCloudPermission(user = "test@broadinstitute.org", role = ACLNames.Owner)
+ permission shouldNot be(null)
}
"Public user" in {
- val permission = FireCloudPermission(
- user = "public",
- role = ACLNames.Owner)
- permission shouldNot be (null)
+ val permission = FireCloudPermission(user = "public", role = ACLNames.Owner)
+ permission shouldNot be(null)
}
}
"Incorrectly formed permissions should not validate" - {
"Empty email" in {
- val ex = intercept[IllegalArgumentException]{
- val permission = FireCloudPermission(
- user = "",
- role = ACLNames.Owner)
+ val ex = intercept[IllegalArgumentException] {
+ val permission = FireCloudPermission(user = "", role = ACLNames.Owner)
}
ex shouldNot be(null)
}
"Invalid email" in {
- val ex = intercept[IllegalArgumentException]{
- val permission = FireCloudPermission(
- user = "in valid at email.com",
- role = ACLNames.Owner)
+ val ex = intercept[IllegalArgumentException] {
+ val permission = FireCloudPermission(user = "in valid at email.com", role = ACLNames.Owner)
}
ex shouldNot be(null)
}
"Invalid role" in {
- val ex = intercept[IllegalArgumentException]{
- val permission = FireCloudPermission(
- user = "test@broadinstitute.org",
- role = ACLNames.ListNoAccess.head)
+ val ex = intercept[IllegalArgumentException] {
+ val permission = FireCloudPermission(user = "test@broadinstitute.org", role = ACLNames.ListNoAccess.head)
}
ex shouldNot be(null)
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/model/ProfileSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/model/ProfileSpec.scala
index 44ab8f14e..ea04e4223 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/model/ProfileSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/model/ProfileSpec.scala
@@ -96,8 +96,10 @@ class ProfileSpec extends AnyFreeSpec with Matchers {
}
"Profile instantiated with ProfileWrapper is valid" in {
- val pw = ProfileWrapper("123", List(
- FireCloudKeyValue(Some("firstName"), Some("test-firstName")),
+ val pw = ProfileWrapper(
+ "123",
+ List(
+ FireCloudKeyValue(Some("firstName"), Some("test-firstName")),
FireCloudKeyValue(Some("lastName"), Some("test-lastName")),
FireCloudKeyValue(Some("title"), Some("test-title")),
FireCloudKeyValue(Some("institute"), Some("test-institute")),
@@ -106,7 +108,8 @@ class ProfileSpec extends AnyFreeSpec with Matchers {
FireCloudKeyValue(Some("programLocationState"), Some("test-programLocationState")),
FireCloudKeyValue(Some("programLocationCountry"), Some("test-programLocationCountry")),
FireCloudKeyValue(Some("contactEmail"), Some("test-contactEmail@noreply.com"))
- ))
+ )
+ )
val profile = Profile(pw)
profile shouldNot be(null)
}
@@ -114,7 +117,7 @@ class ProfileSpec extends AnyFreeSpec with Matchers {
"Incorrectly formed profiles" - {
"BasicProfile with blank required info is invalid" in {
- val ex = intercept[IllegalArgumentException]{
+ val ex = intercept[IllegalArgumentException] {
BasicProfile(
firstName = "",
lastName = "",
@@ -133,7 +136,7 @@ class ProfileSpec extends AnyFreeSpec with Matchers {
ex shouldNot be(null)
}
"Profile with invalid contact email is invalid" in {
- val ex = intercept[IllegalArgumentException]{
+ val ex = intercept[IllegalArgumentException] {
Profile(
firstName = randomString,
lastName = randomString,
@@ -156,12 +159,15 @@ class ProfileSpec extends AnyFreeSpec with Matchers {
"ProfileUtils" - {
- val pw = ProfileWrapper("123", List(
- FireCloudKeyValue(Some("imastring"), Some("hello")),
- FireCloudKeyValue(Some("imalong"), Some("1556724034")),
- FireCloudKeyValue(Some("imnotalong"), Some("not-a-long")),
- FireCloudKeyValue(Some("imnothing"), None)
- ))
+ val pw = ProfileWrapper(
+ "123",
+ List(
+ FireCloudKeyValue(Some("imastring"), Some("hello")),
+ FireCloudKeyValue(Some("imalong"), Some("1556724034")),
+ FireCloudKeyValue(Some("imnotalong"), Some("not-a-long")),
+ FireCloudKeyValue(Some("imnothing"), None)
+ )
+ )
"getString" - {
"returns None if key doesn't exist" in {
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/model/SamResourceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/model/SamResourceSpec.scala
index 5fe4dcbb7..35c24e708 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/model/SamResourceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/model/SamResourceSpec.scala
@@ -7,42 +7,41 @@ import spray.json._
import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol._
import spray.json.DefaultJsonProtocol._
-
class SamResourceSpec extends AnyFreeSpec with Matchers {
- val userPolicyJSON = """
- | {
- | "resourceId": "8011932d-d76e-4c5d-9f66-1538d86a683b",
- | "public": true,
- | "accessPolicyName": "reader",
- | "missingAuthDomainGroups": [],
- | "authDomainGroups": []
- | }
+ val userPolicyJSON = """
+ | {
+ | "resourceId": "8011932d-d76e-4c5d-9f66-1538d86a683b",
+ | "public": true,
+ | "accessPolicyName": "reader",
+ | "missingAuthDomainGroups": [],
+ | "authDomainGroups": []
+ | }
""".stripMargin
val userPolicyListJSON =
"""
- | [{
- | "resourceId": "8011932d-d76e-4c5d-9f66-1538d86a683b",
- | "public": true,
- | "accessPolicyName": "reader",
- | "missingAuthDomainGroups": [],
- | "authDomainGroups": []
- | },
- | {
- | "resourceId": "195feff3-d4b0-43df-9d0d-d49eda2036eb",
- | "public": false,
- | "accessPolicyName": "owner",
- | "missingAuthDomainGroups": [],
- | "authDomainGroups": []
- | },
- | {
- | "resourceId": "a2e2a933-76ed-4679-a3c1-fcec146441b5",
- | "public": false,
- | "accessPolicyName": "owner",
- | "missingAuthDomainGroups": [],
- | "authDomainGroups": []
- | }]
+ | [{
+ | "resourceId": "8011932d-d76e-4c5d-9f66-1538d86a683b",
+ | "public": true,
+ | "accessPolicyName": "reader",
+ | "missingAuthDomainGroups": [],
+ | "authDomainGroups": []
+ | },
+ | {
+ | "resourceId": "195feff3-d4b0-43df-9d0d-d49eda2036eb",
+ | "public": false,
+ | "accessPolicyName": "owner",
+ | "missingAuthDomainGroups": [],
+ | "authDomainGroups": []
+ | },
+ | {
+ | "resourceId": "a2e2a933-76ed-4679-a3c1-fcec146441b5",
+ | "public": false,
+ | "accessPolicyName": "owner",
+ | "missingAuthDomainGroups": [],
+ | "authDomainGroups": []
+ | }]
""".stripMargin
"UserPolicy JSON" - {
@@ -51,8 +50,8 @@ class SamResourceSpec extends AnyFreeSpec with Matchers {
val jsobj: JsValue = JsonParser(userPolicyJSON)
val userPolicy: UserPolicy = jsobj.convertTo[UserPolicy]
assert(userPolicy.public)
- assertResult("reader"){ userPolicy.accessPolicyName.value }
- }
+ assertResult("reader")(userPolicy.accessPolicyName.value)
}
-
}
+
+}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/service/AgoraACLTranslationSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/service/AgoraACLTranslationSpec.scala
index 29ccb8b6d..fffc05333 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/service/AgoraACLTranslationSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/service/AgoraACLTranslationSpec.scala
@@ -14,16 +14,16 @@ class AgoraACLTranslationSpec extends AnyFreeSpec {
"should equal ListOwner" in {
val objFC = FireCloudPermission(email.get, Owner)
val objAgora = objFC.toAgoraPermission
- assertResult(email) { objAgora.user }
- assertResult(ListOwner) { objAgora.roles.get }
+ assertResult(email)(objAgora.user)
+ assertResult(ListOwner)(objAgora.roles.get)
}
}
"when translating ListOwner Agora->FC" - {
"should equal Owner" in {
val objAgora = AgoraPermission(email, Some(ListOwner))
val objFC = objAgora.toFireCloudPermission
- assertResult(email.get) { objFC.user }
- assertResult(Owner) { objFC.role}
+ assertResult(email.get)(objFC.user)
+ assertResult(Owner)(objFC.role)
}
}
// READER
@@ -31,16 +31,16 @@ class AgoraACLTranslationSpec extends AnyFreeSpec {
"should equal ListReader" in {
val objFC = FireCloudPermission(email.get, Reader)
val objAgora = objFC.toAgoraPermission
- assertResult(email) { objAgora.user }
- assertResult(ListReader) { objAgora.roles.get }
+ assertResult(email)(objAgora.user)
+ assertResult(ListReader)(objAgora.roles.get)
}
}
"when translating ListReader Agora->FC" - {
"should equal Reader" in {
val objAgora = AgoraPermission(email, Some(ListReader))
val objFC = objAgora.toFireCloudPermission
- assertResult(email.get) { objFC.user }
- assertResult(Reader) { objFC.role }
+ assertResult(email.get)(objFC.user)
+ assertResult(Reader)(objFC.role)
}
}
// NO ACCESS
@@ -48,16 +48,16 @@ class AgoraACLTranslationSpec extends AnyFreeSpec {
"should equal ListNoAccess" in {
val objFC = FireCloudPermission(email.get, NoAccess)
val objAgora = objFC.toAgoraPermission
- assertResult(email) { objAgora.user }
- assertResult(ListNoAccess) { objAgora.roles.get }
+ assertResult(email)(objAgora.user)
+ assertResult(ListNoAccess)(objAgora.roles.get)
}
}
"when translating ListNoAccess Agora->FC" - {
"should equal NoAccess" in {
val objAgora = AgoraPermission(email, Some(ListNoAccess))
val objFC = objAgora.toFireCloudPermission
- assertResult(email.get) { objFC.user }
- assertResult(NoAccess) { objFC.role }
+ assertResult(email.get)(objFC.user)
+ assertResult(NoAccess)(objFC.role)
}
}
// ALL
@@ -65,49 +65,49 @@ class AgoraACLTranslationSpec extends AnyFreeSpec {
"should equal Owner" in {
val objAgora = AgoraPermission(email, Some(ListAll))
val objFC = objAgora.toFireCloudPermission
- assertResult(email.get) { objFC.user }
- assertResult(Owner) { objFC.role }
+ assertResult(email.get)(objFC.user)
+ assertResult(Owner)(objFC.role)
}
}
// EDGE CASES, AGORA->FC
"when translating partial list Agora->FC" - {
"should equal NoAccess" in {
- val objAgora = AgoraPermission(email, Some(List("Read","Write")))
+ val objAgora = AgoraPermission(email, Some(List("Read", "Write")))
val objFC = objAgora.toFireCloudPermission
- assertResult(email.get) { objFC.user }
- assertResult(NoAccess) { objFC.role }
+ assertResult(email.get)(objFC.user)
+ assertResult(NoAccess)(objFC.role)
}
}
"when translating superset list Agora->FC" - {
"should equal NoAccess" in {
- val objAgora = AgoraPermission(email, Some(ListOwner ++ List("Extra","Permissions")))
+ val objAgora = AgoraPermission(email, Some(ListOwner ++ List("Extra", "Permissions")))
val objFC = objAgora.toFireCloudPermission
- assertResult(email.get) { objFC.user }
- assertResult(NoAccess) { objFC.role }
+ assertResult(email.get)(objFC.user)
+ assertResult(NoAccess)(objFC.role)
}
}
"when translating empty list Agora->FC" - {
"should equal NoAccess" in {
val objAgora = AgoraPermission(email, Some(List.empty))
val objFC = objAgora.toFireCloudPermission
- assertResult(email.get) { objFC.user }
- assertResult(NoAccess) { objFC.role }
+ assertResult(email.get)(objFC.user)
+ assertResult(NoAccess)(objFC.role)
}
}
"when translating whitespace list Agora->FC" - {
"should equal NoAccess" in {
val objAgora = AgoraPermission(email, Some(List("")))
val objFC = objAgora.toFireCloudPermission
- assertResult(email.get) { objFC.user }
- assertResult(NoAccess) { objFC.role }
+ assertResult(email.get)(objFC.user)
+ assertResult(NoAccess)(objFC.role)
}
}
"when translating None Agora->FC" - {
"should equal NoAccess" in {
val objAgora = AgoraPermission(email, None)
val objFC = objAgora.toFireCloudPermission
- assertResult(email.get) { objFC.user }
- assertResult(NoAccess) { objFC.role }
+ assertResult(email.get)(objFC.user)
+ assertResult(NoAccess)(objFC.role)
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/service/BaseServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/service/BaseServiceSpec.scala
index 6eed89476..100cfc834 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/service/BaseServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/service/BaseServiceSpec.scala
@@ -11,20 +11,32 @@ class BaseServiceSpec extends ServiceSpec with BeforeAndAfter {
// this gets fed into sealRoute so that exceptions are handled the same in tests as in real life
implicit val exceptionHandler: ExceptionHandler = FireCloudApiService.exceptionHandler
- val agoraDao:MockAgoraDAO = new MockAgoraDAO
- val googleServicesDao:MockGoogleServicesDAO = new MockGoogleServicesDAO
- val ontologyDao:MockOntologyDAO = new MockOntologyDAO
- val rawlsDao:MockRawlsDAO = new MockRawlsDAO
- val samDao:MockSamDAO = new MockSamDAO
- val searchDao:MockSearchDAO = new MockSearchDAO
- val researchPurposeSupport:MockResearchPurposeSupport = new MockResearchPurposeSupport
- val thurloeDao:MockThurloeDAO = new MockThurloeDAO
- val shareLogDao:MockShareLogDAO = new MockShareLogDAO
- val shibbolethDao:MockShibbolethDAO = new MockShibbolethDAO
- val cwdsDao:CwdsDAO = new MockCwdsDAO
- val ecmDao:ExternalCredsDAO = new DisabledExternalCredsDAO
+ val agoraDao: MockAgoraDAO = new MockAgoraDAO
+ val googleServicesDao: MockGoogleServicesDAO = new MockGoogleServicesDAO
+ val ontologyDao: MockOntologyDAO = new MockOntologyDAO
+ val rawlsDao: MockRawlsDAO = new MockRawlsDAO
+ val samDao: MockSamDAO = new MockSamDAO
+ val searchDao: MockSearchDAO = new MockSearchDAO
+ val researchPurposeSupport: MockResearchPurposeSupport = new MockResearchPurposeSupport
+ val thurloeDao: MockThurloeDAO = new MockThurloeDAO
+ val shareLogDao: MockShareLogDAO = new MockShareLogDAO
+ val shibbolethDao: MockShibbolethDAO = new MockShibbolethDAO
+ val cwdsDao: CwdsDAO = new MockCwdsDAO
+ val ecmDao: ExternalCredsDAO = new DisabledExternalCredsDAO
- val app:Application =
- new Application(agoraDao, googleServicesDao, ontologyDao, rawlsDao, samDao, searchDao, researchPurposeSupport, thurloeDao, shareLogDao, shibbolethDao, cwdsDao, ecmDao)
+ val app: Application =
+ new Application(agoraDao,
+ googleServicesDao,
+ ontologyDao,
+ rawlsDao,
+ samDao,
+ searchDao,
+ researchPurposeSupport,
+ thurloeDao,
+ shareLogDao,
+ shibbolethDao,
+ cwdsDao,
+ ecmDao
+ )
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/service/DataUseRestrictionSupportSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/service/DataUseRestrictionSupportSpec.scala
index 27148e2d1..53478f656 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/service/DataUseRestrictionSupportSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/service/DataUseRestrictionSupportSpec.scala
@@ -24,9 +24,11 @@ class DataUseRestrictionSupportSpec extends AnyFreeSpec with Matchers with DataU
"and all consent codes are true or filled in" in {
val ontologyDAO = new MockOntologyDAO
- val request = StructuredDataRequest(generalResearchUse = true,
+ val request = StructuredDataRequest(
+ generalResearchUse = true,
healthMedicalBiomedicalUseRequired = true,
- diseaseUseRequired = Array("http://purl.obolibrary.org/obo/DOID_4325","http://purl.obolibrary.org/obo/DOID_2531"),
+ diseaseUseRequired =
+ Array("http://purl.obolibrary.org/obo/DOID_4325", "http://purl.obolibrary.org/obo/DOID_2531"),
commercialUseProhibited = true,
forProfitUseProhibited = true,
methodsResearchProhibited = true,
@@ -35,9 +37,24 @@ class DataUseRestrictionSupportSpec extends AnyFreeSpec with Matchers with DataU
genderUseRequired = "female",
pediatricResearchRequired = true,
irbRequired = true,
- prefix = Some("blah"))
+ prefix = Some("blah")
+ )
- val expected = Map("blahconsentCodes" -> Array("NAGR","NMDS","NCTRL","RS-G","GRU","RS-PD","NCU","RS-FM","NPU","HMB","IRB","DS:Ebola hemorrhagic fever","DS:hematologic cancer").toJson,
+ val expected = Map(
+ "blahconsentCodes" -> Array("NAGR",
+ "NMDS",
+ "NCTRL",
+ "RS-G",
+ "GRU",
+ "RS-PD",
+ "NCU",
+ "RS-FM",
+ "NPU",
+ "HMB",
+ "IRB",
+ "DS:Ebola hemorrhagic fever",
+ "DS:hematologic cancer"
+ ).toJson,
"blahdulvn" -> FireCloudConfig.Duos.dulvn.toJson,
"blahstructuredUseRestriction" -> Map(
"NPU" -> true.toJson,
@@ -48,19 +65,22 @@ class DataUseRestrictionSupportSpec extends AnyFreeSpec with Matchers with DataU
"NAGR" -> true.toJson,
"RS-FM" -> true.toJson,
"RS-M" -> false.toJson,
- "NMDS"-> true.toJson,
+ "NMDS" -> true.toJson,
"NCTRL" -> true.toJson,
- "GRU" ->true.toJson,
+ "GRU" -> true.toJson,
"HMB" -> true.toJson,
- "DS" -> Array(4325,2531).toJson).toJson)
+ "DS" -> Array(4325, 2531).toJson
+ ).toJson
+ )
val result = generateStructuredUseRestrictionAttribute(request, ontologyDAO)
- result should be (expected)
+ result should be(expected)
}
"and all consent codes are false or empty" in {
val ontologyDAO = new MockOntologyDAO
- val request = StructuredDataRequest(generalResearchUse = false,
+ val request = StructuredDataRequest(
+ generalResearchUse = false,
healthMedicalBiomedicalUseRequired = false,
diseaseUseRequired = Array(),
commercialUseProhibited = false,
@@ -71,9 +91,11 @@ class DataUseRestrictionSupportSpec extends AnyFreeSpec with Matchers with DataU
genderUseRequired = "",
pediatricResearchRequired = false,
irbRequired = false,
- prefix = None)
+ prefix = None
+ )
- val expected = Map("consentCodes" -> Array.empty[String].toJson,
+ val expected = Map(
+ "consentCodes" -> Array.empty[String].toJson,
"dulvn" -> FireCloudConfig.Duos.dulvn.toJson,
"structuredUseRestriction" -> Map(
"NPU" -> false.toJson,
@@ -84,19 +106,22 @@ class DataUseRestrictionSupportSpec extends AnyFreeSpec with Matchers with DataU
"NAGR" -> false.toJson,
"RS-FM" -> false.toJson,
"RS-M" -> false.toJson,
- "NMDS"-> false.toJson,
+ "NMDS" -> false.toJson,
"NCTRL" -> false.toJson,
"GRU" -> false.toJson,
"HMB" -> false.toJson,
- "DS" -> Array.empty[String].toJson).toJson)
+ "DS" -> Array.empty[String].toJson
+ ).toJson
+ )
val result = generateStructuredUseRestrictionAttribute(request, ontologyDAO)
- result should be (expected)
+ result should be(expected)
}
"and consent codes are a mixture of true and false" in {
val ontologyDAO = new MockOntologyDAO
- val request = StructuredDataRequest(generalResearchUse = false,
+ val request = StructuredDataRequest(
+ generalResearchUse = false,
healthMedicalBiomedicalUseRequired = true,
diseaseUseRequired = Array("http://purl.obolibrary.org/obo/DOID_1240"),
commercialUseProhibited = false,
@@ -107,9 +132,11 @@ class DataUseRestrictionSupportSpec extends AnyFreeSpec with Matchers with DataU
genderUseRequired = "Male",
pediatricResearchRequired = false,
irbRequired = true,
- prefix = Some("library"))
+ prefix = Some("library")
+ )
- val expected = Map("libraryconsentCodes" -> Array("NCTRL","RS-G","RS-M","NPU","HMB","IRB","DS:leukemia").toJson,
+ val expected = Map(
+ "libraryconsentCodes" -> Array("NCTRL", "RS-G", "RS-M", "NPU", "HMB", "IRB", "DS:leukemia").toJson,
"librarydulvn" -> FireCloudConfig.Duos.dulvn.toJson,
"librarystructuredUseRestriction" -> Map(
"NPU" -> true.toJson,
@@ -120,14 +147,16 @@ class DataUseRestrictionSupportSpec extends AnyFreeSpec with Matchers with DataU
"NAGR" -> false.toJson,
"RS-FM" -> false.toJson,
"RS-M" -> true.toJson,
- "NMDS"-> false.toJson,
+ "NMDS" -> false.toJson,
"NCTRL" -> true.toJson,
"GRU" -> false.toJson,
"HMB" -> true.toJson,
- "DS" -> Array(1240).toJson).toJson)
+ "DS" -> Array(1240).toJson
+ ).toJson
+ )
val result = generateStructuredUseRestrictionAttribute(request, ontologyDAO)
- result should be (expected)
+ result should be(expected)
}
}
@@ -136,7 +165,8 @@ class DataUseRestrictionSupportSpec extends AnyFreeSpec with Matchers with DataU
"dataset should have a fully populated data use restriction attribute" in {
allDatasets.map { ds =>
val ontologyDAO = new MockOntologyDAO
- val attrs: Map[AttributeName, Attribute] = generateStructuredAndDisplayAttributes(ds, ontologyDAO).structured
+ val attrs: Map[AttributeName, Attribute] =
+ generateStructuredAndDisplayAttributes(ds, ontologyDAO).structured
val durAtt: Attribute = attrs.getOrElse(structuredUseRestrictionAttributeName, AttributeNull)
durAtt shouldNot be(AttributeNull)
val dur = makeDurFromWorkspace(ds, ontologyDAO)
@@ -207,7 +237,8 @@ class DataUseRestrictionSupportSpec extends AnyFreeSpec with Matchers with DataU
"dataset should not have any data use restriction for empty attributes" in {
val workspace: WorkspaceDetails = mkWorkspace(Map.empty[AttributeName, Attribute], "empty", "empty")
val ontologyDAO = new MockOntologyDAO
- val attrs: Map[AttributeName, Attribute] = generateStructuredAndDisplayAttributes(workspace, ontologyDAO).structured
+ val attrs: Map[AttributeName, Attribute] =
+ generateStructuredAndDisplayAttributes(workspace, ontologyDAO).structured
attrs should be(empty)
}
@@ -217,10 +248,13 @@ class DataUseRestrictionSupportSpec extends AnyFreeSpec with Matchers with DataU
AttributeName.withDefaultNS("name") -> AttributeString("one"),
AttributeName.withDefaultNS("namespace") -> AttributeString("two"),
AttributeName.withDefaultNS("workspaceId") -> AttributeString("three"),
- AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(Seq(AttributeString("one"), AttributeString("two"), AttributeString("three")))
+ AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(
+ Seq(AttributeString("one"), AttributeString("two"), AttributeString("three"))
+ )
)
val workspace: WorkspaceDetails = mkWorkspace(nonLibraryAttributes, "non-library", "non-library")
- val attrs: Map[AttributeName, Attribute] = generateStructuredAndDisplayAttributes(workspace, ontologyDAO).structured
+ val attrs: Map[AttributeName, Attribute] =
+ generateStructuredAndDisplayAttributes(workspace, ontologyDAO).structured
attrs should be(empty)
}
@@ -261,7 +295,8 @@ class DataUseRestrictionSupportSpec extends AnyFreeSpec with Matchers with DataU
"'TOP_THREE' dataset should have the right codes" in {
val ontologyDAO = new MockOntologyDAO
- val attrs: Map[AttributeName, Attribute] = generateStructuredAndDisplayAttributes(topThreeDataset.head, ontologyDAO).display
+ val attrs: Map[AttributeName, Attribute] =
+ generateStructuredAndDisplayAttributes(topThreeDataset.head, ontologyDAO).display
val codes: Seq[String] = getValuesFromAttributeValueListAsAttribute(attrs.get(consentCodesAttributeName))
val expected = Seq("GRU", "HMB") ++ diseaseValuesLabels.map(s => s"DS:$s")
codes should contain theSameElementsAs expected
@@ -273,7 +308,8 @@ class DataUseRestrictionSupportSpec extends AnyFreeSpec with Matchers with DataU
"dataset should not have any data use display codes for empty attributes" in {
val ontologyDAO = new MockOntologyDAO
val workspace: WorkspaceDetails = mkWorkspace(Map.empty[AttributeName, Attribute], "empty", "empty")
- val attrs: Map[AttributeName, Attribute] = generateStructuredAndDisplayAttributes(workspace, ontologyDAO).display
+ val attrs: Map[AttributeName, Attribute] =
+ generateStructuredAndDisplayAttributes(workspace, ontologyDAO).display
attrs should be(empty)
}
@@ -283,10 +319,13 @@ class DataUseRestrictionSupportSpec extends AnyFreeSpec with Matchers with DataU
AttributeName.withDefaultNS("name") -> AttributeString("one"),
AttributeName.withDefaultNS("namespace") -> AttributeString("two"),
AttributeName.withDefaultNS("workspaceId") -> AttributeString("three"),
- AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(Seq(AttributeString("one"), AttributeString("two"), AttributeString("three")))
+ AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(
+ Seq(AttributeString("one"), AttributeString("two"), AttributeString("three"))
+ )
)
val workspace: WorkspaceDetails = mkWorkspace(nonLibraryAttributes, "non-library", "non-library")
- val attrs: Map[AttributeName, Attribute] = generateStructuredAndDisplayAttributes(workspace, ontologyDAO).display
+ val attrs: Map[AttributeName, Attribute] =
+ generateStructuredAndDisplayAttributes(workspace, ontologyDAO).display
attrs should be(empty)
}
@@ -295,19 +334,16 @@ class DataUseRestrictionSupportSpec extends AnyFreeSpec with Matchers with DataU
}
}
-
//////////////////
// Utility methods
//////////////////
-
- private def getValuesFromAttributeValueListAsAttribute(attrs: Option[Attribute]): Seq[String] = {
- (attrs collect {
- case x: AttributeValueList => x.list.collect {
- case a: AttributeString => a.value
+ private def getValuesFromAttributeValueListAsAttribute(attrs: Option[Attribute]): Seq[String] =
+ (attrs collect { case x: AttributeValueList =>
+ x.list.collect { case a: AttributeString =>
+ a.value
}
}).getOrElse(Seq.empty[String])
- }
private def makeDurFromWorkspace(ds: WorkspaceDetails, ontologyDAO: MockOntologyDAO): DataUseRestriction = {
val attrs = generateStructuredAndDisplayAttributes(ds, ontologyDAO).structured
@@ -315,9 +351,8 @@ class DataUseRestrictionSupportSpec extends AnyFreeSpec with Matchers with DataU
durAtt.toJson.convertTo[DataUseRestriction]
}
- private def checkBooleanTrue(dur: DataUseRestriction, fieldName: String): Boolean = {
+ private def checkBooleanTrue(dur: DataUseRestriction, fieldName: String): Boolean =
getFieldMap(dur).getOrElse(fieldName, false).asInstanceOf[Boolean]
- }
private def checkListValues(dur: DataUseRestriction, fieldName: String): Unit = {
val fieldValue: Seq[String] = getFieldMap(dur).getOrElse(fieldName, Seq.empty[String]).asInstanceOf[Seq[String]]
@@ -329,17 +364,15 @@ class DataUseRestrictionSupportSpec extends AnyFreeSpec with Matchers with DataU
diseaseValuesInts should contain theSameElementsAs fieldValue
}
- private def getFieldMap(dur: DataUseRestriction): Map[String, Object] = {
+ private def getFieldMap(dur: DataUseRestriction): Map[String, Object] =
dur.getClass.getDeclaredFields map { f =>
f.setAccessible(true)
getFieldName(f) -> f.get(dur)
} toMap
- }
// Since we have dashes in DUR field names, the value that comes back from Field.getName
// looks like "RS$minusPOP" instead of "RS-POP"
- private def getFieldName(f: Field): String = {
+ private def getFieldName(f: Field): String =
f.getName.replace("$minus", "-")
- }
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/service/DataUseRestrictionTestFixtures.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/service/DataUseRestrictionTestFixtures.scala
index e6c27b200..2cce29ff3 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/service/DataUseRestrictionTestFixtures.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/service/DataUseRestrictionTestFixtures.scala
@@ -7,7 +7,6 @@ import org.joda.time.DateTime
import spray.json.DefaultJsonProtocol._
import spray.json.RootJsonFormat
-
object DataUseRestrictionTestFixtures {
case class DataUseRestriction(
@@ -26,20 +25,22 @@ object DataUseRestrictionTestFixtures {
IRB: Boolean = false
)
- implicit val impAttributeFormat: AttributeFormat with PlainArrayAttributeListSerializer = new AttributeFormat with PlainArrayAttributeListSerializer
+ implicit val impAttributeFormat: AttributeFormat with PlainArrayAttributeListSerializer = new AttributeFormat
+ with PlainArrayAttributeListSerializer
implicit val impDataUseRestriction: RootJsonFormat[DataUseRestriction] = jsonFormat13(DataUseRestriction)
// Datasets are named by the code for easier identification in tests
val booleanCodes: Seq[String] = Seq("GRU", "HMB", "NCU", "NPU", "NMDS", "NCTRL", "RS-PD", "IRB")
val booleanDatasets: Seq[WorkspaceDetails] = booleanCodes.map { code =>
val attributes = Map(AttributeName.withLibraryNS(code) -> AttributeBoolean(true))
- mkWorkspace(attributes, code, s"{${code.replace("-","")}}-unique")
+ mkWorkspace(attributes, code, s"{${code.replace("-", "")}}-unique")
}
val listValues: Seq[String] = Seq("TERM-1", "TERM-2")
val diseaseCodes: Seq[String] = Seq("DS_URL")
- val diseaseURLs: Seq[String] = Seq("http://purl.obolibrary.org/obo/DOID_9220", "http://purl.obolibrary.org/obo/DOID_535")
+ val diseaseURLs: Seq[String] =
+ Seq("http://purl.obolibrary.org/obo/DOID_9220", "http://purl.obolibrary.org/obo/DOID_535")
val diseaseValuesLabels: Seq[String] = Seq("central sleep apnea", "sleep disorder")
val diseaseValuesInts: Seq[Int] = Seq(9220, 535)
val diseaseDatasets: Seq[WorkspaceDetails] = diseaseCodes.map { code =>
@@ -47,14 +48,14 @@ object DataUseRestrictionTestFixtures {
AttributeName.withLibraryNS(code) -> AttributeValueList(diseaseURLs.map(AttributeString)),
AttributeName.withLibraryNS("DS") -> AttributeValueList(diseaseValuesLabels.map(AttributeString))
)
- mkWorkspace(attributes, "DS", s"{${code.replace("-","")}}-unique")
+ mkWorkspace(attributes, "DS", s"{${code.replace("-", "")}}-unique")
}
// Gender datasets are named by the gender value for easier identification in tests
val genderVals: Seq[(String, String)] = Seq(("Female", "RS-FM"), ("Male", "RS-M"), ("N/A", "N/A"))
val genderDatasets: Seq[WorkspaceDetails] = genderVals.flatMap { case (gender: String, code: String) =>
val attributes = Map(AttributeName.withLibraryNS("RS-G") -> AttributeString(gender))
- Seq(mkWorkspace(attributes, gender, code), mkWorkspace(attributes, gender, s"""RSG${gender.replace("/","")}"""))
+ Seq(mkWorkspace(attributes, gender, code), mkWorkspace(attributes, gender, s"""RSG${gender.replace("/", "")}"""))
}
// Both gender and 'NAGR' codes are saved as string values in workspace attributes
@@ -64,29 +65,41 @@ object DataUseRestrictionTestFixtures {
mkWorkspace(attributes, value, s"NAGR$value")
}
- val everythingDataset = Seq(mkWorkspace(
- booleanCodes.map(AttributeName.withLibraryNS(_) -> AttributeBoolean(true)).toMap ++
- diseaseCodes.map(AttributeName.withLibraryNS(_) -> AttributeValueList(diseaseURLs.map(AttributeString))).toMap ++
- Map(AttributeName.withLibraryNS("DS") -> AttributeValueList(diseaseValuesLabels.map(AttributeString))) ++
- Map(AttributeName.withLibraryNS("NAGR") -> AttributeString("Yes")) ++
- Map(AttributeName.withLibraryNS("RS-G") -> AttributeString("Female")),
- "EVERYTHING",
- "EVERYTHING")
+ val everythingDataset = Seq(
+ mkWorkspace(
+ booleanCodes.map(AttributeName.withLibraryNS(_) -> AttributeBoolean(true)).toMap ++
+ diseaseCodes
+ .map(AttributeName.withLibraryNS(_) -> AttributeValueList(diseaseURLs.map(AttributeString)))
+ .toMap ++
+ Map(AttributeName.withLibraryNS("DS") -> AttributeValueList(diseaseValuesLabels.map(AttributeString))) ++
+ Map(AttributeName.withLibraryNS("NAGR") -> AttributeString("Yes")) ++
+ Map(AttributeName.withLibraryNS("RS-G") -> AttributeString("Female")),
+ "EVERYTHING",
+ "EVERYTHING"
+ )
)
- val topThreeDataset = Seq(mkWorkspace(
- Seq("GRU", "HMB").map(AttributeName.withLibraryNS(_) -> AttributeBoolean(true)).toMap ++
- diseaseCodes.map(AttributeName.withLibraryNS(_) -> AttributeValueList(diseaseURLs.map(AttributeString))).toMap ++
- Map(AttributeName.withLibraryNS("DS") -> AttributeValueList(diseaseValuesLabels.map(AttributeString))),
- "TOP_THREE",
- "TOP_THREE")
+ val topThreeDataset = Seq(
+ mkWorkspace(
+ Seq("GRU", "HMB").map(AttributeName.withLibraryNS(_) -> AttributeBoolean(true)).toMap ++
+ diseaseCodes
+ .map(AttributeName.withLibraryNS(_) -> AttributeValueList(diseaseURLs.map(AttributeString)))
+ .toMap ++
+ Map(AttributeName.withLibraryNS("DS") -> AttributeValueList(diseaseValuesLabels.map(AttributeString))),
+ "TOP_THREE",
+ "TOP_THREE"
+ )
)
- val allDatasets: Seq[WorkspaceDetails] = booleanDatasets ++ diseaseDatasets ++ genderDatasets ++ nagrDatasets ++ everythingDataset ++ topThreeDataset
+ val allDatasets: Seq[WorkspaceDetails] =
+ booleanDatasets ++ diseaseDatasets ++ genderDatasets ++ nagrDatasets ++ everythingDataset ++ topThreeDataset
val validDisplayDatasets: Seq[WorkspaceDetails] = booleanDatasets ++ everythingDataset ++ topThreeDataset
- def mkWorkspace(attributes: Map[AttributeName, Attribute], wsName: String, wsDescription: String): WorkspaceDetails = {
+ def mkWorkspace(attributes: Map[AttributeName, Attribute],
+ wsName: String,
+ wsDescription: String
+ ): WorkspaceDetails = {
val testUUID: UUID = UUID.randomUUID()
val defaultAttributes = attributes ++ Map(
AttributeName.withDefaultNS("description") -> AttributeString(wsDescription),
@@ -99,33 +112,43 @@ object DataUseRestrictionTestFixtures {
AttributeName.withLibraryNS("datasetDepositor") -> AttributeString("depo"),
AttributeName.withLibraryNS("contactEmail") -> AttributeString("name@example.com"),
AttributeName.withLibraryNS("datasetOwner") -> AttributeString("owner"),
- AttributeName.withLibraryNS("institute") -> AttributeValueList(Seq(AttributeString("one"),AttributeString("two"))),
+ AttributeName.withLibraryNS("institute") -> AttributeValueList(
+ Seq(AttributeString("one"), AttributeString("two"))
+ ),
AttributeName.withLibraryNS("indication") -> AttributeString("indication"),
AttributeName.withLibraryNS("numSubjects") -> AttributeNumber(123),
AttributeName.withLibraryNS("projectName") -> AttributeString("projectName"),
- AttributeName.withLibraryNS("datatype") -> AttributeValueList(Seq(AttributeString("one"),AttributeString("two"))),
- AttributeName.withLibraryNS("dataCategory") -> AttributeValueList(Seq(AttributeString("one"),AttributeString("two"))),
+ AttributeName.withLibraryNS("datatype") -> AttributeValueList(
+ Seq(AttributeString("one"), AttributeString("two"))
+ ),
+ AttributeName.withLibraryNS("dataCategory") -> AttributeValueList(
+ Seq(AttributeString("one"), AttributeString("two"))
+ ),
AttributeName.withLibraryNS("dataUseRestriction") -> AttributeString("dur"),
AttributeName.withLibraryNS("studyDesign") -> AttributeString("study"),
AttributeName.withLibraryNS("cellType") -> AttributeString("cellType"),
AttributeName.withLibraryNS("requiresExternalApproval") -> AttributeBoolean(false),
- AttributeName.withLibraryNS("technology") -> AttributeValueList(Seq(AttributeString("one"),AttributeString("two"))),
+ AttributeName.withLibraryNS("technology") -> AttributeValueList(
+ Seq(AttributeString("one"), AttributeString("two"))
+ ),
AttributeName.withLibraryNS("useLimitationOption") -> AttributeString("questionnaire"),
- AttributeName.withDefaultNS("_discoverableByGroups") -> AttributeValueList(Seq(AttributeString("one"),AttributeString("two")))
+ AttributeName.withDefaultNS("_discoverableByGroups") -> AttributeValueList(
+ Seq(AttributeString("one"), AttributeString("two"))
+ )
)
WorkspaceDetails(
- workspaceId=testUUID.toString,
- namespace="testWorkspaceNamespace",
- name=wsName,
- isLocked=false,
- createdBy="createdBy",
- createdDate=DateTime.now(),
- lastModified=DateTime.now(),
- attributes=Some(defaultAttributes),
- bucketName="bucketName",
- workflowCollectionName=Some("wf-collection"),
- authorizationDomain=Some(Set.empty[ManagedGroupRef]),
- workspaceVersion=WorkspaceVersions.V2,
+ workspaceId = testUUID.toString,
+ namespace = "testWorkspaceNamespace",
+ name = wsName,
+ isLocked = false,
+ createdBy = "createdBy",
+ createdDate = DateTime.now(),
+ lastModified = DateTime.now(),
+ attributes = Some(defaultAttributes),
+ bucketName = "bucketName",
+ workflowCollectionName = Some("wf-collection"),
+ authorizationDomain = Some(Set.empty[ManagedGroupRef]),
+ workspaceVersion = WorkspaceVersions.V2,
googleProject = GoogleProjectId("googleProject"),
googleProjectNumber = Some(GoogleProjectNumber("googleProjectNumber")),
billingAccount = Some(RawlsBillingAccountName("billingAccount")),
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/service/ElasticSearchDAOQuerySupportSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/service/ElasticSearchDAOQuerySupportSpec.scala
index 3716b0526..128f95d67 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/service/ElasticSearchDAOQuerySupportSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/service/ElasticSearchDAOQuerySupportSpec.scala
@@ -1,6 +1,5 @@
package org.broadinstitute.dsde.firecloud.service
-
import org.broadinstitute.dsde.firecloud.FireCloudConfig
import org.broadinstitute.dsde.firecloud.dataaccess._
import org.broadinstitute.dsde.firecloud.elastic.ElasticUtils
@@ -20,14 +19,17 @@ class ElasticSearchDAOQuerySupportSpec extends AnyFreeSpec with ElasticSearchDAO
val indexname = "ElasticSearchSpec"
val criteria = LibrarySearchParams(Some("searchString"),
- Map.empty[String, Seq[String]],
- None,
- Map.empty[String, Int],
- from = 0, size=10)
+ Map.empty[String, Seq[String]],
+ None,
+ Map.empty[String, Int],
+ from = 0,
+ size = 10
+ )
// create an ElasticSearch client. Client requires legal urls for its servers argument, but those
// urls don't have to point to an actual ES instance.
- val client: TransportClient = ElasticUtils.buildClient(FireCloudConfig.ElasticSearch.servers, FireCloudConfig.ElasticSearch.clusterName)
+ val client: TransportClient =
+ ElasticUtils.buildClient(FireCloudConfig.ElasticSearch.servers, FireCloudConfig.ElasticSearch.clusterName)
// create a mock research purpose support
val researchPurposeSupport: ResearchPurposeSupport = new MockResearchPurposeSupport
@@ -37,21 +39,24 @@ class ElasticSearchDAOQuerySupportSpec extends AnyFreeSpec with ElasticSearchDAO
"discoverability" - {
"when createQuery is given a group for the current user" - {
"should include group in the filter" in {
- val baseRequest = buildSearchQuery(client, indexname, criteria, Seq("whitelistedgroup"), Seq.empty, researchPurposeSupport)
+ val baseRequest =
+ buildSearchQuery(client, indexname, criteria, Seq("whitelistedgroup"), Seq.empty, researchPurposeSupport)
val jsonRequest = getSearchRequestAsJson(baseRequest)
validateGroupTerms(jsonRequest, Some("whitelistedgroup"), None)
}
}
"when createQuery is given no groups for the current user" - {
"should not have groups in the filter" in {
- val baseRequest = buildSearchQuery(client, indexname, criteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
+ val baseRequest =
+ buildSearchQuery(client, indexname, criteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
val jsonRequest = getSearchRequestAsJson(baseRequest)
validateGroupTerms(jsonRequest, None, None)
}
}
"when createQuery is given a workspace for the current user" - {
"should have workspaceId in the filter" in {
- val baseRequest = buildSearchQuery(client, indexname, criteria, Seq.empty[String], Seq("workspaceId"), researchPurposeSupport)
+ val baseRequest =
+ buildSearchQuery(client, indexname, criteria, Seq.empty[String], Seq("workspaceId"), researchPurposeSupport)
val jsonRequest = getSearchRequestAsJson(baseRequest)
validateGroupTerms(jsonRequest, None, Some("workspaceId"))
}
@@ -64,8 +69,9 @@ class ElasticSearchDAOQuerySupportSpec extends AnyFreeSpec with ElasticSearchDAO
val sortField = Some("library:datasetName")
val sortDirection = Some("asc")
- val sortCriteria = criteria.copy(sortField=sortField,sortDirection=sortDirection)
- val baseRequest = buildSearchQuery(client, indexname, sortCriteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
+ val sortCriteria = criteria.copy(sortField = sortField, sortDirection = sortDirection)
+ val baseRequest =
+ buildSearchQuery(client, indexname, sortCriteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
val jsonRequest = getSearchRequestAsJson(baseRequest)
validateSortField(jsonRequest, sortField)
validateSortDirection(jsonRequest, sortDirection)
@@ -75,8 +81,9 @@ class ElasticSearchDAOQuerySupportSpec extends AnyFreeSpec with ElasticSearchDAO
"sort field is present in query and sort direction is defaulted to asc" in {
val sortField = Some("library:datasetName")
- val sortCriteria = criteria.copy(sortField=sortField,sortDirection=None)
- val baseRequest = buildSearchQuery(client, indexname, sortCriteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
+ val sortCriteria = criteria.copy(sortField = sortField, sortDirection = None)
+ val baseRequest =
+ buildSearchQuery(client, indexname, sortCriteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
val jsonRequest = getSearchRequestAsJson(baseRequest)
validateSortField(jsonRequest, sortField)
validateSortDirection(jsonRequest, Some("asc"))
@@ -86,8 +93,9 @@ class ElasticSearchDAOQuerySupportSpec extends AnyFreeSpec with ElasticSearchDAO
"sort field is present in query and sort direction is defaulted to asc" in {
val sortField = Some("library:datasetName")
- val sortCriteria = criteria.copy(sortField=sortField,sortDirection=Some("unknown"))
- val baseRequest = buildSearchQuery(client, indexname, sortCriteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
+ val sortCriteria = criteria.copy(sortField = sortField, sortDirection = Some("unknown"))
+ val baseRequest =
+ buildSearchQuery(client, indexname, sortCriteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
val jsonRequest = getSearchRequestAsJson(baseRequest)
validateSortField(jsonRequest, sortField)
validateSortDirection(jsonRequest, Some("asc"))
@@ -95,8 +103,9 @@ class ElasticSearchDAOQuerySupportSpec extends AnyFreeSpec with ElasticSearchDAO
}
"when specifying a sort order but no sort key" - {
"neither sort order nor sort key is present in query" in {
- val sortCriteria = criteria.copy(sortField=None,sortDirection=Some("asc"))
- val baseRequest = buildSearchQuery(client, indexname, sortCriteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
+ val sortCriteria = criteria.copy(sortField = None, sortDirection = Some("asc"))
+ val baseRequest =
+ buildSearchQuery(client, indexname, sortCriteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
val jsonRequest = getSearchRequestAsJson(baseRequest)
validateSortField(jsonRequest, None)
validateSortDirection(jsonRequest, None)
@@ -104,8 +113,9 @@ class ElasticSearchDAOQuerySupportSpec extends AnyFreeSpec with ElasticSearchDAO
}
"when specifying neither sort order nor sort key" - {
"neither sort order nor sort key is present in query" in {
- val sortCriteria = criteria.copy(sortField=None,sortDirection=None)
- val baseRequest = buildSearchQuery(client, indexname, sortCriteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
+ val sortCriteria = criteria.copy(sortField = None, sortDirection = None)
+ val baseRequest =
+ buildSearchQuery(client, indexname, sortCriteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
val jsonRequest = getSearchRequestAsJson(baseRequest)
validateSortField(jsonRequest, None)
validateSortDirection(jsonRequest, None)
@@ -117,48 +127,53 @@ class ElasticSearchDAOQuerySupportSpec extends AnyFreeSpec with ElasticSearchDAO
"when specifying a page offset" - {
"page offset is present in query" in {
val offset = 23
- val searchCriteria = criteria.copy(from=offset)
- val baseRequest = buildSearchQuery(client, indexname, searchCriteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
+ val searchCriteria = criteria.copy(from = offset)
+ val baseRequest =
+ buildSearchQuery(client, indexname, searchCriteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
val jsonRequest = getSearchRequestAsJson(baseRequest)
- assertResult(Some(offset)) {getFromValue(jsonRequest)}
- assertResult(Some(10)) {getSizeValue(jsonRequest)}
+ assertResult(Some(offset))(getFromValue(jsonRequest))
+ assertResult(Some(10))(getSizeValue(jsonRequest))
}
}
"when omitting a page offset" - {
"page offset defaults to 0" in {
- val baseRequest = buildSearchQuery(client, indexname, criteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
+ val baseRequest =
+ buildSearchQuery(client, indexname, criteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
val jsonRequest = getSearchRequestAsJson(baseRequest)
- assertResult(Some(0)) {getFromValue(jsonRequest)}
- assertResult(Some(10)) {getSizeValue(jsonRequest)}
+ assertResult(Some(0))(getFromValue(jsonRequest))
+ assertResult(Some(10))(getSizeValue(jsonRequest))
}
}
"when specifying a page size" - {
"page size is present in query" in {
val pageSize = 46
- val searchCriteria = criteria.copy(size=pageSize)
- val baseRequest = buildSearchQuery(client, indexname, searchCriteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
+ val searchCriteria = criteria.copy(size = pageSize)
+ val baseRequest =
+ buildSearchQuery(client, indexname, searchCriteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
val jsonRequest = getSearchRequestAsJson(baseRequest)
- assertResult(Some(0)) {getFromValue(jsonRequest)}
- assertResult(Some(pageSize)) {getSizeValue(jsonRequest)}
+ assertResult(Some(0))(getFromValue(jsonRequest))
+ assertResult(Some(pageSize))(getSizeValue(jsonRequest))
}
}
"when omitting a page size" - {
"page size defaults to 10" in {
- val baseRequest = buildSearchQuery(client, indexname, criteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
+ val baseRequest =
+ buildSearchQuery(client, indexname, criteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
val jsonRequest = getSearchRequestAsJson(baseRequest)
- assertResult(Some(0)) {getFromValue(jsonRequest)}
- assertResult(Some(10)) {getSizeValue(jsonRequest)}
+ assertResult(Some(0))(getFromValue(jsonRequest))
+ assertResult(Some(10))(getSizeValue(jsonRequest))
}
}
"when specifying both page offset and page size" - {
"both page offset and page size are present in query" in {
val offset = 23
val pageSize = 46
- val searchCriteria = criteria.copy(from=offset,size=pageSize)
- val baseRequest = buildSearchQuery(client, indexname, searchCriteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
+ val searchCriteria = criteria.copy(from = offset, size = pageSize)
+ val baseRequest =
+ buildSearchQuery(client, indexname, searchCriteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
val jsonRequest = getSearchRequestAsJson(baseRequest)
- assertResult(Some(offset)) {getFromValue(jsonRequest)}
- assertResult(Some(pageSize)) {getSizeValue(jsonRequest)}
+ assertResult(Some(offset))(getFromValue(jsonRequest))
+ assertResult(Some(pageSize))(getSizeValue(jsonRequest))
}
}
}
@@ -167,22 +182,26 @@ class ElasticSearchDAOQuerySupportSpec extends AnyFreeSpec with ElasticSearchDAO
"when specifying text search" - {
"user criteria is present, searching against _all" in {
val searchTerm = "normcore kitsch mustache bespoke semiotics"
- val searchCriteria = criteria.copy(searchString=Some(searchTerm))
- val baseRequest = buildSearchQuery(client, indexname, searchCriteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
+ val searchCriteria = criteria.copy(searchString = Some(searchTerm))
+ val baseRequest =
+ buildSearchQuery(client, indexname, searchCriteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
val jsonRequest = getSearchRequestAsJson(baseRequest)
validateSearchTermAll(jsonRequest, searchTerm)
}
}
"when omitting text search" - {
"no search is present in query" in {
- val searchCriteria = criteria.copy(searchString=None)
- val baseRequest = buildSearchQuery(client, indexname, searchCriteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
+ val searchCriteria = criteria.copy(searchString = None)
+ val baseRequest =
+ buildSearchQuery(client, indexname, searchCriteria, Seq.empty[String], Seq.empty, researchPurposeSupport)
val jsonRequest = getSearchRequestAsJson(baseRequest)
// when omitting search term, we have an empty "match_all" and the "bool" for discover mode
val arr = getQueryArray(jsonRequest)
val matchAllClause = arr.elements.head.asJsObject
- assertResult(Set("match_all"), "first element of must clause should be a match " + jsonRequest.prettyPrint) {matchAllClause.fields.keySet}
- assertResult(JsObject(("boost",JsNumber(1.0)))) {matchAllClause.fields("match_all").asJsObject}
+ assertResult(Set("match_all"), "first element of must clause should be a match " + jsonRequest.prettyPrint) {
+ matchAllClause.fields.keySet
+ }
+ assertResult(JsObject(("boost", JsNumber(1.0))))(matchAllClause.fields("match_all").asJsObject)
// calling getMustBoolObject will validate it down to that level
// getMustBoolObject(jsonRequest)
}
@@ -192,19 +211,28 @@ class ElasticSearchDAOQuerySupportSpec extends AnyFreeSpec with ElasticSearchDAO
"setting access level" - {
val params = LibrarySearchParams(Some("test"), Map(), None, Map())
"to No Access if workspace is not returned from workspace list" in {
- val result: JsValue = LibraryServiceSpec.testLibraryMetadataJsObject.copy(LibraryServiceSpec.testLibraryMetadataJsObject.fields.updated("workspaceId", JsString("no.access.to.workspace.id")))
- val expectedResult: JsValue = JsObject(result.asJsObject.fields.updated("workspaceAccess", JsString(WorkspaceAccessLevels.NoAccess.toString)))
+ val result: JsValue = LibraryServiceSpec.testLibraryMetadataJsObject.copy(
+ LibraryServiceSpec.testLibraryMetadataJsObject.fields.updated("workspaceId",
+ JsString("no.access.to.workspace.id")
+ )
+ )
+ val expectedResult: JsValue = JsObject(
+ result.asJsObject.fields.updated("workspaceAccess", JsString(WorkspaceAccessLevels.NoAccess.toString))
+ )
assertResult(expectedResult.asJsObject) {
addAccessLevel(result.asJsObject, Map.empty)
}
}
"to has access if workspace is returned from workspace list" in {
val wsId = "owner.access.workspace"
- val result: JsValue = LibraryServiceSpec.testLibraryMetadataJsObject.copy(LibraryServiceSpec.testLibraryMetadataJsObject.fields.updated("workspaceId", JsString(wsId)))
- val expectedResult: JsValue = JsObject(result.asJsObject.fields.updated("workspaceAccess", JsString(WorkspaceAccessLevels.Owner.toString)))
+ val result: JsValue = LibraryServiceSpec.testLibraryMetadataJsObject.copy(
+ LibraryServiceSpec.testLibraryMetadataJsObject.fields.updated("workspaceId", JsString(wsId))
+ )
+ val expectedResult: JsValue =
+ JsObject(result.asJsObject.fields.updated("workspaceAccess", JsString(WorkspaceAccessLevels.Owner.toString)))
val userPol = UserPolicy(ResourceId(wsId), false, AccessPolicyName("OWNER"), Seq.empty.toSet, Seq.empty.toSet)
assertResult(expectedResult.asJsObject) {
- addAccessLevel(result.asJsObject, Map(wsId->userPol))
+ addAccessLevel(result.asJsObject, Map(wsId -> userPol))
}
}
}
@@ -214,133 +242,136 @@ class ElasticSearchDAOQuerySupportSpec extends AnyFreeSpec with ElasticSearchDAO
// TODO: do facet requests properly become aggregations?
// TODO: does an expanded facet properly expand?
- def getSearchRequestAsJson(baseQuery:SearchRequestBuilder): JsObject = {
+ def getSearchRequestAsJson(baseQuery: SearchRequestBuilder): JsObject =
baseQuery.toString.parseJson.asJsObject
- }
- def getFromValue(json:JsObject): Option[Int] = {
+ def getFromValue(json: JsObject): Option[Int] =
json.fields.get("from") match {
- case Some(x:JsNumber) => Some(x.value.toInt)
- case _ => None
+ case Some(x: JsNumber) => Some(x.value.toInt)
+ case _ => None
}
- }
- def getSizeValue(json:JsObject): Option[Int] = {
+ def getSizeValue(json: JsObject): Option[Int] =
json.fields.get("size") match {
- case Some(x:JsNumber) => Some(x.value.toInt)
- case _ => None
+ case Some(x: JsNumber) => Some(x.value.toInt)
+ case _ => None
}
- }
- def getSortField(json:JsObject): Option[String] = {
+ def getSortField(json: JsObject): Option[String] =
getSortObject(json) match {
- case Some(sortObj:JsObject) =>
- assertResult(1) {sortObj.fields.size}
+ case Some(sortObj: JsObject) =>
+ assertResult(1)(sortObj.fields.size)
Some(sortObj.fields.keys.head)
case _ => None
}
- }
- def getSortOrder(json:JsObject): Option[String] = {
+ def getSortOrder(json: JsObject): Option[String] =
getSortObject(json) match {
- case Some(sortObj:JsObject) =>
- assertResult(1) {sortObj.fields.size}
+ case Some(sortObj: JsObject) =>
+ assertResult(1)(sortObj.fields.size)
sortObj.fields.values.head.asJsObject.fields.get("order") match {
- case Some(x:JsString) => Some(x.value)
- case _ => None
+ case Some(x: JsString) => Some(x.value)
+ case _ => None
}
case _ => None
}
- }
- def getSortObject(json:JsObject): Option[JsObject] = {
+ def getSortObject(json: JsObject): Option[JsObject] =
json.fields.get("sort") match {
- case Some(arr:JsArray) =>
- assertResult(1) {arr.elements.size} // app code only support sorting on a single field for now
+ case Some(arr: JsArray) =>
+ assertResult(1)(arr.elements.size) // app code only support sorting on a single field for now
Some(arr.elements.head.asJsObject)
case _ => None
}
- }
- def getQuery(json:JsObject): Option[JsValue] = {
+ def getQuery(json: JsObject): Option[JsValue] =
json.fields.get("query")
- }
- def validateSortField(json:JsObject, expectedSortField:Option[String]): Unit = {
+ def validateSortField(json: JsObject, expectedSortField: Option[String]): Unit =
// the ES DAO actually sorts on the inner field with a suffix of ".sort", so add that here.
expectedSortField match {
- case Some(x) => assertResult(Some(x + ".sort")) {getSortField(json)}
- case None => assertResult(None) {getSortField(json)}
+ case Some(x) => assertResult(Some(x + ".sort"))(getSortField(json))
+ case None => assertResult(None)(getSortField(json))
}
- }
- def validateSortDirection(json:JsObject, expectedSortDirection:Option[String]): Unit = {
- assertResult(expectedSortDirection) {getSortOrder(json)}
- }
+ def validateSortDirection(json: JsObject, expectedSortDirection: Option[String]): Unit =
+ assertResult(expectedSortDirection)(getSortOrder(json))
- def validateGroupTerms(json:JsObject, expectedGroup:Option[String], expectedWorkspace:Option[String]): Unit = {
+ def validateGroupTerms(json: JsObject, expectedGroup: Option[String], expectedWorkspace: Option[String]): Unit = {
val groupShouldClause = getFilterBoolShouldArray(json)
groupShouldClause.elements foreach {
case subObj: JsObject =>
subObj.fields foreach {
- case ("bool", b:JsObject) =>
+ case ("bool", b: JsObject) =>
val mustNotField = b
- .fields("must_not").asInstanceOf[JsArray].elements(0).asJsObject
- .fields("exists").asJsObject
+ .fields("must_not")
+ .asInstanceOf[JsArray]
+ .elements(0)
+ .asJsObject
+ .fields("exists")
+ .asJsObject
.fields("field")
- assertResult(ElasticSearch.fieldDiscoverableByGroups) {mustNotField.asInstanceOf[JsString].value}
- // assertResult(expectedDiscoverableGroup(group), "group criteria should include expected group name") {groupbool}
- case ("terms", t:JsObject) =>
+ assertResult(ElasticSearch.fieldDiscoverableByGroups)(mustNotField.asInstanceOf[JsString].value)
+ // assertResult(expectedDiscoverableGroup(group), "group criteria should include expected group name") {groupbool}
+ case ("terms", t: JsObject) =>
t.fields.keySet foreach {
case ElasticSearch.fieldDiscoverableByGroups =>
expectedGroup foreach { grp =>
val actualGroups = t.fields(ElasticSearch.fieldDiscoverableByGroups)
- assertResult(Set(JsString(grp))) {actualGroups.asInstanceOf[JsArray].elements.toSet}
+ assertResult(Set(JsString(grp)))(actualGroups.asInstanceOf[JsArray].elements.toSet)
}
case "workspaceId.keyword" =>
expectedWorkspace foreach { wksp =>
val actualWorkspaces = t.fields("workspaceId.keyword")
- assertResult(Set(JsString(wksp))) {actualWorkspaces.asInstanceOf[JsArray].elements.toSet}
+ assertResult(Set(JsString(wksp)))(actualWorkspaces.asInstanceOf[JsArray].elements.toSet)
}
case _ => ()
}
- // assertResult(expectedNoDiscoverableGroups, "group criteria should be just the must-not-exists") {groupbool}
+ // assertResult(expectedNoDiscoverableGroups, "group criteria should be just the must-not-exists") {groupbool}
case x => throw new Exception(s"unmatched case for ${x.getClass.getName}: ${x.toString()}")
- }
+ }
case x => throw new Exception(s"unmatched case for ${x.getClass.getName}: ${x.toString()}")
}
}
- def validateSearchTermAll(json:JsObject, expectedTerm:String): Unit = {
+ def validateSearchTermAll(json: JsObject, expectedTerm: String): Unit =
validateSearchTerm(json, expectedTerm, "_all")
- }
- def validateSearchTerm(json:JsObject, expectedTerm:String, expectedField:String) = {
+ def validateSearchTerm(json: JsObject, expectedTerm: String, expectedField: String) = {
val shouldArray = getTextSearchShouldArray(json)
- assertResult(2) {shouldArray.elements.size}
+ assertResult(2)(shouldArray.elements.size)
val allSearchMatch = shouldArray.elements.head.asJsObject
validateSearchCriteria(allSearchMatch, expectedTerm, expectedField, "2<67%")
val parentSearchNested = shouldArray.elements.tail.head.asJsObject
- assertResult(Set("nested"), s"search on parents should be a nested query") {parentSearchNested.fields.keySet}
+ assertResult(Set("nested"), s"search on parents should be a nested query")(parentSearchNested.fields.keySet)
val parentSearchNestedQuery = parentSearchNested.fields("nested").asJsObject
assert(parentSearchNestedQuery.fields.keySet.contains("query"), "nested parents query should contain a query")
assert(parentSearchNestedQuery.fields.keySet.contains("path"), "nested parents query should contain a path")
- assertResult("parents", "nested parents query should have a path of 'parents'") {parentSearchNestedQuery.fields("path").asInstanceOf[JsString].value}
+ assertResult("parents", "nested parents query should have a path of 'parents'") {
+ parentSearchNestedQuery.fields("path").asInstanceOf[JsString].value
+ }
val parentSearchMatch = parentSearchNestedQuery.fields("query").asJsObject
validateSearchCriteria(parentSearchMatch, expectedTerm, "parents.label", "3<75%")
}
- private def validateSearchCriteria(json:JsObject, expectedTerm:String, expectedField:String, expectedMinMatch:String) = {
+ private def validateSearchCriteria(json: JsObject,
+ expectedTerm: String,
+ expectedField: String,
+ expectedMinMatch: String
+ ) = {
- assertResult(Set("match"), s"search on $expectedField should be a match clause") {json.fields.keySet}
+ assertResult(Set("match"), s"search on $expectedField should be a match clause")(json.fields.keySet)
val search = json.fields("match").asJsObject
- assertResult(Set(expectedField), s"search clause should execute against only $expectedField") {search.fields.keySet}
+ assertResult(Set(expectedField), s"search clause should execute against only $expectedField") {
+ search.fields.keySet
+ }
val searchCriteria = search.fields(expectedField).asJsObject
assert(searchCriteria.fields.keySet.contains("query"), s"search criteria should contain 'query'")
- assert(searchCriteria.fields.keySet.contains("minimum_should_match"), s"search criteria should contain 'minimum_should_match'")
- assertResult(expectedTerm) {searchCriteria.fields("query").asInstanceOf[JsString].value}
- assertResult(expectedMinMatch) {searchCriteria.fields("minimum_should_match").asInstanceOf[JsString].value}
+ assert(searchCriteria.fields.keySet.contains("minimum_should_match"),
+ s"search criteria should contain 'minimum_should_match'"
+ )
+ assertResult(expectedTerm)(searchCriteria.fields("query").asInstanceOf[JsString].value)
+ assertResult(expectedMinMatch)(searchCriteria.fields("minimum_should_match").asInstanceOf[JsString].value)
}
-
- private def getOuterBool(json:JsObject):JsObject = {
+ private def getOuterBool(json: JsObject): JsObject =
getQuery(json) match {
case Some(a: JsObject) =>
assertResult(Set("bool"), "json should have an outer bool clause") {
@@ -349,31 +380,30 @@ class ElasticSearchDAOQuerySupportSpec extends AnyFreeSpec with ElasticSearchDAO
a.fields("bool").asJsObject
case _ => fail("query was not a JsObject")
}
- }
- private def getQueryArray(json:JsObject):JsArray = {
+ private def getQueryArray(json: JsObject): JsArray = {
val outerbool = getOuterBool(json)
assert(outerbool.fields.keySet.contains("must"), "outer bool clause should include a must clause")
outerbool.fields("must") match {
- case arr:JsArray =>
- assertResult(1, "must clause should have one element") {arr.elements.size}
+ case arr: JsArray =>
+ assertResult(1, "must clause should have one element")(arr.elements.size)
arr
case _ => fail("must clause should be a JsArray")
}
}
- private def getFilterBoolShouldArray(json:JsObject):JsArray = {
+ private def getFilterBoolShouldArray(json: JsObject): JsArray = {
val outerbool = getOuterBool(json)
assert(outerbool.fields.keySet.contains("filter"), "outer bool should include a filter clause")
outerbool.fields("filter") match {
- case arr:JsArray =>
- assertResult(1, "filter clause should have one element") {arr.elements.size}
+ case arr: JsArray =>
+ assertResult(1, "filter clause should have one element")(arr.elements.size)
assert(arr.elements.head.asJsObject.fields.keySet.contains("bool"), "filter should include a bool clause")
arr.elements.head.asJsObject.fields("bool") match {
- case boolMap:JsObject =>
+ case boolMap: JsObject =>
assert(boolMap.fields.keySet.contains("should"), "filter, bool clause should contain a should clause")
boolMap.fields("should") match {
- case shouldArray:JsArray =>
+ case shouldArray: JsArray =>
shouldArray
case x => throw new Exception(s"unmatched case for ${x.getClass.getName}: ${x.toString()}")
}
@@ -383,43 +413,53 @@ class ElasticSearchDAOQuerySupportSpec extends AnyFreeSpec with ElasticSearchDAO
}
}
-
- private def getTextSearchShouldArray(json:JsObject):JsArray = {
+ private def getTextSearchShouldArray(json: JsObject): JsArray = {
val query = getQueryArray(json)
val searchClause = query.elements.head.asJsObject
- assertResult(Set("bool"), "first element of text search clause should be a bool") {searchClause.fields.keySet}
+ assertResult(Set("bool"), "first element of text search clause should be a bool")(searchClause.fields.keySet)
val boolClause = searchClause.fields("bool").asJsObject
- assert(boolClause.fields.keySet.contains("should"), "first element of text search bool clause should inculde a should")
+ assert(boolClause.fields.keySet.contains("should"),
+ "first element of text search bool clause should inculde a should"
+ )
val shouldArray = boolClause.fields("should") match {
- case arr:JsArray => arr
- case _ => fail("text search should clause should be an array")
+ case arr: JsArray => arr
+ case _ => fail("text search should clause should be an array")
}
shouldArray
}
- private def assertDiscoverableGroups(json:JsObject, expectedGroup: Option[String]) = {
+ private def assertDiscoverableGroups(json: JsObject, expectedGroup: Option[String]) = {
val should = json.fields.get("should")
should match {
- case Some(ja:JsArray) =>
+ case Some(ja: JsArray) =>
val expectedLength = if (expectedGroup.isEmpty) 1 else 2
- assertResult(expectedLength, s"should clause should have $expectedLength item(s)") {ja.elements.length}
+ assertResult(expectedLength, s"should clause should have $expectedLength item(s)")(ja.elements.length)
// don't bother asserting the types and keys below; will throw exception and fail test if
// there's a problem.
- val mustNotField = ja.elements(0).asJsObject
- .fields("bool").asJsObject
- .fields("must_not").asInstanceOf[JsArray].elements(0).asJsObject
- .fields("exists").asJsObject
- .fields("field")
- assertResult(ElasticSearch.fieldDiscoverableByGroups) {mustNotField.asInstanceOf[JsString].value}
+ val mustNotField = ja
+ .elements(0)
+ .asJsObject
+ .fields("bool")
+ .asJsObject
+ .fields("must_not")
+ .asInstanceOf[JsArray]
+ .elements(0)
+ .asJsObject
+ .fields("exists")
+ .asJsObject
+ .fields("field")
+ assertResult(ElasticSearch.fieldDiscoverableByGroups)(mustNotField.asInstanceOf[JsString].value)
expectedGroup foreach { grp =>
- val actualGroups = ja.elements(1).asJsObject
- .fields("terms").asJsObject
- .fields(ElasticSearch.fieldDiscoverableByGroups)
- assertResult(Set(JsString(grp))) {actualGroups.asInstanceOf[JsArray].elements.toSet}
+ val actualGroups = ja
+ .elements(1)
+ .asJsObject
+ .fields("terms")
+ .asJsObject
+ .fields(ElasticSearch.fieldDiscoverableByGroups)
+ assertResult(Set(JsString(grp)))(actualGroups.asInstanceOf[JsArray].elements.toSet)
}
case _ => fail("should clause should exist and be a JsArray")
}
}
-
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/service/EntitiesWithTypeServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/service/EntitiesWithTypeServiceSpec.scala
index b5f22dac0..1db5fe909 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/service/EntitiesWithTypeServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/service/EntitiesWithTypeServiceSpec.scala
@@ -18,8 +18,10 @@ class EntitiesWithTypeServiceSpec extends BaseServiceSpec with EntityApiService
val entityServiceConstructor: (ModelSchema) => EntityService = EntityService.constructor(app)
- val validFireCloudPath = FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.workspacesPath + "/broad-dsde-dev/valid/"
- val invalidFireCloudPath = FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.workspacesPath + "/broad-dsde-dev/invalid/"
+ val validFireCloudPath =
+ FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.workspacesPath + "/broad-dsde-dev/valid/"
+ val invalidFireCloudPath =
+ FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.workspacesPath + "/broad-dsde-dev/invalid/"
"EntityService-EntitiesWithType" - {
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/service/ExportEntitiesByTypeServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/service/ExportEntitiesByTypeServiceSpec.scala
index 9a55fe2ca..cdcff12ea 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/service/ExportEntitiesByTypeServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/service/ExportEntitiesByTypeServiceSpec.scala
@@ -4,7 +4,7 @@ import akka.actor.ActorSystem
import akka.http.scaladsl.model.HttpEntity.ChunkStreamPart
import akka.http.scaladsl.model.StatusCodes._
import akka.http.scaladsl.model.Uri.Query
-import akka.http.scaladsl.model.headers.{Connection, ContentDispositionTypes, `Content-Disposition`}
+import akka.http.scaladsl.model.headers.{`Content-Disposition`, Connection, ContentDispositionTypes}
import akka.http.scaladsl.model._
import akka.http.scaladsl.server.Route.{seal => sealRoute}
import akka.http.scaladsl.testkit.RouteTestTimeout
@@ -22,12 +22,16 @@ import org.scalatest.BeforeAndAfterEach
import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext}
-class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitiesApiService with CookieAuthedApiService with BeforeAndAfterEach {
+class ExportEntitiesByTypeServiceSpec
+ extends BaseServiceSpec
+ with ExportEntitiesApiService
+ with CookieAuthedApiService
+ with BeforeAndAfterEach {
override val executionContext: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
// On travis, slow processing causes the route to timeout and complete too quickly for the large content checks.
- override implicit val routeTestTimeout: RouteTestTimeout = RouteTestTimeout(30.seconds)
+ implicit override val routeTestTimeout: RouteTestTimeout = RouteTestTimeout(30.seconds)
def actorRefFactory: ActorSystem = system
@@ -43,7 +47,8 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
val validFireCloudEntitiesSampleSetTSVPath = "/api/workspaces/broad-dsde-dev/valid/entities/sample_set/tsv"
val validFireCloudEntitiesSampleTSVPath = "/api/workspaces/broad-dsde-dev/valid/entities/sample/tsv"
val invalidFireCloudEntitiesSampleTSVPath = "/api/workspaces/broad-dsde-dev/invalid/entities/sample/tsv"
- val invalidFireCloudEntitiesParticipantSetTSVPath = "/api/workspaces/broad-dsde-dev/invalid/entities/participant_set/tsv"
+ val invalidFireCloudEntitiesParticipantSetTSVPath =
+ "/api/workspaces/broad-dsde-dev/invalid/entities/participant_set/tsv"
val exceptionFireCloudEntitiesSampleTSVPath = "/api/workspaces/broad-dsde-dev/exception/entities/sample/tsv"
val page3ExceptionFireCloudEntitiesSampleTSVPath = "/api/workspaces/broad-dsde-dev/page3exception/entities/sample/tsv"
val nonModelEntitiesBigQueryTSVPath = "/api/workspaces/broad-dsde-dev/nonModel/entities/bigQuery/tsv"
@@ -65,10 +70,12 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
"when an exception occurs in a paged query response, the response should be handled appropriately" - {
"FireCloudException is contained in response chunks" in {
// Exception case is generated from the entity query call which is inside of the akka stream code.
- Get(page3ExceptionFireCloudEntitiesSampleTSVPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(exportEntitiesRoutes) ~> check {
+ Get(page3ExceptionFireCloudEntitiesSampleTSVPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ exportEntitiesRoutes
+ ) ~> check {
handled should be(true)
val strResp = responseAs[String]
- strResp should include ("FireCloudException")
+ strResp should include("FireCloudException")
}
}
}
@@ -76,7 +83,9 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
"when an exception occurs, the response should be handled appropriately" - {
"InternalServerError is returned" in {
// Exception case is generated from the entity query call which is inside of the akka stream code.
- Get(exceptionFireCloudEntitiesSampleTSVPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(exportEntitiesRoutes) ~> check {
+ Get(exceptionFireCloudEntitiesSampleTSVPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ exportEntitiesRoutes
+ ) ~> check {
handled should be(true)
status should be(InternalServerError)
errorReportCheck("Rawls", InternalServerError)
@@ -86,12 +95,15 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
"when calling GET on exporting a valid entity type with filtered attributes" - {
"OK response is returned and attributes are filtered" in {
- val uri = Uri(largeFireCloudEntitiesSampleTSVPath).withQuery(Query(("attributeNames", filterProps.mkString(","))))
+ val uri =
+ Uri(largeFireCloudEntitiesSampleTSVPath).withQuery(Query(("attributeNames", filterProps.mkString(","))))
Get(uri) ~> dummyUserIdHeaders("1234") ~> sealRoute(exportEntitiesRoutes) ~> check {
handled should be(true)
status should be(OK)
headers.contains(Connection("Keep-Alive")) should be(true)
- headers should contain(`Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "sample.tsv")))
+ headers should contain(
+ `Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "sample.tsv"))
+ )
contentType shouldEqual ContentType(MediaTypes.`text/tab-separated-values`, HttpCharsets.`UTF-8`)
responseAs[String].startsWith("update:") should be(true)
validateProps(response.entity)
@@ -101,17 +113,23 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
"when calling GET on exporting a non-FC model entity type with all attributes" - {
"OK response is returned and attributes are included and model is flexible" in {
- Get(nonModelEntitiesBigQueryTSVPath+"?model=flexible") ~> dummyUserIdHeaders("1234") ~> sealRoute(exportEntitiesRoutes) ~> check {
+ Get(nonModelEntitiesBigQueryTSVPath + "?model=flexible") ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ exportEntitiesRoutes
+ ) ~> check {
handled should be(true)
status should be(OK)
headers.contains(Connection("Keep-Alive")) should be(true)
- headers should contain(`Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "bigQuery.tsv")))
+ headers should contain(
+ `Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "bigQuery.tsv"))
+ )
contentType shouldEqual ContentType(MediaTypes.`text/tab-separated-values`, HttpCharsets.`UTF-8`)
responseAs[String].contains("query_str") should be(true)
}
}
"400 response is returned is model is firecloud" in {
- Get(nonModelEntitiesBigQueryTSVPath+"?model=firecloud") ~> dummyUserIdHeaders("1234") ~> sealRoute(exportEntitiesRoutes) ~> check {
+ Get(nonModelEntitiesBigQueryTSVPath + "?model=firecloud") ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ exportEntitiesRoutes
+ ) ~> check {
handled should be(true)
status should be(BadRequest)
}
@@ -120,11 +138,15 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
"when calling GET on exporting a non-FC model entity type with selected attributes" - {
"OK response is returned and file is entity type when model is flexible" in {
- Get(nonModelEntitiesPairTSVPath + "?attributeNames=names&model=flexible") ~> dummyUserIdHeaders("1234") ~> sealRoute(exportEntitiesRoutes) ~> check {
+ Get(nonModelEntitiesPairTSVPath + "?attributeNames=names&model=flexible") ~> dummyUserIdHeaders(
+ "1234"
+ ) ~> sealRoute(exportEntitiesRoutes) ~> check {
handled should be(true)
status should be(OK)
headers.contains(Connection("Keep-Alive")) should be(true)
- headers should contain(`Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "pair.tsv")))
+ headers should contain(
+ `Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "pair.tsv"))
+ )
contentType shouldEqual ContentType(MediaTypes.`text/tab-separated-values`, HttpCharsets.`UTF-8`)
responseAs[String].startsWith("entity:") should be(true)
responseAs[String].contains("names") should be(true)
@@ -134,7 +156,9 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
"when calling GET on exporting a non-FC model entity set type with all attributes" - {
"400 response is returned when model defaults to firecloud" in {
- Get(nonModelEntitiesBigQuerySetTSVPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(exportEntitiesRoutes) ~> check {
+ Get(nonModelEntitiesBigQuerySetTSVPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ exportEntitiesRoutes
+ ) ~> check {
handled should be(true)
status should be(BadRequest)
}
@@ -143,11 +167,15 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
"when calling GET on exporting LARGE (20K) sample TSV" - {
"OK response is returned" in {
- Get(largeFireCloudEntitiesSampleTSVPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(exportEntitiesRoutes) ~> check {
+ Get(largeFireCloudEntitiesSampleTSVPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ exportEntitiesRoutes
+ ) ~> check {
handled should be(true)
status should be(OK)
headers.contains(Connection("Keep-Alive")) should be(true)
- headers should contain(`Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "sample.tsv")))
+ headers should contain(
+ `Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "sample.tsv"))
+ )
contentType shouldEqual ContentType(MediaTypes.`text/tab-separated-values`, HttpCharsets.`UTF-8`)
}
}
@@ -155,24 +183,34 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
"when calling GET on exporting LARGE (5K) sample set file" - {
"OK response is returned" in {
- Get(largeFireCloudEntitiesSampleSetTSVPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(exportEntitiesRoutes) ~> check {
+ Get(largeFireCloudEntitiesSampleSetTSVPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ exportEntitiesRoutes
+ ) ~> check {
handled should be(true)
status should be(OK)
- response.entity.isKnownEmpty() shouldNot be(true) // Entity is the first line of content as output by StreamingActor
+ response.entity.isKnownEmpty() shouldNot be(
+ true
+ ) // Entity is the first line of content as output by StreamingActor
headers.contains(Connection("Keep-Alive")) should be(true)
- headers.contains(`Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "sample_set.zip"))) should be(true)
+ headers.contains(
+ `Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "sample_set.zip"))
+ ) should be(true)
}
}
}
"when calling GET on exporting a valid collection type" - {
"OK response is returned" in {
- Get(validFireCloudEntitiesSampleSetTSVPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(exportEntitiesRoutes) ~> check {
+ Get(validFireCloudEntitiesSampleSetTSVPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ exportEntitiesRoutes
+ ) ~> check {
handled should be(true)
status should be(OK)
response.entity.isKnownEmpty() shouldNot be(true)
headers.contains(Connection("Keep-Alive")) should be(true)
- headers should contain(`Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "sample_set.zip")))
+ headers should contain(
+ `Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "sample_set.zip"))
+ )
contentType shouldEqual ContentTypes.`application/octet-stream`
}
}
@@ -180,12 +218,16 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
"when calling GET on exporting a valid entity type" - {
"OK response is returned" in {
- Get(validFireCloudEntitiesSampleTSVPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(exportEntitiesRoutes) ~> check {
+ Get(validFireCloudEntitiesSampleTSVPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ exportEntitiesRoutes
+ ) ~> check {
handled should be(true)
status should be(OK)
response.entity.isKnownEmpty() shouldNot be(true)
headers.contains(Connection("Keep-Alive")) should be(true)
- headers should contain(`Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "sample.tsv")))
+ headers should contain(
+ `Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "sample.tsv"))
+ )
contentType shouldEqual ContentType(MediaTypes.`text/tab-separated-values`, HttpCharsets.`UTF-8`)
}
}
@@ -193,7 +235,9 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
"when calling GET on exporting an invalid collection type" - {
"NotFound response is returned" in {
- Get(invalidFireCloudEntitiesParticipantSetTSVPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(exportEntitiesRoutes) ~> check {
+ Get(invalidFireCloudEntitiesParticipantSetTSVPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ exportEntitiesRoutes
+ ) ~> check {
handled should be(true)
status should be(NotFound)
}
@@ -202,7 +246,9 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
"when calling GET on exporting an invalid entity type" - {
"NotFound response is returned" in {
- Get(invalidFireCloudEntitiesSampleTSVPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(exportEntitiesRoutes) ~> check {
+ Get(invalidFireCloudEntitiesSampleTSVPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ exportEntitiesRoutes
+ ) ~> check {
handled should be(true)
status should be(NotFound)
errorReportCheck("Rawls", NotFound)
@@ -219,10 +265,15 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
"when calling GET on exporting a valid collection type" - {
"OK response is returned" in {
- Post("/api/workspaces/broad-dsde-dev/valid/entities/sample_set/tsv") ~> dummyUserIdHeaders("1234") ~> sealRoute(exportEntitiesRoutes) ~> check {
+ Post("/api/workspaces/broad-dsde-dev/valid/entities/sample_set/tsv") ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ exportEntitiesRoutes
+ ) ~> check {
handled should be(true)
status should be(OK)
- verify(mockitoGoogleServicesDao, times(1)).writeObjectAsRawlsSA(any[GcsBucketName], any[GcsObjectName], any[File])
+ verify(mockitoGoogleServicesDao, times(1)).writeObjectAsRawlsSA(any[GcsBucketName],
+ any[GcsObjectName],
+ any[File]
+ )
val result = Await.result(Unmarshal(response.entity).to[String], Duration.Inf)
// gs://bucketName/tsvexport/sample_set/sample_set-1727724455587.zip
result should fullyMatch regex """gs:\/\/bucketName\/tsvexport\/sample_set\/sample_set-[0-9]{13}.zip"""
@@ -232,10 +283,15 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
"when calling GET on exporting a valid entity type" - {
"OK response is returned" in {
- Post("/api/workspaces/broad-dsde-dev/valid/entities/sample/tsv") ~> dummyUserIdHeaders("1234") ~> sealRoute(exportEntitiesRoutes) ~> check {
+ Post("/api/workspaces/broad-dsde-dev/valid/entities/sample/tsv") ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ exportEntitiesRoutes
+ ) ~> check {
handled should be(true)
status should be(OK)
- verify(mockitoGoogleServicesDao, times(1)).writeObjectAsRawlsSA(any[GcsBucketName], any[GcsObjectName], any[File])
+ verify(mockitoGoogleServicesDao, times(1)).writeObjectAsRawlsSA(any[GcsBucketName],
+ any[GcsObjectName],
+ any[File]
+ )
val result = Await.result(Unmarshal(response.entity).to[String], Duration.Inf)
// gs://bucketName/tsvexport/sample/sample-1727724455587.tsv
result should fullyMatch regex """gs:\/\/bucketName\/tsvexport\/sample\/sample-[0-9]{13}.tsv"""
@@ -245,23 +301,31 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
}
- val validCookieFireCloudEntitiesLargeSampleTSVPath = "/cookie-authed/workspaces/broad-dsde-dev/large/entities/sample/tsv"
- val validCookieFireCloudEntitiesSampleSetTSVPath = "/cookie-authed/workspaces/broad-dsde-dev/valid/entities/sample_set/tsv"
+ val validCookieFireCloudEntitiesLargeSampleTSVPath =
+ "/cookie-authed/workspaces/broad-dsde-dev/large/entities/sample/tsv"
+ val validCookieFireCloudEntitiesSampleSetTSVPath =
+ "/cookie-authed/workspaces/broad-dsde-dev/valid/entities/sample_set/tsv"
val validCookieFireCloudEntitiesSampleTSVPath = "/cookie-authed/workspaces/broad-dsde-dev/valid/entities/sample/tsv"
- val invalidCookieFireCloudEntitiesSampleTSVPath = "/cookie-authed/workspaces/broad-dsde-dev/invalid/entities/sample/tsv"
- val invalidCookieFireCloudEntitiesParticipantSetTSVPath = "/cookie-authed/workspaces/broad-dsde-dev/invalid/entities/participant_set/tsv"
- val exceptionCookieFireCloudEntitiesSampleTSVPath = "/cookie-authed/workspaces/broad-dsde-dev/exception/entities/sample/tsv"
- val page3ExceptionCookieFireCloudEntitiesSampleTSVPath = "/cookie-authed/workspaces/broad-dsde-dev/page3exception/entities/sample/tsv"
+ val invalidCookieFireCloudEntitiesSampleTSVPath =
+ "/cookie-authed/workspaces/broad-dsde-dev/invalid/entities/sample/tsv"
+ val invalidCookieFireCloudEntitiesParticipantSetTSVPath =
+ "/cookie-authed/workspaces/broad-dsde-dev/invalid/entities/participant_set/tsv"
+ val exceptionCookieFireCloudEntitiesSampleTSVPath =
+ "/cookie-authed/workspaces/broad-dsde-dev/exception/entities/sample/tsv"
+ val page3ExceptionCookieFireCloudEntitiesSampleTSVPath =
+ "/cookie-authed/workspaces/broad-dsde-dev/page3exception/entities/sample/tsv"
"CookieAuthedApiService-ExportEntitiesByType" - {
"when an exception occurs in a paged query response, the response should be handled appropriately" - {
"FireCloudException is contained in response chunks" in {
// Exception case is generated from the entity query call which is inside of the akka stream code.
- Post(page3ExceptionCookieFireCloudEntitiesSampleTSVPath, FormData(Map("FCtoken"->"token"))) ~> dummyUserIdHeaders("1234") ~> sealRoute(cookieAuthedRoutes) ~> check {
+ Post(page3ExceptionCookieFireCloudEntitiesSampleTSVPath,
+ FormData(Map("FCtoken" -> "token"))
+ ) ~> dummyUserIdHeaders("1234") ~> sealRoute(cookieAuthedRoutes) ~> check {
handled should be(true)
val strResp = responseAs[String]
- strResp should include ("FireCloudException")
+ strResp should include("FireCloudException")
}
}
}
@@ -269,7 +333,9 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
"when an exception occurs, the response should be handled appropriately" - {
"InternalServerError is returned" in {
// Exception case is generated from the entity query call which is inside of the akka stream code.
- Post(exceptionCookieFireCloudEntitiesSampleTSVPath, FormData(Map("FCtoken"->"token"))) ~> dummyUserIdHeaders("1234") ~> sealRoute(cookieAuthedRoutes) ~> check {
+ Post(exceptionCookieFireCloudEntitiesSampleTSVPath, FormData(Map("FCtoken" -> "token"))) ~> dummyUserIdHeaders(
+ "1234"
+ ) ~> sealRoute(cookieAuthedRoutes) ~> check {
handled should be(true)
status should be(InternalServerError)
errorReportCheck("Rawls", InternalServerError)
@@ -279,11 +345,15 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
"when calling POST on exporting a valid entity type with filtered attributes" - {
"OK response is returned and attributes are filtered" in {
- Post(validCookieFireCloudEntitiesLargeSampleTSVPath, FormData(Map("FCtoken"->"token", "attributeNames"->filterProps.mkString(",")))) ~> dummyUserIdHeaders("1234") ~> sealRoute(cookieAuthedRoutes) ~> check {
+ Post(validCookieFireCloudEntitiesLargeSampleTSVPath,
+ FormData(Map("FCtoken" -> "token", "attributeNames" -> filterProps.mkString(",")))
+ ) ~> dummyUserIdHeaders("1234") ~> sealRoute(cookieAuthedRoutes) ~> check {
handled should be(true)
status should be(OK)
headers.contains(Connection("Keep-Alive")) should be(true)
- headers should contain(`Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "sample.tsv")))
+ headers should contain(
+ `Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "sample.tsv"))
+ )
contentType shouldEqual ContentType(MediaTypes.`text/tab-separated-values`, HttpCharsets.`UTF-8`)
validateProps(response.entity)
}
@@ -292,12 +362,16 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
"when calling POST on exporting LARGE (20K) sample TSV" - {
"OK response is returned" in {
- Post(validCookieFireCloudEntitiesLargeSampleTSVPath, FormData(Map("FCtoken"->"token"))) ~> sealRoute(cookieAuthedRoutes) ~> check {
+ Post(validCookieFireCloudEntitiesLargeSampleTSVPath, FormData(Map("FCtoken" -> "token"))) ~> sealRoute(
+ cookieAuthedRoutes
+ ) ~> check {
handled should be(true)
status should be(OK)
response.entity.isKnownEmpty() shouldNot be(true)
headers.contains(Connection("Keep-Alive")) should be(true)
- headers should contain(`Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "sample.tsv")))
+ headers should contain(
+ `Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "sample.tsv"))
+ )
contentType shouldEqual ContentType(MediaTypes.`text/tab-separated-values`, HttpCharsets.`UTF-8`)
}
}
@@ -305,24 +379,32 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
"when calling POST on exporting a valid collection type" - {
"OK response is returned" in {
- Post(validCookieFireCloudEntitiesSampleSetTSVPath, FormData(Map("FCtoken"->"token"))) ~> sealRoute(cookieAuthedRoutes) ~> check {
+ Post(validCookieFireCloudEntitiesSampleSetTSVPath, FormData(Map("FCtoken" -> "token"))) ~> sealRoute(
+ cookieAuthedRoutes
+ ) ~> check {
handled should be(true)
status should be(OK)
response.entity.isKnownEmpty() shouldNot be(true)
headers.contains(Connection("Keep-Alive")) should be(true)
- headers.contains(`Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "sample_set.zip"))) should be(true)
+ headers.contains(
+ `Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "sample_set.zip"))
+ ) should be(true)
}
}
}
"when calling POST on exporting a valid entity type" - {
"OK response is returned" in {
- Post(validCookieFireCloudEntitiesSampleTSVPath, FormData(Map("FCtoken"->"token"))) ~> sealRoute(cookieAuthedRoutes) ~> check {
+ Post(validCookieFireCloudEntitiesSampleTSVPath, FormData(Map("FCtoken" -> "token"))) ~> sealRoute(
+ cookieAuthedRoutes
+ ) ~> check {
handled should be(true)
status should be(OK)
response.entity.isKnownEmpty() shouldNot be(true)
headers.contains(Connection("Keep-Alive")) should be(true)
- headers should contain(`Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "sample.tsv")))
+ headers should contain(
+ `Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "sample.tsv"))
+ )
contentType shouldEqual ContentType(MediaTypes.`text/tab-separated-values`, HttpCharsets.`UTF-8`)
}
}
@@ -330,7 +412,9 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
"when calling POST on exporting an invalid collection type" - {
"NotFound response is returned" in {
- Post(invalidCookieFireCloudEntitiesParticipantSetTSVPath, FormData(Map("FCtoken"->"token"))) ~> sealRoute(cookieAuthedRoutes) ~> check {
+ Post(invalidCookieFireCloudEntitiesParticipantSetTSVPath, FormData(Map("FCtoken" -> "token"))) ~> sealRoute(
+ cookieAuthedRoutes
+ ) ~> check {
handled should be(true)
status should be(NotFound)
}
@@ -339,7 +423,9 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
"when calling POST on exporting an invalid entity type" - {
"NotFound response is returned" in {
- Post(invalidCookieFireCloudEntitiesSampleTSVPath, FormData(Map("FCtoken"->"token"))) ~> sealRoute(cookieAuthedRoutes) ~> check {
+ Post(invalidCookieFireCloudEntitiesSampleTSVPath, FormData(Map("FCtoken" -> "token"))) ~> sealRoute(
+ cookieAuthedRoutes
+ ) ~> check {
handled should be(true)
status should be(NotFound)
errorReportCheck("Rawls", NotFound)
@@ -350,7 +436,9 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
"when calling PUT, PATCH, DELETE on export path" - {
"MethodNotAllowed response is returned" in {
List(HttpMethods.PUT, HttpMethods.DELETE, HttpMethods.PATCH) foreach { method =>
- new RequestBuilder(method)(invalidCookieFireCloudEntitiesParticipantSetTSVPath) ~> sealRoute(cookieAuthedRoutes) ~> check {
+ new RequestBuilder(method)(invalidCookieFireCloudEntitiesParticipantSetTSVPath) ~> sealRoute(
+ cookieAuthedRoutes
+ ) ~> check {
handled should be(true)
withClue(s"Method $method:") {
status should equal(MethodNotAllowed)
@@ -369,7 +457,9 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
handled should be(true)
status should be(OK)
headers.contains(Connection("Keep-Alive")) should be(true)
- headers should contain(`Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "sample.tsv")))
+ headers should contain(
+ `Content-Disposition`.apply(ContentDispositionTypes.attachment, Map("filename" -> "sample.tsv"))
+ )
contentType shouldEqual ContentType(MediaTypes.`text/tab-separated-values`, HttpCharsets.`UTF-8`)
validateProps(response.entity)
}
@@ -379,12 +469,11 @@ class ExportEntitiesByTypeServiceSpec extends BaseServiceSpec with ExportEntitie
private def validateProps(entity: HttpEntity): Unit = {
val entityHeaderString = Await.result(entity.toStrict(1.second).map(_.data.utf8String), Duration.Inf)
- filterProps.foreach { h => entityHeaderString.contains(h) should be(true) }
- missingProps.foreach { h => entityHeaderString.contains(h) should be(false) }
+ filterProps.foreach(h => entityHeaderString.contains(h) should be(true))
+ missingProps.foreach(h => entityHeaderString.contains(h) should be(false))
}
- private def validateErrorInLastChunk(chunks: Seq[ChunkStreamPart], message: String): Unit = {
- chunks.reverse.head.data.utf8String should include (message)
- }
+ private def validateErrorInLastChunk(chunks: Seq[ChunkStreamPart], message: String): Unit =
+ chunks.reverse.head.data.utf8String should include(message)
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/service/LibraryServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/service/LibraryServiceSpec.scala
index 7c2234cf1..a5623e67d 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/service/LibraryServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/service/LibraryServiceSpec.scala
@@ -54,23 +54,42 @@ object LibraryServiceSpec {
val testLibraryMetadataJsObject = testLibraryMetadata.parseJson.asJsObject
}
-class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with LibraryServiceSupport with AttributeSupport with ElasticSearchDAOSupport {
- def toName(s:String) = AttributeName.fromDelimitedName(s)
+class LibraryServiceSpec
+ extends BaseServiceSpec
+ with AnyFreeSpecLike
+ with LibraryServiceSupport
+ with AttributeSupport
+ with ElasticSearchDAOSupport {
+ def toName(s: String) = AttributeName.fromDelimitedName(s)
implicit val userToken: WithAccessToken = AccessToken("LibraryServiceSpec")
- val libraryAttributePredicate = (k: AttributeName) => k.namespace == AttributeName.libraryNamespace && k.name != LibraryService.publishedFlag.name
+ val libraryAttributePredicate = (k: AttributeName) =>
+ k.namespace == AttributeName.libraryNamespace && k.name != LibraryService.publishedFlag.name
- val existingLibraryAttrs = Map("library:keyone"->"valone", "library:keytwo"->"valtwo", "library:keythree"->"valthree", "library:keyfour"->"valfour").toJson.convertTo[AttributeMap]
- val existingMixedAttrs = Map("library:keyone"->"valone", "library:keytwo"->"valtwo", "keythree"->"valthree", "keyfour"->"valfour").toJson.convertTo[AttributeMap]
- val existingPublishedAttrs = Map("library:published"->"true", "library:keytwo"->"valtwo", "keythree"->"valthree", "keyfour"->"valfour").toJson.convertTo[AttributeMap]
+ val existingLibraryAttrs = Map("library:keyone" -> "valone",
+ "library:keytwo" -> "valtwo",
+ "library:keythree" -> "valthree",
+ "library:keyfour" -> "valfour"
+ ).toJson.convertTo[AttributeMap]
+ val existingMixedAttrs = Map("library:keyone" -> "valone",
+ "library:keytwo" -> "valtwo",
+ "keythree" -> "valthree",
+ "keyfour" -> "valfour"
+ ).toJson.convertTo[AttributeMap]
+ val existingPublishedAttrs = Map("library:published" -> "true",
+ "library:keytwo" -> "valtwo",
+ "keythree" -> "valthree",
+ "keyfour" -> "valfour"
+ ).toJson.convertTo[AttributeMap]
val testUUID = UUID.randomUUID()
val testGroup1Ref = ManagedGroupRef(RawlsGroupName("test-group1"))
val testGroup2Ref = ManagedGroupRef(RawlsGroupName("test-group2"))
- val testWorkspace = new WorkspaceDetails(workspaceId = testUUID.toString,
+ val testWorkspace = new WorkspaceDetails(
+ workspaceId = testUUID.toString,
namespace = "testWorkspaceNamespace",
name = "testWorkspaceName",
authorizationDomain = Some(Set(testGroup1Ref, testGroup2Ref)),
@@ -81,7 +100,7 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
attributes = Some(Map.empty),
bucketName = "bucketName",
workflowCollectionName = Some("wf-collection"),
- workspaceVersion=WorkspaceVersions.V2,
+ workspaceVersion = WorkspaceVersions.V2,
googleProject = GoogleProjectId("googleProject"),
googleProjectNumber = Some(GoogleProjectNumber("googleProjectNumber")),
billingAccount = Some(RawlsBillingAccountName("billingAccount")),
@@ -91,7 +110,6 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
state = WorkspaceState.Ready
)
-
val DULAdditionalJsObject =
"""
|{
@@ -107,7 +125,8 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
| "library:IRB" : false
|}
""".stripMargin.parseJson.asJsObject
- val DULfields = (LibraryServiceSpec.testLibraryMetadataJsObject.fields-"library:orsp") ++ DULAdditionalJsObject.fields
+ val DULfields =
+ (LibraryServiceSpec.testLibraryMetadataJsObject.fields - "library:orsp") ++ DULAdditionalJsObject.fields
val testLibraryDULMetadata = LibraryServiceSpec.testLibraryMetadataJsObject.copy(DULfields).compactPrint
val dur = Duration(2, MINUTES)
@@ -129,12 +148,13 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
}
"when new attrs are a subset" - {
"should calculate removals and updates" in {
- val newAttrs = """{"library:keyone":"valoneNew", "library:keytwo":"valtwoNew"}""".parseJson.convertTo[AttributeMap]
+ val newAttrs =
+ """{"library:keyone":"valoneNew", "library:keytwo":"valtwoNew"}""".parseJson.convertTo[AttributeMap]
val expected = Seq(
RemoveAttribute(toName("library:keythree")),
RemoveAttribute(toName("library:keyfour")),
- AddUpdateAttribute(toName("library:keyone"),AttributeString("valoneNew")),
- AddUpdateAttribute(toName("library:keytwo"),AttributeString("valtwoNew"))
+ AddUpdateAttribute(toName("library:keyone"), AttributeString("valoneNew")),
+ AddUpdateAttribute(toName("library:keytwo"), AttributeString("valtwoNew"))
)
assertResult(expected) {
generateAttributeOperations(existingLibraryAttrs, newAttrs, libraryAttributePredicate)
@@ -170,7 +190,7 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
val newAttrs = """{"library:keyone":"valoneNew"}""".parseJson.convertTo[AttributeMap]
val expected = Seq(
RemoveAttribute(toName("library:keytwo")),
- AddUpdateAttribute(toName("library:keyone"),AttributeString("valoneNew"))
+ AddUpdateAttribute(toName("library:keyone"), AttributeString("valoneNew"))
)
assertResult(expected) {
generateAttributeOperations(existingMixedAttrs, newAttrs, libraryAttributePredicate)
@@ -179,12 +199,14 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
}
"when new attrs include non-library" - {
"should not touch new non-library attrs" in {
- val newAttrs = """{"library:keyone":"valoneNew", "library:keytwo":"valtwoNew", "333":"three", "444":"four"}""".parseJson.convertTo[AttributeMap]
+ val newAttrs =
+ """{"library:keyone":"valoneNew", "library:keytwo":"valtwoNew", "333":"three", "444":"four"}""".parseJson
+ .convertTo[AttributeMap]
val expected = Seq(
RemoveAttribute(toName("library:keythree")),
RemoveAttribute(toName("library:keyfour")),
- AddUpdateAttribute(toName("library:keyone"),AttributeString("valoneNew")),
- AddUpdateAttribute(toName("library:keytwo"),AttributeString("valtwoNew"))
+ AddUpdateAttribute(toName("library:keyone"), AttributeString("valoneNew")),
+ AddUpdateAttribute(toName("library:keytwo"), AttributeString("valtwoNew"))
)
assertResult(expected) {
generateAttributeOperations(existingLibraryAttrs, newAttrs, libraryAttributePredicate)
@@ -196,7 +218,7 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
val newAttrs = """{"library:keyone":"valoneNew"}""".parseJson.convertTo[AttributeMap]
val expected = Seq(
RemoveAttribute(toName("library:keytwo")),
- AddUpdateAttribute(toName("library:keyone"),AttributeString("valoneNew"))
+ AddUpdateAttribute(toName("library:keyone"), AttributeString("valoneNew"))
)
assertResult(expected) {
generateAttributeOperations(existingPublishedAttrs, newAttrs, libraryAttributePredicate)
@@ -205,12 +227,14 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
}
"when new attrs include published flag" - {
"should not touch old published flag" in {
- val newAttrs = """{"library:published":"true","library:keyone":"valoneNew", "library:keytwo":"valtwoNew"}""".parseJson.convertTo[AttributeMap]
+ val newAttrs =
+ """{"library:published":"true","library:keyone":"valoneNew", "library:keytwo":"valtwoNew"}""".parseJson
+ .convertTo[AttributeMap]
val expected = Seq(
RemoveAttribute(toName("library:keythree")),
RemoveAttribute(toName("library:keyfour")),
- AddUpdateAttribute(toName("library:keyone"),AttributeString("valoneNew")),
- AddUpdateAttribute(toName("library:keytwo"),AttributeString("valtwoNew"))
+ AddUpdateAttribute(toName("library:keyone"), AttributeString("valoneNew")),
+ AddUpdateAttribute(toName("library:keytwo"), AttributeString("valtwoNew"))
)
assertResult(expected) {
generateAttributeOperations(existingLibraryAttrs, newAttrs, libraryAttributePredicate)
@@ -219,7 +243,7 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
}
"when publishing a workspace" - {
"should add a library:published attribute" in {
- val expected = Seq(AddUpdateAttribute(toName("library:published"),AttributeBoolean(true)))
+ val expected = Seq(AddUpdateAttribute(toName("library:published"), AttributeBoolean(true)))
assertResult(expected) {
updatePublishAttribute(true)
}
@@ -235,18 +259,29 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
}
"with only library attributes in workspace" - {
"should generate indexable document" in {
- val w = testWorkspace.copy(attributes = Some(Map(
- AttributeName("library","foo")->AttributeString("foo"),
- AttributeName("library","bar")->AttributeString("bar")
- )))
- val expected = Document(testUUID.toString, Map(
- AttributeName("library","foo")->AttributeString("foo"),
- AttributeName("library","bar")->AttributeString("bar"),
- AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
- AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
- AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
- AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(Seq(AttributeString(testGroup1Ref.membersGroupName.value), AttributeString(testGroup2Ref.membersGroupName.value)))
- ))
+ val w = testWorkspace.copy(attributes =
+ Some(
+ Map(
+ AttributeName("library", "foo") -> AttributeString("foo"),
+ AttributeName("library", "bar") -> AttributeString("bar")
+ )
+ )
+ )
+ val expected = Document(
+ testUUID.toString,
+ Map(
+ AttributeName("library", "foo") -> AttributeString("foo"),
+ AttributeName("library", "bar") -> AttributeString("bar"),
+ AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
+ AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
+ AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
+ AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(
+ Seq(AttributeString(testGroup1Ref.membersGroupName.value),
+ AttributeString(testGroup2Ref.membersGroupName.value)
+ )
+ )
+ )
+ )
assertResult(expected) {
Await.result(indexableDocuments(Seq(w), ontologyDao), dur).head
}
@@ -254,20 +289,31 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
}
"with only default attributes in workspace" - {
"should generate indexable document" in {
- val w = testWorkspace.copy(attributes = Some(Map(
- AttributeName("library","foo")->AttributeString("foo"),
- AttributeName("library","discoverableByGroups")->AttributeValueList(Seq(AttributeString("Group1"))),
- AttributeName.withDefaultNS("baz")->AttributeString("defaultBaz"),
- AttributeName.withDefaultNS("qux")->AttributeString("defaultQux")
- )))
- val expected = Document(testUUID.toString, Map(
- AttributeName("library","foo")->AttributeString("foo"),
- AttributeName.withDefaultNS("_discoverableByGroups") -> AttributeValueList(Seq(AttributeString("Group1"))),
- AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
- AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
- AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
- AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(Seq(AttributeString(testGroup1Ref.membersGroupName.value), AttributeString(testGroup2Ref.membersGroupName.value)))
- ))
+ val w = testWorkspace.copy(attributes =
+ Some(
+ Map(
+ AttributeName("library", "foo") -> AttributeString("foo"),
+ AttributeName("library", "discoverableByGroups") -> AttributeValueList(Seq(AttributeString("Group1"))),
+ AttributeName.withDefaultNS("baz") -> AttributeString("defaultBaz"),
+ AttributeName.withDefaultNS("qux") -> AttributeString("defaultQux")
+ )
+ )
+ )
+ val expected = Document(
+ testUUID.toString,
+ Map(
+ AttributeName("library", "foo") -> AttributeString("foo"),
+ AttributeName.withDefaultNS("_discoverableByGroups") -> AttributeValueList(Seq(AttributeString("Group1"))),
+ AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
+ AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
+ AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
+ AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(
+ Seq(AttributeString(testGroup1Ref.membersGroupName.value),
+ AttributeString(testGroup2Ref.membersGroupName.value)
+ )
+ )
+ )
+ )
assertResult(expected) {
Await.result(indexableDocuments(Seq(w), ontologyDao), dur).head
}
@@ -275,29 +321,100 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
}
"with discoverableByGroup attribute in workspace" - {
"should generate indexable document" in {
- val w = testWorkspace.copy(attributes = Some(Map(
- AttributeName.withDefaultNS("baz")->AttributeString("defaultBaz"),
- AttributeName.withDefaultNS("qux")->AttributeString("defaultQux")
- )))
- val expected = Document(testUUID.toString, Map(
- AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
- AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
- AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
- AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(Seq(AttributeString(testGroup1Ref.membersGroupName.value), AttributeString(testGroup2Ref.membersGroupName.value)))
- ))
+ val w = testWorkspace.copy(attributes =
+ Some(
+ Map(
+ AttributeName.withDefaultNS("baz") -> AttributeString("defaultBaz"),
+ AttributeName.withDefaultNS("qux") -> AttributeString("defaultQux")
+ )
+ )
+ )
+ val expected = Document(
+ testUUID.toString,
+ Map(
+ AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
+ AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
+ AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
+ AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(
+ Seq(AttributeString(testGroup1Ref.membersGroupName.value),
+ AttributeString(testGroup2Ref.membersGroupName.value)
+ )
+ )
+ )
+ )
assertResult(expected) {
Await.result(indexableDocuments(Seq(w), ontologyDao), dur).head
}
}
"should be the different for attribute operations" in {
- val empty = WorkspaceResponse(Some(WorkspaceAccessLevels.NoAccess), Some(false), Some(true), Some(false), testWorkspace.copy(attributes = Some(Map(discoverableWSAttribute->AttributeValueList(Seq.empty)))), Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None)
- assert(isDiscoverableDifferent(empty, Map(discoverableWSAttribute->AttributeValueList(Seq(AttributeString("group1"))))))
- val one = WorkspaceResponse(Some(WorkspaceAccessLevels.NoAccess), Some(false), Some(true), Some(false), testWorkspace.copy(attributes = Some(Map(discoverableWSAttribute->AttributeValueList(Seq(AttributeString("group1")))))), Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None)
- assert(isDiscoverableDifferent(one, Map(discoverableWSAttribute->AttributeValueList(Seq(AttributeString("group1"),AttributeString("group2"))))))
- assert(isDiscoverableDifferent(one, Map(discoverableWSAttribute->AttributeValueList(Seq.empty))))
- val two = WorkspaceResponse(Some(WorkspaceAccessLevels.NoAccess), Some(false), Some(true), Some(false), testWorkspace.copy(attributes = Some(Map(discoverableWSAttribute->AttributeValueList(Seq(AttributeString("group1"),AttributeString("group2")))))), Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None)
- assert(isDiscoverableDifferent(two, Map(discoverableWSAttribute->AttributeValueList(Seq(AttributeString("group2"))))))
- assert(!isDiscoverableDifferent(two, Map(discoverableWSAttribute->AttributeValueList(Seq(AttributeString("group2"),AttributeString("group1"))))))
+ val empty = WorkspaceResponse(
+ Some(WorkspaceAccessLevels.NoAccess),
+ Some(false),
+ Some(true),
+ Some(false),
+ testWorkspace.copy(attributes = Some(Map(discoverableWSAttribute -> AttributeValueList(Seq.empty)))),
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
+ assert(
+ isDiscoverableDifferent(empty,
+ Map(discoverableWSAttribute -> AttributeValueList(Seq(AttributeString("group1"))))
+ )
+ )
+ val one = WorkspaceResponse(
+ Some(WorkspaceAccessLevels.NoAccess),
+ Some(false),
+ Some(true),
+ Some(false),
+ testWorkspace.copy(attributes =
+ Some(Map(discoverableWSAttribute -> AttributeValueList(Seq(AttributeString("group1")))))
+ ),
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
+ assert(
+ isDiscoverableDifferent(
+ one,
+ Map(
+ discoverableWSAttribute -> AttributeValueList(Seq(AttributeString("group1"), AttributeString("group2")))
+ )
+ )
+ )
+ assert(isDiscoverableDifferent(one, Map(discoverableWSAttribute -> AttributeValueList(Seq.empty))))
+ val two = WorkspaceResponse(
+ Some(WorkspaceAccessLevels.NoAccess),
+ Some(false),
+ Some(true),
+ Some(false),
+ testWorkspace.copy(attributes =
+ Some(
+ Map(
+ discoverableWSAttribute -> AttributeValueList(Seq(AttributeString("group1"), AttributeString("group2")))
+ )
+ )
+ ),
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
+ assert(
+ isDiscoverableDifferent(two,
+ Map(discoverableWSAttribute -> AttributeValueList(Seq(AttributeString("group2"))))
+ )
+ )
+ assert(
+ !isDiscoverableDifferent(
+ two,
+ Map(
+ discoverableWSAttribute -> AttributeValueList(Seq(AttributeString("group2"), AttributeString("group1")))
+ )
+ )
+ )
}
}
"with no attributes in workspace" - {
@@ -305,12 +422,19 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
// the Map.empty below is currently the same as what's in testWorkspace;
// include explicitly here in case testWorkspace changes later
val w = testWorkspace.copy(attributes = Some(Map.empty))
- val expected = Document(testUUID.toString, Map(
- AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
- AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
- AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
- AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(Seq(AttributeString(testGroup1Ref.membersGroupName.value), AttributeString(testGroup2Ref.membersGroupName.value)))
- ))
+ val expected = Document(
+ testUUID.toString,
+ Map(
+ AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
+ AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
+ AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
+ AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(
+ Seq(AttributeString(testGroup1Ref.membersGroupName.value),
+ AttributeString(testGroup2Ref.membersGroupName.value)
+ )
+ )
+ )
+ )
assertResult(expected) {
Await.result(indexableDocuments(Seq(w), ontologyDao), dur).head
}
@@ -319,20 +443,33 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
"with just a (longish) description in workspace" - {
"should generate indexable document" in {
// https://hipsum.co/
- val w = testWorkspace.copy(attributes = Some(Map(
- AttributeName.withDefaultNS("description")->AttributeString("Fingerstache copper mug edison bulb, actually austin mustache chartreuse bicycle rights." +
- " Plaid iceland artisan blog street art hammock, subway tile vice. Hammock put a bird on it pinterest tacos" +
- " kitsch gastropub. Chicharrones food truck edison bulb meh. Cardigan aesthetic vegan kitsch. Hell of" +
- " messenger bag chillwave hashtag, distillery thundercats aesthetic roof party lo-fi sustainable" +
- " jean shorts single-origin coffee. Distillery ugh green juice, hammock marfa gastropub mlkshk" +
- " chambray vegan aesthetic beard listicle skateboard ramps literally.")
- )))
- val expected = Document(testUUID.toString, Map(
- AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
- AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
- AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
- AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(Seq(AttributeString(testGroup1Ref.membersGroupName.value), AttributeString(testGroup2Ref.membersGroupName.value)))
- ))
+ val w = testWorkspace.copy(attributes =
+ Some(
+ Map(
+ AttributeName.withDefaultNS("description") -> AttributeString(
+ "Fingerstache copper mug edison bulb, actually austin mustache chartreuse bicycle rights." +
+ " Plaid iceland artisan blog street art hammock, subway tile vice. Hammock put a bird on it pinterest tacos" +
+ " kitsch gastropub. Chicharrones food truck edison bulb meh. Cardigan aesthetic vegan kitsch. Hell of" +
+ " messenger bag chillwave hashtag, distillery thundercats aesthetic roof party lo-fi sustainable" +
+ " jean shorts single-origin coffee. Distillery ugh green juice, hammock marfa gastropub mlkshk" +
+ " chambray vegan aesthetic beard listicle skateboard ramps literally."
+ )
+ )
+ )
+ )
+ val expected = Document(
+ testUUID.toString,
+ Map(
+ AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
+ AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
+ AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
+ AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(
+ Seq(AttributeString(testGroup1Ref.membersGroupName.value),
+ AttributeString(testGroup2Ref.membersGroupName.value)
+ )
+ )
+ )
+ )
assertResult(expected) {
Await.result(indexableDocuments(Seq(w), ontologyDao), dur).head
}
@@ -340,20 +477,31 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
}
"with mixed library and default attributes in workspace" - {
"should generate indexable document" in {
- val w = testWorkspace.copy(attributes = Some(Map(
- AttributeName("library","foo")->AttributeString("foo"),
- AttributeName("library","bar")->AttributeString("bar"),
- AttributeName.withDefaultNS("baz")->AttributeString("defaultBaz"),
- AttributeName.withDefaultNS("qux")->AttributeString("defaultQux")
- )))
- val expected = Document(testUUID.toString, Map(
- AttributeName("library","foo")->AttributeString("foo"),
- AttributeName("library","bar")->AttributeString("bar"),
- AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
- AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
- AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
- AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(Seq(AttributeString(testGroup1Ref.membersGroupName.value), AttributeString(testGroup2Ref.membersGroupName.value)))
- ))
+ val w = testWorkspace.copy(attributes =
+ Some(
+ Map(
+ AttributeName("library", "foo") -> AttributeString("foo"),
+ AttributeName("library", "bar") -> AttributeString("bar"),
+ AttributeName.withDefaultNS("baz") -> AttributeString("defaultBaz"),
+ AttributeName.withDefaultNS("qux") -> AttributeString("defaultQux")
+ )
+ )
+ )
+ val expected = Document(
+ testUUID.toString,
+ Map(
+ AttributeName("library", "foo") -> AttributeString("foo"),
+ AttributeName("library", "bar") -> AttributeString("bar"),
+ AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
+ AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
+ AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
+ AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(
+ Seq(AttributeString(testGroup1Ref.membersGroupName.value),
+ AttributeString(testGroup2Ref.membersGroupName.value)
+ )
+ )
+ )
+ )
assertResult(expected) {
Await.result(indexableDocuments(Seq(w), ontologyDao), dur).head
}
@@ -361,22 +509,33 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
}
"with illegally-namespaced attributes in workspace" - {
"should generate indexable document" in {
- val w = testWorkspace.copy(attributes = Some(Map(
- AttributeName("library","foo")->AttributeString("foo"),
- AttributeName("library","bar")->AttributeString("bar"),
- AttributeName.withDefaultNS("baz")->AttributeString("defaultBaz"),
- AttributeName.withDefaultNS("qux")->AttributeString("defaultQux"),
- AttributeName("nope","foo")->AttributeString("foo"),
- AttributeName("default","bar")->AttributeString("bar")
- )))
- val expected = Document(testUUID.toString, Map(
- AttributeName("library","foo")->AttributeString("foo"),
- AttributeName("library","bar")->AttributeString("bar"),
- AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
- AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
- AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
- AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(Seq(AttributeString(testGroup1Ref.membersGroupName.value), AttributeString(testGroup2Ref.membersGroupName.value)))
- ))
+ val w = testWorkspace.copy(attributes =
+ Some(
+ Map(
+ AttributeName("library", "foo") -> AttributeString("foo"),
+ AttributeName("library", "bar") -> AttributeString("bar"),
+ AttributeName.withDefaultNS("baz") -> AttributeString("defaultBaz"),
+ AttributeName.withDefaultNS("qux") -> AttributeString("defaultQux"),
+ AttributeName("nope", "foo") -> AttributeString("foo"),
+ AttributeName("default", "bar") -> AttributeString("bar")
+ )
+ )
+ )
+ val expected = Document(
+ testUUID.toString,
+ Map(
+ AttributeName("library", "foo") -> AttributeString("foo"),
+ AttributeName("library", "bar") -> AttributeString("bar"),
+ AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
+ AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
+ AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
+ AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(
+ Seq(AttributeString(testGroup1Ref.membersGroupName.value),
+ AttributeString(testGroup2Ref.membersGroupName.value)
+ )
+ )
+ )
+ )
assertResult(expected) {
Await.result(indexableDocuments(Seq(w), ontologyDao), dur).head
}
@@ -384,48 +543,94 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
}
"with diseaseOntologyID attribute" - {
"should generate indexable document with parent info when DOID valid" in {
- val w = testWorkspace.copy(attributes = Some(Map(
- AttributeName.withLibraryNS("diseaseOntologyID") -> AttributeString("http://purl.obolibrary.org/obo/DOID_9220")
- )))
- val parentData = ontologyDao.data("http://purl.obolibrary.org/obo/DOID_9220").head.parents.get.map(_.toESTermParent)
- val expected = Document(testUUID.toString, Map(
- AttributeName.withLibraryNS("diseaseOntologyID") -> AttributeString("http://purl.obolibrary.org/obo/DOID_9220"),
- AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
- AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
- AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
- AttributeName.withDefaultNS("parents") -> AttributeValueRawJson(parentData.toJson.compactPrint),
- AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(Seq(AttributeString(testGroup1Ref.membersGroupName.value), AttributeString(testGroup2Ref.membersGroupName.value)))
- ))
+ val w = testWorkspace.copy(attributes =
+ Some(
+ Map(
+ AttributeName.withLibraryNS("diseaseOntologyID") -> AttributeString(
+ "http://purl.obolibrary.org/obo/DOID_9220"
+ )
+ )
+ )
+ )
+ val parentData =
+ ontologyDao.data("http://purl.obolibrary.org/obo/DOID_9220").head.parents.get.map(_.toESTermParent)
+ val expected = Document(
+ testUUID.toString,
+ Map(
+ AttributeName.withLibraryNS("diseaseOntologyID") -> AttributeString(
+ "http://purl.obolibrary.org/obo/DOID_9220"
+ ),
+ AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
+ AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
+ AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
+ AttributeName.withDefaultNS("parents") -> AttributeValueRawJson(parentData.toJson.compactPrint),
+ AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(
+ Seq(AttributeString(testGroup1Ref.membersGroupName.value),
+ AttributeString(testGroup2Ref.membersGroupName.value)
+ )
+ )
+ )
+ )
assertResult(expected) {
Await.result(indexableDocuments(Seq(w), ontologyDao), dur).head
}
}
"should generate indexable document with no parent info when DOID has no parents" in {
- val w = testWorkspace.copy(attributes = Some(Map(
- AttributeName.withLibraryNS("diseaseOntologyID") -> AttributeString("http://purl.obolibrary.org/obo/DOID_4")
- )))
- val expected = Document(testUUID.toString, Map(
- AttributeName.withLibraryNS("diseaseOntologyID") -> AttributeString("http://purl.obolibrary.org/obo/DOID_4"),
- AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
- AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
- AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
- AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(Seq(AttributeString(testGroup1Ref.membersGroupName.value), AttributeString(testGroup2Ref.membersGroupName.value)))
- ))
+ val w = testWorkspace.copy(attributes =
+ Some(
+ Map(
+ AttributeName.withLibraryNS("diseaseOntologyID") -> AttributeString(
+ "http://purl.obolibrary.org/obo/DOID_4"
+ )
+ )
+ )
+ )
+ val expected = Document(
+ testUUID.toString,
+ Map(
+ AttributeName.withLibraryNS("diseaseOntologyID") -> AttributeString(
+ "http://purl.obolibrary.org/obo/DOID_4"
+ ),
+ AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
+ AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
+ AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
+ AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(
+ Seq(AttributeString(testGroup1Ref.membersGroupName.value),
+ AttributeString(testGroup2Ref.membersGroupName.value)
+ )
+ )
+ )
+ )
assertResult(expected) {
Await.result(indexableDocuments(Seq(w), ontologyDao), dur).head
}
}
"should generate indexable document with no parent info when DOID not valid" in {
- val w = testWorkspace.copy(attributes = Some(Map(
- AttributeName.withLibraryNS("diseaseOntologyID") -> AttributeString("http://purl.obolibrary.org/obo/DOID_99999")
- )))
- val expected = Document(testUUID.toString, Map(
- AttributeName.withLibraryNS("diseaseOntologyID") -> AttributeString("http://purl.obolibrary.org/obo/DOID_99999"),
- AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
- AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
- AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
- AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(Seq(AttributeString(testGroup1Ref.membersGroupName.value), AttributeString(testGroup2Ref.membersGroupName.value)))
- ))
+ val w = testWorkspace.copy(attributes =
+ Some(
+ Map(
+ AttributeName.withLibraryNS("diseaseOntologyID") -> AttributeString(
+ "http://purl.obolibrary.org/obo/DOID_99999"
+ )
+ )
+ )
+ )
+ val expected = Document(
+ testUUID.toString,
+ Map(
+ AttributeName.withLibraryNS("diseaseOntologyID") -> AttributeString(
+ "http://purl.obolibrary.org/obo/DOID_99999"
+ ),
+ AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
+ AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
+ AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
+ AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(
+ Seq(AttributeString(testGroup1Ref.membersGroupName.value),
+ AttributeString(testGroup2Ref.membersGroupName.value)
+ )
+ )
+ )
+ )
assertResult(expected) {
Await.result(indexableDocuments(Seq(w), ontologyDao), dur).head
}
@@ -434,16 +639,27 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
"with an ORSP id in attributes" - {
// most of this is unit-tested in DataUseRestrictionSupportSpec; the test here is intentionally high level
"should generate indexable document without any data use restrictions if ORSP id is present" in {
- val w = testWorkspace.copy(attributes = Some(Map(
- orspIdAttribute -> AttributeString("MOCK-NOTFOUND")
- )))
- val expected = Document(testUUID.toString, Map(
- orspIdAttribute -> AttributeString("MOCK-NOTFOUND"),
- AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
- AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
- AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
- AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(Seq(AttributeString(testGroup1Ref.membersGroupName.value), AttributeString(testGroup2Ref.membersGroupName.value)))
- ))
+ val w = testWorkspace.copy(attributes =
+ Some(
+ Map(
+ orspIdAttribute -> AttributeString("MOCK-NOTFOUND")
+ )
+ )
+ )
+ val expected = Document(
+ testUUID.toString,
+ Map(
+ orspIdAttribute -> AttributeString("MOCK-NOTFOUND"),
+ AttributeName.withDefaultNS("name") -> AttributeString(testWorkspace.name),
+ AttributeName.withDefaultNS("namespace") -> AttributeString(testWorkspace.namespace),
+ AttributeName.withDefaultNS("workspaceId") -> AttributeString(testWorkspace.workspaceId),
+ AttributeName.withDefaultNS("authorizationDomain") -> AttributeValueList(
+ Seq(AttributeString(testGroup1Ref.membersGroupName.value),
+ AttributeString(testGroup2Ref.membersGroupName.value)
+ )
+ )
+ )
+ )
assertResult(expected) {
Await.result(indexableDocuments(Seq(w), ontologyDao), dur).head
}
@@ -452,7 +668,7 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
"in its runtime schema definition" - {
"has valid JSON" in {
val fileContents = FileUtils.readAllTextFromResource("library/attribute-definitions.json")
- val jsonVal:Try[JsValue] = Try(fileContents.parseJson)
+ val jsonVal: Try[JsValue] = Try(fileContents.parseJson)
assert(jsonVal.isSuccess, "Schema should be valid json")
}
"has valid JSON Schema" in {
@@ -473,35 +689,37 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
val ex = intercept[ValidationException] {
validateJsonSchema(sampleData, testSchema)
}
- assertResult(31){ex.getViolationCount}
+ assertResult(31)(ex.getViolationCount)
}
"fails with one missing key" in {
val testSchema = FileUtils.readAllTextFromResource("library/attribute-definitions.json")
val defaultData = LibraryServiceSpec.testLibraryMetadata.parseJson.asJsObject
- val sampleData = defaultData.copy(defaultData.fields-"library:datasetName").compactPrint
+ val sampleData = defaultData.copy(defaultData.fields - "library:datasetName").compactPrint
val ex = intercept[ValidationException] {
validateJsonSchema(sampleData, testSchema)
}
- assertResult(1){ex.getViolationCount}
+ assertResult(1)(ex.getViolationCount)
assert(ex.getCausingExceptions.asScala.last.getMessage.contains("library:datasetName"))
}
"fails with two missing keys" in {
val testSchema = FileUtils.readAllTextFromResource("library/attribute-definitions.json")
val defaultData = LibraryServiceSpec.testLibraryMetadata.parseJson.asJsObject
- val sampleData = defaultData.copy(defaultData.fields-"library:datasetName"-"library:datasetOwner").compactPrint
+ val sampleData =
+ defaultData.copy(defaultData.fields - "library:datasetName" - "library:datasetOwner").compactPrint
val ex = intercept[ValidationException] {
validateJsonSchema(sampleData, testSchema)
}
- assertResult(2){ex.getViolationCount}
+ assertResult(2)(ex.getViolationCount)
}
"fails on a string that should be a number" in {
val testSchema = FileUtils.readAllTextFromResource("library/attribute-definitions.json")
val defaultData = LibraryServiceSpec.testLibraryMetadata.parseJson.asJsObject
- val sampleData = defaultData.copy(defaultData.fields.updated("library:numSubjects", JsString("isString"))).compactPrint
+ val sampleData =
+ defaultData.copy(defaultData.fields.updated("library:numSubjects", JsString("isString"))).compactPrint
val ex = intercept[ValidationException] {
validateJsonSchema(sampleData, testSchema)
}
- assertResult(1){ex.getViolationCount}
+ assertResult(1)(ex.getViolationCount)
assert(ex.getCausingExceptions.asScala.last.getMessage.contains("library:numSubjects"))
}
"fails on a number out of bounds" in {
@@ -511,17 +729,18 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
val ex = intercept[ValidationException] {
validateJsonSchema(sampleData, testSchema)
}
- assertResult(1){ex.getViolationCount}
+ assertResult(1)(ex.getViolationCount)
assert(ex.getCausingExceptions.asScala.last.getMessage.contains("library:numSubjects"))
}
"fails on a value outside its enum" in {
val testSchema = FileUtils.readAllTextFromResource("library/attribute-definitions.json")
val defaultData = LibraryServiceSpec.testLibraryMetadata.parseJson.asJsObject
- val sampleData = defaultData.copy(defaultData.fields.updated("library:coverage", JsString("foobar"))).compactPrint
+ val sampleData =
+ defaultData.copy(defaultData.fields.updated("library:coverage", JsString("foobar"))).compactPrint
val ex = intercept[ValidationException] {
validateJsonSchema(sampleData, testSchema)
}
- assertResult(1){ex.getViolationCount}
+ assertResult(1)(ex.getViolationCount)
// getSchemaValidationMessages is used at runtime to generate error messages to the user; it recurses through
// the exception and its causes.
val errMsgs = getSchemaValidationMessages(ex)
@@ -533,17 +752,18 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
"fails on a string that should be an array" in {
val testSchema = FileUtils.readAllTextFromResource("library/attribute-definitions.json")
val defaultData = LibraryServiceSpec.testLibraryMetadata.parseJson.asJsObject
- val sampleData = defaultData.copy(defaultData.fields.updated("library:institute", JsString("isString"))).compactPrint
+ val sampleData =
+ defaultData.copy(defaultData.fields.updated("library:institute", JsString("isString"))).compactPrint
val ex = intercept[ValidationException] {
validateJsonSchema(sampleData, testSchema)
}
- assertResult(1){ex.getViolationCount}
+ assertResult(1)(ex.getViolationCount)
assert(ex.getCausingExceptions.asScala.last.getMessage.contains("library:institute"))
}
"fails with missing ORSP key" in {
val testSchema = FileUtils.readAllTextFromResource("library/attribute-definitions.json")
val defaultData = LibraryServiceSpec.testLibraryMetadata.parseJson.asJsObject
- val sampleData = defaultData.copy(defaultData.fields-"library:orsp").compactPrint
+ val sampleData = defaultData.copy(defaultData.fields - "library:orsp").compactPrint
val ex = intercept[ValidationException] {
validateJsonSchema(sampleData, testSchema)
}
@@ -557,7 +777,7 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
"fails with one missing key from the DUL set" in {
val testSchema = FileUtils.readAllTextFromResource("library/attribute-definitions.json")
val defaultData = testLibraryDULMetadata.parseJson.asJsObject
- val sampleData = defaultData.copy(defaultData.fields-"library:NPU").compactPrint
+ val sampleData = defaultData.copy(defaultData.fields - "library:NPU").compactPrint
val ex = intercept[ValidationException] {
validateJsonSchema(sampleData, testSchema)
}
@@ -573,71 +793,90 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
"validates on a complete metadata packet with all DUL keys and the correct option chosen" in {
val testSchema = FileUtils.readAllTextFromResource("library/attribute-definitions.json")
val defaultData = testLibraryDULMetadata.parseJson.asJsObject
- val sampleData = defaultData.copy(defaultData.fields.updated("library:useLimitationOption", JsString("questionnaire"))).compactPrint
+ val sampleData = defaultData
+ .copy(defaultData.fields.updated("library:useLimitationOption", JsString("questionnaire")))
+ .compactPrint
validateJsonSchema(sampleData, testSchema)
}
"has error messages for top-level missing keys" in {
val testSchema = FileUtils.readAllTextFromResource("library/attribute-definitions.json")
val defaultData = LibraryServiceSpec.testLibraryMetadata.parseJson.asJsObject
- val sampleData = defaultData.copy(defaultData.fields-"library:datasetName"-"library:datasetOwner").compactPrint
+ val sampleData =
+ defaultData.copy(defaultData.fields - "library:datasetName" - "library:datasetOwner").compactPrint
val ex = intercept[ValidationException] {
validateJsonSchema(sampleData, testSchema)
}
val errorMessages = getSchemaValidationMessages(ex)
- assert( errorMessages.contains("#: required key [library:datasetName] not found"),
- "does not have library:datasetName in error messages" )
- assert( errorMessages.contains("#: required key [library:datasetOwner] not found"),
- "does not have library:datasetOwner in error messages" )
+ assert(errorMessages.contains("#: required key [library:datasetName] not found"),
+ "does not have library:datasetName in error messages"
+ )
+ assert(errorMessages.contains("#: required key [library:datasetOwner] not found"),
+ "does not have library:datasetOwner in error messages"
+ )
}
"has error message for missing key from the DUR set" in {
val testSchema = FileUtils.readAllTextFromResource("library/attribute-definitions.json")
val defaultData = testLibraryDULMetadata.parseJson.asJsObject
- val sampleData = defaultData.copy(defaultData.fields-"library:NPU").compactPrint
+ val sampleData = defaultData.copy(defaultData.fields - "library:NPU").compactPrint
val ex = intercept[ValidationException] {
validateJsonSchema(sampleData, testSchema)
}
val errorMessages = getSchemaValidationMessages(ex)
- assert( errorMessages.contains("#: required key [library:NPU] not found"),
- "does not have library:NPU in error messages" )
+ assert(errorMessages.contains("#: required key [library:NPU] not found"),
+ "does not have library:NPU in error messages"
+ )
}
"has error message when primary DUL keys (GRU, HMB, DS) are specified but are all false/empty" in {
val testSchema = FileUtils.readAllTextFromResource("library/attribute-definitions.json")
val defaultData = testLibraryDULMetadata.parseJson.asJsObject
- val sampleData = defaultData.copy(defaultData.fields ++ Map(
- "library:HMB" -> JsBoolean(false),
- "library:GRU" -> JsBoolean(false),
- "library:DS" -> JsArray.empty
- )).compactPrint
+ val sampleData = defaultData
+ .copy(
+ defaultData.fields ++ Map(
+ "library:HMB" -> JsBoolean(false),
+ "library:GRU" -> JsBoolean(false),
+ "library:DS" -> JsArray.empty
+ )
+ )
+ .compactPrint
val ex = intercept[ValidationException] {
validateJsonSchema(sampleData, testSchema)
}
val errorMessages = getSchemaValidationMessages(ex)
- assert( errorMessages.contains("#/library:GRU: false is not a valid enum value") )
- assert( errorMessages.contains("#/library:HMB: false is not a valid enum value") )
- assert( errorMessages.contains("#/library:DS: expected minimum item count: 1, found: 0") )
+ assert(errorMessages.contains("#/library:GRU: false is not a valid enum value"))
+ assert(errorMessages.contains("#/library:HMB: false is not a valid enum value"))
+ assert(errorMessages.contains("#/library:DS: expected minimum item count: 1, found: 0"))
}
"has error message when library:DS is not an array" in {
val testSchema = FileUtils.readAllTextFromResource("library/attribute-definitions.json")
val defaultData = testLibraryDULMetadata.parseJson.asJsObject
- val sampleData = defaultData.copy(defaultData.fields.updated(
- "library:DS", JsString("astring")
- )).compactPrint
+ val sampleData = defaultData
+ .copy(
+ defaultData.fields.updated(
+ "library:DS",
+ JsString("astring")
+ )
+ )
+ .compactPrint
val ex = intercept[ValidationException] {
validateJsonSchema(sampleData, testSchema)
}
val errorMessages = getSchemaValidationMessages(ex)
- assert( errorMessages.contains("#/library:DS: expected type: JSONArray, found: String") )
+ assert(errorMessages.contains("#/library:DS: expected type: JSONArray, found: String"))
}
"validates when multiple primary DUL keys are true" in {
val testSchema = FileUtils.readAllTextFromResource("library/attribute-definitions.json")
val defaultData = testLibraryDULMetadata.parseJson.asJsObject
- val sampleData = defaultData.copy(defaultData.fields ++ Map(
- "library:useLimitationOption" -> JsString("questionnaire"),
- "library:HMB" -> JsBoolean(true),
- "library:GRU" -> JsBoolean(true),
- "library:DS" -> JsArray(JsString("foo"))
- )).compactPrint
+ val sampleData = defaultData
+ .copy(
+ defaultData.fields ++ Map(
+ "library:useLimitationOption" -> JsString("questionnaire"),
+ "library:HMB" -> JsBoolean(true),
+ "library:GRU" -> JsBoolean(true),
+ "library:DS" -> JsArray(JsString("foo"))
+ )
+ )
+ .compactPrint
validateJsonSchema(sampleData, testSchema)
}
}
@@ -653,7 +892,7 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
"works for aggregatable string type" in {
val label = "library:attr"
val `type` = "string"
- val aggregateObject = JsObject("renderHint"->JsString("text"))
+ val aggregateObject = JsObject("renderHint" -> JsString("text"))
val expected = label -> ESType(`type`, false, true, true)
assertResult(expected) {
createType(label, AttributeDetail(`type`, None, Some(aggregateObject)))
@@ -674,7 +913,7 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
val label = "library:attr"
val `type` = "array"
val subtype = "string"
- val aggregateObject = JsObject("renderHint"->JsString("text"))
+ val aggregateObject = JsObject("renderHint" -> JsString("text"))
val detail = AttributeDetail(`type`, Some(AttributeDetail(subtype)), Some(aggregateObject))
val expected = label -> ESType(subtype, false, true, true)
assertResult(expected) {
@@ -720,9 +959,9 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
DateTime.now(),
DateTime.now(),
"my_workspace_creator",
- Some(Map()), //attributes
- false, //locked
- Some(Set.empty), //authdomain
+ Some(Map()), // attributes
+ false, // locked
+ Some(Set.empty), // authdomain
WorkspaceVersions.V2,
GoogleProjectId("googleProject"),
Some(GoogleProjectNumber("googleProjectNumber")),
@@ -753,58 +992,68 @@ class LibraryServiceSpec extends BaseServiceSpec with AnyFreeSpecLike with Libra
}
}
"should return a list when some attributes" in {
- val workspaces = makeWorkspacesWithAttributes(Seq(
- Map(AttributeName.withLibraryNS("something") -> AttributeString("one")),
- Map(AttributeName.withLibraryNS("something") -> AttributeString("two")),
- Map(AttributeName.withLibraryNS("something") -> AttributeString("three"))
- ))
- assertResult(Set("one","two","three")) {
+ val workspaces = makeWorkspacesWithAttributes(
+ Seq(
+ Map(AttributeName.withLibraryNS("something") -> AttributeString("one")),
+ Map(AttributeName.withLibraryNS("something") -> AttributeString("two")),
+ Map(AttributeName.withLibraryNS("something") -> AttributeString("three"))
+ )
+ )
+ assertResult(Set("one", "two", "three")) {
uniqueWorkspaceStringAttributes(workspaces, AttributeName.withLibraryNS("something"))
}
}
"should not return duplicate attributes" in {
- val workspaces = makeWorkspacesWithAttributes(Seq(
- Map(AttributeName.withDefaultNS("something") -> AttributeString("one")),
- Map(AttributeName.withDefaultNS("something") -> AttributeString("two")),
- Map(AttributeName.withDefaultNS("something") -> AttributeString("two"))
- ))
- assertResult(Set("one","two")) {
+ val workspaces = makeWorkspacesWithAttributes(
+ Seq(
+ Map(AttributeName.withDefaultNS("something") -> AttributeString("one")),
+ Map(AttributeName.withDefaultNS("something") -> AttributeString("two")),
+ Map(AttributeName.withDefaultNS("something") -> AttributeString("two"))
+ )
+ )
+ assertResult(Set("one", "two")) {
uniqueWorkspaceStringAttributes(workspaces, AttributeName.withDefaultNS("something"))
}
}
"should ignore non-string attributes" in {
- val workspaces = makeWorkspacesWithAttributes(Seq(
- Map(AttributeName.withDefaultNS("something") -> AttributeString("one")),
- Map(AttributeName.withDefaultNS("something") -> AttributeNumber(2)),
- Map(AttributeName.withDefaultNS("something") -> AttributeValueList(Seq(AttributeString("two"))))
- ))
+ val workspaces = makeWorkspacesWithAttributes(
+ Seq(
+ Map(AttributeName.withDefaultNS("something") -> AttributeString("one")),
+ Map(AttributeName.withDefaultNS("something") -> AttributeNumber(2)),
+ Map(AttributeName.withDefaultNS("something") -> AttributeValueList(Seq(AttributeString("two"))))
+ )
+ )
assertResult(Set("one")) {
uniqueWorkspaceStringAttributes(workspaces, AttributeName.withDefaultNS("something"))
}
}
"should ignore attributes beyond our target name" in {
- val workspaces = makeWorkspacesWithAttributes(Seq(
- Map(
- AttributeName.withDefaultNS("something") -> AttributeString("one"),
- AttributeName.withLibraryNS("something") -> AttributeString("three") // different namespace
- ),
- Map(
- AttributeName.withDefaultNS("something") -> AttributeString("two"),
- AttributeName.withDefaultNS("hi") -> AttributeString("two") // different name
+ val workspaces = makeWorkspacesWithAttributes(
+ Seq(
+ Map(
+ AttributeName.withDefaultNS("something") -> AttributeString("one"),
+ AttributeName.withLibraryNS("something") -> AttributeString("three") // different namespace
+ ),
+ Map(
+ AttributeName.withDefaultNS("something") -> AttributeString("two"),
+ AttributeName.withDefaultNS("hi") -> AttributeString("two") // different name
+ )
)
- ))
- assertResult(Set("one","two")) {
+ )
+ assertResult(Set("one", "two")) {
uniqueWorkspaceStringAttributes(workspaces, AttributeName.withDefaultNS("something"))
}
}
"should ignore workspaces that don't have our target name" in {
- val workspaces = makeWorkspacesWithAttributes(Seq(
- Map(AttributeName.withDefaultNS("something") -> AttributeString("one")),
- Map(AttributeName.withDefaultNS("somethingElse") -> AttributeString("two")),
- Map(),
- Map(AttributeName.withDefaultNS("something") -> AttributeString("four"))
- ))
- assertResult(Set("one","four")) {
+ val workspaces = makeWorkspacesWithAttributes(
+ Seq(
+ Map(AttributeName.withDefaultNS("something") -> AttributeString("one")),
+ Map(AttributeName.withDefaultNS("somethingElse") -> AttributeString("two")),
+ Map(),
+ Map(AttributeName.withDefaultNS("something") -> AttributeString("four"))
+ )
+ )
+ assertResult(Set("one", "four")) {
uniqueWorkspaceStringAttributes(workspaces, AttributeName.withDefaultNS("something"))
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/service/NihServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/service/NihServiceSpec.scala
index 371c26558..338b36abb 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/service/NihServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/service/NihServiceSpec.scala
@@ -36,7 +36,8 @@ class NihServiceSpec extends AnyFlatSpec with Matchers {
val usernames = Map("fcSubjectId1" -> "nihUsername1", "fcSubjectId2" -> "nihUsername2")
- val expiretimes1 = Map("fcSubjectId1" -> DateUtils.nowMinus24Hours.toString, "fcSubjectId2" -> DateUtils.nowPlus24Hours.toString)
+ val expiretimes1 =
+ Map("fcSubjectId1" -> DateUtils.nowMinus24Hours.toString, "fcSubjectId2" -> DateUtils.nowPlus24Hours.toString)
val currentUsernames1 = Map("fcSubjectId2" -> "nihUsername2")
val expiretimes2 = Map("fcSubjectId1" -> DateUtils.nowMinus24Hours.toString)
@@ -61,27 +62,40 @@ class NihServiceSpec extends AnyFlatSpec with Matchers {
}
it should "honor expiration of JWTs" in {
- //Set up a Mock Shibboleth with a public key that matches a private key we have access to
- //The private key that matches the public key in MockShibbolethDao is lost to time
+ // Set up a Mock Shibboleth with a public key that matches a private key we have access to
+ // The private key that matches the public key in MockShibbolethDao is lost to time
val keypairGen = KeyPairGenerator.getInstance("RSA")
keypairGen.initialize(1024)
val keypair = keypairGen.generateKeyPair()
val privKey: PrivateKey = keypair.getPrivate
- val pubKey: String = s"-----BEGIN PUBLIC KEY-----\n${Base64.getEncoder.encodeToString(keypair.getPublic.getEncoded)}\n-----END PUBLIC KEY-----"
+ val pubKey: String =
+ s"-----BEGIN PUBLIC KEY-----\n${Base64.getEncoder.encodeToString(keypair.getPublic.getEncoded)}\n-----END PUBLIC KEY-----"
val mockShibboleth = mock[ShibbolethDAO]
when(mockShibboleth.getPublicKey()).thenReturn(Future.successful(pubKey))
val nihServiceMock = new NihService(samDao, thurloeDao, googleDao, mockShibboleth, ecmDao)
// expires in 15 minutes
- val expiresInTheFuture: Long = Instant.ofEpochMilli(System.currentTimeMillis() + (15 * 60 * 1000)).getEpochSecond // 15 minutes * 60 seconds * 1000 milliseconds
- val validStr = Jwt.encode(JwtClaim("{\"eraCommonsUsername\": \"firecloud-dev\", \"iat\": 1652937842}").expiresAt(expiresInTheFuture), privKey, JwtAlgorithm.RS256)
+ val expiresInTheFuture: Long =
+ Instant
+ .ofEpochMilli(System.currentTimeMillis() + (15 * 60 * 1000))
+ .getEpochSecond // 15 minutes * 60 seconds * 1000 milliseconds
+ val validStr = Jwt.encode(
+ JwtClaim("{\"eraCommonsUsername\": \"firecloud-dev\", \"iat\": 1652937842}").expiresAt(expiresInTheFuture),
+ privKey,
+ JwtAlgorithm.RS256
+ )
val validJwt = JWTWrapper(validStr)
// expired 1 minute ago
- val expiresInThePast: Long = Instant.ofEpochMilli(System.currentTimeMillis() - (60 * 1000)).getEpochSecond // 60 seconds * 1000 milliseconds
- val expStr = Jwt.encode(JwtClaim("{\"eraCommonsUsername\": \"firecloud-dev\", \"iat\": 1655232707}").expiresAt(expiresInThePast), privKey, JwtAlgorithm.RS256)
+ val expiresInThePast: Long =
+ Instant.ofEpochMilli(System.currentTimeMillis() - (60 * 1000)).getEpochSecond // 60 seconds * 1000 milliseconds
+ val expStr = Jwt.encode(
+ JwtClaim("{\"eraCommonsUsername\": \"firecloud-dev\", \"iat\": 1655232707}").expiresAt(expiresInThePast),
+ privKey,
+ JwtAlgorithm.RS256
+ )
val expJwt = JWTWrapper(expStr)
val userToken: UserInfo = UserInfo("dummyToken", thurloeDao.TCGA_AND_TARGET_LINKED)
@@ -89,14 +103,13 @@ class NihServiceSpec extends AnyFlatSpec with Matchers {
val resp1 = Await.result(nihServiceMock.updateNihLinkAndSyncSelf(userToken, validJwt), 3.seconds)
val resp2 = Await.result(nihServiceMock.updateNihLinkAndSyncSelf(userToken, expJwt), 3.seconds)
-
resp1 match {
- case _@ RequestComplete((StatusCodes.OK, _)) => succeed
- case x => fail(s"Unexpired token should be accepted. Response was: $x")
+ case _ @RequestComplete((StatusCodes.OK, _)) => succeed
+ case x => fail(s"Unexpired token should be accepted. Response was: $x")
}
resp2 match {
- case _@ RequestComplete((StatusCodes.BadRequest, errorReport: ErrorReport)) =>
+ case _ @RequestComplete((StatusCodes.BadRequest, errorReport: ErrorReport)) =>
errorReport.message shouldBe "Failed to decode JWT"
case x =>
fail(s"Expired token should fail at the decode stage. Response was: $x")
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/service/NihServiceUnitSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/service/NihServiceUnitSpec.scala
index c3bb9d96d..8b56632df 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/service/NihServiceUnitSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/service/NihServiceUnitSpec.scala
@@ -4,9 +4,33 @@ import akka.http.scaladsl.model.headers.OAuth2BearerToken
import akka.http.scaladsl.model.{StatusCode, StatusCodes}
import org.broadinstitute.dsde.firecloud.FireCloudConfig
import org.broadinstitute.dsde.firecloud.FireCloudException
-import org.broadinstitute.dsde.firecloud.dataaccess.{ExternalCredsDAO, GoogleServicesDAO, SamDAO, ShibbolethDAO, ThurloeDAO}
-import org.broadinstitute.dsde.firecloud.model.{FireCloudKeyValue, FireCloudManagedGroupMembership, JWTWrapper, LinkedEraAccount, ManagedGroupRoles, NihLink, ProfileWrapper, SamUser, UserInfo, WithAccessToken, WorkbenchUserInfo}
-import org.broadinstitute.dsde.workbench.model.{AzureB2CId, GoogleSubjectId, WorkbenchEmail, WorkbenchGroupName, WorkbenchUserId}
+import org.broadinstitute.dsde.firecloud.dataaccess.{
+ ExternalCredsDAO,
+ GoogleServicesDAO,
+ SamDAO,
+ ShibbolethDAO,
+ ThurloeDAO
+}
+import org.broadinstitute.dsde.firecloud.model.{
+ FireCloudKeyValue,
+ FireCloudManagedGroupMembership,
+ JWTWrapper,
+ LinkedEraAccount,
+ ManagedGroupRoles,
+ NihLink,
+ ProfileWrapper,
+ SamUser,
+ UserInfo,
+ WithAccessToken,
+ WorkbenchUserInfo
+}
+import org.broadinstitute.dsde.workbench.model.{
+ AzureB2CId,
+ GoogleSubjectId,
+ WorkbenchEmail,
+ WorkbenchGroupName,
+ WorkbenchUserId
+}
import org.broadinstitute.dsde.rawls.model.ErrorReport
import org.joda.time.DateTime
import org.mockito.{ArgumentMatchers, Mockito}
@@ -45,13 +69,20 @@ class NihServiceUnitSpec extends AnyFlatSpec with Matchers with BeforeAndAfterEa
val userTcgaOnly = genSamUser();
val userTargetOnly = genSamUser();
- var userNoAllowlistsLinkedAccount = LinkedEraAccount(userNoAllowlists.id.value, "nihUsername1", new DateTime().plusDays(30))
- var userTcgaAndTargetLinkedAccount = LinkedEraAccount(userTcgaAndTarget.id.value, "nihUsername2", new DateTime().plusDays(30))
+ var userNoAllowlistsLinkedAccount =
+ LinkedEraAccount(userNoAllowlists.id.value, "nihUsername1", new DateTime().plusDays(30))
+ var userTcgaAndTargetLinkedAccount =
+ LinkedEraAccount(userTcgaAndTarget.id.value, "nihUsername2", new DateTime().plusDays(30))
var userTcgaOnlyLinkedAccount = LinkedEraAccount(userTcgaOnly.id.value, "nihUsername3", new DateTime().plusDays(30))
- var userTargetOnlyLinkedAccount = LinkedEraAccount(userTargetOnly.id.value, "nihUsername4", new DateTime().plusDays(30))
+ var userTargetOnlyLinkedAccount =
+ LinkedEraAccount(userTargetOnly.id.value, "nihUsername4", new DateTime().plusDays(30))
val samUsers = Seq(userNoLinkedAccount, userNoAllowlists, userTcgaAndTarget, userTcgaOnly, userTargetOnly)
- val linkedAccounts = Seq(userNoAllowlistsLinkedAccount, userTcgaAndTargetLinkedAccount, userTcgaOnlyLinkedAccount, userTargetOnlyLinkedAccount)
+ val linkedAccounts = Seq(userNoAllowlistsLinkedAccount,
+ userTcgaAndTargetLinkedAccount,
+ userTcgaOnlyLinkedAccount,
+ userTargetOnlyLinkedAccount
+ )
val idToSamUser = samUsers.groupBy(_.id).view.mapValues(_.head).toMap
@@ -69,7 +100,7 @@ class NihServiceUnitSpec extends AnyFlatSpec with Matchers with BeforeAndAfterEa
userTargetOnlyLinkedAccount.linkedExternalId -> userTargetOnlyLinkedAccount
)
- val samUserToGroups = {
+ val samUserToGroups =
Map(
userNoLinkedAccount.id -> Set("other-group"),
userNoAllowlists.id -> Set("other-group"),
@@ -77,25 +108,22 @@ class NihServiceUnitSpec extends AnyFlatSpec with Matchers with BeforeAndAfterEa
userTcgaOnly.id -> Set("TCGA-dbGaP-Authorized", "other-group"),
userTargetOnly.id -> Set("TARGET-dbGaP-Authorized", "other-group")
)
- }
- val samGroupMemberships = {
+ val samGroupMemberships =
Map(
"TCGA-dbGaP-Authorized" -> Set(userTcgaAndTarget.id, userTcgaOnly.id),
"TARGET-dbGaP-Authorized" -> Set(userTcgaAndTarget.id, userTargetOnly.id),
"this-doesnt-matter" -> Set.empty
)
- }
- val accessTokenToUser = {
+ val accessTokenToUser =
Map(
UUID.randomUUID().toString -> userNoLinkedAccount.id,
- UUID.randomUUID().toString -> userNoAllowlists.id,
- UUID.randomUUID().toString -> userTcgaAndTarget.id,
- UUID.randomUUID().toString -> userTcgaOnly.id,
- UUID.randomUUID().toString -> userTargetOnly.id
+ UUID.randomUUID().toString -> userNoAllowlists.id,
+ UUID.randomUUID().toString -> userTcgaAndTarget.id,
+ UUID.randomUUID().toString -> userTcgaOnly.id,
+ UUID.randomUUID().toString -> userTargetOnly.id
)
- }
val userToAccessToken = accessTokenToUser.map(_.swap)
val adminAccessToken = UUID.randomUUID().toString
@@ -110,7 +138,10 @@ class NihServiceUnitSpec extends AnyFlatSpec with Matchers with BeforeAndAfterEa
mockEcmUsers()
val user = userTcgaAndTarget
val userInfo = UserInfo(userToAccessToken(user.id), userTcgaAndTarget.id.value)
- val nihStatus = Await.result(nihService.getNihStatus(userInfo), Duration.Inf).asInstanceOf[PerRequest.RequestComplete[NihStatus]].response
+ val nihStatus = Await
+ .result(nihService.getNihStatus(userInfo), Duration.Inf)
+ .asInstanceOf[PerRequest.RequestComplete[NihStatus]]
+ .response
nihStatus.linkedNihUsername shouldBe Some(linkedAccountsBySamUserId(WorkbenchUserId(userInfo.id)).linkedExternalId)
verifyNoInteractions(thurloeDao)
verify(ecmDao).getLinkedAccount(userInfo)
@@ -121,7 +152,10 @@ class NihServiceUnitSpec extends AnyFlatSpec with Matchers with BeforeAndAfterEa
when(ecmDao.getLinkedAccount(any[UserInfo])).thenReturn(Future.successful(None))
val user = userTcgaAndTarget
val userInfo = UserInfo(userToAccessToken(user.id), userTcgaAndTarget.id.value)
- val nihStatus = Await.result(nihService.getNihStatus(userInfo), Duration.Inf).asInstanceOf[PerRequest.RequestComplete[NihStatus]].response
+ val nihStatus = Await
+ .result(nihService.getNihStatus(userInfo), Duration.Inf)
+ .asInstanceOf[PerRequest.RequestComplete[NihStatus]]
+ .response
nihStatus.linkedNihUsername shouldBe Some(linkedAccountsBySamUserId(WorkbenchUserId(userInfo.id)).linkedExternalId)
verify(thurloeDao).getAllKVPs(user.id.value, userInfo)
}
@@ -131,35 +165,55 @@ class NihServiceUnitSpec extends AnyFlatSpec with Matchers with BeforeAndAfterEa
when(ecmDao.getLinkedAccount(any[UserInfo])).thenReturn(Future.successful(None))
val user = userNoLinkedAccount
val userInfo = UserInfo(userToAccessToken(user.id), userNoLinkedAccount.id.value)
- val nihStatus = Await.result(nihService.getNihStatus(userInfo), Duration.Inf).asInstanceOf[PerRequest.RequestComplete[StatusCode]].response
+ val nihStatus = Await
+ .result(nihService.getNihStatus(userInfo), Duration.Inf)
+ .asInstanceOf[PerRequest.RequestComplete[StatusCode]]
+ .response
nihStatus should be(StatusCodes.NotFound)
}
it should "return None if a user if found in Thurloe, but no linkedNihUsername exists" in {
when(thurloeDao.getAllKVPs(any[String], any[WithAccessToken]))
- .thenReturn(Future.successful(Some(ProfileWrapper(userNoLinkedAccount.id.value, List(FireCloudKeyValue(Some("email"), Some(userNoLinkedAccount.email.value)))))))
+ .thenReturn(
+ Future.successful(
+ Some(
+ ProfileWrapper(userNoLinkedAccount.id.value,
+ List(FireCloudKeyValue(Some("email"), Some(userNoLinkedAccount.email.value)))
+ )
+ )
+ )
+ )
when(ecmDao.getLinkedAccount(any[UserInfo])).thenReturn(Future.successful(None))
val user = userNoLinkedAccount
val userInfo = UserInfo(userToAccessToken(user.id), userNoLinkedAccount.id.value)
- val nihStatus = Await.result(nihService.getNihStatus(userInfo), Duration.Inf).asInstanceOf[PerRequest.RequestComplete[StatusCode]].response
+ val nihStatus = Await
+ .result(nihService.getNihStatus(userInfo), Duration.Inf)
+ .asInstanceOf[PerRequest.RequestComplete[StatusCode]]
+ .response
nihStatus should be(StatusCodes.NotFound)
}
private def verifyTargetGroupSynced(): Unit = {
val emailsToSync = Set(WorkbenchEmail(userTcgaAndTarget.email.value), WorkbenchEmail(userTargetOnly.email.value))
- val nihStatus = Await.result(nihService.syncAllowlistAllUsers("TARGET"), Duration.Inf).asInstanceOf[PerRequest.RequestComplete[StatusCode]].response
+ val nihStatus = Await
+ .result(nihService.syncAllowlistAllUsers("TARGET"), Duration.Inf)
+ .asInstanceOf[PerRequest.RequestComplete[StatusCode]]
+ .response
nihStatus should be(StatusCodes.NoContent)
verify(googleDao, never()).getBucketObjectAsInputStream(FireCloudConfig.Nih.whitelistBucket, "tcga-whitelist.txt")
- verify(googleDao, times(1)).getBucketObjectAsInputStream(FireCloudConfig.Nih.whitelistBucket, "target-whitelist.txt")
+ verify(googleDao, times(1))
+ .getBucketObjectAsInputStream(FireCloudConfig.Nih.whitelistBucket, "target-whitelist.txt")
verify(samDao, times(1)).overwriteGroupMembers(
ArgumentMatchers.eq(WorkbenchGroupName("TARGET-dbGaP-Authorized")),
ArgumentMatchers.eq(ManagedGroupRoles.Member),
- ArgumentMatchers.argThat((list: List[WorkbenchEmail]) => list.toSet.equals(emailsToSync)))(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
+ ArgumentMatchers.argThat((list: List[WorkbenchEmail]) => list.toSet.equals(emailsToSync))
+ )(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
verify(samDao, never()).overwriteGroupMembers(
ArgumentMatchers.eq(WorkbenchGroupName("other-group")),
ArgumentMatchers.eq(ManagedGroupRoles.Member),
- ArgumentMatchers.argThat((list: List[WorkbenchEmail]) => list.toSet.equals(emailsToSync)))(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
+ ArgumentMatchers.argThat((list: List[WorkbenchEmail]) => list.toSet.equals(emailsToSync))
+ )(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
}
"syncWhitelistAllUsers" should "sync all users for a single allowlist from ECM" in {
@@ -184,62 +238,103 @@ class NihServiceUnitSpec extends AnyFlatSpec with Matchers with BeforeAndAfterEa
}
it should "sync all users by combining responses from ECM and Thurloe if they contain different users" in {
- when(ecmDao.getActiveLinkedEraAccounts(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))).thenReturn(Future.successful(Seq(userTargetOnlyLinkedAccount)))
+ when(ecmDao.getActiveLinkedEraAccounts(ArgumentMatchers.eq(UserInfo(adminAccessToken, ""))))
+ .thenReturn(Future.successful(Seq(userTargetOnlyLinkedAccount)))
when(thurloeDao.getAllUserValuesForKey(ArgumentMatchers.eq("email")))
- .thenReturn(Future.successful(samUsers.filter(u => !u.id.equals(userTargetOnly.id)).map(user => user.id.value -> user.email.value).toMap))
+ .thenReturn(
+ Future.successful(
+ samUsers.filter(u => !u.id.equals(userTargetOnly.id)).map(user => user.id.value -> user.email.value).toMap
+ )
+ )
when(thurloeDao.getAllUserValuesForKey(ArgumentMatchers.eq("linkedNihUsername")))
- .thenReturn(Future.successful(linkedAccountsBySamUserId.removed(WorkbenchUserId(userTargetOnlyLinkedAccount.userId)).map(tup => (tup._1.value, tup._2.linkedExternalId))))
+ .thenReturn(
+ Future.successful(
+ linkedAccountsBySamUserId
+ .removed(WorkbenchUserId(userTargetOnlyLinkedAccount.userId))
+ .map(tup => (tup._1.value, tup._2.linkedExternalId))
+ )
+ )
when(thurloeDao.getAllUserValuesForKey(ArgumentMatchers.eq("linkExpireTime")))
- .thenReturn(Future.successful(linkedAccountsBySamUserId.removed(WorkbenchUserId(userTargetOnlyLinkedAccount.userId)).map(tup => (tup._1.value, (tup._2.linkExpireTime.getMillis / 1000L).toString))))
+ .thenReturn(
+ Future.successful(
+ linkedAccountsBySamUserId
+ .removed(WorkbenchUserId(userTargetOnlyLinkedAccount.userId))
+ .map(tup => (tup._1.value, (tup._2.linkExpireTime.getMillis / 1000L).toString))
+ )
+ )
verifyTargetGroupSynced()
}
it should "respond with NOT FOUND if no allowlist is found" in {
- val nihStatus = Await.result(nihService.syncAllowlistAllUsers("NOT_FOUND"), Duration.Inf).asInstanceOf[PerRequest.RequestComplete[StatusCode]].response
+ val nihStatus = Await
+ .result(nihService.syncAllowlistAllUsers("NOT_FOUND"), Duration.Inf)
+ .asInstanceOf[PerRequest.RequestComplete[StatusCode]]
+ .response
nihStatus should be(StatusCodes.NotFound)
}
- it should "recover from a Sam API Exception with a FirecloudException" in {
- val errorMessage = "Oops :("
- Mockito.reset(samDao)
- mockEcmUsers()
- mockThurloeUsers()
- when(samDao.getUsersForIds(any[Seq[WorkbenchUserId]])(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))).thenAnswer(args => {
- val userIds = args.getArgument(0).asInstanceOf[Seq[WorkbenchUserId]]
- Future.successful(samUsers.filter(user => userIds.contains(WorkbenchUserId(user.id.value))).map(user => WorkbenchUserInfo(user.id.value, user.email.value)))
- })
- when(samDao.overwriteGroupMembers(any(), any(), any())(any())).thenReturn(Future.failed(new RuntimeException(errorMessage)))
- when(samDao.listGroups(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))).thenReturn(Future.successful(samGroupMemberships.keys.map(groupName => FireCloudManagedGroupMembership(groupName, groupName + "@firecloud.org", "member")).toList))
- when(samDao.createGroup(any[WorkbenchGroupName])(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))).thenReturn(Future.successful(()))
-
- val ex = intercept[FireCloudException] {
- Await.result(nihService.syncAllowlistAllUsers("TARGET"), Duration.Inf)
- }
- ex.getMessage should include(errorMessage)
- }
+ it should "recover from a Sam API Exception with a FirecloudException" in {
+ val errorMessage = "Oops :("
+ Mockito.reset(samDao)
+ mockEcmUsers()
+ mockThurloeUsers()
+ when(samDao.getUsersForIds(any[Seq[WorkbenchUserId]])(ArgumentMatchers.eq(UserInfo(adminAccessToken, ""))))
+ .thenAnswer { args =>
+ val userIds = args.getArgument(0).asInstanceOf[Seq[WorkbenchUserId]]
+ Future.successful(
+ samUsers
+ .filter(user => userIds.contains(WorkbenchUserId(user.id.value)))
+ .map(user => WorkbenchUserInfo(user.id.value, user.email.value))
+ )
+ }
+ when(samDao.overwriteGroupMembers(any(), any(), any())(any()))
+ .thenReturn(Future.failed(new RuntimeException(errorMessage)))
+ when(samDao.listGroups(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))).thenReturn(
+ Future.successful(
+ samGroupMemberships.keys
+ .map(groupName => FireCloudManagedGroupMembership(groupName, groupName + "@firecloud.org", "member"))
+ .toList
+ )
+ )
+ when(samDao.createGroup(any[WorkbenchGroupName])(ArgumentMatchers.eq(UserInfo(adminAccessToken, ""))))
+ .thenReturn(Future.successful(()))
+
+ val ex = intercept[FireCloudException] {
+ Await.result(nihService.syncAllowlistAllUsers("TARGET"), Duration.Inf)
+ }
+ ex.getMessage should include(errorMessage)
+ }
"syncAllNihWhitelistsAllUsers" should "sync all allowlists for all users" in {
mockEcmUsers()
when(thurloeDao.getAllUserValuesForKey(any[String])).thenReturn(Future.successful(Map.empty))
- val targetEmailsToSync = Set(WorkbenchEmail(userTcgaAndTarget.email.value), WorkbenchEmail(userTargetOnly.email.value))
+ val targetEmailsToSync =
+ Set(WorkbenchEmail(userTcgaAndTarget.email.value), WorkbenchEmail(userTargetOnly.email.value))
val tcgaUsersToSync = Set(WorkbenchEmail(userTcgaAndTarget.email.value), WorkbenchEmail(userTcgaOnly.email.value))
- val nihStatus = Await.result(nihService.syncAllNihAllowlistsAllUsers(), Duration.Inf).asInstanceOf[PerRequest.RequestComplete[StatusCode]].response
+ val nihStatus = Await
+ .result(nihService.syncAllNihAllowlistsAllUsers(), Duration.Inf)
+ .asInstanceOf[PerRequest.RequestComplete[StatusCode]]
+ .response
nihStatus should be(StatusCodes.NoContent)
verify(googleDao, times(1)).getBucketObjectAsInputStream(FireCloudConfig.Nih.whitelistBucket, "tcga-whitelist.txt")
- verify(googleDao, times(1)).getBucketObjectAsInputStream(FireCloudConfig.Nih.whitelistBucket, "target-whitelist.txt")
+ verify(googleDao, times(1)).getBucketObjectAsInputStream(FireCloudConfig.Nih.whitelistBucket,
+ "target-whitelist.txt"
+ )
verify(samDao, times(1)).overwriteGroupMembers(
ArgumentMatchers.eq(WorkbenchGroupName("TARGET-dbGaP-Authorized")),
ArgumentMatchers.eq(ManagedGroupRoles.Member),
- ArgumentMatchers.argThat((list: List[WorkbenchEmail]) => list.toSet.equals(targetEmailsToSync)))(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
+ ArgumentMatchers.argThat((list: List[WorkbenchEmail]) => list.toSet.equals(targetEmailsToSync))
+ )(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
verify(samDao, times(1)).overwriteGroupMembers(
ArgumentMatchers.eq(WorkbenchGroupName("TCGA-dbGaP-Authorized")),
ArgumentMatchers.eq(ManagedGroupRoles.Member),
- ArgumentMatchers.argThat((list: List[WorkbenchEmail]) => list.toSet.equals(tcgaUsersToSync)))(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
+ ArgumentMatchers.argThat((list: List[WorkbenchEmail]) => list.toSet.equals(tcgaUsersToSync))
+ )(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
}
"updateNihLinkAndSyncSelf" should "decode a JWT from Shibboleth and sync allowlists for a user" in {
@@ -247,102 +342,147 @@ class NihServiceUnitSpec extends AnyFlatSpec with Matchers with BeforeAndAfterEa
mockEcmUsers()
mockThurloeUsers()
val user = userTcgaOnly
- val userInfo = UserInfo(user.email.value, OAuth2BearerToken(user.id.value), Instant.now().plusSeconds(60).getEpochSecond, user.id.value)
+ val userInfo = UserInfo(user.email.value,
+ OAuth2BearerToken(user.id.value),
+ Instant.now().plusSeconds(60).getEpochSecond,
+ user.id.value
+ )
val linkedAccount = userTcgaOnlyLinkedAccount
val jwt = jwtForUser(linkedAccount)
- val (statusCode, nihStatus) = Await.result(nihService.updateNihLinkAndSyncSelf(userInfo, jwt), Duration.Inf).asInstanceOf[PerRequest.RequestComplete[(StatusCode, NihStatus)]].response
+ val (statusCode, nihStatus) = Await
+ .result(nihService.updateNihLinkAndSyncSelf(userInfo, jwt), Duration.Inf)
+ .asInstanceOf[PerRequest.RequestComplete[(StatusCode, NihStatus)]]
+ .response
nihStatus.linkedNihUsername should be(Some(linkedAccount.linkedExternalId))
nihStatus.linkExpireTime should be(Some(linkedAccount.linkExpireTime.getMillis / 1000L))
- nihStatus.datasetPermissions should be(Set(
- NihDatasetPermission("BROKEN", authorized = false),
- NihDatasetPermission("TARGET", authorized = false),
- NihDatasetPermission("TCGA", authorized = true)))
+ nihStatus.datasetPermissions should be(
+ Set(NihDatasetPermission("BROKEN", authorized = false),
+ NihDatasetPermission("TARGET", authorized = false),
+ NihDatasetPermission("TCGA", authorized = true)
+ )
+ )
statusCode should be(StatusCodes.OK)
verify(googleDao, times(1)).getBucketObjectAsInputStream(FireCloudConfig.Nih.whitelistBucket, "tcga-whitelist.txt")
- verify(googleDao, times(1)).getBucketObjectAsInputStream(FireCloudConfig.Nih.whitelistBucket, "target-whitelist.txt")
+ verify(googleDao, times(1)).getBucketObjectAsInputStream(FireCloudConfig.Nih.whitelistBucket,
+ "target-whitelist.txt"
+ )
verify(samDao, times(1)).removeGroupMember(
ArgumentMatchers.eq(WorkbenchGroupName("TARGET-dbGaP-Authorized")),
ArgumentMatchers.eq(ManagedGroupRoles.Member),
- ArgumentMatchers.eq(WorkbenchEmail(user.email.value)))(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
+ ArgumentMatchers.eq(WorkbenchEmail(user.email.value))
+ )(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
verify(samDao, times(1)).addGroupMember(
ArgumentMatchers.eq(WorkbenchGroupName("TCGA-dbGaP-Authorized")),
ArgumentMatchers.eq(ManagedGroupRoles.Member),
- ArgumentMatchers.eq(WorkbenchEmail(user.email.value)))(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
+ ArgumentMatchers.eq(WorkbenchEmail(user.email.value))
+ )(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
verify(samDao, never()).addGroupMember(
ArgumentMatchers.eq(WorkbenchGroupName("this-doesnt-matter")),
ArgumentMatchers.eq(ManagedGroupRoles.Member),
- ArgumentMatchers.eq(WorkbenchEmail(user.email.value)))(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
+ ArgumentMatchers.eq(WorkbenchEmail(user.email.value))
+ )(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
verify(samDao, never()).addGroupMember(
ArgumentMatchers.eq(WorkbenchGroupName("other-group")),
ArgumentMatchers.eq(ManagedGroupRoles.Member),
- ArgumentMatchers.eq(WorkbenchEmail(user.email.value)))(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
+ ArgumentMatchers.eq(WorkbenchEmail(user.email.value))
+ )(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
}
it should "continue, but return an error of ECM returns an error" in {
mockShibbolethDAO()
mockThurloeUsers()
- when(ecmDao.putLinkedEraAccount(any[LinkedEraAccount])(any[WithAccessToken])).thenReturn(Future.failed(new RuntimeException("ECM is down")))
+ when(ecmDao.putLinkedEraAccount(any[LinkedEraAccount])(any[WithAccessToken]))
+ .thenReturn(Future.failed(new RuntimeException("ECM is down")))
val user = userTcgaOnly
- val userInfo = UserInfo(user.email.value, OAuth2BearerToken(user.id.value), Instant.now().plusSeconds(60).getEpochSecond, user.id.value)
+ val userInfo = UserInfo(user.email.value,
+ OAuth2BearerToken(user.id.value),
+ Instant.now().plusSeconds(60).getEpochSecond,
+ user.id.value
+ )
val linkedAccount = userTcgaOnlyLinkedAccount
val jwt = jwtForUser(linkedAccount)
- val (statusCode, errorReport) = Await.result(nihService.updateNihLinkAndSyncSelf(userInfo, jwt), Duration.Inf).asInstanceOf[PerRequest.RequestComplete[(StatusCode, ErrorReport)]].response
+ val (statusCode, errorReport) = Await
+ .result(nihService.updateNihLinkAndSyncSelf(userInfo, jwt), Duration.Inf)
+ .asInstanceOf[PerRequest.RequestComplete[(StatusCode, ErrorReport)]]
+ .response
errorReport.message should include("Error updating NIH link")
statusCode should be(StatusCodes.InternalServerError)
verify(thurloeDao, times(1)).saveKeyValues(userInfo, NihLink(linkedAccount).propertyValueMap)
verify(googleDao, times(1)).getBucketObjectAsInputStream(FireCloudConfig.Nih.whitelistBucket, "tcga-whitelist.txt")
- verify(googleDao, times(1)).getBucketObjectAsInputStream(FireCloudConfig.Nih.whitelistBucket, "target-whitelist.txt")
+ verify(googleDao, times(1)).getBucketObjectAsInputStream(FireCloudConfig.Nih.whitelistBucket,
+ "target-whitelist.txt"
+ )
verify(samDao, times(1)).removeGroupMember(
ArgumentMatchers.eq(WorkbenchGroupName("TARGET-dbGaP-Authorized")),
ArgumentMatchers.eq(ManagedGroupRoles.Member),
- ArgumentMatchers.eq(WorkbenchEmail(user.email.value)))(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
+ ArgumentMatchers.eq(WorkbenchEmail(user.email.value))
+ )(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
verify(samDao, times(1)).addGroupMember(
ArgumentMatchers.eq(WorkbenchGroupName("TCGA-dbGaP-Authorized")),
ArgumentMatchers.eq(ManagedGroupRoles.Member),
- ArgumentMatchers.eq(WorkbenchEmail(user.email.value)))(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
+ ArgumentMatchers.eq(WorkbenchEmail(user.email.value))
+ )(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
verify(samDao, never()).addGroupMember(
ArgumentMatchers.eq(WorkbenchGroupName("this-doesnt-matter")),
ArgumentMatchers.eq(ManagedGroupRoles.Member),
- ArgumentMatchers.eq(WorkbenchEmail(user.email.value)))(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
+ ArgumentMatchers.eq(WorkbenchEmail(user.email.value))
+ )(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
}
it should "continue, but return an error of Thurloe returns an error" in {
mockShibbolethDAO()
mockEcmUsers()
- when(thurloeDao.saveKeyValues(any[UserInfo], any[Map[String, String]])).thenReturn(Future.successful(Failure(new RuntimeException("Thurloe is down"))))
+ when(thurloeDao.saveKeyValues(any[UserInfo], any[Map[String, String]]))
+ .thenReturn(Future.successful(Failure(new RuntimeException("Thurloe is down"))))
val user = userTcgaOnly
- val userInfo = UserInfo(user.email.value, OAuth2BearerToken(user.id.value), Instant.now().plusSeconds(60).getEpochSecond, user.id.value)
+ val userInfo = UserInfo(user.email.value,
+ OAuth2BearerToken(user.id.value),
+ Instant.now().plusSeconds(60).getEpochSecond,
+ user.id.value
+ )
val linkedAccount = userTcgaOnlyLinkedAccount
val jwt = jwtForUser(linkedAccount)
- val (statusCode, errorReport) = Await.result(nihService.updateNihLinkAndSyncSelf(userInfo, jwt), Duration.Inf).asInstanceOf[PerRequest.RequestComplete[(StatusCode, ErrorReport)]].response
+ val (statusCode, errorReport) = Await
+ .result(nihService.updateNihLinkAndSyncSelf(userInfo, jwt), Duration.Inf)
+ .asInstanceOf[PerRequest.RequestComplete[(StatusCode, ErrorReport)]]
+ .response
errorReport.message should include("Error updating NIH link")
statusCode should be(StatusCodes.InternalServerError)
// Tokens from Shibboleth are to the second, not millisecond
- var expectedLinkedAccount = linkedAccount.copy(linkExpireTime = linkedAccount.linkExpireTime.minusMillis(linkedAccount.linkExpireTime.getMillisOfSecond))
+ var expectedLinkedAccount = linkedAccount.copy(linkExpireTime =
+ linkedAccount.linkExpireTime.minusMillis(linkedAccount.linkExpireTime.getMillisOfSecond)
+ )
- verify(ecmDao, times(1)).putLinkedEraAccount(ArgumentMatchers.eq(expectedLinkedAccount))(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
+ verify(ecmDao, times(1)).putLinkedEraAccount(ArgumentMatchers.eq(expectedLinkedAccount))(
+ ArgumentMatchers.eq(UserInfo(adminAccessToken, ""))
+ )
verify(googleDao, times(1)).getBucketObjectAsInputStream(FireCloudConfig.Nih.whitelistBucket, "tcga-whitelist.txt")
- verify(googleDao, times(1)).getBucketObjectAsInputStream(FireCloudConfig.Nih.whitelistBucket, "target-whitelist.txt")
+ verify(googleDao, times(1)).getBucketObjectAsInputStream(FireCloudConfig.Nih.whitelistBucket,
+ "target-whitelist.txt"
+ )
verify(samDao, times(1)).removeGroupMember(
ArgumentMatchers.eq(WorkbenchGroupName("TARGET-dbGaP-Authorized")),
ArgumentMatchers.eq(ManagedGroupRoles.Member),
- ArgumentMatchers.eq(WorkbenchEmail(user.email.value)))(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
+ ArgumentMatchers.eq(WorkbenchEmail(user.email.value))
+ )(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
verify(samDao, times(1)).addGroupMember(
ArgumentMatchers.eq(WorkbenchGroupName("TCGA-dbGaP-Authorized")),
ArgumentMatchers.eq(ManagedGroupRoles.Member),
- ArgumentMatchers.eq(WorkbenchEmail(user.email.value)))(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
+ ArgumentMatchers.eq(WorkbenchEmail(user.email.value))
+ )(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
verify(samDao, never()).addGroupMember(
ArgumentMatchers.eq(WorkbenchGroupName("this-doesnt-matter")),
ArgumentMatchers.eq(ManagedGroupRoles.Member),
- ArgumentMatchers.eq(WorkbenchEmail(user.email.value)))(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
+ ArgumentMatchers.eq(WorkbenchEmail(user.email.value))
+ )(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
}
"unlinkNihAccountAndSyncSelf" should "remove links from ECM and Thurloe, and sync allowlists" in {
@@ -350,14 +490,21 @@ class NihServiceUnitSpec extends AnyFlatSpec with Matchers with BeforeAndAfterEa
mockThurloeUsers()
val user = userTcgaOnly
- val userInfo = UserInfo(user.email.value, OAuth2BearerToken(user.id.value), Instant.now().plusSeconds(60).getEpochSecond, user.id.value)
+ val userInfo = UserInfo(user.email.value,
+ OAuth2BearerToken(user.id.value),
+ Instant.now().plusSeconds(60).getEpochSecond,
+ user.id.value
+ )
Await.result(nihService.unlinkNihAccountAndSyncSelf(userInfo), Duration.Inf)
verify(samDao, times(1)).removeGroupMember(
ArgumentMatchers.eq(WorkbenchGroupName("TCGA-dbGaP-Authorized")),
ArgumentMatchers.eq(ManagedGroupRoles.Member),
- ArgumentMatchers.eq(WorkbenchEmail(user.email.value)))(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
- verify(ecmDao, times(1)).deleteLinkedEraAccount(ArgumentMatchers.eq(userInfo))(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
+ ArgumentMatchers.eq(WorkbenchEmail(user.email.value))
+ )(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))
+ verify(ecmDao, times(1)).deleteLinkedEraAccount(ArgumentMatchers.eq(userInfo))(
+ ArgumentMatchers.eq(UserInfo(adminAccessToken, ""))
+ )
verify(thurloeDao, times(1)).deleteKeyValue(user.id.value, "linkedNihUsername", userInfo)
verify(thurloeDao, times(1)).deleteKeyValue(user.id.value, "linkExpireTime", userInfo)
@@ -365,114 +512,150 @@ class NihServiceUnitSpec extends AnyFlatSpec with Matchers with BeforeAndAfterEa
private def mockSamUsers(): Unit = {
when(samDao.overwriteGroupMembers(any(), any(), any())(any())).thenReturn(Future.successful(()))
- when(samDao.listGroups(any[WithAccessToken])).thenAnswer(args => {
+ when(samDao.listGroups(any[WithAccessToken])).thenAnswer { args =>
Future {
val userInfo = args.getArgument(0).asInstanceOf[WithAccessToken]
if (userInfo.accessToken.token.equals(adminAccessToken)) {
- samGroupMemberships.keys.map(groupName => FireCloudManagedGroupMembership(groupName, groupName + "@firecloud.org", "member")).toList
+ samGroupMemberships.keys
+ .map(groupName => FireCloudManagedGroupMembership(groupName, groupName + "@firecloud.org", "member"))
+ .toList
}
val samUser = accessTokenToUser.get(userInfo.accessToken.token)
- samUser.map(samUserToGroups(_).map(groupName => FireCloudManagedGroupMembership(groupName, groupName + "@firecloud.com", "member")).toList)
+ samUser
+ .map(
+ samUserToGroups(_)
+ .map(groupName => FireCloudManagedGroupMembership(groupName, groupName + "@firecloud.com", "member"))
+ .toList
+ )
.getOrElse(List.empty)
}
- })
+ }
when(samDao.addGroupMember(any(), any(), any())(any())).thenReturn(Future.successful(()))
when(samDao.removeGroupMember(any(), any(), any())(any())).thenReturn(Future.successful(()))
- when(samDao.isGroupMember(any[WorkbenchGroupName], any[UserInfo])).thenAnswer(args => Future {
- val groupName = args.getArgument(0).asInstanceOf[WorkbenchGroupName]
- val userInfo = args.getArgument(1).asInstanceOf[UserInfo]
- samGroupMemberships.get(groupName.value).exists(_.exists(_.value == userInfo.id))
- })
- when(samDao.createGroup(any[WorkbenchGroupName])(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))).thenReturn(Future.successful(()))
- when(samDao.getUsersForIds(any[Seq[WorkbenchUserId]])(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))).thenAnswer(args => {
- val userIds = args.getArgument(0).asInstanceOf[Seq[WorkbenchUserId]]
- Future.successful(samUsers.filter(user => userIds.contains(WorkbenchUserId(user.id.value))).map(user => WorkbenchUserInfo(user.id.value, user.email.value)))
- })
+ when(samDao.isGroupMember(any[WorkbenchGroupName], any[UserInfo])).thenAnswer(args =>
+ Future {
+ val groupName = args.getArgument(0).asInstanceOf[WorkbenchGroupName]
+ val userInfo = args.getArgument(1).asInstanceOf[UserInfo]
+ samGroupMemberships.get(groupName.value).exists(_.exists(_.value == userInfo.id))
+ }
+ )
+ when(samDao.createGroup(any[WorkbenchGroupName])(ArgumentMatchers.eq(UserInfo(adminAccessToken, ""))))
+ .thenReturn(Future.successful(()))
+ when(samDao.getUsersForIds(any[Seq[WorkbenchUserId]])(ArgumentMatchers.eq(UserInfo(adminAccessToken, ""))))
+ .thenAnswer { args =>
+ val userIds = args.getArgument(0).asInstanceOf[Seq[WorkbenchUserId]]
+ Future.successful(
+ samUsers
+ .filter(user => userIds.contains(WorkbenchUserId(user.id.value)))
+ .map(user => WorkbenchUserInfo(user.id.value, user.email.value))
+ )
+ }
}
private def mockEcmUsers(): Unit = {
- when(ecmDao.getLinkedAccount(any[UserInfo])).thenAnswer(args => {
+ when(ecmDao.getLinkedAccount(any[UserInfo])).thenAnswer { args =>
val userInfo = args.getArgument(0).asInstanceOf[UserInfo]
Future.successful(linkedAccountsBySamUserId.get(WorkbenchUserId(userInfo.id)))
- })
- when(ecmDao.putLinkedEraAccount(any[LinkedEraAccount])(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))).thenReturn(Future.successful(()))
- when(ecmDao.deleteLinkedEraAccount(any[UserInfo])(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))).thenReturn(Future.successful(()))
-
- when(ecmDao.getLinkedEraAccountForUsername(any[String])(ArgumentMatchers.eq(UserInfo(adminAccessToken, "")))).thenAnswer(args => {
- val externalId = args.getArgument(0).asInstanceOf[String]
- Future.successful(linkedAccountsByExternalId.get(externalId))
- })
+ }
+ when(ecmDao.putLinkedEraAccount(any[LinkedEraAccount])(ArgumentMatchers.eq(UserInfo(adminAccessToken, ""))))
+ .thenReturn(Future.successful(()))
+ when(ecmDao.deleteLinkedEraAccount(any[UserInfo])(ArgumentMatchers.eq(UserInfo(adminAccessToken, ""))))
+ .thenReturn(Future.successful(()))
+
+ when(ecmDao.getLinkedEraAccountForUsername(any[String])(ArgumentMatchers.eq(UserInfo(adminAccessToken, ""))))
+ .thenAnswer { args =>
+ val externalId = args.getArgument(0).asInstanceOf[String]
+ Future.successful(linkedAccountsByExternalId.get(externalId))
+ }
when(ecmDao.getActiveLinkedEraAccounts(ArgumentMatchers.eq(UserInfo(adminAccessToken, ""))))
.thenReturn(Future.successful(linkedAccountsBySamUserId.values.toSeq))
}
private def mockThurloeUsers(): Unit = {
- when(thurloeDao.getAllKVPs(any[String], any[WithAccessToken])).thenAnswer(args => Future {
- val userId = WorkbenchUserId(args.getArgument(0).asInstanceOf[String])
- val user = idToSamUser(userId)
- val linkedEraAccount = linkedAccountsBySamUserId.get(userId)
- Some(ProfileWrapper(userId.value, List(
- FireCloudKeyValue(Some("contactEmail"), Some(user.email.value)),
- FireCloudKeyValue(Some("linkedNihUsername"), linkedEraAccount.map(_.linkedExternalId)),
- FireCloudKeyValue(Some("linkExpireTime"), linkedEraAccount.map(_.linkExpireTime.getMillis.toString))
- )))
- })
+ when(thurloeDao.getAllKVPs(any[String], any[WithAccessToken])).thenAnswer(args =>
+ Future {
+ val userId = WorkbenchUserId(args.getArgument(0).asInstanceOf[String])
+ val user = idToSamUser(userId)
+ val linkedEraAccount = linkedAccountsBySamUserId.get(userId)
+ Some(
+ ProfileWrapper(
+ userId.value,
+ List(
+ FireCloudKeyValue(Some("contactEmail"), Some(user.email.value)),
+ FireCloudKeyValue(Some("linkedNihUsername"), linkedEraAccount.map(_.linkedExternalId)),
+ FireCloudKeyValue(Some("linkExpireTime"), linkedEraAccount.map(_.linkExpireTime.getMillis.toString))
+ )
+ )
+ )
+ }
+ )
when(thurloeDao.getAllUserValuesForKey(ArgumentMatchers.eq("email")))
.thenReturn(Future.successful(samUsers.map(user => user.id.value -> user.email.value).toMap))
- when(thurloeDao.getAllUserValuesForKey(ArgumentMatchers.eq("linkedNihUsername"))).thenReturn(Future.successful(linkedAccountsBySamUserId.map(tup => (tup._1.value, tup._2.linkedExternalId))))
- when(thurloeDao.getAllUserValuesForKey(ArgumentMatchers.eq("linkExpireTime"))).thenReturn(Future.successful(linkedAccountsBySamUserId.map(tup => (tup._1.value, (tup._2.linkExpireTime.getMillis / 1000).toString))))
+ when(thurloeDao.getAllUserValuesForKey(ArgumentMatchers.eq("linkedNihUsername")))
+ .thenReturn(Future.successful(linkedAccountsBySamUserId.map(tup => (tup._1.value, tup._2.linkedExternalId))))
+ when(thurloeDao.getAllUserValuesForKey(ArgumentMatchers.eq("linkExpireTime"))).thenReturn(
+ Future.successful(
+ linkedAccountsBySamUserId.map(tup => (tup._1.value, (tup._2.linkExpireTime.getMillis / 1000).toString))
+ )
+ )
when(thurloeDao.saveKeyValues(any[UserInfo], any[Map[String, String]])).thenReturn(Future.successful(Success(())))
- when(thurloeDao.saveKeyValues(any[String], any[WithAccessToken], any[Map[String, String]])).thenReturn(Future.successful(Success(())))
- when(thurloeDao.deleteKeyValue(any[String], any[String], any[WithAccessToken])).thenReturn(Future.successful(Success(())))
+ when(thurloeDao.saveKeyValues(any[String], any[WithAccessToken], any[Map[String, String]]))
+ .thenReturn(Future.successful(Success(())))
+ when(thurloeDao.deleteKeyValue(any[String], any[String], any[WithAccessToken]))
+ .thenReturn(Future.successful(Success(())))
}
private def mockGoogleServicesDAO(): Unit = {
- when(googleDao.getBucketObjectAsInputStream(ArgumentMatchers.eq(FireCloudConfig.Nih.whitelistBucket), any[String])).thenAnswer(args => {
- val filename = args.getArgument(1).asInstanceOf[String]
- val nihUsernames = filename match {
- case "tcga-whitelist.txt" => Seq(userTcgaAndTargetLinkedAccount.linkedExternalId, userTcgaOnlyLinkedAccount.linkedExternalId)
- case "target-whitelist.txt" => Seq(userTcgaAndTargetLinkedAccount.linkedExternalId, userTargetOnlyLinkedAccount.linkedExternalId)
- case "broken-whitelist.txt" => Seq.empty
+ when(googleDao.getBucketObjectAsInputStream(ArgumentMatchers.eq(FireCloudConfig.Nih.whitelistBucket), any[String]))
+ .thenAnswer { args =>
+ val filename = args.getArgument(1).asInstanceOf[String]
+ val nihUsernames = filename match {
+ case "tcga-whitelist.txt" =>
+ Seq(userTcgaAndTargetLinkedAccount.linkedExternalId, userTcgaOnlyLinkedAccount.linkedExternalId)
+ case "target-whitelist.txt" =>
+ Seq(userTcgaAndTargetLinkedAccount.linkedExternalId, userTargetOnlyLinkedAccount.linkedExternalId)
+ case "broken-whitelist.txt" => Seq.empty
+ }
+ new ByteArrayInputStream(nihUsernames.mkString("\n").getBytes(StandardCharsets.UTF_8))
}
- new ByteArrayInputStream(nihUsernames.mkString("\n").getBytes(StandardCharsets.UTF_8))
- })
when(googleDao.getAdminUserAccessToken).thenReturn(adminAccessToken)
}
- private def mockShibbolethDAO(): Unit = {
+ private def mockShibbolethDAO(): Unit =
when(shibbolethDao.getPublicKey()).thenReturn(Future.successful(pubKey))
- }
val keypairGen = KeyPairGenerator.getInstance("RSA")
keypairGen.initialize(1024)
val keypair = keypairGen.generateKeyPair()
val privKey: PrivateKey = keypair.getPrivate
- val pubKey: String = s"-----BEGIN PUBLIC KEY-----\n${Base64.getEncoder.encodeToString(keypair.getPublic.getEncoded)}\n-----END PUBLIC KEY-----"
+ val pubKey: String =
+ s"-----BEGIN PUBLIC KEY-----\n${Base64.getEncoder.encodeToString(keypair.getPublic.getEncoded)}\n-----END PUBLIC KEY-----"
private def jwtForUser(linkedEraAccount: LinkedEraAccount): JWTWrapper = {
val expiresInTheFuture: Long = linkedEraAccount.linkExpireTime.getMillis / 1000L
val issuedAt = Instant.ofEpochMilli(linkedEraAccount.linkExpireTime.minusDays(30).getMillis).getEpochSecond
val validStr = Jwt.encode(
- JwtClaim(s"""{"eraCommonsUsername": "${linkedEraAccount.linkedExternalId}"}""").issuedAt(issuedAt).expiresAt(expiresInTheFuture),
+ JwtClaim(s"""{"eraCommonsUsername": "${linkedEraAccount.linkedExternalId}"}""")
+ .issuedAt(issuedAt)
+ .expiresAt(expiresInTheFuture),
privKey,
- JwtAlgorithm.RS256)
+ JwtAlgorithm.RS256
+ )
JWTWrapper(validStr)
}
- private def genSamUser(): SamUser = {
+ private def genSamUser(): SamUser =
SamUser(
WorkbenchUserId(Random.nextInt().toString),
Some(GoogleSubjectId(Random.nextInt().toString)),
- WorkbenchEmail( UUID.randomUUID().toString + "@email.com"),
+ WorkbenchEmail(UUID.randomUUID().toString + "@email.com"),
Some(AzureB2CId(UUID.randomUUID().toString)),
enabled = true,
Instant.now(),
Some(Instant.now()),
- Instant.now())
- }
-
-
+ Instant.now()
+ )
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/service/OntologyServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/service/OntologyServiceSpec.scala
index 5bafafd48..0e4d4c520 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/service/OntologyServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/service/OntologyServiceSpec.scala
@@ -11,7 +11,6 @@ import spray.json.DefaultJsonProtocol._
import spray.json.{JsObject, JsValue, JsonParser}
import spray.json.lenses.JsonLenses._
-
class OntologyServiceSpec extends AnyFreeSpec with Matchers with ScalaFutures {
val ontologyDao = new MockOntologyDAO()
@@ -21,14 +20,15 @@ class OntologyServiceSpec extends AnyFreeSpec with Matchers with ScalaFutures {
val ontologyService = new OntologyService(ontologyDao, researchPurposeSupport)
- final implicit override val patienceConfig: PatienceConfig = PatienceConfig(timeout = scaled(Span(300, Seconds)), interval = scaled(Span(2, Seconds)))
+ implicit final override val patienceConfig: PatienceConfig =
+ PatienceConfig(timeout = scaled(Span(300, Seconds)), interval = scaled(Span(2, Seconds)))
private def jsonFromResearchPurposeRequest(rpRequest: ResearchPurposeRequest): JsValue = {
val result = ontologyService.buildResearchPurposeQuery(rpRequest).futureValue
val resultString = result match {
case RequestComplete(response) => response.toString
- case _ => fail("expected a RequestComplete")
+ case _ => fail("expected a RequestComplete")
}
JsonParser(resultString)
@@ -62,7 +62,9 @@ class OntologyServiceSpec extends AnyFreeSpec with Matchers with ScalaFutures {
// Specifying specific diseases implies General Research Use (GRU) and Health/Medical/Biomedical (HMB)
Seq("GRU", "HMB") foreach { code =>
- json.extract[Boolean](termsPath / s"abc:structuredUseRestriction.$code".? / Symbol("value")) should contain theSameElementsAs Seq(true)
+ json.extract[Boolean](
+ termsPath / s"abc:structuredUseRestriction.$code".? / Symbol("value")
+ ) should contain theSameElementsAs Seq(true)
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/service/PassthroughDirectivesSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/service/PassthroughDirectivesSpec.scala
index f06a8f303..9ab1203d1 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/service/PassthroughDirectivesSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/service/PassthroughDirectivesSpec.scala
@@ -27,15 +27,13 @@ final class PassthroughDirectivesSpec extends BaseServiceSpec with FireCloudDire
override def beforeAll() = {
echoServer = startClientAndServer(echoPort)
- echoServer.when(request())
- .respond(
- callback().
- withCallbackClass("org.broadinstitute.dsde.firecloud.service.EchoCallback"))
+ echoServer
+ .when(request())
+ .respond(callback().withCallbackClass("org.broadinstitute.dsde.firecloud.service.EchoCallback"))
}
- override def afterAll() = {
+ override def afterAll() =
echoServer.stop
- }
"Passthrough Directives" - {
"passthrough() directive" - {
@@ -62,13 +60,13 @@ final class PassthroughDirectivesSpec extends BaseServiceSpec with FireCloudDire
"path with a single query parameter" - {
"should send the query parameter through" in {
- validateUri("/one/2/three?key=value", Some(Map("key"->"value")))
+ validateUri("/one/2/three?key=value", Some(Map("key" -> "value")))
}
}
"path with multiple query parameters" - {
"should send the query parameters through" in {
- validateUri("/one/2/three?key=value&key2=val2", Some(Map("key"->"value", "key2"->"val2")))
+ validateUri("/one/2/three?key=value&key2=val2", Some(Map("key" -> "value", "key2" -> "val2")))
}
}
@@ -77,7 +75,7 @@ final class PassthroughDirectivesSpec extends BaseServiceSpec with FireCloudDire
validateUri(
"/one/2/three?key=value&key2=1%323",
"/one/2/three?key=value&key2=123",
- Some(Map("key"->"value", "key2"->"123"))
+ Some(Map("key" -> "value", "key2" -> "123"))
)
}
}
@@ -88,7 +86,7 @@ final class PassthroughDirectivesSpec extends BaseServiceSpec with FireCloudDire
val specRoute = passthrough(echoUrl + "/", meth)
val reqMethod = new RequestBuilder(meth)
reqMethod() ~> sealRoute(specRoute) ~> check {
- assertResult(OK) {status}
+ assertResult(OK)(status)
// special handling for HEAD, because HEAD won't return a body
if (meth != HEAD && meth != CONNECT) {
val info = responseAs[RequestInfo]
@@ -106,13 +104,11 @@ final class PassthroughDirectivesSpec extends BaseServiceSpec with FireCloudDire
}
}
- private def validateUri(path: String, queryParams: Option[Map[String, String]] = None): Unit = {
+ private def validateUri(path: String, queryParams: Option[Map[String, String]] = None): Unit =
validateUri(path, path, queryParams)
- }
- private def validateUri(inpath: String, outpath: String): Unit = {
+ private def validateUri(inpath: String, outpath: String): Unit =
validateUri(inpath, outpath, None)
- }
private def validateUri(inpath: String, outpath: String, queryParams: Option[Map[String, String]]): Unit = {
val specRoute = passthrough(echoUrl + inpath, HttpMethods.GET)
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/service/PassthroughDirectivesSpecSupport.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/service/PassthroughDirectivesSpecSupport.scala
index 244e9ea1a..6454f7af9 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/service/PassthroughDirectivesSpecSupport.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/service/PassthroughDirectivesSpecSupport.scala
@@ -17,7 +17,7 @@ class EchoCallback extends ExpectationResponseCallback {
override def handle(httpRequest: HttpRequest): HttpResponse = {
// translate the mockserver request to a spray Uri
val query: Query = Option(httpRequest.getQueryStringParameters) match {
- case None => Query.Empty
+ case None => Query.Empty
case Some(params) => Query(params.getRawParameterString)
}
@@ -32,18 +32,15 @@ class EchoCallback extends ExpectationResponseCallback {
sprayuri.toString()
)
- org.mockserver.model.HttpResponse.response()
+ org.mockserver.model.HttpResponse
+ .response()
.withStatusCode(OK.intValue)
.withHeader(MockUtils.header)
.withBody(requestInfoFormat.write(requestInfo).prettyPrint)
}
}
-case class RequestInfo(
- method: String,
- path: String,
- queryparams: Map[String,String],
- url: String)
+case class RequestInfo(method: String, path: String, queryparams: Map[String, String], url: String)
object PassthroughDirectivesSpecSupport {
implicit val requestInfoFormat: RootJsonFormat[RequestInfo] = jsonFormat4(RequestInfo)
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/service/RegisterServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/service/RegisterServiceSpec.scala
index 68e1800b7..82ac370fb 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/service/RegisterServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/service/RegisterServiceSpec.scala
@@ -3,8 +3,17 @@ package org.broadinstitute.dsde.firecloud.service
import akka.http.scaladsl.model.headers.OAuth2BearerToken
import org.broadinstitute.dsde.firecloud.HealthChecks.termsOfServiceUrl
import org.broadinstitute.dsde.firecloud.dataaccess.{GoogleServicesDAO, RawlsDAO, SamDAO, ThurloeDAO}
-import org.broadinstitute.dsde.firecloud.model.{BasicProfile, RegistrationInfo, UserInfo, WorkbenchEnabled, WorkbenchUserInfo}
-import org.broadinstitute.dsde.workbench.model.Notifications.{ActivationNotification, AzurePreviewActivationNotification}
+import org.broadinstitute.dsde.firecloud.model.{
+ BasicProfile,
+ RegistrationInfo,
+ UserInfo,
+ WorkbenchEnabled,
+ WorkbenchUserInfo
+}
+import org.broadinstitute.dsde.workbench.model.Notifications.{
+ ActivationNotification,
+ AzurePreviewActivationNotification
+}
import org.mockito.ArgumentMatchers
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito._
@@ -28,9 +37,16 @@ class RegisterServiceSpec extends AnyFlatSpec with Matchers with BeforeAndAfterE
val registerService = new RegisterService(rawlsDAO, samDAO, thurloeDAO, googleServicesDAO)
- val azureB2CUserInfo: UserInfo = UserInfo("azure-b2c@example.com", OAuth2BearerToken("token"), 1, "0f3cd8e4-59c2-4bce-9c24-98c5a0c308c1", None)
- val googleB2CUserInfo: UserInfo = UserInfo("google-b2c@example.com", OAuth2BearerToken("token"), 1, "0617047d-a81f-4724-b783-b5af51af9a70", Some(OAuth2BearerToken("some-google-token")))
- val googleLegacyUserInfo: UserInfo = UserInfo("google-legacy@example.com", OAuth2BearerToken("token"), 1, "111111111111", None)
+ val azureB2CUserInfo: UserInfo =
+ UserInfo("azure-b2c@example.com", OAuth2BearerToken("token"), 1, "0f3cd8e4-59c2-4bce-9c24-98c5a0c308c1", None)
+ val googleB2CUserInfo: UserInfo = UserInfo("google-b2c@example.com",
+ OAuth2BearerToken("token"),
+ 1,
+ "0617047d-a81f-4724-b783-b5af51af9a70",
+ Some(OAuth2BearerToken("some-google-token"))
+ )
+ val googleLegacyUserInfo: UserInfo =
+ UserInfo("google-legacy@example.com", OAuth2BearerToken("token"), 1, "111111111111", None)
val profile: BasicProfile = BasicProfile(
firstName = "first",
@@ -76,16 +92,21 @@ class RegisterServiceSpec extends AnyFlatSpec with Matchers with BeforeAndAfterE
// user is not registered; registration check returns google=false and ldap=false
when(samDAO.getRegistrationStatus(userInfo)).thenReturn(
Future.successful(
- RegistrationInfo(
- WorkbenchUserInfo(userInfo.id, userInfo.userEmail),
- WorkbenchEnabled(google = false, ldap = false, allUsersGroup = false),
- None)))
+ RegistrationInfo(WorkbenchUserInfo(userInfo.id, userInfo.userEmail),
+ WorkbenchEnabled(google = false, ldap = false, allUsersGroup = false),
+ None
+ )
+ )
+ )
// registering this user returns successfully
- when(samDAO.registerUser(any())(ArgumentMatchers.eq(userInfo))).thenReturn(Future.successful(
- RegistrationInfo(
- WorkbenchUserInfo(userInfo.id, userInfo.userEmail),
- WorkbenchEnabled(google = true, ldap = true, allUsersGroup = true),
- None)))
+ when(samDAO.registerUser(any())(ArgumentMatchers.eq(userInfo))).thenReturn(
+ Future.successful(
+ RegistrationInfo(WorkbenchUserInfo(userInfo.id, userInfo.userEmail),
+ WorkbenchEnabled(google = true, ldap = true, allUsersGroup = true),
+ None
+ )
+ )
+ )
// saving to Thurloe returns successfully
when(thurloeDAO.saveProfile(userInfo, profile)).thenReturn(Future.successful(()))
when(thurloeDAO.saveKeyValues(ArgumentMatchers.eq(userInfo), any())).thenReturn(Future.successful(Success(())))
@@ -105,10 +126,12 @@ class RegisterServiceSpec extends AnyFlatSpec with Matchers with BeforeAndAfterE
// user is already registered; registration check returns google=true and ldap=true
when(samDAO.getRegistrationStatus(userInfo)).thenReturn(
Future.successful(
- RegistrationInfo(
- WorkbenchUserInfo(userInfo.id, userInfo.userEmail),
- WorkbenchEnabled(google = true, ldap = true, allUsersGroup = true),
- None)))
+ RegistrationInfo(WorkbenchUserInfo(userInfo.id, userInfo.userEmail),
+ WorkbenchEnabled(google = true, ldap = true, allUsersGroup = true),
+ None
+ )
+ )
+ )
// saving to Thurloe returns successfully
when(thurloeDAO.saveProfile(userInfo, profile)).thenReturn(Future.successful(()))
when(thurloeDAO.saveKeyValues(ArgumentMatchers.eq(userInfo), any())).thenReturn(Future.successful(Success(())))
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/service/ServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/service/ServiceSpec.scala
index 42474d168..9e5d30b28 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/service/ServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/service/ServiceSpec.scala
@@ -21,13 +21,21 @@ import akka.testkit.TestKitBase
import scala.concurrent.duration._
// common Service Spec to be inherited by service tests
-trait ServiceSpec extends AnyFreeSpec with ScalaFutures with ScalatestRouteTest with Matchers with TestRequestBuilding with TestKitBase {
+trait ServiceSpec
+ extends AnyFreeSpec
+ with ScalaFutures
+ with ScalatestRouteTest
+ with Matchers
+ with TestRequestBuilding
+ with TestKitBase {
implicit val routeTestTimeout: RouteTestTimeout = RouteTestTimeout(5.seconds)
val allHttpMethods = Seq(CONNECT, DELETE, GET, HEAD, PATCH, POST, PUT, TRACE)
- def allHttpMethodsExcept(method: HttpMethod, methods: HttpMethod*): Seq[HttpMethod] = allHttpMethodsExcept(method +: methods)
+ def allHttpMethodsExcept(method: HttpMethod, methods: HttpMethod*): Seq[HttpMethod] = allHttpMethodsExcept(
+ method +: methods
+ )
def allHttpMethodsExcept(methods: Seq[HttpMethod]): Seq[HttpMethod] = allHttpMethods.diff(methods)
// is the response an ErrorReport with the given Source and StatusCode
@@ -37,9 +45,8 @@ trait ServiceSpec extends AnyFreeSpec with ScalaFutures with ScalatestRouteTest
report.statusCode.get should be(statusCode)
}
- def checkIfPassedThrough(route: Route, method: HttpMethod, uri: String, toBeHandled: Boolean): Unit = {
+ def checkIfPassedThrough(route: Route, method: HttpMethod, uri: String, toBeHandled: Boolean): Unit =
new RequestBuilder(method)(uri) ~> dummyAuthHeaders ~> route ~> check {
handled should be(toBeHandled)
}
- }
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/service/TSVFileSupportSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/service/TSVFileSupportSpec.scala
index b7d5bde10..57c0bd160 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/service/TSVFileSupportSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/service/TSVFileSupportSpec.scala
@@ -6,7 +6,15 @@ import org.broadinstitute.dsde.firecloud.FireCloudExceptionWithErrorReport
import org.broadinstitute.dsde.firecloud.mock.MockTSVLoadFiles
import org.broadinstitute.dsde.firecloud.model.{EntityUpdateDefinition, FlexibleModelSchema}
import org.broadinstitute.dsde.firecloud.utils.TSVLoadFile
-import org.broadinstitute.dsde.rawls.model.{AttributeBoolean, AttributeEntityReference, AttributeListElementable, AttributeName, AttributeNumber, AttributeString, AttributeValueRawJson}
+import org.broadinstitute.dsde.rawls.model.{
+ AttributeBoolean,
+ AttributeEntityReference,
+ AttributeListElementable,
+ AttributeName,
+ AttributeNumber,
+ AttributeString,
+ AttributeValueRawJson
+}
import org.broadinstitute.dsde.rawls.model.AttributeUpdateOperations.{AddUpdateAttribute, RemoveAttribute}
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.matchers.must.Matchers.contain
@@ -17,16 +25,19 @@ import org.scalatest.matchers.should.Matchers.convertToAnyShouldWrapper
*/
class TSVFileSupportSpec extends AnyFreeSpec with TSVFileSupport {
-
"getWorkspaceAttributeCalls" - {
"get AttributeUpdates for a valid Workspace Attribute TSV file" in {
val attributes = getWorkspaceAttributeCalls(MockTSVLoadFiles.validWorkspaceAttributes)
assertResult(attributes) {
- List(AddUpdateAttribute(AttributeName("default", "a1"), AttributeString("v1")),
+ List(
+ AddUpdateAttribute(AttributeName("default", "a1"), AttributeString("v1")),
AddUpdateAttribute(AttributeName("default", "a2"), AttributeString("2")),
AddUpdateAttribute(AttributeName("default", "a3"), AttributeString("[1,2,3]")),
- AddUpdateAttribute(AttributeName("default", "a4"), AttributeValueRawJson("""{"tables":{"sample":{"save":["participant",false,"sample",true]}}}""")
- ))
+ AddUpdateAttribute(
+ AttributeName("default", "a4"),
+ AttributeValueRawJson("""{"tables":{"sample":{"save":["participant",false,"sample",true]}}}""")
+ )
+ )
}
}
@@ -55,7 +66,8 @@ class TSVFileSupportSpec extends AnyFreeSpec with TSVFileSupport {
val attributes = getWorkspaceAttributeCalls(MockTSVLoadFiles.validRemoveAddAttribute)
assertResult(attributes) {
List(RemoveAttribute(AttributeName("default", "a1")),
- AddUpdateAttribute(AttributeName("default", "a2"), AttributeString("v2")))
+ AddUpdateAttribute(AttributeName("default", "a2"), AttributeString("v2"))
+ )
}
}
}
@@ -106,44 +118,46 @@ class TSVFileSupportSpec extends AnyFreeSpec with TSVFileSupport {
)
val stringTestCases = List("", "string", "true525600", ",")
val referenceTestCases = Map(
- """{"entityType":"targetType","entityName":"targetName"}""" -> AttributeEntityReference("targetType", "targetName")
+ """{"entityType":"targetType","entityName":"targetName"}""" -> AttributeEntityReference("targetType",
+ "targetName"
+ )
)
"should detect boolean values when applicable" in {
- booleanTestCases foreach {
- case (input, expected) => withClue(s"should handle potential boolean: $input") {
+ booleanTestCases foreach { case (input, expected) =>
+ withClue(s"should handle potential boolean: $input") {
stringToTypedAttribute(input) shouldBe expected
}
}
}
"should detect int values when applicable" in {
- integerTestCases foreach {
- case (input, expected) => withClue(s"should handle potential int: $input") {
+ integerTestCases foreach { case (input, expected) =>
+ withClue(s"should handle potential int: $input") {
stringToTypedAttribute(input) shouldBe expected
}
}
}
"should detect double values when applicable" in {
- doubleTestCases foreach {
- case (input, expected) => withClue(s"should handle potential double: $input") {
+ doubleTestCases foreach { case (input, expected) =>
+ withClue(s"should handle potential double: $input") {
stringToTypedAttribute(input) shouldBe expected
}
}
}
"should detect entity references when applicable" in {
- referenceTestCases foreach {
- case (input, expected) => withClue(s"should handle potential reference: $input") {
+ referenceTestCases foreach { case (input, expected) =>
+ withClue(s"should handle potential reference: $input") {
stringToTypedAttribute(input) shouldBe expected
}
}
}
"should detect string values when applicable" in {
- stringTestCases foreach {
- str => withClue(s"should handle string: $str") {
+ stringTestCases foreach { str =>
+ withClue(s"should handle string: $str") {
stringToTypedAttribute(str) shouldBe AttributeString(str)
}
}
@@ -175,20 +189,28 @@ class TSVFileSupportSpec extends AnyFreeSpec with TSVFileSupport {
exemplarValue: AttributeListElementable,
expectedSize: Int = 4,
colname: String = "arrays",
- entityType: String = "some_type")
+ entityType: String = "some_type"
+ )
val testCases = List(
TsvArrayTestCase(MockTSVLoadFiles.entityWithAttributeStringArray, "all strings", AttributeString("")),
TsvArrayTestCase(MockTSVLoadFiles.entityWithAttributeNumberArray, "all numbers", AttributeNumber(0)),
TsvArrayTestCase(MockTSVLoadFiles.entityWithAttributeBooleanArray, "all booleans", AttributeBoolean(true)),
TsvArrayTestCase(MockTSVLoadFiles.entityWithAttributeEntityReferenceArray,
- "all entity references", AttributeEntityReference("entityType", "entityName"))
+ "all entity references",
+ AttributeEntityReference("entityType", "entityName")
+ )
)
testCases foreach { testCase =>
s"parse an attribute array consisting of ${testCase.testHint}" in {
- val resultingOps = setAttributesOnEntity(testCase.entityType, None, testCase.loadFile.tsvData.head, Seq((testCase.colname, None)), FlexibleModelSchema)
- resultingOps.operations.size shouldBe testCase.expectedSize //1 to remove any existing list, 3 to add the list elements
+ val resultingOps = setAttributesOnEntity(testCase.entityType,
+ None,
+ testCase.loadFile.tsvData.head,
+ Seq((testCase.colname, None)),
+ FlexibleModelSchema
+ )
+ resultingOps.operations.size shouldBe testCase.expectedSize // 1 to remove any existing list, 3 to add the list elements
resultingOps.entityType shouldBe testCase.entityType
// firstOp should be the RemoveAttribute
@@ -206,31 +228,46 @@ class TSVFileSupportSpec extends AnyFreeSpec with TSVFileSupport {
op("op") shouldBe AttributeString("AddListMember")
op("attributeListName") shouldBe AttributeString(testCase.colname)
val element = op("newMember")
- element.getClass shouldBe(expectedClass)
+ element.getClass shouldBe expectedClass
}
}
}
"throw an exception when parsing an attribute array consisting of mixed attribute types" in {
val caught = intercept[FireCloudExceptionWithErrorReport] {
- setAttributesOnEntity("some_type", None, MockTSVLoadFiles.entityWithAttributeMixedArray.tsvData.head, Seq(("arrays", None)), FlexibleModelSchema)
+ setAttributesOnEntity("some_type",
+ None,
+ MockTSVLoadFiles.entityWithAttributeMixedArray.tsvData.head,
+ Seq(("arrays", None)),
+ FlexibleModelSchema
+ )
}
- caught.errorReport.statusCode should contain (BadRequest)
+ caught.errorReport.statusCode should contain(BadRequest)
caught.errorReport.message shouldBe "Mixed-type entity attribute lists are not supported."
}
"throw an exception when parsing an attribute array of objects" in {
val caught = intercept[FireCloudExceptionWithErrorReport] {
- setAttributesOnEntity("some_type", None, MockTSVLoadFiles.entityWithAttributeArrayOfObjects.tsvData.head, Seq(("arrays", None)), FlexibleModelSchema)
+ setAttributesOnEntity("some_type",
+ None,
+ MockTSVLoadFiles.entityWithAttributeArrayOfObjects.tsvData.head,
+ Seq(("arrays", None)),
+ FlexibleModelSchema
+ )
}
- caught.errorReport.statusCode should contain (BadRequest)
+ caught.errorReport.statusCode should contain(BadRequest)
caught.errorReport.message shouldBe UNSUPPORTED_ARRAY_TYPE_ERROR_MSG
}
"parse an attribute empty array" in {
- val resultingOps = setAttributesOnEntity("some_type", None, MockTSVLoadFiles.entityWithEmptyAttributeArray.tsvData.head, Seq(("arrays", None)), FlexibleModelSchema)
+ val resultingOps = setAttributesOnEntity("some_type",
+ None,
+ MockTSVLoadFiles.entityWithEmptyAttributeArray.tsvData.head,
+ Seq(("arrays", None)),
+ FlexibleModelSchema
+ )
- resultingOps.operations.size shouldBe 2 //1 to remove any existing attribute with this name, 1 to create the empty attr value list
+ resultingOps.operations.size shouldBe 2 // 1 to remove any existing attribute with this name, 1 to create the empty attr value list
// firstOp should be the RemoveAttribute
val firstOp = resultingOps.operations.head
@@ -246,7 +283,12 @@ class TSVFileSupportSpec extends AnyFreeSpec with TSVFileSupport {
}
"parse an attribute array-of-arrays" in {
- val resultingOps = setAttributesOnEntity("array", None, MockTSVLoadFiles.entityWithNestedArrays.tsvData.head, Seq(("array", None)), FlexibleModelSchema)
+ val resultingOps = setAttributesOnEntity("array",
+ None,
+ MockTSVLoadFiles.entityWithNestedArrays.tsvData.head,
+ Seq(("array", None)),
+ FlexibleModelSchema
+ )
// 1 to remove any existing attribute with this name, 3 to add the AttributeValueRawJsons
resultingOps.operations.size shouldBe 4
@@ -255,12 +297,18 @@ class TSVFileSupportSpec extends AnyFreeSpec with TSVFileSupport {
val expectedOps = Seq(
Map("op" -> AttributeString("RemoveAttribute"), "attributeName" -> col),
- Map("op" -> AttributeString("AddListMember"), "attributeListName" -> col,
- "newMember" -> AttributeValueRawJson("""["one","two"]""")),
- Map("op" -> AttributeString("AddListMember"), "attributeListName" -> col,
- "newMember" -> AttributeValueRawJson("""["three","four"]""")),
- Map("op" -> AttributeString("AddListMember"), "attributeListName" -> col,
- "newMember" -> AttributeValueRawJson("""["five","six"]"""))
+ Map("op" -> AttributeString("AddListMember"),
+ "attributeListName" -> col,
+ "newMember" -> AttributeValueRawJson("""["one","two"]""")
+ ),
+ Map("op" -> AttributeString("AddListMember"),
+ "attributeListName" -> col,
+ "newMember" -> AttributeValueRawJson("""["three","four"]""")
+ ),
+ Map("op" -> AttributeString("AddListMember"),
+ "attributeListName" -> col,
+ "newMember" -> AttributeValueRawJson("""["five","six"]""")
+ )
)
val expected = EntityUpdateDefinition("bla", "array", expectedOps)
@@ -269,13 +317,19 @@ class TSVFileSupportSpec extends AnyFreeSpec with TSVFileSupport {
}
"remove attribute values when deleteEmptyValues is set to true" in {
- val colInfo = colNamesToAttributeNames( MockTSVLoadFiles.validWithBlanks.headers, Map.empty)
- val resultingOps = setAttributesOnEntity("some_type", None, MockTSVLoadFiles.validWithBlanks.tsvData.head, colInfo, FlexibleModelSchema, true)
+ val colInfo = colNamesToAttributeNames(MockTSVLoadFiles.validWithBlanks.headers, Map.empty)
+ val resultingOps = setAttributesOnEntity("some_type",
+ None,
+ MockTSVLoadFiles.validWithBlanks.tsvData.head,
+ colInfo,
+ FlexibleModelSchema,
+ true
+ )
resultingOps.operations.size shouldBe 2
- //TSV is 1 entity with 2 attributes, one of which is blank. deleteEmptyValues is set to true
- //We should see a RemoveAttribute op for the blank and an AddUpdateAttribute op for the non-null value
+ // TSV is 1 entity with 2 attributes, one of which is blank. deleteEmptyValues is set to true
+ // We should see a RemoveAttribute op for the blank and an AddUpdateAttribute op for the non-null value
val firstOp = resultingOps.operations.head
firstOp.keySet should contain theSameElementsAs List("op", "attributeName")
firstOp("op") shouldBe AttributeString("RemoveAttribute")
@@ -288,13 +342,19 @@ class TSVFileSupportSpec extends AnyFreeSpec with TSVFileSupport {
}
"not remove attribute values when deleteEmptyValues is set to false" in {
- val colInfo = colNamesToAttributeNames( MockTSVLoadFiles.validWithBlanks.headers, Map.empty)
- val resultingOps = setAttributesOnEntity("some_type", None, MockTSVLoadFiles.validWithBlanks.tsvData.head, colInfo, FlexibleModelSchema, false)
+ val colInfo = colNamesToAttributeNames(MockTSVLoadFiles.validWithBlanks.headers, Map.empty)
+ val resultingOps = setAttributesOnEntity("some_type",
+ None,
+ MockTSVLoadFiles.validWithBlanks.tsvData.head,
+ colInfo,
+ FlexibleModelSchema,
+ false
+ )
resultingOps.operations.size shouldBe 1
- //TSV is 1 entity with 2 attributes, one of which is blank. deleteEmptyValues is set to false
- //We should only see an AddUpdateAttribute op for the non-null value
+ // TSV is 1 entity with 2 attributes, one of which is blank. deleteEmptyValues is set to false
+ // We should only see an AddUpdateAttribute op for the non-null value
val firstOp = resultingOps.operations.head
firstOp.keySet should contain theSameElementsAs List("op", "attributeName", "addUpdateAttribute")
firstOp("op") shouldBe AttributeString("AddUpdateAttribute")
@@ -304,14 +364,26 @@ class TSVFileSupportSpec extends AnyFreeSpec with TSVFileSupport {
"create AttributeBoolean and AttributeNumber when applicable" in {
val colInfo = colNamesToAttributeNames(MockTSVLoadFiles.entityWithBooleanAndNumberAttributes.headers, Map.empty)
- val resultingOpsFirst = setAttributesOnEntity("foo", None, MockTSVLoadFiles.entityWithBooleanAndNumberAttributes.tsvData.head, colInfo, FlexibleModelSchema)
- val resultingOpsSecond = setAttributesOnEntity("foo", None, MockTSVLoadFiles.entityWithBooleanAndNumberAttributes.tsvData(1), colInfo, FlexibleModelSchema)
+ val resultingOpsFirst = setAttributesOnEntity("foo",
+ None,
+ MockTSVLoadFiles.entityWithBooleanAndNumberAttributes.tsvData.head,
+ colInfo,
+ FlexibleModelSchema
+ )
+ val resultingOpsSecond = setAttributesOnEntity("foo",
+ None,
+ MockTSVLoadFiles.entityWithBooleanAndNumberAttributes.tsvData(1),
+ colInfo,
+ FlexibleModelSchema
+ )
val expectedOpsFirst = List(AttributeBoolean(true), AttributeNumber(0), AttributeString("string"))
val expectedOpsSecond = List(AttributeBoolean(false), AttributeNumber(3.14), AttributeString(","))
resultingOpsFirst.operations.map(_("addUpdateAttribute")) should contain theSameElementsInOrderAs expectedOpsFirst
- resultingOpsSecond.operations.map(_("addUpdateAttribute")) should contain theSameElementsInOrderAs expectedOpsSecond
+ resultingOpsSecond.operations.map(
+ _("addUpdateAttribute")
+ ) should contain theSameElementsInOrderAs expectedOpsSecond
}
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/service/UserServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/service/UserServiceSpec.scala
index f1a9f42d3..e23130951 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/service/UserServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/service/UserServiceSpec.scala
@@ -2,7 +2,12 @@ package org.broadinstitute.dsde.firecloud.service
import akka.http.scaladsl.model.headers.OAuth2BearerToken
import org.broadinstitute.dsde.firecloud.{Application, FireCloudConfig}
-import org.broadinstitute.dsde.firecloud.dataaccess.{DisabledExternalCredsDAO, MockCwdsDAO, MockResearchPurposeSupport, MockShareLogDAO}
+import org.broadinstitute.dsde.firecloud.dataaccess.{
+ DisabledExternalCredsDAO,
+ MockCwdsDAO,
+ MockResearchPurposeSupport,
+ MockShareLogDAO
+}
import org.broadinstitute.dsde.firecloud.mock.MockGoogleServicesDAO
import org.broadinstitute.dsde.firecloud.model.{ProfileWrapper, UserInfo}
import org.broadinstitute.dsde.firecloud.service.PerRequest.RequestComplete
@@ -11,10 +16,22 @@ import org.scalatest.BeforeAndAfterEach
import scala.concurrent.Await
import scala.concurrent.duration._
+class UserServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
-class UserServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
-
- val customApp = Application(agoraDao, new MockGoogleServicesFailedGroupsDAO(), ontologyDao, new MockRawlsDeleteWSDAO(), samDao, new MockSearchDeleteWSDAO(), new MockResearchPurposeSupport, thurloeDao, new MockShareLogDAO, shibbolethDao, new MockCwdsDAO, new DisabledExternalCredsDAO)
+ val customApp = Application(
+ agoraDao,
+ new MockGoogleServicesFailedGroupsDAO(),
+ ontologyDao,
+ new MockRawlsDeleteWSDAO(),
+ samDao,
+ new MockSearchDeleteWSDAO(),
+ new MockResearchPurposeSupport,
+ thurloeDao,
+ new MockShareLogDAO,
+ shibbolethDao,
+ new MockCwdsDAO,
+ new DisabledExternalCredsDAO
+ )
val userServiceConstructor: (UserInfo) => UserService = UserService.constructor(customApp)
@@ -40,18 +57,18 @@ class UserServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
"should return original keys if Google group creation fails" in {
val keys = ProfileWrapper(userToken.id, List())
val anonymousGroupName = "makeGoogleGroupCreationFail"
- val rqComplete = Await.
- result(userService.setupAnonymizedGoogleGroup(keys, anonymousGroupName), 3.seconds).
- asInstanceOf[RequestComplete[ProfileWrapper]]
+ val rqComplete = Await
+ .result(userService.setupAnonymizedGoogleGroup(keys, anonymousGroupName), 3.seconds)
+ .asInstanceOf[RequestComplete[ProfileWrapper]]
val returnedKeys = rqComplete.response
returnedKeys should equal(keys)
}
"should return original keys if adding a member to Google group fails" in {
val keys = ProfileWrapper(userToken.id, List())
val anonymousGroupName = "makeAddMemberFail"
- val rqComplete = Await.
- result(userService.setupAnonymizedGoogleGroup(keys, anonymousGroupName), 3.seconds).
- asInstanceOf[RequestComplete[ProfileWrapper]]
+ val rqComplete = Await
+ .result(userService.setupAnonymizedGoogleGroup(keys, anonymousGroupName), 3.seconds)
+ .asInstanceOf[RequestComplete[ProfileWrapper]]
val returnedKeys = rqComplete.response
returnedKeys should equal(keys)
}
@@ -65,16 +82,14 @@ class UserServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
* [Copied/modified from WorkspaceServiceSpec]
*/
class MockGoogleServicesFailedGroupsDAO extends MockGoogleServicesDAO {
- override def createGoogleGroup(groupName: String): Option[String] = {
+ override def createGoogleGroup(groupName: String): Option[String] =
groupName match {
case "makeGoogleGroupCreationFail" => Option.empty
- case _ => Option(groupName)
+ case _ => Option(groupName)
}
- }
- override def addMemberToAnonymizedGoogleGroup(groupName: String, targetUserEmail: String): Option[String] = {
+ override def addMemberToAnonymizedGoogleGroup(groupName: String, targetUserEmail: String): Option[String] =
groupName match {
case "makeAddMemberFail" => Option.empty
- case _ => Option(targetUserEmail)
+ case _ => Option(targetUserEmail)
}
- }
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/service/WorkspaceServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/service/WorkspaceServiceSpec.scala
index b84a2a373..688c6abbf 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/service/WorkspaceServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/service/WorkspaceServiceSpec.scala
@@ -12,26 +12,37 @@ import org.scalatest.BeforeAndAfterEach
import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext, Future}
-
class WorkspaceServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
- val customApp = Application(agoraDao, googleServicesDao, ontologyDao, new MockRawlsDeleteWSDAO(), samDao, new MockSearchDeleteWSDAO(), new MockResearchPurposeSupport, thurloeDao, new MockShareLogDAO, shibbolethDao, new MockCwdsDAO, new DisabledExternalCredsDAO)
+ val customApp = Application(
+ agoraDao,
+ googleServicesDao,
+ ontologyDao,
+ new MockRawlsDeleteWSDAO(),
+ samDao,
+ new MockSearchDeleteWSDAO(),
+ new MockResearchPurposeSupport,
+ thurloeDao,
+ new MockShareLogDAO,
+ shibbolethDao,
+ new MockCwdsDAO,
+ new DisabledExternalCredsDAO
+ )
val workspaceServiceConstructor: (WithAccessToken) => WorkspaceService = WorkspaceService.constructor(customApp)
lazy val ws: WorkspaceService = workspaceServiceConstructor(AccessToken(OAuth2BearerToken("")))
- override def beforeEach(): Unit = {
+ override def beforeEach(): Unit =
searchDao.reset()
- }
- override def afterEach(): Unit = {
+ override def afterEach(): Unit =
searchDao.reset()
- }
"export workspace attributes as TSV " - {
"export valid tsv" in {
- val rqComplete = Await.result(ws.exportWorkspaceAttributesTSV("attributes", "n", "fn"), Duration.Inf)
+ val rqComplete = Await
+ .result(ws.exportWorkspaceAttributesTSV("attributes", "n", "fn"), Duration.Inf)
.asInstanceOf[RequestCompleteWithHeaders[(StatusCode, String)]]
val (status, tsvString) = rqComplete.response
@@ -41,7 +52,8 @@ class WorkspaceServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
val tsvReturnString = List(
List("workspace:e", "d", "b", "c", "a", "f").mkString("\t"),
- List("\"this\thas\ttabs\tin\tit\"", "escape quo\"te", 1.23, "", "true", "[\"v6\",999,true]").mkString("\t")).mkString("\n")
+ List("\"this\thas\ttabs\tin\tit\"", "escape quo\"te", 1.23, "", "true", "[\"v6\",999,true]").mkString("\t")
+ ).mkString("\n")
assertResult(tsvReturnString) {
tsvString
@@ -56,43 +68,43 @@ class WorkspaceServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
"should delete an unpublished workspace successfully" in {
val workspaceNamespace = "projectowner"
- val rqComplete = Await.
- result(ws.deleteWorkspace(workspaceNamespace, workspaceName), Duration.Inf).
- asInstanceOf[RequestComplete[(StatusCode, Option[String])]]
+ val rqComplete = Await
+ .result(ws.deleteWorkspace(workspaceNamespace, workspaceName), Duration.Inf)
+ .asInstanceOf[RequestComplete[(StatusCode, Option[String])]]
val (status, workspaceDeleteResponse) = rqComplete.response
- workspaceDeleteResponse.isDefined should be (true)
- status should be (StatusCodes.Accepted)
+ workspaceDeleteResponse.isDefined should be(true)
+ status should be(StatusCodes.Accepted)
}
"should delete a published workspace successfully" in {
val workspaceNamespace = "unpublishsuccess"
- val rqComplete = Await.
- result(ws.deleteWorkspace(workspaceNamespace, workspaceName), Duration.Inf).
- asInstanceOf[RequestComplete[(StatusCode, Option[String])]]
+ val rqComplete = Await
+ .result(ws.deleteWorkspace(workspaceNamespace, workspaceName), Duration.Inf)
+ .asInstanceOf[RequestComplete[(StatusCode, Option[String])]]
val (status, workspaceDeleteResponse) = rqComplete.response
- workspaceDeleteResponse.isDefined should be (true)
- workspaceDeleteResponse.get should include (ws.unPublishSuccessMessage(workspaceNamespace, workspaceName))
- status should be (StatusCodes.Accepted)
+ workspaceDeleteResponse.isDefined should be(true)
+ workspaceDeleteResponse.get should include(ws.unPublishSuccessMessage(workspaceNamespace, workspaceName))
+ status should be(StatusCodes.Accepted)
}
"should not delete a published workspace if un-publish fails" in {
val workspaceNamespace = "unpublishfailure"
- val rqComplete = Await.
- result(ws.deleteWorkspace(workspaceNamespace, workspaceName), Duration.Inf).
- asInstanceOf[RequestComplete[(StatusCode, ErrorReport)]]
+ val rqComplete = Await
+ .result(ws.deleteWorkspace(workspaceNamespace, workspaceName), Duration.Inf)
+ .asInstanceOf[RequestComplete[(StatusCode, ErrorReport)]]
val (status, error) = rqComplete.response
- status should be (StatusCodes.InternalServerError)
+ status should be(StatusCodes.InternalServerError)
}
"should delete a workspace and skip unpublishing if a user has lost access to view a workspace" in {
val workspaceNamespace = "deleteWithoutUnpublish"
- val rqComplete = Await.
- result(ws.deleteWorkspace(workspaceNamespace, workspaceName), Duration.Inf).
- asInstanceOf[RequestComplete[(StatusCode, Option[String])]]
+ val rqComplete = Await
+ .result(ws.deleteWorkspace(workspaceNamespace, workspaceName), Duration.Inf)
+ .asInstanceOf[RequestComplete[(StatusCode, Option[String])]]
val (status, workspaceDeleteResponse) = rqComplete.response
- workspaceDeleteResponse.isDefined should be (true)
+ workspaceDeleteResponse.isDefined should be(true)
workspaceDeleteResponse.get should not include (ws.unPublishSuccessMessage(workspaceNamespace, workspaceName))
- status should be (StatusCodes.Accepted)
+ status should be(StatusCodes.Accepted)
}
}
}
@@ -103,9 +115,10 @@ class WorkspaceServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
*/
class MockRawlsDeleteWSDAO(implicit val executionContext: ExecutionContext) extends MockRawlsDAO {
- override def deleteWorkspace(workspaceNamespace: String, workspaceName: String)(implicit userToken: WithAccessToken): Future[Option[String]] = {
+ override def deleteWorkspace(workspaceNamespace: String, workspaceName: String)(implicit
+ userToken: WithAccessToken
+ ): Future[Option[String]] =
Future.successful(Some("Your Google bucket 'bucketId' will be deleted within 24h."))
- }
private val unpublishsuccess = publishedRawlsWorkspaceWithAttributes.copy(
namespace = "unpublishsuccess",
@@ -119,38 +132,100 @@ class MockRawlsDeleteWSDAO(implicit val executionContext: ExecutionContext) exte
workspaceId = "unpublishfailure"
)
- override def getWorkspace(ns: String, name: String)(implicit userToken: WithAccessToken): Future[WorkspaceResponse] = {
+ override def getWorkspace(ns: String, name: String)(implicit userToken: WithAccessToken): Future[WorkspaceResponse] =
ns match {
case "attributes" => Future(rawlsWorkspaceResponseWithAttributes)
- case "deleteWithoutUnpublish" => Future.failed(new FireCloudExceptionWithErrorReport(ErrorReport(source = "Mock Rawls", message = "You do not have access to view this workspace or it does not exist", statusCode = Some(StatusCodes.NotFound), causes = Seq.empty, stackTrace = Seq.empty, exceptionClass = None)))
- case "projectowner" => Future(WorkspaceResponse(Some(WorkspaceAccessLevels.ProjectOwner), canShare = Some(true), canCompute = Some(true), catalog = Some(false), newWorkspace, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None))
- case "unpublishsuccess" => Future(WorkspaceResponse(Some(WorkspaceAccessLevels.Owner), canShare = Some(true), canCompute = Some(true), catalog = Some(false), unpublishsuccess, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None))
- case "unpublishfailure" => Future(WorkspaceResponse(Some(WorkspaceAccessLevels.Owner), canShare = Some(true), canCompute = Some(true), catalog = Some(false), unpublishfailure, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None))
- case _ => Future(WorkspaceResponse(Some(WorkspaceAccessLevels.Owner), canShare = Some(true), canCompute = Some(true), catalog = Some(false), newWorkspace, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None))
+ case "deleteWithoutUnpublish" =>
+ Future.failed(
+ new FireCloudExceptionWithErrorReport(
+ ErrorReport(
+ source = "Mock Rawls",
+ message = "You do not have access to view this workspace or it does not exist",
+ statusCode = Some(StatusCodes.NotFound),
+ causes = Seq.empty,
+ stackTrace = Seq.empty,
+ exceptionClass = None
+ )
+ )
+ )
+ case "projectowner" =>
+ Future(
+ WorkspaceResponse(
+ Some(WorkspaceAccessLevels.ProjectOwner),
+ canShare = Some(true),
+ canCompute = Some(true),
+ catalog = Some(false),
+ newWorkspace,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
+ )
+ case "unpublishsuccess" =>
+ Future(
+ WorkspaceResponse(
+ Some(WorkspaceAccessLevels.Owner),
+ canShare = Some(true),
+ canCompute = Some(true),
+ catalog = Some(false),
+ unpublishsuccess,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
+ )
+ case "unpublishfailure" =>
+ Future(
+ WorkspaceResponse(
+ Some(WorkspaceAccessLevels.Owner),
+ canShare = Some(true),
+ canCompute = Some(true),
+ catalog = Some(false),
+ unpublishfailure,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
+ )
+ case _ =>
+ Future(
+ WorkspaceResponse(
+ Some(WorkspaceAccessLevels.Owner),
+ canShare = Some(true),
+ canCompute = Some(true),
+ catalog = Some(false),
+ newWorkspace,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
+ )
}
- }
- override def updateLibraryAttributes(ns: String, name: String, attributeOperations: Seq[AttributeUpdateOperation])(implicit userToken: WithAccessToken): Future[WorkspaceDetails] = {
+ override def updateLibraryAttributes(ns: String, name: String, attributeOperations: Seq[AttributeUpdateOperation])(
+ implicit userToken: WithAccessToken
+ ): Future[WorkspaceDetails] =
ns match {
- case "projectowner" => Future(newWorkspace)
+ case "projectowner" => Future(newWorkspace)
case "unpublishsuccess" => Future(publishedRawlsWorkspaceWithAttributes)
case "unpublishfailure" => Future(unpublishfailure)
- case _ => Future(newWorkspace)
+ case _ => Future(newWorkspace)
}
- }
}
class MockSearchDeleteWSDAO extends MockSearchDAO {
- override def deleteDocument(id: String): Unit = {
+ override def deleteDocument(id: String): Unit =
id match {
case "unpublishfailure" =>
deleteDocumentInvoked.set(false)
throw new FireCloudException(s"Failed to remove document with id $id from elastic search")
case _ => deleteDocumentInvoked.set(true)
}
- }
}
-
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/service/WorkspaceTagsServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/service/WorkspaceTagsServiceSpec.scala
index 737b9a6e2..0ee11f370 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/service/WorkspaceTagsServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/service/WorkspaceTagsServiceSpec.scala
@@ -7,7 +7,12 @@ import org.broadinstitute.dsde.firecloud.dataaccess.MockRawlsDAO
import org.broadinstitute.dsde.firecloud.model._
import org.broadinstitute.dsde.firecloud.webservice.WorkspaceApiService
import org.broadinstitute.dsde.firecloud.{EntityService, FireCloudConfig}
-import org.broadinstitute.dsde.rawls.model.AttributeUpdateOperations.{AddListMember, AddUpdateAttribute, AttributeUpdateOperation, RemoveListMember}
+import org.broadinstitute.dsde.rawls.model.AttributeUpdateOperations.{
+ AddListMember,
+ AddUpdateAttribute,
+ AttributeUpdateOperation,
+ RemoveListMember
+}
import org.broadinstitute.dsde.rawls.model._
import org.joda.time.DateTime
import org.scalatest.{Assertions, BeforeAndAfterEach}
@@ -23,19 +28,25 @@ import scala.concurrent.{ExecutionContext, Future}
* Remember that the responses from the tag apis are sorted, so the expected values in unit
* tests may look funny - it's the sorting.
*/
-class WorkspaceTagsServiceSpec extends BaseServiceSpec with WorkspaceApiService with BeforeAndAfterEach with SprayJsonSupport {
+class WorkspaceTagsServiceSpec
+ extends BaseServiceSpec
+ with WorkspaceApiService
+ with BeforeAndAfterEach
+ with SprayJsonSupport {
override val executionContext: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
// Mock remote endpoints
- private final val workspacesRoot = FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.workspacesPath
+ final private val workspacesRoot = FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.workspacesPath
- def workspaceTagsPath(ns: String = "namespace", name: String = "name") = workspacesRoot + "/%s/%s/tags".format(ns, name)
+ def workspaceTagsPath(ns: String = "namespace", name: String = "name") =
+ workspacesRoot + "/%s/%s/tags".format(ns, name)
// use the MockTagsRawlsDao for these tests.
val testApp = app.copy(rawlsDAO = new MockTagsRawlsDao)
val workspaceServiceConstructor: (WithAccessToken) => WorkspaceService = WorkspaceService.constructor(testApp)
- val permissionReportServiceConstructor: (UserInfo) => PermissionReportService = PermissionReportService.constructor(testApp)
+ val permissionReportServiceConstructor: (UserInfo) => PermissionReportService =
+ PermissionReportService.constructor(testApp)
val entityServiceConstructor: (ModelSchema) => EntityService = EntityService.constructor(app)
private def randUUID = java.util.UUID.randomUUID.toString
@@ -246,42 +257,59 @@ class WorkspaceTagsServiceSpec extends BaseServiceSpec with WorkspaceApiService
// ==========================================================================
// helpers for tests
// ==========================================================================
- private def testPut(tags: List[String], expected: List[String]) = {
+ private def testPut(tags: List[String], expected: List[String]) =
singlepassTest(tags, expected, Put)
- }
- private def testPut(firstTags: List[String], firstExpected: List[String], secondTags: List[String], secondExpected: List[String]) = {
+ private def testPut(firstTags: List[String],
+ firstExpected: List[String],
+ secondTags: List[String],
+ secondExpected: List[String]
+ ) =
multipassTest(firstTags, firstExpected, Put, secondTags, secondExpected)
- }
- private def testPatch(tags: List[String], expected: List[String]) = {
+ private def testPatch(tags: List[String], expected: List[String]) =
singlepassTest(tags, expected, Patch)
- }
- private def testPatch(firstTags: List[String], firstExpected: List[String], secondTags: List[String], secondExpected: List[String]) = {
+ private def testPatch(firstTags: List[String],
+ firstExpected: List[String],
+ secondTags: List[String],
+ secondExpected: List[String]
+ ) =
multipassTest(firstTags, firstExpected, Patch, secondTags, secondExpected)
- }
- private def testDelete(tags: List[String], expected: List[String]) = {
+ private def testDelete(tags: List[String], expected: List[String]) =
singlepassTest(tags, expected, Delete)
- }
- private def testDelete(firstTags: List[String], firstExpected: List[String], secondTags: List[String], secondExpected: List[String]) = {
+ private def testDelete(firstTags: List[String],
+ firstExpected: List[String],
+ secondTags: List[String],
+ secondExpected: List[String]
+ ) =
multipassTest(firstTags, firstExpected, Delete, secondTags, secondExpected)
- }
private def singlepassTest(tags: List[String], expected: List[String], method: RequestBuilder) = {
val name = randUUID
- method(workspaceTagsPath(method.method.value.toLowerCase, name), tags) ~> dummyUserIdHeaders("1234") ~> sealRoute(workspaceRoutes) ~> check {
+ method(workspaceTagsPath(method.method.value.toLowerCase, name), tags) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should be(OK)
responseAs[List[String]] should be(expected)
}
}
- private def multipassTest(firstTags: List[String], firstExpected: List[String], secondMethod: RequestBuilder, secondTags: List[String], secondExpected: List[String]) = {
+ private def multipassTest(firstTags: List[String],
+ firstExpected: List[String],
+ secondMethod: RequestBuilder,
+ secondTags: List[String],
+ secondExpected: List[String]
+ ) = {
val name = randUUID
- Put(workspaceTagsPath("put", name), firstTags) ~> dummyUserIdHeaders("1234") ~> sealRoute(workspaceRoutes) ~> check {
+ Put(workspaceTagsPath("put", name), firstTags) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should be(OK)
responseAs[List[String]] should be(firstExpected)
- secondMethod(workspaceTagsPath(secondMethod.method.value.toLowerCase, name), secondTags) ~> dummyUserIdHeaders("1234") ~> sealRoute(workspaceRoutes) ~> check {
+ secondMethod(workspaceTagsPath(secondMethod.method.value.toLowerCase, name), secondTags) ~> dummyUserIdHeaders(
+ "1234"
+ ) ~> sealRoute(workspaceRoutes) ~> check {
status should be(OK)
responseAs[List[String]] should be(secondExpected)
}
@@ -310,9 +338,9 @@ class MockTagsRawlsDao extends MockRawlsDAO with Assertions {
DateTime.now(),
DateTime.now(),
"my_workspace_creator",
- Some(Map()), //attributes
- false, //locked
- Some(Set.empty), //authdomain
+ Some(Map()), // attributes
+ false, // locked
+ Some(Set.empty), // authdomain
WorkspaceVersions.V2,
GoogleProjectId("googleProject"),
Some(GoogleProjectNumber("googleProjectNumber")),
@@ -325,7 +353,7 @@ class MockTagsRawlsDao extends MockRawlsDAO with Assertions {
WorkspaceState.Ready
)
- private def workspaceResponse(ws:WorkspaceDetails=workspace) = WorkspaceResponse(
+ private def workspaceResponse(ws: WorkspaceDetails = workspace) = WorkspaceResponse(
Some(WorkspaceAccessLevels.ProjectOwner),
canShare = Some(false),
canCompute = Some(true),
@@ -337,60 +365,98 @@ class MockTagsRawlsDao extends MockRawlsDAO with Assertions {
None
)
-
private def workspaceFromState(ns: String, name: String) = {
val tags = statefulTagMap.getOrElse(name, ListBuffer.empty[String])
val tagAttrs = (tags map AttributeString).toSeq
- workspace.copy(attributes = Option(Map(
- AttributeName.withTagsNS() -> AttributeValueList(tagAttrs)
- )))
+ workspace.copy(attributes =
+ Option(
+ Map(
+ AttributeName.withTagsNS() -> AttributeValueList(tagAttrs)
+ )
+ )
+ )
}
- override def getWorkspace(ns: String, name: String)(implicit userToken: WithAccessToken): Future[WorkspaceResponse] = {
+ override def getWorkspace(ns: String, name: String)(implicit userToken: WithAccessToken): Future[WorkspaceResponse] =
// AttributeName.withTagsNS() -> AttributeValueList(Seq(AttributeString("foo"),AttributeString("bar")))
ns match {
case "notags" => Future.successful(workspaceResponse())
- case "onetag" => Future.successful(workspaceResponse(workspace.copy(attributes = Option(Map(
- AttributeName.withTagsNS() -> AttributeValueList(Seq(AttributeString("wibble")))
- )))))
- case "threetags" => Future.successful(workspaceResponse(workspace.copy(attributes = Option(Map(
- AttributeName.withTagsNS() -> AttributeValueList(Seq(AttributeString("foo"),AttributeString("bar"),AttributeString("baz")))
- )))))
- case "mixedattrs" => Future.successful(workspaceResponse(workspace.copy(attributes = Option(Map(
- AttributeName.withTagsNS() -> AttributeValueList(Seq(AttributeString("boop"),AttributeString("blep"))),
- AttributeName.withDefaultNS("someDefault") -> AttributeNumber(123),
- AttributeName.withLibraryNS("someLibrary") -> AttributeBoolean(true)
- )))))
+ case "onetag" =>
+ Future.successful(
+ workspaceResponse(
+ workspace.copy(attributes =
+ Option(
+ Map(
+ AttributeName.withTagsNS() -> AttributeValueList(Seq(AttributeString("wibble")))
+ )
+ )
+ )
+ )
+ )
+ case "threetags" =>
+ Future.successful(
+ workspaceResponse(
+ workspace.copy(attributes =
+ Option(
+ Map(
+ AttributeName.withTagsNS() -> AttributeValueList(
+ Seq(AttributeString("foo"), AttributeString("bar"), AttributeString("baz"))
+ )
+ )
+ )
+ )
+ )
+ )
+ case "mixedattrs" =>
+ Future.successful(
+ workspaceResponse(
+ workspace.copy(attributes =
+ Option(
+ Map(
+ AttributeName.withTagsNS() -> AttributeValueList(
+ Seq(AttributeString("boop"), AttributeString("blep"))
+ ),
+ AttributeName.withDefaultNS("someDefault") -> AttributeNumber(123),
+ AttributeName.withLibraryNS("someLibrary") -> AttributeBoolean(true)
+ )
+ )
+ )
+ )
+ )
case "put" | "patch" | "delete" =>
Future.successful(workspaceResponse(workspaceFromState(ns, name)))
case _ =>
Future.successful(workspaceResponse())
}
- }
- override def patchWorkspaceAttributes(ns: String, name: String, attributes: Seq[AttributeUpdateOperation])(implicit userToken: WithAccessToken): Future[WorkspaceDetails] = {
+ override def patchWorkspaceAttributes(ns: String, name: String, attributes: Seq[AttributeUpdateOperation])(implicit
+ userToken: WithAccessToken
+ ): Future[WorkspaceDetails] = {
ns match {
// unsafe casts throughout here - we want to throw exceptions if anything is the wrong type
case "put" =>
attributes match {
- case Seq(op:AddUpdateAttribute) =>
+ case Seq(op: AddUpdateAttribute) =>
val tags = op.addUpdateAttribute.asInstanceOf[AttributeValueList].list map {
_.asInstanceOf[AttributeString].value
}
- statefulTagMap.put(name, ListBuffer( tags:_* ))
+ statefulTagMap.put(name, ListBuffer(tags: _*))
case _ => fail("Put operation should consist of one AddUpdateAttribute operation")
}
case "patch" =>
- assert( attributes.forall(_.isInstanceOf[AddListMember]),
- "Patch operation should consist of only AddListMember operations" )
+ assert(attributes.forall(_.isInstanceOf[AddListMember]),
+ "Patch operation should consist of only AddListMember operations"
+ )
val newTags = attributes.map(_.asInstanceOf[AddListMember].newMember.asInstanceOf[AttributeString].value)
val currentTags = statefulTagMap.getOrElse(name, ListBuffer.empty[String])
val finalTags = currentTags ++ newTags
statefulTagMap.put(name, finalTags)
case "delete" =>
- assert( attributes.forall(_.isInstanceOf[RemoveListMember]),
- "Delete operation should consist of only AddListMember operations" )
- val removeTags = attributes.map(_.asInstanceOf[RemoveListMember].removeMember.asInstanceOf[AttributeString].value)
+ assert(attributes.forall(_.isInstanceOf[RemoveListMember]),
+ "Delete operation should consist of only AddListMember operations"
+ )
+ val removeTags =
+ attributes.map(_.asInstanceOf[RemoveListMember].removeMember.asInstanceOf[AttributeString].value)
val currentTags = statefulTagMap.getOrElse(name, ListBuffer.empty[String])
val finalTags = currentTags --= removeTags
statefulTagMap.put(name, finalTags)
@@ -400,5 +466,4 @@ class MockTagsRawlsDao extends MockRawlsDAO with Assertions {
Future.successful(workspaceFromState(ns, name))
}
-
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/utils/EnabledUserDirectivesSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/utils/EnabledUserDirectivesSpec.scala
index 7408ef777..b34e0a501 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/utils/EnabledUserDirectivesSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/utils/EnabledUserDirectivesSpec.scala
@@ -24,94 +24,115 @@ import org.scalatest.matchers.should.Matchers
import scala.concurrent.ExecutionContext
class EnabledUserDirectivesSpec
- extends AnyFreeSpec
+ extends AnyFreeSpec
with EnabledUserDirectives
with Matchers
with ScalatestRouteTest
with BeforeAndAfterAll
with SprayJsonSupport {
- override implicit val executionContext: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
+ implicit override val executionContext: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
val enabledUser: UserInfo = UserInfo("enabled@nowhere.com", OAuth2BearerToken("enabled"), 123456, "enabled-id")
val disabledUser: UserInfo = UserInfo("disabled@nowhere.com", OAuth2BearerToken("disabled"), 123456, "disabled-id")
- val unregisteredUser: UserInfo = UserInfo("unregistered@nowhere.com", OAuth2BearerToken("unregistered"), 123456, "unregistered-id")
- val samApiExceptionUser: UserInfo = UserInfo("samapiexception@nowhere.com", OAuth2BearerToken("samapiexception"), 123456, "samapiexception-id")
+ val unregisteredUser: UserInfo =
+ UserInfo("unregistered@nowhere.com", OAuth2BearerToken("unregistered"), 123456, "unregistered-id")
+ val samApiExceptionUser: UserInfo =
+ UserInfo("samapiexception@nowhere.com", OAuth2BearerToken("samapiexception"), 123456, "samapiexception-id")
val samUserInfoPath = "/register/user/v2/self/info"
var mockSamServer: ClientAndServer = _
- def stopMockSamServer(): Unit = {
+ def stopMockSamServer(): Unit =
mockSamServer.stop()
- }
def startMockSamServer(): Unit = {
mockSamServer = startClientAndServer(MockUtils.samServerPort)
// enabled user
mockSamServer
- .when(request
- .withMethod("GET")
- .withPath(samUserInfoPath)
- .withHeader(new Header("Authorization", "Bearer enabled")))
+ .when(
+ request
+ .withMethod("GET")
+ .withPath(samUserInfoPath)
+ .withHeader(new Header("Authorization", "Bearer enabled"))
+ )
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withBody("""{
- | "adminEnabled": true,
- | "enabled": true,
- | "userEmail": "enabled@nowhere.com",
- | "userSubjectId": "enabled-id"
- |}""".stripMargin).withStatusCode(OK.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withBody("""{
+ | "adminEnabled": true,
+ | "enabled": true,
+ | "userEmail": "enabled@nowhere.com",
+ | "userSubjectId": "enabled-id"
+ |}""".stripMargin)
+ .withStatusCode(OK.intValue)
)
// disabled user
mockSamServer
- .when(request
- .withMethod("GET")
- .withPath(samUserInfoPath)
- .withHeader(new Header("Authorization", "Bearer disabled")))
+ .when(
+ request
+ .withMethod("GET")
+ .withPath(samUserInfoPath)
+ .withHeader(new Header("Authorization", "Bearer disabled"))
+ )
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withBody("""{
- | "adminEnabled": false,
- | "enabled": false,
- | "userEmail": "disabled@nowhere.com",
- | "userSubjectId": "disabled-id"
- |}""".stripMargin).withStatusCode(OK.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withBody("""{
+ | "adminEnabled": false,
+ | "enabled": false,
+ | "userEmail": "disabled@nowhere.com",
+ | "userSubjectId": "disabled-id"
+ |}""".stripMargin)
+ .withStatusCode(OK.intValue)
)
// unregistered user
mockSamServer
- .when(request
- .withMethod("GET")
- .withPath(samUserInfoPath)
- .withHeader(new Header("Authorization", "Bearer unregistered")))
+ .when(
+ request
+ .withMethod("GET")
+ .withPath(samUserInfoPath)
+ .withHeader(new Header("Authorization", "Bearer unregistered"))
+ )
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withBody("""{
- | "causes": [],
- | "message": "Google Id unregistered-id not found in sam",
- | "source": "sam",
- | "stackTrace": [],
- | "statusCode": 404
- |}""".stripMargin).withStatusCode(NotFound.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withBody("""{
+ | "causes": [],
+ | "message": "Google Id unregistered-id not found in sam",
+ | "source": "sam",
+ | "stackTrace": [],
+ | "statusCode": 404
+ |}""".stripMargin)
+ .withStatusCode(NotFound.intValue)
)
// ApiException from the Sam client
mockSamServer
- .when(request
- .withMethod("GET")
- .withPath(samUserInfoPath)
- .withHeader(new Header("Authorization", "Bearer samapiexception")))
+ .when(
+ request
+ .withMethod("GET")
+ .withPath(samUserInfoPath)
+ .withHeader(new Header("Authorization", "Bearer samapiexception"))
+ )
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withBody("""{
- | "source": "Sam",
- | "message": "unit test error",
- | "statusCode": 418,
- | "causes": [],
- |}""".stripMargin).withStatusCode(ImATeapot.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withBody("""{
+ | "source": "Sam",
+ | "message": "unit test error",
+ | "statusCode": 418,
+ | "causes": [],
+ |}""".stripMargin)
+ .withStatusCode(ImATeapot.intValue)
)
}
@@ -125,13 +146,13 @@ class EnabledUserDirectivesSpec
implicit val exceptionHandler: ExceptionHandler = FireCloudApiService.exceptionHandler
// define a simple route that uses requireEnabledUser
- def userEnabledRoute(userInfo: UserInfo): Route = seal({
+ def userEnabledRoute(userInfo: UserInfo): Route = seal {
get {
requireEnabledUser(userInfo, s"http://localhost:${MockUtils.samServerPort}") {
complete("route was successful")
}
}
- })
+ }
"requireEnabledUser" - {
"should allow enabled users" in {
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/utils/PermissionsSupportSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/utils/PermissionsSupportSpec.scala
index 636327c02..765de7c9c 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/utils/PermissionsSupportSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/utils/PermissionsSupportSpec.scala
@@ -11,22 +11,21 @@ import akka.http.scaladsl.model.StatusCodes
import scala.concurrent.duration.{Duration, SECONDS}
import scala.concurrent.{Await, ExecutionContext, Future}
-
class PermissionsSupportSpec extends PermissionsSupport with AnyFreeSpecLike {
protected val rawlsDAO: RawlsDAO = new MockRawlsDAO
protected val samDao: SamDAO = new PermissionsSupportMockSamDAO
implicit protected val executionContext: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
- val dur:Duration = Duration(60, SECONDS)
+ val dur: Duration = Duration(60, SECONDS)
"tryIsGroupMember" - {
"should return true if user is a member" in {
- assert( Await.result(tryIsGroupMember(UserInfo("", "alice"), "apples"), dur) )
- assert( Await.result(tryIsGroupMember(UserInfo("", "bob"), "bananas"), dur) )
+ assert(Await.result(tryIsGroupMember(UserInfo("", "alice"), "apples"), dur))
+ assert(Await.result(tryIsGroupMember(UserInfo("", "bob"), "bananas"), dur))
}
"should return false if user is not a member" in {
- assert( !Await.result(tryIsGroupMember(UserInfo("", "alice"), "bananas"), dur) )
- assert( !Await.result(tryIsGroupMember(UserInfo("", "bob"), "apples"), dur) )
+ assert(!Await.result(tryIsGroupMember(UserInfo("", "alice"), "bananas"), dur))
+ assert(!Await.result(tryIsGroupMember(UserInfo("", "bob"), "apples"), dur))
}
"should catch and wrap source exceptions" in {
val ex = intercept[FireCloudExceptionWithErrorReport] {
@@ -39,18 +38,18 @@ class PermissionsSupportSpec extends PermissionsSupport with AnyFreeSpecLike {
"asGroupMember" - {
"should allow inner function to succeed if user is a member" in {
implicit val userInfo = UserInfo("", "alice")
- def command = asGroupMember("apples") { Future.successful(RequestComplete(StatusCodes.OK)) }
+ def command = asGroupMember("apples")(Future.successful(RequestComplete(StatusCodes.OK)))
val x = Await.result(command, dur)
- assertResult(RequestComplete(StatusCodes.OK)) { x }
+ assertResult(RequestComplete(StatusCodes.OK))(x)
}
"should throw FireCloudExceptionWithErrorReport if user is not a member" in {
implicit val userInfo = UserInfo("", "bob")
- def command = asGroupMember("apples") { Future.successful(RequestComplete(StatusCodes.OK)) }
+ def command = asGroupMember("apples")(Future.successful(RequestComplete(StatusCodes.OK)))
val x = intercept[FireCloudExceptionWithErrorReport] {
Await.result(command, dur)
}
- assertResult(Some(StatusCodes.Forbidden)) { x.errorReport.statusCode }
- assertResult("You must be in the appropriate group.") { x.errorReport.message }
+ assertResult(Some(StatusCodes.Forbidden))(x.errorReport.statusCode)
+ assertResult("You must be in the appropriate group.")(x.errorReport.message)
}
}
}
@@ -61,11 +60,10 @@ class PermissionsSupportMockSamDAO extends MockSamDAO {
"bananas" -> Seq("bob")
)
- override def isGroupMember(groupName: WorkbenchGroupName, userInfo: UserInfo): Future[Boolean] = {
+ override def isGroupMember(groupName: WorkbenchGroupName, userInfo: UserInfo): Future[Boolean] =
userInfo.id match {
case "failme" => Future.failed(new Exception("intentional exception for unit tests"))
- case _ => Future.successful(groupMap.getOrElse(groupName.value, Seq.empty[String]).contains(userInfo.id))
+ case _ => Future.successful(groupMap.getOrElse(groupName.value, Seq.empty[String]).contains(userInfo.id))
}
- }
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/utils/StreamingPassthroughSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/utils/StreamingPassthroughSpec.scala
index 8ec3ccf93..1651b1519 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/utils/StreamingPassthroughSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/utils/StreamingPassthroughSpec.scala
@@ -18,9 +18,12 @@ import java.util.UUID
import scala.concurrent.ExecutionContext
import scala.util.Try
-class StreamingPassthroughSpec extends AnyFreeSpec
- with Matchers with BeforeAndAfterAll with ScalatestRouteTest
- with StreamingPassthrough {
+class StreamingPassthroughSpec
+ extends AnyFreeSpec
+ with Matchers
+ with BeforeAndAfterAll
+ with ScalatestRouteTest
+ with StreamingPassthrough {
implicit val executionContext: ExecutionContext = scala.concurrent.ExecutionContext.global
@@ -38,11 +41,13 @@ class StreamingPassthroughSpec extends AnyFreeSpec
// set up mockserver responses for each testable status code
testableStatusCodes foreach { statusCode =>
- val request = org.mockserver.model.HttpRequest.request()
+ val request = org.mockserver.model.HttpRequest
+ .request()
.withMethod("GET")
.withPath(s"/statuscode/checker/${statusCode.intValue()}")
- val response = org.mockserver.model.HttpResponse.response()
+ val response = org.mockserver.model.HttpResponse
+ .response()
.withStatusCode(statusCode.intValue())
.withBody(statusCode.reason)
@@ -52,10 +57,8 @@ class StreamingPassthroughSpec extends AnyFreeSpec
}
}
- override protected def afterAll(): Unit = {
+ override protected def afterAll(): Unit =
localMockserver.stop()
- }
-
"convertToRemoteUri" - {
"should calculate a remainder" in {
@@ -125,45 +128,51 @@ class StreamingPassthroughSpec extends AnyFreeSpec
// fixtures for the next set of tests
val fixtureHeaders = Seq(Accept(Seq(MediaRanges.`application/*`)))
val fixtureRequest = HttpRequest(method = HttpMethods.POST,
- uri = Uri("http://localhost:8123/foo/bar/baz/qux"),
- headers = fixtureHeaders)
+ uri = Uri("http://localhost:8123/foo/bar/baz/qux"),
+ headers = fixtureHeaders
+ )
"should NOT forward Timeout-Access header" in {
val requestHeaders = fixtureHeaders :+ RawHeader("Timeout-Access", "doesnt matter")
val expectedHeaders = fixtureHeaders :+ Host("example.com")
val req = fixtureRequest.withHeaders(requestHeaders)
// call transformToPassthroughRequest
- val actual = transformToPassthroughRequest(Path("/foo/bar"), Uri("https://example.com/api/version/foo"), None)(req)
- actual.headers should contain theSameElementsAs (expectedHeaders)
+ val actual =
+ transformToPassthroughRequest(Path("/foo/bar"), Uri("https://example.com/api/version/foo"), None)(req)
+ actual.headers should contain theSameElementsAs expectedHeaders
}
"should rewrite Host header" in {
val requestHeaders = fixtureHeaders :+ Host("overwritten")
val expectedHeaders = fixtureHeaders :+ Host("example.com")
val req = fixtureRequest.withHeaders(requestHeaders)
// call transformToPassthroughRequest
- val actual = transformToPassthroughRequest(Path("/foo/bar"), Uri("https://example.com/api/version/foo"), None)(req)
- actual.headers should contain theSameElementsAs (expectedHeaders)
+ val actual =
+ transformToPassthroughRequest(Path("/foo/bar"), Uri("https://example.com/api/version/foo"), None)(req)
+ actual.headers should contain theSameElementsAs expectedHeaders
}
"should forward Authorization header" in {
val requestHeaders = fixtureHeaders :+ Authorization(OAuth2BearerToken("123456"))
val expectedHeaders = requestHeaders :+ Host("example.com")
val req = fixtureRequest.withHeaders(requestHeaders)
// call transformToPassthroughRequest
- val actual = transformToPassthroughRequest(Path("/foo/bar"), Uri("https://example.com/api/version/foo"), None)(req)
- actual.headers should contain theSameElementsAs(expectedHeaders)
+ val actual =
+ transformToPassthroughRequest(Path("/foo/bar"), Uri("https://example.com/api/version/foo"), None)(req)
+ actual.headers should contain theSameElementsAs expectedHeaders
}
"should forward miscellaneous headers" in {
val requestHeaders = fixtureHeaders :+ RawHeader("X-FireCloud-Id", FireCloudConfig.FireCloud.fireCloudId)
val expectedHeaders = requestHeaders :+ Host("example.com")
val req = fixtureRequest.withHeaders(requestHeaders)
// call transformToPassthroughRequest
- val actual = transformToPassthroughRequest(Path("/foo/bar"), Uri("https://example.com/api/version/foo"), None)(req)
- actual.headers should contain theSameElementsAs (expectedHeaders)
+ val actual =
+ transformToPassthroughRequest(Path("/foo/bar"), Uri("https://example.com/api/version/foo"), None)(req)
+ actual.headers should contain theSameElementsAs expectedHeaders
}
List(CONNECT, DELETE, GET, HEAD, OPTIONS, PATCH, POST, PUT, TRACE) foreach { methodUnderTest =>
s"should preserve request method $methodUnderTest" in {
val req = fixtureRequest.withMethod(methodUnderTest)
- val actual = transformToPassthroughRequest(Path("/foo/bar"), Uri("https://example.com/api/version/foo"), None)(req)
+ val actual =
+ transformToPassthroughRequest(Path("/foo/bar"), Uri("https://example.com/api/version/foo"), None)(req)
actual.method shouldBe methodUnderTest
}
}
@@ -171,16 +180,16 @@ class StreamingPassthroughSpec extends AnyFreeSpec
val randomJson = JsObject("mykey" -> JsString(UUID.randomUUID().toString))
val requestEntity = HttpEntity.Strict(ContentTypes.`application/json`, ByteString.apply(randomJson.compactPrint))
val req = fixtureRequest.withEntity(requestEntity)
- val actual = transformToPassthroughRequest(Path("/foo/bar"), Uri("https://example.com/api/version/foo"), None)(req)
+ val actual =
+ transformToPassthroughRequest(Path("/foo/bar"), Uri("https://example.com/api/version/foo"), None)(req)
actual.entity shouldBe requestEntity
}
}
"mockserver-based tests" - {
- val testRoute = {
+ val testRoute =
streamingPassthrough(Uri(s"http://localhost:$localMockserverPort/statuscode/checker"))
- }
testableStatusCodes foreach { codeUnderTest =>
s"should reply with remote-system ${codeUnderTest.intValue} (${codeUnderTest.reason()}) responses" in {
@@ -203,6 +212,4 @@ class StreamingPassthroughSpec extends AnyFreeSpec
}
}
-
-
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/utils/TSVFormatterSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/utils/TSVFormatterSpec.scala
index d09ea0412..6fcb05b48 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/utils/TSVFormatterSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/utils/TSVFormatterSpec.scala
@@ -23,15 +23,34 @@ class TSVFormatterSpec extends AnyFreeSpec with ScalaFutures with Matchers with
"Sparse data fields should pass for" - {
"Entity and Membership Set Data" in {
- val samples = AttributeEntityReferenceList(Seq(
- AttributeEntityReference(entityType = "sample", entityName = "sample_01"),
- AttributeEntityReference(entityType = "sample", entityName = "sample_02"),
- AttributeEntityReference(entityType = "sample", entityName = "sample_03"),
- AttributeEntityReference(entityType = "sample", entityName = "sample_04")))
+ val samples = AttributeEntityReferenceList(
+ Seq(
+ AttributeEntityReference(entityType = "sample", entityName = "sample_01"),
+ AttributeEntityReference(entityType = "sample", entityName = "sample_02"),
+ AttributeEntityReference(entityType = "sample", entityName = "sample_03"),
+ AttributeEntityReference(entityType = "sample", entityName = "sample_04")
+ )
+ )
val sampleSetList = List(
- Entity("sample_set_1", "sample_set", Map(AttributeName.withDefaultNS("foo") -> AttributeString("bar"), AttributeName.withDefaultNS("samples") -> samples)),
- Entity("sample_set_2", "sample_set", Map(AttributeName.withDefaultNS("bar") -> AttributeString("foo"), AttributeName.withDefaultNS("samples") -> samples)),
- Entity("sample_set_3", "sample_set", Map(AttributeName.withDefaultNS("baz") -> AttributeString("?#*"), AttributeName.withDefaultNS("samples") -> samples)))
+ Entity("sample_set_1",
+ "sample_set",
+ Map(AttributeName.withDefaultNS("foo") -> AttributeString("bar"),
+ AttributeName.withDefaultNS("samples") -> samples
+ )
+ ),
+ Entity("sample_set_2",
+ "sample_set",
+ Map(AttributeName.withDefaultNS("bar") -> AttributeString("foo"),
+ AttributeName.withDefaultNS("samples") -> samples
+ )
+ ),
+ Entity("sample_set_3",
+ "sample_set",
+ Map(AttributeName.withDefaultNS("baz") -> AttributeString("?#*"),
+ AttributeName.withDefaultNS("samples") -> samples
+ )
+ )
+ )
testEntityDataSet("sample_set", sampleSetList, None)
testMembershipDataSet("sample_set", sampleSetList, sampleSetList.size * samples.list.size)
@@ -41,14 +60,13 @@ class TSVFormatterSpec extends AnyFreeSpec with ScalaFutures with Matchers with
"Sample tests should pass for" - {
"Entity Data" in {
- val sampleAtts = {
+ val sampleAtts =
Map(
AttributeName.withDefaultNS("sample_type") -> AttributeString("Blood"),
AttributeName.withDefaultNS("header_1") -> AttributeString(MockUtils.randomAlpha()),
AttributeName.withDefaultNS("header_2") -> AttributeString(MockUtils.randomAlpha()),
- AttributeName.withDefaultNS("participant") -> AttributeEntityReference("participant","participant_name")
+ AttributeName.withDefaultNS("participant") -> AttributeEntityReference("participant", "participant_name")
)
- }
val sampleList = List(
Entity("sample_01", "sample", sampleAtts),
Entity("sample_02", "sample", sampleAtts),
@@ -57,16 +75,31 @@ class TSVFormatterSpec extends AnyFreeSpec with ScalaFutures with Matchers with
)
val results = testEntityDataSet("sample", sampleList, None)
- results should contain theSameElementsAs Seq("entity:sample_id", "sample_type", "header_1", "header_2", "participant")
- results.head should be ("entity:sample_id")
+ results should contain theSameElementsAs Seq("entity:sample_id",
+ "sample_type",
+ "header_1",
+ "header_2",
+ "participant"
+ )
+ results.head should be("entity:sample_id")
val results2 = testEntityDataSet("sample", sampleList, Option(IndexedSeq.empty))
- results2 should contain theSameElementsAs Seq("entity:sample_id", "sample_type", "header_1", "header_2", "participant")
- results2.head should be ("entity:sample_id")
+ results2 should contain theSameElementsAs Seq("entity:sample_id",
+ "sample_type",
+ "header_1",
+ "header_2",
+ "participant"
+ )
+ results2.head should be("entity:sample_id")
val results3 = testEntityDataSet("sample", sampleList, Option(IndexedSeq("")))
- results3 should contain theSameElementsAs Seq("entity:sample_id", "sample_type", "header_1", "header_2", "participant")
- results3.head should be ("entity:sample_id")
+ results3 should contain theSameElementsAs Seq("entity:sample_id",
+ "sample_type",
+ "header_1",
+ "header_2",
+ "participant"
+ )
+ results3.head should be("entity:sample_id")
Seq(
IndexedSeq("header_2", "does_not_exist", "header_1"),
@@ -74,23 +107,30 @@ class TSVFormatterSpec extends AnyFreeSpec with ScalaFutures with Matchers with
IndexedSeq("header_1", "header_2"),
IndexedSeq("header_1")
).foreach { requestedHeaders =>
- val resultsWithSpecificHeaders = testEntityDataSet("sample", sampleList, Option(requestedHeaders), TsvTypes.UPDATE)
- resultsWithSpecificHeaders should contain theSameElementsInOrderAs Seq("update:sample_id") ++ requestedHeaders.filterNot(_.equals("sample_id"))
+ val resultsWithSpecificHeaders =
+ testEntityDataSet("sample", sampleList, Option(requestedHeaders), TsvTypes.UPDATE)
+ resultsWithSpecificHeaders should contain theSameElementsInOrderAs Seq("update:sample_id") ++ requestedHeaders
+ .filterNot(_.equals("sample_id"))
}
- testEntityDataSet("sample", sampleList, Option(IndexedSeq("participant"))) should contain theSameElementsInOrderAs Seq("entity:sample_id", "participant")
+ testEntityDataSet("sample",
+ sampleList,
+ Option(IndexedSeq("participant"))
+ ) should contain theSameElementsInOrderAs Seq("entity:sample_id", "participant")
}
"Set Data" in {
- val samples = AttributeEntityReferenceList(Seq(
- AttributeEntityReference(entityType = "sample", entityName = "sample_01"),
- AttributeEntityReference(entityType = "sample", entityName = "sample_02"),
- AttributeEntityReference(entityType = "sample", entityName = "sample_03"),
- AttributeEntityReference(entityType = "sample", entityName = "sample_04")))
- val sampleSetAtts = {
+ val samples = AttributeEntityReferenceList(
+ Seq(
+ AttributeEntityReference(entityType = "sample", entityName = "sample_01"),
+ AttributeEntityReference(entityType = "sample", entityName = "sample_02"),
+ AttributeEntityReference(entityType = "sample", entityName = "sample_03"),
+ AttributeEntityReference(entityType = "sample", entityName = "sample_04")
+ )
+ )
+ val sampleSetAtts =
Map(AttributeName.withDefaultNS("samples") -> samples)
- }
val sampleSetList = List(Entity("sample_set_1", "sample_set", sampleSetAtts))
testMembershipDataSet("sample_set", sampleSetList, samples.list.size)
}
@@ -99,35 +139,39 @@ class TSVFormatterSpec extends AnyFreeSpec with ScalaFutures with Matchers with
"Participant tests should pass for" - {
"Entity Data" in {
- val participantAtts1 = {
+ val participantAtts1 =
Map(
- AttributeName.withDefaultNS("participant_id") -> AttributeEntityReference(entityType = "participant", entityName = "1143"),
+ AttributeName.withDefaultNS("participant_id") -> AttributeEntityReference(entityType = "participant",
+ entityName = "1143"
+ ),
AttributeName.withDefaultNS("gender") -> AttributeString("F"),
AttributeName.withDefaultNS("age") -> AttributeString("52")
)
- }
- val participantAtts2 = {
+ val participantAtts2 =
Map(
- AttributeName.withDefaultNS("participant_id") -> AttributeEntityReference(entityType = "participant", entityName = "1954"),
+ AttributeName.withDefaultNS("participant_id") -> AttributeEntityReference(entityType = "participant",
+ entityName = "1954"
+ ),
AttributeName.withDefaultNS("gender") -> AttributeString("M"),
AttributeName.withDefaultNS("age") -> AttributeString("61")
)
- }
- val participantList = List(Entity("1143", "participant", participantAtts1),
- Entity("1954", "participant", participantAtts2))
+ val participantList =
+ List(Entity("1143", "participant", participantAtts1), Entity("1954", "participant", participantAtts2))
val results = testEntityDataSet("participant", participantList, None)
results should contain theSameElementsAs Seq("entity:participant_id", "participant_id", "gender", "age")
- results.head should be ("entity:participant_id")
+ results.head should be("entity:participant_id")
}
"Set Data" in {
- val participants = AttributeEntityReferenceList(Seq(
- AttributeEntityReference(entityType = "participant", entityName = "subject_HCC1143"),
- AttributeEntityReference(entityType = "participant", entityName = "subject_HCC1144")))
- val participantSetAtts = {
+ val participants = AttributeEntityReferenceList(
+ Seq(
+ AttributeEntityReference(entityType = "participant", entityName = "subject_HCC1143"),
+ AttributeEntityReference(entityType = "participant", entityName = "subject_HCC1144")
+ )
+ )
+ val participantSetAtts =
Map(AttributeName.withDefaultNS("participants") -> participants)
- }
val participantSetList = List(Entity("participant_set_1", "participant_set", participantSetAtts))
testMembershipDataSet("participant_set", participantSetList, participants.list.size)
}
@@ -136,37 +180,52 @@ class TSVFormatterSpec extends AnyFreeSpec with ScalaFutures with Matchers with
"Pair tests should pass for" - {
"Entity data" in {
- val pairAtts1 = {
+ val pairAtts1 =
Map(
- AttributeName.withDefaultNS("case_sample") -> AttributeEntityReference(entityType = "sample", entityName = "345"),
- AttributeName.withDefaultNS("control_sample") -> AttributeEntityReference(entityType = "sample", entityName = "456"),
- AttributeName.withDefaultNS("participant") -> AttributeEntityReference(entityType = "participant", entityName = "1143"),
+ AttributeName.withDefaultNS("case_sample") -> AttributeEntityReference(entityType = "sample",
+ entityName = "345"
+ ),
+ AttributeName.withDefaultNS("control_sample") -> AttributeEntityReference(entityType = "sample",
+ entityName = "456"
+ ),
+ AttributeName.withDefaultNS("participant") -> AttributeEntityReference(entityType = "participant",
+ entityName = "1143"
+ ),
AttributeName.withDefaultNS("header_1") -> AttributeString(MockUtils.randomAlpha())
)
- }
- val pairAtts2 = {
+ val pairAtts2 =
Map(
- AttributeName.withDefaultNS("case_sample") -> AttributeEntityReference(entityType = "sample", entityName = "567"),
- AttributeName.withDefaultNS("control_sample") -> AttributeEntityReference(entityType = "sample", entityName = "678"),
- AttributeName.withDefaultNS("participant") -> AttributeEntityReference(entityType = "participant", entityName = "1954"),
+ AttributeName.withDefaultNS("case_sample") -> AttributeEntityReference(entityType = "sample",
+ entityName = "567"
+ ),
+ AttributeName.withDefaultNS("control_sample") -> AttributeEntityReference(entityType = "sample",
+ entityName = "678"
+ ),
+ AttributeName.withDefaultNS("participant") -> AttributeEntityReference(entityType = "participant",
+ entityName = "1954"
+ ),
AttributeName.withDefaultNS("header_1") -> AttributeString(MockUtils.randomAlpha())
)
- }
- val pairList = List(Entity("1", "pair", pairAtts1),
- Entity("2", "pair", pairAtts2))
+ val pairList = List(Entity("1", "pair", pairAtts1), Entity("2", "pair", pairAtts2))
val results = testEntityDataSet("pair", pairList, None)
- results should contain theSameElementsAs Seq("entity:pair_id", "case_sample", "control_sample", "participant", "header_1")
- results.head should be ("entity:pair_id")
+ results should contain theSameElementsAs Seq("entity:pair_id",
+ "case_sample",
+ "control_sample",
+ "participant",
+ "header_1"
+ )
+ results.head should be("entity:pair_id")
}
"Set data" in {
- val pairs = AttributeEntityReferenceList(Seq(
- AttributeEntityReference(entityType = "pair", entityName = "1"),
- AttributeEntityReference(entityType = "pair", entityName = "2")))
- val pairSetAtts = {
+ val pairs = AttributeEntityReferenceList(
+ Seq(AttributeEntityReference(entityType = "pair", entityName = "1"),
+ AttributeEntityReference(entityType = "pair", entityName = "2")
+ )
+ )
+ val pairSetAtts =
Map(AttributeName.withDefaultNS("pairs") -> pairs)
- }
val pairSetList = List(Entity("pair_set_1", "pair_set", pairSetAtts))
testMembershipDataSet("pair_set", pairSetList, pairs.list.size)
}
@@ -174,30 +233,26 @@ class TSVFormatterSpec extends AnyFreeSpec with ScalaFutures with Matchers with
"Values containing tabs should be quoted" in {
val entityType = "sample"
- val attrs1 = {
+ val attrs1 =
Map(
AttributeName.withDefaultNS("nowhitespace") -> AttributeString("abcdefg"),
AttributeName.withDefaultNS("tabs") -> AttributeString("this\tvalue\thas\ttabs"),
AttributeName.withDefaultNS("spaces") -> AttributeString("this value has spaces")
)
- }
- val attrs2 = {
+ val attrs2 =
Map(
AttributeName.withDefaultNS("nowhitespace") -> AttributeString("hijklm"),
AttributeName.withDefaultNS("tabs") -> AttributeString("another\tvalue\twith\ttabs"),
AttributeName.withDefaultNS("spaces") -> AttributeString("another value with spaces")
)
- }
- val entities = List(
- Entity("1", entityType, attrs1),
- Entity("2", entityType, attrs2))
+ val entities = List(Entity("1", entityType, attrs1), Entity("2", entityType, attrs2))
val tsvHeaders = TSVFormatter.makeEntityHeaders(entityType, List("nowhitespace", "tabs", "spaces"), None)
val tsvRows = TSVFormatter.makeEntityRows(entityType, entities, tsvHeaders)
tsvRows shouldBe Seq(
Seq("1", "abcdefg", "\"this\tvalue\thas\ttabs\"", "this value has spaces"),
- Seq("2", "hijklm", "\"another\tvalue\twith\ttabs\"", "another value with spaces"),
+ Seq("2", "hijklm", "\"another\tvalue\twith\ttabs\"", "another value with spaces")
)
}
@@ -207,25 +262,34 @@ class TSVFormatterSpec extends AnyFreeSpec with ScalaFutures with Matchers with
AttributeNumber(123.45) -> "123.45",
AttributeBoolean(true) -> "true",
AttributeBoolean(false) -> "false",
- AttributeValueList(Seq(AttributeString("one"), AttributeString("two"), AttributeString("three"))) -> """["one","two","three"]""",
- AttributeValueRawJson(JsObject(Map("foo" -> JsString("bar"), "baz" -> JsNumber(123)))) -> """{"foo":"bar","baz":123}""",
- AttributeEntityReference("targetType", "targetName") -> """{"entityType":"targetType","entityName":"targetName"}""",
- AttributeEntityReferenceList(Seq(
- AttributeEntityReference("type1", "name1"),
- AttributeEntityReference("type2", "name2"))) -> """[{"entityType":"type1","entityName":"name1"},{"entityType":"type2","entityName":"name2"}]"""
+ AttributeValueList(
+ Seq(AttributeString("one"), AttributeString("two"), AttributeString("three"))
+ ) -> """["one","two","three"]""",
+ AttributeValueRawJson(
+ JsObject(Map("foo" -> JsString("bar"), "baz" -> JsNumber(123)))
+ ) -> """{"foo":"bar","baz":123}""",
+ AttributeEntityReference("targetType",
+ "targetName"
+ ) -> """{"entityType":"targetType","entityName":"targetName"}""",
+ AttributeEntityReferenceList(
+ Seq(AttributeEntityReference("type1", "name1"), AttributeEntityReference("type2", "name2"))
+ ) -> """[{"entityType":"type1","entityName":"name1"},{"entityType":"type2","entityName":"name2"}]"""
)
"tsvSafeAttribute() method" - {
- tsvSafeAttributeTestData foreach {
- case (input, expected) =>
- s"should stringify correctly for input $input" in {
- TSVFormatter.tsvSafeAttribute(input) shouldBe expected
- }
+ tsvSafeAttributeTestData foreach { case (input, expected) =>
+ s"should stringify correctly for input $input" in {
+ TSVFormatter.tsvSafeAttribute(input) shouldBe expected
+ }
}
}
}
- private def testEntityDataSet(entityType: String, entities: List[Entity], requestedHeaders: Option[IndexedSeq[String]], tsvType: TsvType = TsvTypes.ENTITY) = {
+ private def testEntityDataSet(entityType: String,
+ entities: List[Entity],
+ requestedHeaders: Option[IndexedSeq[String]],
+ tsvType: TsvType = TsvTypes.ENTITY
+ ) = {
val allHeaders = entities flatMap { e =>
e.attributes map { a => a._1.name }
@@ -247,7 +311,7 @@ class TSVFormatterSpec extends AnyFreeSpec with ScalaFutures with Matchers with
headers(0) should be(s"${tsvType.toString}:${entityType}_id")
// Check that all lines have the same number of columns as the header.
- lines foreach( _.split("\t", -1).length should equal(headers.size) )
+ lines foreach (_.split("\t", -1).length should equal(headers.size))
headers
}
@@ -267,7 +331,11 @@ class TSVFormatterSpec extends AnyFreeSpec with ScalaFutures with Matchers with
lines foreach { _.split("\t", -1).length should equal(2) }
- lines.head.split("\t") should be(Array(s"${TsvTypes.MEMBERSHIP.toString}:${entityType}_id", FirecloudModelSchema.getCollectionMemberType(entityType).get.get))
+ lines.head.split("\t") should be(
+ Array(s"${TsvTypes.MEMBERSHIP.toString}:${entityType}_id",
+ FirecloudModelSchema.getCollectionMemberType(entityType).get.get
+ )
+ )
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/utils/TSVParserSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/utils/TSVParserSpec.scala
index f3fcd5171..a720ae26e 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/utils/TSVParserSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/utils/TSVParserSpec.scala
@@ -103,74 +103,58 @@ class TSVParserSpec extends AnyFlatSpec {
"EntityClient.backwardsCompatStripIdSuffixes" should "fix up the names of attributes for certain reference types for pairs" in {
val entityType: String = "pair"
- val requiredAttributes: Map[String, String] = Map("case_sample_id" -> "sample",
- "control_sample_id" -> "sample",
- "participant_id" -> "participant")
-
- val input = Seq(
- "entity:pair_id",
- "case_sample_id",
- "control_sample_id",
- "participant_id",
- "some_other_id",
- "ref_dict",
- "ref_fasta")
-
- val expect = Seq(
- "entity:pair_id",
- "case_sample",
- "control_sample",
- "participant",
- "some_other_id",
- "ref_dict",
- "ref_fasta")
+ val requiredAttributes: Map[String, String] =
+ Map("case_sample_id" -> "sample", "control_sample_id" -> "sample", "participant_id" -> "participant")
+
+ val input = Seq("entity:pair_id",
+ "case_sample_id",
+ "control_sample_id",
+ "participant_id",
+ "some_other_id",
+ "ref_dict",
+ "ref_fasta"
+ )
+
+ val expect =
+ Seq("entity:pair_id", "case_sample", "control_sample", "participant", "some_other_id", "ref_dict", "ref_fasta")
assertResult(TSVLoadFile(input.head, expect, Seq.empty), entityType) {
- EntityService.backwardsCompatStripIdSuffixes(TSVLoadFile(input.head, input, Seq.empty), entityType, FirecloudModelSchema)
+ EntityService.backwardsCompatStripIdSuffixes(TSVLoadFile(input.head, input, Seq.empty),
+ entityType,
+ FirecloudModelSchema
+ )
}
}
it should "fix up the names of attributes for certain reference types for samples" in {
val entityType: String = "sample"
- val requiredAttributes: Map[String, String] = Map(
- "participant_id" -> "participant")
-
- val input = Seq(
- "entity:sample_id",
- "participant_id",
- "some_other_id",
- "ref_dict",
- "ref_fasta")
-
- val expect = Seq(
- "entity:sample_id",
- "participant",
- "some_other_id",
- "ref_dict",
- "ref_fasta")
+ val requiredAttributes: Map[String, String] = Map("participant_id" -> "participant")
+
+ val input = Seq("entity:sample_id", "participant_id", "some_other_id", "ref_dict", "ref_fasta")
+
+ val expect = Seq("entity:sample_id", "participant", "some_other_id", "ref_dict", "ref_fasta")
assertResult(TSVLoadFile(input.head, expect, Seq.empty), entityType) {
- EntityService.backwardsCompatStripIdSuffixes(TSVLoadFile(input.head, input, Seq.empty), entityType, FirecloudModelSchema)
+ EntityService.backwardsCompatStripIdSuffixes(TSVLoadFile(input.head, input, Seq.empty),
+ entityType,
+ FirecloudModelSchema
+ )
}
}
-
it should "fix up the names of attributes for certain reference types for participant sets" in {
val entityType: String = "participant_set"
val requiredAttributes: Map[String, String] = Map.empty
- val input = Seq(
- "entity:participant_set_id",
- "participant_id",
- "some_other_id")
+ val input = Seq("entity:participant_set_id", "participant_id", "some_other_id")
- val expect = Seq(
- "entity:participant_set_id",
- "participant",
- "some_other_id")
+ val expect = Seq("entity:participant_set_id", "participant", "some_other_id")
assertResult(TSVLoadFile(input.head, expect, Seq.empty), entityType) {
- EntityService.backwardsCompatStripIdSuffixes(TSVLoadFile(input.head, input, Seq.empty), entityType, FirecloudModelSchema)
+ EntityService.backwardsCompatStripIdSuffixes(TSVLoadFile(input.head, input, Seq.empty),
+ entityType,
+ FirecloudModelSchema
+ )
}
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/utils/TestRequestBuilding.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/utils/TestRequestBuilding.scala
index 4f80ecdf7..5b15720ec 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/utils/TestRequestBuilding.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/utils/TestRequestBuilding.scala
@@ -8,19 +8,19 @@ trait TestRequestBuilding extends FireCloudRequestBuilding {
val dummyToken: String = "mF_9.B5f-4.1JqM"
- def dummyAuthHeaders: RequestTransformer = {
+ def dummyAuthHeaders: RequestTransformer =
addCredentials(OAuth2BearerToken(dummyToken))
- }
- def dummyUserIdHeaders(userId: String, token: String = "access_token", email: String = "random@site.com"): WithTransformerConcatenation[HttpRequest, HttpRequest] = {
+ def dummyUserIdHeaders(userId: String,
+ token: String = "access_token",
+ email: String = "random@site.com"
+ ): WithTransformerConcatenation[HttpRequest, HttpRequest] =
addCredentials(OAuth2BearerToken(token)) ~>
addHeader(RawHeader("OIDC_CLAIM_user_id", userId)) ~>
addHeader(RawHeader("OIDC_access_token", token)) ~>
addHeader(RawHeader("OIDC_CLAIM_email", email)) ~>
addHeader(RawHeader("OIDC_CLAIM_expires_in", "100000"))
- }
- def dummyCookieAuthHeaders: RequestTransformer = {
+ def dummyCookieAuthHeaders: RequestTransformer =
addHeader(Cookie("FCtoken", dummyToken))
- }
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/ApiServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/ApiServiceSpec.scala
index 44ec8cc25..072734e2f 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/ApiServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/ApiServiceSpec.scala
@@ -17,7 +17,12 @@ import scala.concurrent.duration._
*/
// common trait to be inherited by API service tests
-trait ApiServiceSpec extends AnyFlatSpec with Matchers with ScalatestRouteTest with SprayJsonSupport with TestRequestBuilding {
+trait ApiServiceSpec
+ extends AnyFlatSpec
+ with Matchers
+ with ScalatestRouteTest
+ with SprayJsonSupport
+ with TestRequestBuilding {
// increase the timeout for ScalatestRouteTest from the default of 1 second, otherwise
// intermittent failures occur on requests not completing in time
implicit val routeTestTimeout: RouteTestTimeout = RouteTestTimeout(5.seconds)
@@ -41,14 +46,25 @@ trait ApiServiceSpec extends AnyFlatSpec with Matchers with ScalatestRouteTest w
def actorRefFactory = system
val nihServiceConstructor = NihService.constructor(
- new Application(agoraDao, googleDao, ontologyDao, rawlsDao, samDao, searchDao, researchPurposeSupport, thurloeDao, shareLogDao, shibbolethDao, cwdsDao, ecmDao)
+ new Application(agoraDao,
+ googleDao,
+ ontologyDao,
+ rawlsDao,
+ samDao,
+ searchDao,
+ researchPurposeSupport,
+ thurloeDao,
+ shareLogDao,
+ shibbolethDao,
+ cwdsDao,
+ ecmDao
+ )
) _
}
// lifted from rawls. prefer this to using theSameElementsAs directly, because its functionality depends on whitespace
- def assertSameElements[T](expected: IterableOnce[T], actual: IterableOnce[T]): Unit = {
+ def assertSameElements[T](expected: IterableOnce[T], actual: IterableOnce[T]): Unit =
expected.iterator.to(Iterable) should contain theSameElementsAs actual.iterator.to(Iterable)
- }
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/BillingApiServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/BillingApiServiceSpec.scala
index d7f7e68f3..40da1d358 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/BillingApiServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/BillingApiServiceSpec.scala
@@ -24,39 +24,53 @@ final class BillingApiServiceSpec extends BaseServiceSpec with BillingApiService
workspaceServer = startClientAndServer(MockUtils.workspaceServerPort)
- workspaceServer.when(
- request()
- .withMethod(POST.name)
- .withPath(billingPath))
- .respond(HttpResponse.response()
+ workspaceServer
+ .when(
+ request()
+ .withMethod(POST.name)
+ .withPath(billingPath)
+ )
+ .respond(
+ HttpResponse
+ .response()
.withHeaders(MockUtils.header)
- .withStatusCode(Created.intValue))
+ .withStatusCode(Created.intValue)
+ )
- workspaceServer.when(
- request()
- .withMethod(GET.name)
- .withPath(billingPath + "/project1/members"))
- .respond(HttpResponse.response()
+ workspaceServer
+ .when(
+ request()
+ .withMethod(GET.name)
+ .withPath(billingPath + "/project1/members")
+ )
+ .respond(
+ HttpResponse
+ .response()
.withHeaders(MockUtils.header)
- .withStatusCode(OK.intValue))
+ .withStatusCode(OK.intValue)
+ )
List(PUT, DELETE).foreach { method =>
- workspaceServer.when(
- request()
- .withMethod(method.name)
- .withPath(billingPath + "/project2/user/foo@bar.com"))
- .respond(HttpResponse.response()
+ workspaceServer
+ .when(
+ request()
+ .withMethod(method.name)
+ .withPath(billingPath + "/project2/user/foo@bar.com")
+ )
+ .respond(
+ HttpResponse
+ .response()
.withHeaders(MockUtils.header)
- .withStatusCode(OK.intValue))
+ .withStatusCode(OK.intValue)
+ )
}
}
- override def afterAll(): Unit = {
+ override def afterAll(): Unit =
workspaceServer.stop()
- }
// streamingPassthrough directive needs to see the routes under "/api", which is how FireCloudApiService starts them
- val testableRoutes = pathPrefix("api") { billingServiceRoutes }
+ val testableRoutes = pathPrefix("api")(billingServiceRoutes)
"BillingApiService" - {
"list project members" in {
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/CromIamApiServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/CromIamApiServiceSpec.scala
index dc0ce31c6..a1bc86255 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/CromIamApiServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/CromIamApiServiceSpec.scala
@@ -29,7 +29,7 @@ class CromIamApiServiceSpec extends BaseServiceSpec with CromIamApiService with
}
// streamingPassthrough directive needs to see the routes under "/api", which is how FireCloudApiService starts them
- val testableRoutes = pathPrefix("api") { cromIamApiServiceRoutes }
+ val testableRoutes = pathPrefix("api")(cromIamApiServiceRoutes)
"CromIAM passthrough" - {
@@ -99,13 +99,15 @@ class CromIamApiServiceSpec extends BaseServiceSpec with CromIamApiService with
"should forward query parameters on GET" in {
- val request = org.mockserver.model.HttpRequest.request()
+ val request = org.mockserver.model.HttpRequest
+ .request()
.withMethod("GET")
.withPath(s"$endpoint")
.withQueryStringParameter("includeKey", "hit")
.withQueryStringParameter("includeKey", "hitFailure")
- val response = org.mockserver.model.HttpResponse.response()
+ val response = org.mockserver.model.HttpResponse
+ .response()
.withStatusCode(200)
.withBody("We got all of your includeKeys")
@@ -113,7 +115,9 @@ class CromIamApiServiceSpec extends BaseServiceSpec with CromIamApiService with
.when(request)
.respond(response)
- Get(Uri(endpoint).withQuery(Query("includeKey=hit&includeKey=hitFailure"))) ~> dummyUserIdHeaders("1234") ~> sealRoute(testableRoutes) ~> check {
+ Get(Uri(endpoint).withQuery(Query("includeKey=hit&includeKey=hitFailure"))) ~> dummyUserIdHeaders(
+ "1234"
+ ) ~> sealRoute(testableRoutes) ~> check {
cromiamServer.verify(request)
status.intValue should equal(200)
@@ -129,8 +133,10 @@ class CromIamApiServiceSpec extends BaseServiceSpec with CromIamApiService with
// "workspaceServer" mockserver, not the "cromiamServer" mockserver.
val endpointPapiV1 = workflowRoot + "/my-bogus-workflow-id-565656/backend/metadata/operations/foobar"
- val endpointPapiV2 = workflowRoot + "/my-bogus-workflow-id-565656/backend/metadata/projects/proj/operations/foobar"
- val endpointGoogleLifeSciencesBeta = workflowRoot + "/my-bogus-workflow-id-565656/backend/metadata/projects/proj/projId/locations/us-somewhere/operations/opId"
+ val endpointPapiV2 =
+ workflowRoot + "/my-bogus-workflow-id-565656/backend/metadata/projects/proj/operations/foobar"
+ val endpointGoogleLifeSciencesBeta =
+ workflowRoot + "/my-bogus-workflow-id-565656/backend/metadata/projects/proj/projId/locations/us-somewhere/operations/opId"
val myMethods = List(HttpMethods.GET)
"should pass through my methods PAPIv1" in {
@@ -166,13 +172,15 @@ class CromIamApiServiceSpec extends BaseServiceSpec with CromIamApiService with
"should forward query parameters on GET" in {
- val request = org.mockserver.model.HttpRequest.request()
+ val request = org.mockserver.model.HttpRequest
+ .request()
.withMethod("GET")
.withPath(s"$endpoint")
.withQueryStringParameter("start", "start value")
.withQueryStringParameter("end", "end value")
- val response = org.mockserver.model.HttpResponse.response()
+ val response = org.mockserver.model.HttpResponse
+ .response()
.withStatusCode(200)
.withBody("Got a query with start and end values")
@@ -180,7 +188,9 @@ class CromIamApiServiceSpec extends BaseServiceSpec with CromIamApiService with
.when(request)
.respond(response)
- Get(Uri(endpoint).withQuery(Query("start=start%20value&end=end%20value"))) ~> dummyUserIdHeaders("1234") ~> sealRoute(testableRoutes) ~> check {
+ Get(Uri(endpoint).withQuery(Query("start=start%20value&end=end%20value"))) ~> dummyUserIdHeaders(
+ "1234"
+ ) ~> sealRoute(testableRoutes) ~> check {
cromiamServer.verify(request)
status.intValue should equal(200)
@@ -203,13 +213,15 @@ class CromIamApiServiceSpec extends BaseServiceSpec with CromIamApiService with
"should forward query parameters on GET" in {
- val request = org.mockserver.model.HttpRequest.request()
+ val request = org.mockserver.model.HttpRequest
+ .request()
.withMethod("GET")
.withPath(s"$endpoint")
.withQueryStringParameter("workflowA", "workflowA value")
.withQueryStringParameter("workflowB", "workflowB value")
- val response = org.mockserver.model.HttpResponse.response()
+ val response = org.mockserver.model.HttpResponse
+ .response()
.withStatusCode(200)
.withBody("Got a query with workflowA and workflowB values")
@@ -217,7 +229,9 @@ class CromIamApiServiceSpec extends BaseServiceSpec with CromIamApiService with
.when(request)
.respond(response)
- Get(Uri(endpoint).withQuery(Query("workflowA=workflowA%20value&workflowB=workflowB%20value"))) ~> dummyUserIdHeaders("1234") ~> sealRoute(testableRoutes) ~> check {
+ Get(
+ Uri(endpoint).withQuery(Query("workflowA=workflowA%20value&workflowB=workflowB%20value"))
+ ) ~> dummyUserIdHeaders("1234") ~> sealRoute(testableRoutes) ~> check {
cromiamServer.verify(request)
status.intValue should equal(200)
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/EntityApiServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/EntityApiServiceSpec.scala
index 5b2a214aa..163ead6fe 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/EntityApiServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/EntityApiServiceSpec.scala
@@ -38,27 +38,36 @@ class EntityApiServiceSpec extends BaseServiceSpec with EntityApiService with Sp
val invalidFireCloudEntitiesCopyPath = apiPrefix + "/broad-dsde-dev/invalid/entities/copy"
val validEntityCopy = EntityCopyWithoutDestinationDefinition(
- sourceWorkspace = WorkspaceName(namespace="broad-dsde-dev", name="other-ws"),
- entityType = "sample", Seq("sample_01"))
+ sourceWorkspace = WorkspaceName(namespace = "broad-dsde-dev", name = "other-ws"),
+ entityType = "sample",
+ Seq("sample_01")
+ )
val invalidEntityCopy = EntityCopyWithoutDestinationDefinition(
- sourceWorkspace = WorkspaceName(namespace="invalid", name="other-ws"),
- entityType = "sample", Seq("sample_01"))
+ sourceWorkspace = WorkspaceName(namespace = "invalid", name = "other-ws"),
+ entityType = "sample",
+ Seq("sample_01")
+ )
- val validEntityDelete = Seq(EntityId("sample","id"),EntityId("sample","bar"))
+ val validEntityDelete = Seq(EntityId("sample", "id"), EntityId("sample", "bar"))
val invalidEntityDelete = validEntityCopy // we're testing that the payload can't be unmarshalled to a Seq[EntityId]
- val mixedFailEntityDelete = Seq(EntityId("sample","foo"),EntityId("failme","kthxbai"),EntityId("sample","bar"))
- val allFailEntityDelete = Seq(EntityId("failme","kthxbai"))
+ val mixedFailEntityDelete = Seq(EntityId("sample", "foo"), EntityId("failme", "kthxbai"), EntityId("sample", "bar"))
+ val allFailEntityDelete = Seq(EntityId("failme", "kthxbai"))
def entityCopyWithDestination(copyDef: EntityCopyDefinition) = new EntityCopyDefinition(
sourceWorkspace = copyDef.sourceWorkspace,
destinationWorkspace = WorkspaceName("broad-dsde-dev", "valid"),
entityType = copyDef.entityType,
- entityNames = copyDef.entityNames)
+ entityNames = copyDef.entityNames
+ )
val sampleAtts = Map(
AttributeName.withDefaultNS("sample_type") -> AttributeString("Blood"),
- AttributeName.withDefaultNS("ref_fasta") -> AttributeString("gs://cancer-exome-pipeline-demo-data/Homo_sapiens_assembly19.fasta"),
- AttributeName.withDefaultNS("ref_dict") -> AttributeString("gs://cancer-exome-pipeline-demo-data/Homo_sapiens_assembly19.dict"),
+ AttributeName.withDefaultNS("ref_fasta") -> AttributeString(
+ "gs://cancer-exome-pipeline-demo-data/Homo_sapiens_assembly19.fasta"
+ ),
+ AttributeName.withDefaultNS("ref_dict") -> AttributeString(
+ "gs://cancer-exome-pipeline-demo-data/Homo_sapiens_assembly19.dict"
+ ),
AttributeName.withDefaultNS("participant_id") -> AttributeEntityReference("participant", "subject_HCC1143")
)
val validSampleEntities = List(Entity("sample_01", "sample", sampleAtts))
@@ -70,9 +79,15 @@ class EntityApiServiceSpec extends BaseServiceSpec with EntityApiService with Sp
.when(
request()
.withMethod("GET")
- .withPath(FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.entitiesPath.format("broad-dsde-dev", "valid") + "/sample"))
+ .withPath(
+ FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.entitiesPath.format("broad-dsde-dev",
+ "valid"
+ ) + "/sample"
+ )
+ )
.respond(
- org.mockserver.model.HttpResponse.response()
+ org.mockserver.model.HttpResponse
+ .response()
.withHeaders(MockUtils.header)
.withBody(validSampleEntities.toJson.compactPrint)
.withStatusCode(OK.intValue)
@@ -82,30 +97,42 @@ class EntityApiServiceSpec extends BaseServiceSpec with EntityApiService with Sp
.when(
request()
.withMethod("GET")
- .withPath(FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.entitiesPath.format("broad-dsde-dev", "valid")))
+ .withPath(
+ FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.entitiesPath.format("broad-dsde-dev", "valid")
+ )
+ )
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withStatusCode(OK.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(OK.intValue)
)
// Valid entity query case
workspaceServer
.when(
request()
.withMethod("GET")
- .withPath(FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.entityQueryPath.format("broad-dsde-dev", "valid") + "/sample"))
+ .withPath(
+ FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.entityQueryPath.format("broad-dsde-dev",
+ "valid"
+ ) + "/sample"
+ )
+ )
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withStatusCode(OK.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(OK.intValue)
)
// Valid/Invalid Copy cases
workspaceServer
.when(
request()
.withMethod("POST")
- .withPath(FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.workspacesEntitiesCopyPath))
+ .withPath(FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.workspacesEntitiesCopyPath)
+ )
.respond(
- callback().
- withCallbackClass("org.broadinstitute.dsde.firecloud.mock.ValidEntityCopyCallback")
+ callback().withCallbackClass("org.broadinstitute.dsde.firecloud.mock.ValidEntityCopyCallback")
)
// Invalid Entities by sample type case
@@ -113,9 +140,15 @@ class EntityApiServiceSpec extends BaseServiceSpec with EntityApiService with Sp
.when(
request()
.withMethod("GET")
- .withPath(FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.entitiesPath.format("broad-dsde-dev", "invalid") + "/sample"))
+ .withPath(
+ FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.entitiesPath.format("broad-dsde-dev",
+ "invalid"
+ ) + "/sample"
+ )
+ )
.respond(
- org.mockserver.model.HttpResponse.response()
+ org.mockserver.model.HttpResponse
+ .response()
.withHeaders(MockUtils.header)
.withStatusCode(NotFound.intValue)
.withBody(MockUtils.rawlsErrorReport(NotFound).toJson.compactPrint)
@@ -125,9 +158,13 @@ class EntityApiServiceSpec extends BaseServiceSpec with EntityApiService with Sp
.when(
request()
.withMethod("GET")
- .withPath(FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.entitiesPath.format("broad-dsde-dev", "invalid")))
+ .withPath(
+ FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.entitiesPath.format("broad-dsde-dev", "invalid")
+ )
+ )
.respond(
- org.mockserver.model.HttpResponse.response()
+ org.mockserver.model.HttpResponse
+ .response()
.withHeaders(MockUtils.header)
.withStatusCode(NotFound.intValue)
.withBody(MockUtils.rawlsErrorReport(NotFound).toJson.compactPrint)
@@ -137,16 +174,19 @@ class EntityApiServiceSpec extends BaseServiceSpec with EntityApiService with Sp
.when(
request()
.withMethod("POST")
- .withPath(FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.entitiesPath.format("broad-dsde-dev", "valid") + "/delete"))
+ .withPath(
+ FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.entitiesPath.format("broad-dsde-dev",
+ "valid"
+ ) + "/delete"
+ )
+ )
.respond(
- callback().
- withCallbackClass("org.broadinstitute.dsde.firecloud.mock.ValidEntityDeleteCallback")
+ callback().withCallbackClass("org.broadinstitute.dsde.firecloud.mock.ValidEntityDeleteCallback")
)
}
- override def afterAll(): Unit = {
+ override def afterAll(): Unit =
workspaceServer.stop()
- }
"EntityService" - {
@@ -176,7 +216,9 @@ class EntityApiServiceSpec extends BaseServiceSpec with EntityApiService with Sp
"when calling GET on valid entityQuery with params" - {
"OK response is returned" in {
- Get(validFireCloudEntityQuerySamplePath + "?page=1&pageSize=1") ~> dummyUserIdHeaders("1234") ~> sealRoute(entityRoutes) ~> check {
+ Get(validFireCloudEntityQuerySamplePath + "?page=1&pageSize=1") ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ entityRoutes
+ ) ~> check {
status should be(OK)
}
}
@@ -184,7 +226,9 @@ class EntityApiServiceSpec extends BaseServiceSpec with EntityApiService with Sp
"when calling POST on valid copy entities" - {
"Created response is returned" in {
- Post(validFireCloudEntitiesCopyPath, validEntityCopy) ~> dummyUserIdHeaders("1234") ~> sealRoute(entityRoutes) ~> check {
+ Post(validFireCloudEntitiesCopyPath, validEntityCopy) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ entityRoutes
+ ) ~> check {
status should be(Created)
}
}
@@ -192,7 +236,9 @@ class EntityApiServiceSpec extends BaseServiceSpec with EntityApiService with Sp
"when calling POST on invalid copy entities" - {
"NotFound response is returned" in {
- Post(validFireCloudEntitiesCopyPath, invalidEntityCopy) ~> dummyUserIdHeaders("1234") ~> sealRoute(entityRoutes) ~> check {
+ Post(validFireCloudEntitiesCopyPath, invalidEntityCopy) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ entityRoutes
+ ) ~> check {
status should be(NotFound)
errorReportCheck("Rawls", NotFound)
}
@@ -219,7 +265,9 @@ class EntityApiServiceSpec extends BaseServiceSpec with EntityApiService with Sp
"when calling POST on copy entities in an unknown workspace" - {
"NotFound response is returned with an ErrorReport" in {
- Post(invalidFireCloudEntitiesCopyPath, validEntityCopy) ~> dummyUserIdHeaders("1234") ~> sealRoute(entityRoutes) ~> check {
+ Post(invalidFireCloudEntitiesCopyPath, validEntityCopy) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ entityRoutes
+ ) ~> check {
status should be(NotFound)
errorReportCheck("Rawls", NotFound)
}
@@ -228,7 +276,9 @@ class EntityApiServiceSpec extends BaseServiceSpec with EntityApiService with Sp
"when calling bulk entity delete with a valid payload" - {
"response is NoContent" in {
- Post(validFireCloudEntitiesBulkDeletePath, validEntityDelete) ~> dummyUserIdHeaders("1234") ~> sealRoute(entityRoutes) ~> check {
+ Post(validFireCloudEntitiesBulkDeletePath, validEntityDelete) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ entityRoutes
+ ) ~> check {
status should be(NoContent)
}
}
@@ -236,7 +286,9 @@ class EntityApiServiceSpec extends BaseServiceSpec with EntityApiService with Sp
"when calling bulk entity delete with an invalid payload" - {
"BadRequest is returned" in {
- Post(validFireCloudEntitiesBulkDeletePath, invalidEntityDelete) ~> dummyUserIdHeaders("1234") ~> sealRoute(entityRoutes) ~> check {
+ Post(validFireCloudEntitiesBulkDeletePath, invalidEntityDelete) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ entityRoutes
+ ) ~> check {
status should be(BadRequest)
}
}
@@ -244,7 +296,9 @@ class EntityApiServiceSpec extends BaseServiceSpec with EntityApiService with Sp
"when calling bulk entity delete with some missing entities" - {
"BadRequest is returned with an ErrorReport" in {
- Post(validFireCloudEntitiesBulkDeletePath, mixedFailEntityDelete) ~> dummyUserIdHeaders("1234") ~> sealRoute(entityRoutes) ~> check {
+ Post(validFireCloudEntitiesBulkDeletePath, mixedFailEntityDelete) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ entityRoutes
+ ) ~> check {
status should be(BadRequest)
errorReportCheck("Rawls", BadRequest)
}
@@ -253,7 +307,9 @@ class EntityApiServiceSpec extends BaseServiceSpec with EntityApiService with Sp
"when calling bulk entity delete with all missing entities" - {
"BadRequest is returned with an ErrorReport" in {
- Post(validFireCloudEntitiesBulkDeletePath, allFailEntityDelete) ~> dummyUserIdHeaders("1234") ~> sealRoute(entityRoutes) ~> check {
+ Post(validFireCloudEntitiesBulkDeletePath, allFailEntityDelete) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ entityRoutes
+ ) ~> check {
status should be(BadRequest)
errorReportCheck("Rawls", BadRequest)
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/Ga4ghApiServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/Ga4ghApiServiceSpec.scala
index a1c8d997d..a4a5ae67c 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/Ga4ghApiServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/Ga4ghApiServiceSpec.scala
@@ -30,22 +30,24 @@ class Ga4ghApiServiceSpec extends BaseServiceSpec with Ga4ghApiService with Befo
// The following paths are currently unimplemented in Agora, but handled.
"/ga4gh/v1/tools/namespace:name/versions/1/dockerfile",
"/ga4gh/v1/tools/namespace:name/versions/1/WDL/descriptor/1",
- "/ga4gh/v1/tools/namespace:name/versions/1/WDL/tests")
+ "/ga4gh/v1/tools/namespace:name/versions/1/WDL/tests"
+ )
override def beforeAll(): Unit = {
toolRegistryServer = startClientAndServer(MockUtils.methodsServerPort)
toolPaths.map { path =>
- toolRegistryServer.when(request().withMethod(HttpMethods.GET.name).withPath(path))
+ toolRegistryServer
+ .when(request().withMethod(HttpMethods.GET.name).withPath(path))
.respond(
- org.mockserver.model.HttpResponse.response()
+ org.mockserver.model.HttpResponse
+ .response()
.withStatusCode(OK.intValue)
)
}
}
- override def afterAll(): Unit = {
+ override def afterAll(): Unit =
toolRegistryServer.stop()
- }
"GA4GH API service" - {
"Tool Registry" - {
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/ImportPermissionApiServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/ImportPermissionApiServiceSpec.scala
index bf0327104..55c2650d2 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/ImportPermissionApiServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/ImportPermissionApiServiceSpec.scala
@@ -23,7 +23,7 @@ class ImportPermissionApiServiceSpec extends BaseServiceSpec with UserApiService
val testApp = app.copy(rawlsDAO = new ImportPermissionMockRawlsDAO)
- val userServiceConstructor:(UserInfo) => UserService = UserService.constructor(testApp)
+ val userServiceConstructor: (UserInfo) => UserService = UserService.constructor(testApp)
"UserService /api/profile/importstatus endpoint tests" - {
@@ -35,56 +35,72 @@ class ImportPermissionApiServiceSpec extends BaseServiceSpec with UserApiService
}
}
"should accept GET" in {
- Get(endpoint) ~> dummyUserIdHeaders("foo","noWorkspaces;noProjects") ~> userServiceRoutes ~> check {
+ Get(endpoint) ~> dummyUserIdHeaders("foo", "noWorkspaces;noProjects") ~> userServiceRoutes ~> check {
assert(handled)
}
}
"should return billingProject: true if user has at least one billing project" in {
- Get(endpoint) ~> dummyUserIdHeaders("userid","noWorkspaces;hasProjects") ~> sealRoute(userServiceRoutes) ~> check {
+ Get(endpoint) ~> dummyUserIdHeaders("userid", "noWorkspaces;hasProjects") ~> sealRoute(
+ userServiceRoutes
+ ) ~> check {
status should equal(OK)
responseAs[UserImportPermission].billingProject shouldBe true
}
}
"should return billingProject: false if user has no billing projects" in {
- Get(endpoint) ~> dummyUserIdHeaders("userid", "noWorkspaces;noProjects") ~> sealRoute(userServiceRoutes) ~> check {
+ Get(endpoint) ~> dummyUserIdHeaders("userid", "noWorkspaces;noProjects") ~> sealRoute(
+ userServiceRoutes
+ ) ~> check {
status should equal(OK)
responseAs[UserImportPermission].billingProject shouldBe false
}
}
"should return billingProject: false if user has billing projects, but none that are ready" in {
- Get(endpoint) ~> dummyUserIdHeaders("userid", "noWorkspaces;projectsNotReady") ~> sealRoute(userServiceRoutes) ~> check {
+ Get(endpoint) ~> dummyUserIdHeaders("userid", "noWorkspaces;projectsNotReady") ~> sealRoute(
+ userServiceRoutes
+ ) ~> check {
status should equal(OK)
responseAs[UserImportPermission].billingProject shouldBe false
}
}
"should return writableWorkspace: true if user has a writable workspace" in {
- Get(endpoint) ~> dummyUserIdHeaders("userid","hasWorkspaces;noProjects") ~> sealRoute(userServiceRoutes) ~> check {
+ Get(endpoint) ~> dummyUserIdHeaders("userid", "hasWorkspaces;noProjects") ~> sealRoute(
+ userServiceRoutes
+ ) ~> check {
status should equal(OK)
responseAs[UserImportPermission].writableWorkspace shouldBe true
}
}
"should return writableWorkspace: false if user has no workspaces" in {
- Get(endpoint) ~> dummyUserIdHeaders("userid","noWorkspaces;noProjects") ~> sealRoute(userServiceRoutes) ~> check {
+ Get(endpoint) ~> dummyUserIdHeaders("userid", "noWorkspaces;noProjects") ~> sealRoute(
+ userServiceRoutes
+ ) ~> check {
status should equal(OK)
responseAs[UserImportPermission].writableWorkspace shouldBe false
}
}
"should return writableWorkspace: false if user has workspaces, but none that are writable" in {
- Get(endpoint) ~> dummyUserIdHeaders("userid","onlyReadableWorkspaces;noProjects") ~> sealRoute(userServiceRoutes) ~> check {
+ Get(endpoint) ~> dummyUserIdHeaders("userid", "onlyReadableWorkspaces;noProjects") ~> sealRoute(
+ userServiceRoutes
+ ) ~> check {
status should equal(OK)
responseAs[UserImportPermission].writableWorkspace shouldBe false
}
}
"should return both writableWorkspace: true and billingProject: true if both conditions are satisfied" in {
- Get(endpoint) ~> dummyUserIdHeaders("userid","hasWorkspaces;hasProjects") ~> sealRoute(userServiceRoutes) ~> check {
+ Get(endpoint) ~> dummyUserIdHeaders("userid", "hasWorkspaces;hasProjects") ~> sealRoute(
+ userServiceRoutes
+ ) ~> check {
status should equal(OK)
responseAs[UserImportPermission].billingProject shouldBe true
responseAs[UserImportPermission].writableWorkspace shouldBe true
}
}
"should return both writableWorkspace: false and billingProject: false if both conditions failed" in {
- Get(endpoint) ~> dummyUserIdHeaders("userid","onlyReadableWorkspaces;projectsNotReady") ~> sealRoute(userServiceRoutes) ~> check {
+ Get(endpoint) ~> dummyUserIdHeaders("userid", "onlyReadableWorkspaces;projectsNotReady") ~> sealRoute(
+ userServiceRoutes
+ ) ~> check {
status should equal(OK)
responseAs[UserImportPermission].billingProject shouldBe false
responseAs[UserImportPermission].writableWorkspace shouldBe false
@@ -92,16 +108,20 @@ class ImportPermissionApiServiceSpec extends BaseServiceSpec with UserApiService
}
"should propagate an error if the call to get workspaces fails" in {
- Get(endpoint) ~> dummyUserIdHeaders("userid","thisWillError;hasProjects") ~> sealRoute(userServiceRoutes) ~> check {
+ Get(endpoint) ~> dummyUserIdHeaders("userid", "thisWillError;hasProjects") ~> sealRoute(
+ userServiceRoutes
+ ) ~> check {
status should equal(InternalServerError)
- val err:ErrorReport = responseAs[ErrorReport]
+ val err: ErrorReport = responseAs[ErrorReport]
err.message shouldBe "intentional exception for getWorkspaces catchall case"
}
}
"should propagate an error if the call to get billing projects fails" in {
- Get(endpoint) ~> dummyUserIdHeaders("userid","hasWorkspaces;thisWillError") ~> sealRoute(userServiceRoutes) ~> check {
+ Get(endpoint) ~> dummyUserIdHeaders("userid", "hasWorkspaces;thisWillError") ~> sealRoute(
+ userServiceRoutes
+ ) ~> check {
status should equal(InternalServerError)
- val err:ErrorReport = responseAs[ErrorReport]
+ val err: ErrorReport = responseAs[ErrorReport]
err.message shouldBe "intentional exception for getProjects catchall case"
}
}
@@ -111,44 +131,119 @@ class ImportPermissionApiServiceSpec extends BaseServiceSpec with UserApiService
class ImportPermissionMockRawlsDAO extends MockRawlsDAO {
- override def getProjects(implicit userToken: WithAccessToken): Future[Seq[Project.RawlsBillingProjectMembership]] = {
+ override def getProjects(implicit userToken: WithAccessToken): Future[Seq[Project.RawlsBillingProjectMembership]] =
parseTestToken(userToken)._2 match {
- case "hasProjects" => Future.successful(Seq(
- RawlsBillingProjectMembership(RawlsBillingProjectName("projectone"), ProjectRoles.User, CreationStatuses.Ready, None),
- RawlsBillingProjectMembership(RawlsBillingProjectName("projecttwo"), ProjectRoles.Owner, CreationStatuses.Creating, None)
- ))
- case "projectsNotReady" => Future.successful(Seq(
- RawlsBillingProjectMembership(RawlsBillingProjectName("projectone"), ProjectRoles.User, CreationStatuses.Creating, None),
- RawlsBillingProjectMembership(RawlsBillingProjectName("projecttwo"), ProjectRoles.Owner, CreationStatuses.Creating, None)
-
- ))
+ case "hasProjects" =>
+ Future.successful(
+ Seq(
+ RawlsBillingProjectMembership(RawlsBillingProjectName("projectone"),
+ ProjectRoles.User,
+ CreationStatuses.Ready,
+ None
+ ),
+ RawlsBillingProjectMembership(RawlsBillingProjectName("projecttwo"),
+ ProjectRoles.Owner,
+ CreationStatuses.Creating,
+ None
+ )
+ )
+ )
+ case "projectsNotReady" =>
+ Future.successful(
+ Seq(
+ RawlsBillingProjectMembership(RawlsBillingProjectName("projectone"),
+ ProjectRoles.User,
+ CreationStatuses.Creating,
+ None
+ ),
+ RawlsBillingProjectMembership(RawlsBillingProjectName("projecttwo"),
+ ProjectRoles.Owner,
+ CreationStatuses.Creating,
+ None
+ )
+ )
+ )
case "noProjects" => Future.successful(Seq.empty[RawlsBillingProjectMembership])
- case _ => Future.failed(new FireCloudException("intentional exception for getProjects catchall case"))
+ case _ => Future.failed(new FireCloudException("intentional exception for getProjects catchall case"))
}
- }
- override def getWorkspaces(implicit userInfo: WithAccessToken): Future[Seq[WorkspaceListResponse]] = {
+ override def getWorkspaces(implicit userInfo: WithAccessToken): Future[Seq[WorkspaceListResponse]] =
parseTestToken(userInfo)._1 match {
- case "hasWorkspaces" => Future.successful(Seq(
- WorkspaceListResponse(WorkspaceAccessLevels.ProjectOwner, Some(true), Some(true), newWorkspace, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), false),
- WorkspaceListResponse(WorkspaceAccessLevels.Read, Some(false), Some(false), newWorkspace, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), false),
- WorkspaceListResponse(WorkspaceAccessLevels.Owner, Some(true), Some(true), publishedRawlsWorkspaceWithAttributes, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), false),
- WorkspaceListResponse(WorkspaceAccessLevels.NoAccess, Some(false), Some(false), newWorkspace, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), false)
- ))
- case "onlyReadableWorkspaces" => Future.successful(Seq(
- WorkspaceListResponse(WorkspaceAccessLevels.Read, Some(false), Some(false), newWorkspace, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), false),
- WorkspaceListResponse(WorkspaceAccessLevels.Read, Some(false), Some(false), newWorkspace, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), false),
- WorkspaceListResponse(WorkspaceAccessLevels.Read, Some(false), Some(false), publishedRawlsWorkspaceWithAttributes, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), false),
- WorkspaceListResponse(WorkspaceAccessLevels.NoAccess, Some(false), Some(false), newWorkspace, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), false)
- ))
+ case "hasWorkspaces" =>
+ Future.successful(
+ Seq(
+ WorkspaceListResponse(WorkspaceAccessLevels.ProjectOwner,
+ Some(true),
+ Some(true),
+ newWorkspace,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ false
+ ),
+ WorkspaceListResponse(WorkspaceAccessLevels.Read,
+ Some(false),
+ Some(false),
+ newWorkspace,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ false
+ ),
+ WorkspaceListResponse(
+ WorkspaceAccessLevels.Owner,
+ Some(true),
+ Some(true),
+ publishedRawlsWorkspaceWithAttributes,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ false
+ ),
+ WorkspaceListResponse(WorkspaceAccessLevels.NoAccess,
+ Some(false),
+ Some(false),
+ newWorkspace,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ false
+ )
+ )
+ )
+ case "onlyReadableWorkspaces" =>
+ Future.successful(
+ Seq(
+ WorkspaceListResponse(WorkspaceAccessLevels.Read,
+ Some(false),
+ Some(false),
+ newWorkspace,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ false
+ ),
+ WorkspaceListResponse(WorkspaceAccessLevels.Read,
+ Some(false),
+ Some(false),
+ newWorkspace,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ false
+ ),
+ WorkspaceListResponse(
+ WorkspaceAccessLevels.Read,
+ Some(false),
+ Some(false),
+ publishedRawlsWorkspaceWithAttributes,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ false
+ ),
+ WorkspaceListResponse(WorkspaceAccessLevels.NoAccess,
+ Some(false),
+ Some(false),
+ newWorkspace,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ false
+ )
+ )
+ )
case "noWorkspaces" => Future.successful(Seq.empty[WorkspaceListResponse])
case _ => Future.failed(new FireCloudException("intentional exception for getWorkspaces catchall case"))
}
- }
// this is hacky, but the only argument to getProjects and getWorkspaces is the access token. Therefore,
// we need to encode our test criteria into a string, and we can do so using a delimiter.
- private def parseTestToken(userInfo: WithAccessToken): (String,String) = {
+ private def parseTestToken(userInfo: WithAccessToken): (String, String) = {
val tokenParts = userInfo.accessToken.token.split(";")
assert(tokenParts.length == 2)
(tokenParts(0), tokenParts(1))
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/LibraryApiServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/LibraryApiServiceSpec.scala
index 24783e0d8..af5a60570 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/LibraryApiServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/LibraryApiServiceSpec.scala
@@ -13,7 +13,12 @@ import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol._
import org.broadinstitute.dsde.firecloud.model._
import org.broadinstitute.dsde.firecloud.service.{BaseServiceSpec, LibraryService, OntologyService}
import org.broadinstitute.dsde.rawls.model.Attributable.AttributeMap
-import org.broadinstitute.dsde.rawls.model.{AttributeFormat, AttributeName, AttributeString, PlainArrayAttributeListSerializer}
+import org.broadinstitute.dsde.rawls.model.{
+ AttributeFormat,
+ AttributeName,
+ AttributeString,
+ PlainArrayAttributeListSerializer
+}
import org.mockserver.integration.ClientAndServer
import org.mockserver.integration.ClientAndServer._
import org.scalatest.BeforeAndAfterEach
@@ -25,26 +30,29 @@ import scala.jdk.CollectionConverters._
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, ExecutionContext}
-
-class LibraryApiServiceSpec extends BaseServiceSpec with LibraryApiService
- with SamMockserverUtils with BeforeAndAfterEach with SprayJsonSupport {
+class LibraryApiServiceSpec
+ extends BaseServiceSpec
+ with LibraryApiService
+ with SamMockserverUtils
+ with BeforeAndAfterEach
+ with SprayJsonSupport {
def actorRefFactory = system
override val executionContext: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
lazy val isCuratorPath = "/api/library/user/role/curator"
- private def publishedPath(ns:String="namespace", name:String="name") =
+ private def publishedPath(ns: String = "namespace", name: String = "name") =
"/api/library/%s/%s/published".format(ns, name)
private def setMetadataPath(ns: String = "republish", name: String = "name") =
"/api/library/%s/%s/metadata".format(ns, name)
private def setDiscoverableGroupsPath(ns: String = "discoverableGroups", name: String = "name") =
"/api/library/%s/%s/discoverableGroups".format(ns, name)
- private final val librarySearchPath = "/api/library/search"
- private final val librarySuggestPath = "/api/library/suggest"
- private final val libraryPopulateSuggestPath = "/api/library/populate/suggest/"
- private final val libraryGroupsPath = "/api/library/groups"
+ final private val librarySearchPath = "/api/library/search"
+ final private val librarySuggestPath = "/api/library/suggest"
+ final private val libraryPopulateSuggestPath = "/api/library/populate/suggest/"
+ final private val libraryGroupsPath = "/api/library/groups"
- private final val duosResearchPurposeQuery = "/duos/researchPurposeQuery"
+ final private val duosResearchPurposeQuery = "/duos/researchPurposeQuery"
val libraryServiceConstructor: (UserInfo) => LibraryService = LibraryService.constructor(app)
val ontologyServiceConstructor: () => OntologyService = OntologyService.constructor(app)
@@ -104,17 +112,14 @@ class LibraryApiServiceSpec extends BaseServiceSpec with LibraryApiService
returnEnabledUser(mockSamServer)
}
- override def afterAll(): Unit = {
+ override def afterAll(): Unit =
mockSamServer.stop()
- }
- override def beforeEach(): Unit = {
+ override def beforeEach(): Unit =
searchDao.reset()
- }
- override def afterEach(): Unit = {
+ override def afterEach(): Unit =
searchDao.reset()
- }
"LibraryService" - {
@@ -123,11 +128,12 @@ class LibraryApiServiceSpec extends BaseServiceSpec with LibraryApiService
"when calling the isCurator endpoint" - {
"PUT, POST, DELETE on /api/library/user/role/curator" - {
"should receive a MethodNotAllowed" in {
- List(HttpMethods.PUT, HttpMethods.POST, HttpMethods.DELETE) map {
- method =>
- new RequestBuilder(method)(isCuratorPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
- status should equal(MethodNotAllowed)
- }
+ List(HttpMethods.PUT, HttpMethods.POST, HttpMethods.DELETE) map { method =>
+ new RequestBuilder(method)(isCuratorPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ libraryRoutes
+ ) ~> check {
+ status should equal(MethodNotAllowed)
+ }
}
}
}
@@ -135,56 +141,72 @@ class LibraryApiServiceSpec extends BaseServiceSpec with LibraryApiService
"when saving metadata" - {
"complete data can be saved for an unpublished workspace" in {
- val content = HttpEntity(ContentTypes.`application/json`, testLibraryMetadata)
- new RequestBuilder(HttpMethods.PUT)(setMetadataPath("unpublishedwriter"), content) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
- status should equal(OK)
- }
+ val content = HttpEntity(ContentTypes.`application/json`, testLibraryMetadata)
+ new RequestBuilder(HttpMethods.PUT)(setMetadataPath("unpublishedwriter"), content) ~> dummyUserIdHeaders(
+ "1234"
+ ) ~> sealRoute(libraryRoutes) ~> check {
+ status should equal(OK)
}
+ }
"incomplete data can be saved for an unpublished workspace" in {
val content = HttpEntity(ContentTypes.`application/json`, incompleteMetadata)
- new RequestBuilder(HttpMethods.PUT)(setMetadataPath("unpublishedwriter"), content) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
+ new RequestBuilder(HttpMethods.PUT)(setMetadataPath("unpublishedwriter"), content) ~> dummyUserIdHeaders(
+ "1234"
+ ) ~> sealRoute(libraryRoutes) ~> check {
status should equal(OK)
}
}
"complete data can be saved for a published workspace" in {
val content = HttpEntity(ContentTypes.`application/json`, testLibraryMetadata)
- new RequestBuilder(HttpMethods.PUT)(setMetadataPath("publishedwriter"), content) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
+ new RequestBuilder(HttpMethods.PUT)(setMetadataPath("publishedwriter"), content) ~> dummyUserIdHeaders(
+ "1234"
+ ) ~> sealRoute(libraryRoutes) ~> check {
status should equal(OK)
}
}
"cannot save incomplete data if already published dataset" in {
val content = HttpEntity(ContentTypes.`application/json`, incompleteMetadata)
- new RequestBuilder(HttpMethods.PUT)(setMetadataPath("publishedwriter"), content) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
+ new RequestBuilder(HttpMethods.PUT)(setMetadataPath("publishedwriter"), content) ~> dummyUserIdHeaders(
+ "1234"
+ ) ~> sealRoute(libraryRoutes) ~> check {
status should equal(BadRequest)
}
}
"validates for unpublished dataset if user specifies validate=true" in {
val content = HttpEntity(ContentTypes.`application/json`, incompleteMetadata)
- new RequestBuilder(HttpMethods.PUT)(setMetadataPath("unpublishedwriter") + "?validate=true", content) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
+ new RequestBuilder(HttpMethods.PUT)(setMetadataPath("unpublishedwriter") + "?validate=true",
+ content
+ ) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
status should equal(BadRequest)
}
}
"validation defaults to false if user specifies a non-boolean value" in {
val content = HttpEntity(ContentTypes.`application/json`, incompleteMetadata)
- new RequestBuilder(HttpMethods.PUT)(setMetadataPath("unpublishedwriter") + "?validate=cat", content) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
+ new RequestBuilder(HttpMethods.PUT)(setMetadataPath("unpublishedwriter") + "?validate=cat",
+ content
+ ) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
status should equal(OK)
}
}
"always validates for published workspace even if user specifies validate=false" in {
val content = HttpEntity(ContentTypes.`application/json`, incompleteMetadata)
- new RequestBuilder(HttpMethods.PUT)(setMetadataPath("publishedwriter") + "?validate=false", content) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
+ new RequestBuilder(HttpMethods.PUT)(setMetadataPath("publishedwriter") + "?validate=false",
+ content
+ ) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
status should equal(BadRequest)
}
}
"always validates for published workspace even if user specifies a non-boolean value" in {
val content = HttpEntity(ContentTypes.`application/json`, incompleteMetadata)
- new RequestBuilder(HttpMethods.PUT)(setMetadataPath("publishedwriter") + "?validate=cat", content) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
+ new RequestBuilder(HttpMethods.PUT)(setMetadataPath("publishedwriter") + "?validate=cat",
+ content
+ ) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
status should equal(BadRequest)
}
}
@@ -200,8 +222,8 @@ class LibraryApiServiceSpec extends BaseServiceSpec with LibraryApiService
status should equal(OK)
val meta = responseAs[AttributeMap]
// see MockRawlsDAO.publishedRawlsWorkspaceWithAttributes
- val expected:AttributeMap = Map( AttributeName("library", "projectName") -> AttributeString("testing") )
- assertResult(expected) {meta}
+ val expected: AttributeMap = Map(AttributeName("library", "projectName") -> AttributeString("testing"))
+ assertResult(expected)(meta)
}
}
"complete data can be retrieved for a valid workspace" in {
@@ -209,7 +231,7 @@ class LibraryApiServiceSpec extends BaseServiceSpec with LibraryApiService
status should equal(OK)
val meta = responseAs[AttributeMap]
val expected = new MockRawlsDAO().unpublishedRawlsWorkspaceLibraryValid.attributes.get
- assertResult (expected) {meta}
+ assertResult(expected)(meta)
}
}
"will return empty set if no metadata exists" in {
@@ -224,19 +246,25 @@ class LibraryApiServiceSpec extends BaseServiceSpec with LibraryApiService
"when calling publish" - {
"POST on " + publishedPath() - {
"should return No Content for already published workspace " in {
- new RequestBuilder(HttpMethods.POST)(publishedPath("publishedwriter")) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
+ new RequestBuilder(HttpMethods.POST)(publishedPath("publishedwriter")) ~> dummyUserIdHeaders(
+ "1234"
+ ) ~> sealRoute(libraryRoutes) ~> check {
status should equal(NoContent)
}
}
"should return OK and invoke indexDocument for unpublished workspace with valid dataset" in {
- new RequestBuilder(HttpMethods.POST)(publishedPath("libraryValid")) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
+ new RequestBuilder(HttpMethods.POST)(publishedPath("libraryValid")) ~> dummyUserIdHeaders(
+ "1234"
+ ) ~> sealRoute(libraryRoutes) ~> check {
status should equal(OK)
assert(this.searchDao.indexDocumentInvoked.get(), "indexDocument should have been invoked")
assert(!this.searchDao.deleteDocumentInvoked.get(), "deleteDocument should not have been invoked")
}
}
"should return BadRequest and not invoke indexDocument for unpublished workspace with invalid dataset" in {
- new RequestBuilder(HttpMethods.POST)(publishedPath()) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
+ new RequestBuilder(HttpMethods.POST)(publishedPath()) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ libraryRoutes
+ ) ~> check {
status should equal(BadRequest)
assert(!this.searchDao.indexDocumentInvoked.get(), "indexDocument should not have been invoked")
assert(!this.searchDao.deleteDocumentInvoked.get(), "deleteDocument should not have been invoked")
@@ -245,12 +273,16 @@ class LibraryApiServiceSpec extends BaseServiceSpec with LibraryApiService
}
"DELETE on " + publishedPath() - {
"should be No Content for unpublished workspace" in {
- new RequestBuilder(HttpMethods.DELETE)(publishedPath("unpublishedwriter")) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
+ new RequestBuilder(HttpMethods.DELETE)(publishedPath("unpublishedwriter")) ~> dummyUserIdHeaders(
+ "1234"
+ ) ~> sealRoute(libraryRoutes) ~> check {
status should equal(NoContent)
}
}
"as return OK and invoke deleteDocument for published workspace" in {
- new RequestBuilder(HttpMethods.DELETE)(publishedPath("publishedowner")) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
+ new RequestBuilder(HttpMethods.DELETE)(publishedPath("publishedowner")) ~> dummyUserIdHeaders(
+ "1234"
+ ) ~> sealRoute(libraryRoutes) ~> check {
status should equal(OK)
assert(this.searchDao.deleteDocumentInvoked.get(), "deleteDocument should have been invoked")
assert(!this.searchDao.indexDocumentInvoked.get(), "indexDocument should not have been invoked")
@@ -262,7 +294,9 @@ class LibraryApiServiceSpec extends BaseServiceSpec with LibraryApiService
"POST with no searchterm on " + librarySearchPath - {
"should retrieve all datasets" in {
val content = HttpEntity(ContentTypes.`application/json`, "{}")
- new RequestBuilder(HttpMethods.POST)(librarySearchPath, content) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
+ new RequestBuilder(HttpMethods.POST)(librarySearchPath, content) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ libraryRoutes
+ ) ~> check {
status should equal(OK)
assert(this.searchDao.findDocumentsInvoked.get(), "findDocuments should have been invoked")
}
@@ -270,8 +304,11 @@ class LibraryApiServiceSpec extends BaseServiceSpec with LibraryApiService
}
"POST on " + librarySearchPath - {
"should search for datasets" in {
- val content = HttpEntity(ContentTypes.`application/json`, "{\"searchTerm\":\"test\", \"from\":0, \"size\":10}")
- new RequestBuilder(HttpMethods.POST)(librarySearchPath, content) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
+ val content =
+ HttpEntity(ContentTypes.`application/json`, "{\"searchTerm\":\"test\", \"from\":0, \"size\":10}")
+ new RequestBuilder(HttpMethods.POST)(librarySearchPath, content) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ libraryRoutes
+ ) ~> check {
status should equal(OK)
assert(this.searchDao.findDocumentsInvoked.get(), "findDocuments should have been invoked")
val respdata = Await.result(Unmarshal(response).to[LibrarySearchResponse], Duration.Inf)
@@ -282,8 +319,11 @@ class LibraryApiServiceSpec extends BaseServiceSpec with LibraryApiService
}
"POST on " + librarySuggestPath - {
"should return autcomplete suggestions" in {
- val content = HttpEntity(ContentTypes.`application/json`, "{\"searchTerm\":\"test\", \"from\":0, \"size\":10}")
- new RequestBuilder(HttpMethods.POST)(librarySuggestPath, content) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
+ val content =
+ HttpEntity(ContentTypes.`application/json`, "{\"searchTerm\":\"test\", \"from\":0, \"size\":10}")
+ new RequestBuilder(HttpMethods.POST)(librarySuggestPath, content) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ libraryRoutes
+ ) ~> check {
status should equal(OK)
assert(this.searchDao.autocompleteInvoked.get(), "autocompleteInvoked should have been invoked")
val respdata = Await.result(Unmarshal(response).to[LibrarySearchResponse], Duration.Inf)
@@ -294,7 +334,9 @@ class LibraryApiServiceSpec extends BaseServiceSpec with LibraryApiService
}
"GET on " + libraryPopulateSuggestPath - {
"should return autcomplete suggestions" in {
- new RequestBuilder(HttpMethods.GET)(libraryPopulateSuggestPath + "library:datasetOwner?q=aha") ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
+ new RequestBuilder(HttpMethods.GET)(
+ libraryPopulateSuggestPath + "library:datasetOwner?q=aha"
+ ) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
status should equal(OK)
assert(this.searchDao.populateSuggestInvoked.get(), "populateSuggestInvoked should have been invoked")
val respdata = Await.result(Unmarshal(response).to[String], Duration.Inf)
@@ -305,10 +347,12 @@ class LibraryApiServiceSpec extends BaseServiceSpec with LibraryApiService
}
"GET on " + libraryGroupsPath - {
"should return the all broad users group" in {
- new RequestBuilder(HttpMethods.GET)(libraryGroupsPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
+ new RequestBuilder(HttpMethods.GET)(libraryGroupsPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ libraryRoutes
+ ) ~> check {
status should equal(OK)
val respdata = Await.result(Unmarshal(response).to[Seq[String]], Duration.Inf)
- assert(respdata.toSet == FireCloudConfig.ElasticSearch.discoverGroupNames.asScala.toSet)
+ assert(respdata.toSet == FireCloudConfig.ElasticSearch.discoverGroupNames.asScala.toSet)
}
}
}
@@ -316,15 +360,19 @@ class LibraryApiServiceSpec extends BaseServiceSpec with LibraryApiService
"when working with Library discoverable groups" - {
"should return the right groups on get" in {
- Get(setDiscoverableGroupsPath("libraryValid","unittest")) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
+ Get(setDiscoverableGroupsPath("libraryValid", "unittest")) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ libraryRoutes
+ ) ~> check {
status should equal(OK)
- assertResult(List("group1","group2")) {responseAs[List[String]]}
+ assertResult(List("group1", "group2"))(responseAs[List[String]])
}
}
"should return an empty array if no groups are assigned" in {
- Get(setDiscoverableGroupsPath("publishedwriter","unittest")) ~> dummyUserIdHeaders("1234") ~> sealRoute(libraryRoutes) ~> check {
+ Get(setDiscoverableGroupsPath("publishedwriter", "unittest")) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ libraryRoutes
+ ) ~> check {
status should equal(OK)
- assertResult(List.empty[String]) {responseAs[List[String]]}
+ assertResult(List.empty[String])(responseAs[List[String]])
}
}
}
@@ -342,7 +390,9 @@ class LibraryApiServiceSpec extends BaseServiceSpec with LibraryApiService
val request = ResearchPurposeRequest.empty.copy(DS = Some(Seq(s"${doidPrefix}1234", s"${doidPrefix}5678")))
new RequestBuilder(HttpMethods.POST)(duosResearchPurposeQuery, request) ~> sealRoute(libraryRoutes) ~> check {
status should equal(OK)
- val diseaseIds = responseAs[JsObject].extract[Int](Symbol("bool") / Symbol("should") / * / Symbol("term") / "structuredUseRestriction.DS" / Symbol("value"))
+ val diseaseIds = responseAs[JsObject].extract[Int](
+ Symbol("bool") / Symbol("should") / * / Symbol("term") / "structuredUseRestriction.DS" / Symbol("value")
+ )
diseaseIds should equal(Seq(1234, 5678))
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/ManagedGroupApiServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/ManagedGroupApiServiceSpec.scala
index 276074100..92c331e5d 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/ManagedGroupApiServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/ManagedGroupApiServiceSpec.scala
@@ -16,7 +16,7 @@ class ManagedGroupApiServiceSpec extends BaseServiceSpec with ManagedGroupApiSer
override val executionContext: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
- val managedGroupServiceConstructor:(WithAccessToken) => ManagedGroupService = ManagedGroupService.constructor(app)
+ val managedGroupServiceConstructor: (WithAccessToken) => ManagedGroupService = ManagedGroupService.constructor(app)
val uniqueId = "normal-user"
@@ -26,8 +26,8 @@ class ManagedGroupApiServiceSpec extends BaseServiceSpec with ManagedGroupApiSer
"OK response is returned" in {
Get("/api/groups") ~>
dummyUserIdHeaders(uniqueId) ~> sealRoute(managedGroupServiceRoutes) ~> check {
- status should equal(OK)
- }
+ status should equal(OK)
+ }
}
}
@@ -35,13 +35,13 @@ class ManagedGroupApiServiceSpec extends BaseServiceSpec with ManagedGroupApiSer
"Created response is returned" in {
Post("/api/groups/example-group") ~>
dummyUserIdHeaders(uniqueId) ~> sealRoute(managedGroupServiceRoutes) ~> check {
- status should equal(Created)
- }
+ status should equal(Created)
+ }
Post("/api/groups/example-group") ~>
dummyUserIdHeaders(uniqueId) ~> sealRoute(managedGroupServiceRoutes) ~> check {
- status should equal(Created)
- }
+ status should equal(Created)
+ }
}
}
@@ -49,13 +49,13 @@ class ManagedGroupApiServiceSpec extends BaseServiceSpec with ManagedGroupApiSer
"OK response is returned" in {
Post("/api/groups/example-group") ~>
dummyUserIdHeaders(uniqueId) ~> sealRoute(managedGroupServiceRoutes) ~> check {
- status should equal(Created)
- }
+ status should equal(Created)
+ }
Get("/api/groups/example-group") ~>
dummyUserIdHeaders(uniqueId) ~> sealRoute(managedGroupServiceRoutes) ~> check {
- status should equal(OK)
- }
+ status should equal(OK)
+ }
}
}
@@ -63,13 +63,13 @@ class ManagedGroupApiServiceSpec extends BaseServiceSpec with ManagedGroupApiSer
"OK response is returned" in {
Post("/api/groups/example-group") ~>
dummyUserIdHeaders(uniqueId) ~> sealRoute(managedGroupServiceRoutes) ~> check {
- status should equal(Created)
- }
+ status should equal(Created)
+ }
Delete("/api/groups/example-group") ~>
dummyUserIdHeaders(uniqueId) ~> sealRoute(managedGroupServiceRoutes) ~> check {
- status should equal(NoContent)
- }
+ status should equal(NoContent)
+ }
}
}
@@ -77,13 +77,13 @@ class ManagedGroupApiServiceSpec extends BaseServiceSpec with ManagedGroupApiSer
"OK response is returned" in {
Post("/api/groups/example-group") ~>
dummyUserIdHeaders(uniqueId) ~> sealRoute(managedGroupServiceRoutes) ~> check {
- status should equal(Created)
- }
+ status should equal(Created)
+ }
Put("/api/groups/example-group/admin/test@test.test") ~>
dummyUserIdHeaders(uniqueId) ~> sealRoute(managedGroupServiceRoutes) ~> check {
- status should equal(NoContent)
- }
+ status should equal(NoContent)
+ }
}
}
@@ -91,13 +91,13 @@ class ManagedGroupApiServiceSpec extends BaseServiceSpec with ManagedGroupApiSer
"OK response is returned" in {
Post("/api/groups/example-group") ~>
dummyUserIdHeaders(uniqueId) ~> sealRoute(managedGroupServiceRoutes) ~> check {
- status should equal(Created)
- }
+ status should equal(Created)
+ }
Delete("/api/groups/example-group/admin/test@test.test") ~>
dummyUserIdHeaders(uniqueId) ~> sealRoute(managedGroupServiceRoutes) ~> check {
- status should equal(NoContent)
- }
+ status should equal(NoContent)
+ }
}
}
@@ -105,8 +105,8 @@ class ManagedGroupApiServiceSpec extends BaseServiceSpec with ManagedGroupApiSer
"OK response is returned" in {
Post("/api/groups/example-group/requestAccess") ~>
dummyUserIdHeaders(uniqueId) ~> sealRoute(managedGroupServiceRoutes) ~> check {
- status should equal(NoContent)
- }
+ status should equal(NoContent)
+ }
}
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/MethodConfigurationApiServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/MethodConfigurationApiServiceSpec.scala
index 1bb9c863e..b390df7be 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/MethodConfigurationApiServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/MethodConfigurationApiServiceSpec.scala
@@ -8,7 +8,14 @@ import org.broadinstitute.dsde.firecloud.mock.MockUtils
import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol._
import org.broadinstitute.dsde.firecloud.model.{CopyConfigurationIngest, PublishConfigurationIngest}
import org.broadinstitute.dsde.firecloud.service.ServiceSpec
-import org.broadinstitute.dsde.rawls.model.{GoogleProjectId, GoogleProjectNumber, RawlsBillingAccountName, WorkspaceDetails, WorkspaceState, WorkspaceVersions}
+import org.broadinstitute.dsde.rawls.model.{
+ GoogleProjectId,
+ GoogleProjectNumber,
+ RawlsBillingAccountName,
+ WorkspaceDetails,
+ WorkspaceState,
+ WorkspaceVersions
+}
import org.joda.time.DateTime
import org.mockserver.integration.ClientAndServer
import org.mockserver.integration.ClientAndServer._
@@ -21,7 +28,7 @@ class MethodConfigurationApiServiceSpec extends ServiceSpec with MethodConfigura
override val executionContext: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
var workspaceServer: ClientAndServer = _
- private final val mockWorkspace = WorkspaceDetails(
+ final private val mockWorkspace = WorkspaceDetails(
"namespace",
"name",
"workspace_id",
@@ -30,9 +37,9 @@ class MethodConfigurationApiServiceSpec extends ServiceSpec with MethodConfigura
DateTime.now(),
DateTime.now(),
"my_workspace_creator",
- Some(Map()), //attributes
- false, //locked
- Some(Set.empty), //authdomain
+ Some(Map()), // attributes
+ false, // locked
+ Some(Set.empty), // authdomain
WorkspaceVersions.V2,
GoogleProjectId("googleProject"),
Some(GoogleProjectNumber("googleProjectNumber")),
@@ -49,66 +56,91 @@ class MethodConfigurationApiServiceSpec extends ServiceSpec with MethodConfigura
workspaceServer = startClientAndServer(MockUtils.workspaceServerPort)
List(MethodConfigurationApiService.remoteTemplatePath, MethodConfigurationApiService.remoteInputsOutputsPath) map {
path =>
- workspaceServer.when(
- request().withMethod("POST").withPath(path))
- .respond(org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withStatusCode(OK.intValue))
- }
- List(HttpMethods.GET, HttpMethods.PUT, HttpMethods.DELETE) map {
- method =>
workspaceServer
- .when(request().withMethod(method.name).withPath(
- MethodConfigurationApiService.remoteMethodConfigPath(
- mockWorkspace.namespace,
- mockWorkspace.name,
- mockWorkspace.namespace,
- mockWorkspace.name)))
+ .when(request().withMethod("POST").withPath(path))
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withStatusCode(OK.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(OK.intValue)
)
}
+ List(HttpMethods.GET, HttpMethods.PUT, HttpMethods.DELETE) map { method =>
+ workspaceServer
+ .when(
+ request()
+ .withMethod(method.name)
+ .withPath(
+ MethodConfigurationApiService.remoteMethodConfigPath(mockWorkspace.namespace,
+ mockWorkspace.name,
+ mockWorkspace.namespace,
+ mockWorkspace.name
+ )
+ )
+ )
+ .respond(
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(OK.intValue)
+ )
+ }
workspaceServer
- .when(request().withMethod("POST").withPath(
- MethodConfigurationApiService.remoteMethodConfigRenamePath(
- mockWorkspace.namespace,
- mockWorkspace.name,
- mockWorkspace.namespace,
- mockWorkspace.name)))
+ .when(
+ request()
+ .withMethod("POST")
+ .withPath(
+ MethodConfigurationApiService.remoteMethodConfigRenamePath(mockWorkspace.namespace,
+ mockWorkspace.name,
+ mockWorkspace.namespace,
+ mockWorkspace.name
+ )
+ )
+ )
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withStatusCode(OK.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(OK.intValue)
)
workspaceServer
- .when(request().withMethod("GET").withPath(
- MethodConfigurationApiService.remoteMethodConfigValidatePath(
- mockWorkspace.namespace,
- mockWorkspace.name,
- mockWorkspace.namespace,
- mockWorkspace.name)))
+ .when(
+ request()
+ .withMethod("GET")
+ .withPath(
+ MethodConfigurationApiService.remoteMethodConfigValidatePath(mockWorkspace.namespace,
+ mockWorkspace.name,
+ mockWorkspace.namespace,
+ mockWorkspace.name
+ )
+ )
+ )
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withStatusCode(OK.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(OK.intValue)
)
workspaceServer
- .when(request().withMethod("POST").withPath(
- MethodConfigurationApiService.remoteCopyFromMethodRepoConfigPath))
+ .when(request().withMethod("POST").withPath(MethodConfigurationApiService.remoteCopyFromMethodRepoConfigPath))
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withStatusCode(Created.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(Created.intValue)
)
workspaceServer
- .when(request().withMethod("POST").withPath(
- MethodConfigurationApiService.remoteCopyToMethodRepoConfigPath))
+ .when(request().withMethod("POST").withPath(MethodConfigurationApiService.remoteCopyToMethodRepoConfigPath))
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withStatusCode(Created.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(Created.intValue)
)
}
- override def afterAll(): Unit = {
+ override def afterAll(): Unit =
workspaceServer.stop()
- }
"MethodConfigurationService" - {
@@ -118,41 +150,42 @@ class MethodConfigurationApiServiceSpec extends ServiceSpec with MethodConfigura
val localInputsOutputsPath = "/inputsOutputs"
"when calling the passthrough service" - {
- List(localTemplatePath, localInputsOutputsPath) foreach {
- path =>
- s"POST on $path" - {
- "should not receive a MethodNotAllowed" in {
- Post(path) ~> dummyUserIdHeaders("1234") ~> sealRoute(methodConfigurationRoutes) ~> check {
- status shouldNot equal(MethodNotAllowed)
- }
+ List(localTemplatePath, localInputsOutputsPath) foreach { path =>
+ s"POST on $path" - {
+ "should not receive a MethodNotAllowed" in {
+ Post(path) ~> dummyUserIdHeaders("1234") ~> sealRoute(methodConfigurationRoutes) ~> check {
+ status shouldNot equal(MethodNotAllowed)
}
}
+ }
- s"GET, PUT, DELETE on $path" - {
- "should receive a MethodNotAllowed" in {
- List(HttpMethods.GET, HttpMethods.PUT, HttpMethods.DELETE) foreach {
- method =>
- new RequestBuilder(method)(path) ~> dummyUserIdHeaders("1234") ~> sealRoute(methodConfigurationRoutes) ~> check {
- status should equal(MethodNotAllowed)
- }
+ s"GET, PUT, DELETE on $path" - {
+ "should receive a MethodNotAllowed" in {
+ List(HttpMethods.GET, HttpMethods.PUT, HttpMethods.DELETE) foreach { method =>
+ new RequestBuilder(method)(path) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ methodConfigurationRoutes
+ ) ~> check {
+ status should equal(MethodNotAllowed)
}
}
}
+ }
}
- val localMethodConfigPath = "/workspaces/%s/%s/method_configs/%s/%s".format(
- mockWorkspace.namespace,
- mockWorkspace.name,
- mockWorkspace.namespace,
- mockWorkspace.name)
+ val localMethodConfigPath = "/workspaces/%s/%s/method_configs/%s/%s".format(mockWorkspace.namespace,
+ mockWorkspace.name,
+ mockWorkspace.namespace,
+ mockWorkspace.name
+ )
s"GET, PUT, POST, and DELETE on $localMethodConfigPath " - {
"should not receive a MethodNotAllowed" in {
- List(HttpMethods.GET, HttpMethods.PUT, HttpMethods.POST, HttpMethods.DELETE) map {
- method =>
- new RequestBuilder(method)(localMethodConfigPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(methodConfigurationRoutes) ~> check {
- status shouldNot equal(MethodNotAllowed)
- }
+ List(HttpMethods.GET, HttpMethods.PUT, HttpMethods.POST, HttpMethods.DELETE) map { method =>
+ new RequestBuilder(method)(localMethodConfigPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ methodConfigurationRoutes
+ ) ~> check {
+ status shouldNot equal(MethodNotAllowed)
+ }
}
}
}
@@ -169,7 +202,9 @@ class MethodConfigurationApiServiceSpec extends ServiceSpec with MethodConfigura
s"POST on $localMethodConfigRenamePath " - {
"should not receive a MethodNotAllowed" in {
- Post(localMethodConfigRenamePath) ~> dummyUserIdHeaders("1234") ~> sealRoute(methodConfigurationRoutes) ~> check {
+ Post(localMethodConfigRenamePath) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ methodConfigurationRoutes
+ ) ~> check {
status shouldNot equal(MethodNotAllowed)
}
}
@@ -177,11 +212,12 @@ class MethodConfigurationApiServiceSpec extends ServiceSpec with MethodConfigura
s"GET, PATCH, PUT, DELETE on $localMethodConfigRenamePath " - {
"should receive a MethodNotAllowed" in {
- List(HttpMethods.GET, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE) map {
- method =>
- new RequestBuilder(method)(localMethodConfigRenamePath) ~> dummyUserIdHeaders("1234") ~> sealRoute(methodConfigurationRoutes) ~> check {
- status should equal(MethodNotAllowed)
- }
+ List(HttpMethods.GET, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE) map { method =>
+ new RequestBuilder(method)(localMethodConfigRenamePath) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ methodConfigurationRoutes
+ ) ~> check {
+ status should equal(MethodNotAllowed)
+ }
}
}
}
@@ -190,7 +226,9 @@ class MethodConfigurationApiServiceSpec extends ServiceSpec with MethodConfigura
s"GET on $localMethodConfigValidatePath " - {
"should not receive a MethodNotAllowed" in {
- Get(localMethodConfigValidatePath) ~> dummyUserIdHeaders("1234") ~> sealRoute(methodConfigurationRoutes) ~> check {
+ Get(localMethodConfigValidatePath) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ methodConfigurationRoutes
+ ) ~> check {
status shouldNot equal(MethodNotAllowed)
}
}
@@ -198,11 +236,12 @@ class MethodConfigurationApiServiceSpec extends ServiceSpec with MethodConfigura
s"PUT, POST, PATCH, DELETE on $localMethodConfigValidatePath " - {
"should receive a MethodNotAllowed" in {
- List(HttpMethods.PUT, HttpMethods.PATCH, HttpMethods.POST, HttpMethods.DELETE) map {
- method =>
- new RequestBuilder(method)(localMethodConfigValidatePath) ~> dummyUserIdHeaders("1234") ~> sealRoute(methodConfigurationRoutes) ~> check {
- status should equal(MethodNotAllowed)
- }
+ List(HttpMethods.PUT, HttpMethods.PATCH, HttpMethods.POST, HttpMethods.DELETE) map { method =>
+ new RequestBuilder(method)(localMethodConfigValidatePath) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ methodConfigurationRoutes
+ ) ~> check {
+ status should equal(MethodNotAllowed)
+ }
}
}
}
@@ -224,7 +263,9 @@ class MethodConfigurationApiServiceSpec extends ServiceSpec with MethodConfigura
s"when calling POST on the $validCopyFromRepoUrl path with valid workspace and configuration data" - {
"Created response is returned" in {
- Post(validCopyFromRepoUrl, configurationCopyFormData) ~> dummyUserIdHeaders("1234") ~> sealRoute(methodConfigurationRoutes) ~> check {
+ Post(validCopyFromRepoUrl, configurationCopyFormData) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ methodConfigurationRoutes
+ ) ~> check {
status should equal(Created)
}
}
@@ -232,11 +273,12 @@ class MethodConfigurationApiServiceSpec extends ServiceSpec with MethodConfigura
s"GET, PUT, PATCH, DELETE on $validCopyFromRepoUrl " - {
"should receive a MethodNotAllowed" in {
- List(HttpMethods.GET, HttpMethods.PUT, HttpMethods.PATCH, HttpMethods.DELETE) map {
- method =>
- new RequestBuilder(method)(validCopyFromRepoUrl, configurationCopyFormData) ~> dummyUserIdHeaders("1234") ~> sealRoute(methodConfigurationRoutes) ~> check {
- status should equal(MethodNotAllowed)
- }
+ List(HttpMethods.GET, HttpMethods.PUT, HttpMethods.PATCH, HttpMethods.DELETE) map { method =>
+ new RequestBuilder(method)(validCopyFromRepoUrl, configurationCopyFormData) ~> dummyUserIdHeaders(
+ "1234"
+ ) ~> sealRoute(methodConfigurationRoutes) ~> check {
+ status should equal(MethodNotAllowed)
+ }
}
}
}
@@ -256,7 +298,9 @@ class MethodConfigurationApiServiceSpec extends ServiceSpec with MethodConfigura
s"when calling POST on the $validCopyToRepoUrl path with valid workspace and configuration data" - {
"Created response is returned" in {
- Post(validCopyToRepoUrl, configurationPublishFormData) ~> dummyUserIdHeaders("1234") ~> sealRoute(methodConfigurationRoutes) ~> check {
+ Post(validCopyToRepoUrl, configurationPublishFormData) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ methodConfigurationRoutes
+ ) ~> check {
status should equal(Created)
}
}
@@ -264,11 +308,12 @@ class MethodConfigurationApiServiceSpec extends ServiceSpec with MethodConfigura
s"GET, PUT, PATCH, DELETE on $validCopyToRepoUrl " - {
"should receive a MethodNotAllowed" in {
- List(HttpMethods.GET, HttpMethods.PUT, HttpMethods.PATCH, HttpMethods.DELETE) map {
- method =>
- new RequestBuilder(method)(validCopyToRepoUrl, configurationPublishFormData) ~> dummyUserIdHeaders("1234") ~> sealRoute(methodConfigurationRoutes) ~> check {
- status should equal(MethodNotAllowed)
- }
+ List(HttpMethods.GET, HttpMethods.PUT, HttpMethods.PATCH, HttpMethods.DELETE) map { method =>
+ new RequestBuilder(method)(validCopyToRepoUrl, configurationPublishFormData) ~> dummyUserIdHeaders(
+ "1234"
+ ) ~> sealRoute(methodConfigurationRoutes) ~> check {
+ status should equal(MethodNotAllowed)
+ }
}
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/MethodsApiServiceACLSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/MethodsApiServiceACLSpec.scala
index 94a87e5c0..4deb443b5 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/MethodsApiServiceACLSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/MethodsApiServiceACLSpec.scala
@@ -15,7 +15,6 @@ import spray.json._
import scala.concurrent.ExecutionContext
-
class MethodsApiServiceACLSpec extends BaseServiceSpec with MethodsApiService with SprayJsonSupport {
def actorRefFactory = system
@@ -29,30 +28,30 @@ class MethodsApiServiceACLSpec extends BaseServiceSpec with MethodsApiService wi
// we have to manually create this faulty json; we can't create it via FireCloudPermission objects, because
// they don't allow faulty values!
- val sourceBadRole = """[{"user":"foo@broadinstitute.org","role":"OWNER"},{"user":"bar@broadinstitute.org","role":"UNKNOWN"}]"""
+ val sourceBadRole =
+ """[{"user":"foo@broadinstitute.org","role":"OWNER"},{"user":"bar@broadinstitute.org","role":"UNKNOWN"}]"""
val jsonBadRole = sourceBadRole.parseJson.asInstanceOf[JsArray]
val sourceBadUser = """[{"user":"foo@broadinstitute.org","role":"OWNER"},{"user":"","role":"READER"}]"""
val jsonBadUser = sourceBadUser.parseJson.asInstanceOf[JsArray]
- /* ACL endpoints.
+ /* ACL endpoints.
We unit test the individual translations, so we only need to augment the unit tests here. We need to test:
- * handling of lists (unit tests work with single objects)
- * configs vs. methods paths are not mixed up (we use a single code path for both)
- * rejections when invalid data POSTed
- * DELETE/PUT methods are rejected (we supersede those with POST, and should block passthroughs)
- */
+ * handling of lists (unit tests work with single objects)
+ * configs vs. methods paths are not mixed up (we use a single code path for both)
+ * rejections when invalid data POSTed
+ * DELETE/PUT methods are rejected (we supersede those with POST, and should block passthroughs)
+ */
"MethodsServiceACLs" - {
"when testing DELETE, PUT methods on the permissions paths" - {
"MethodNotAllowed is returned" in {
- List(HttpMethods.DELETE, HttpMethods.PUT) map {
- method =>
- new RequestBuilder(method)("/" + localMethodPermissionsPath) ~> sealRoute(methodsApiServiceRoutes) ~> check {
- status should equal(MethodNotAllowed)
- }
- new RequestBuilder(method)("/" + localConfigPermissionsPath) ~> sealRoute(methodsApiServiceRoutes) ~> check {
- status should equal(MethodNotAllowed)
- }
+ List(HttpMethods.DELETE, HttpMethods.PUT) map { method =>
+ new RequestBuilder(method)("/" + localMethodPermissionsPath) ~> sealRoute(methodsApiServiceRoutes) ~> check {
+ status should equal(MethodNotAllowed)
+ }
+ new RequestBuilder(method)("/" + localConfigPermissionsPath) ~> sealRoute(methodsApiServiceRoutes) ~> check {
+ status should equal(MethodNotAllowed)
+ }
}
}
}
@@ -60,28 +59,36 @@ class MethodsApiServiceACLSpec extends BaseServiceSpec with MethodsApiService wi
// BAD INPUTS
"when posting bad roles to methods" - {
"BadRequest is returned" in {
- Post("/" + localMethodPermissionsPath, jsonBadRole) ~> dummyAuthHeaders ~> sealRoute(methodsApiServiceRoutes) ~> check {
+ Post("/" + localMethodPermissionsPath, jsonBadRole) ~> dummyAuthHeaders ~> sealRoute(
+ methodsApiServiceRoutes
+ ) ~> check {
status should equal(BadRequest)
}
}
}
"when posting bad roles to configs" - {
"BadRequest is returned" in {
- Post("/" + localConfigPermissionsPath, jsonBadRole) ~> dummyAuthHeaders ~> sealRoute(methodsApiServiceRoutes) ~> check {
+ Post("/" + localConfigPermissionsPath, jsonBadRole) ~> dummyAuthHeaders ~> sealRoute(
+ methodsApiServiceRoutes
+ ) ~> check {
status should equal(BadRequest)
}
}
}
"when posting bad users to methods" - {
"BadRequest is returned" in {
- Post("/" + localMethodPermissionsPath, jsonBadUser) ~> dummyAuthHeaders ~> sealRoute(methodsApiServiceRoutes) ~> check {
+ Post("/" + localMethodPermissionsPath, jsonBadUser) ~> dummyAuthHeaders ~> sealRoute(
+ methodsApiServiceRoutes
+ ) ~> check {
status should equal(BadRequest)
}
}
}
"when posting bad users to configs" - {
"BadRequest is returned" in {
- Post("/" + localConfigPermissionsPath, jsonBadUser) ~> dummyAuthHeaders ~> sealRoute(methodsApiServiceRoutes) ~> check {
+ Post("/" + localConfigPermissionsPath, jsonBadUser) ~> dummyAuthHeaders ~> sealRoute(
+ methodsApiServiceRoutes
+ ) ~> check {
status should equal(BadRequest)
}
}
@@ -91,7 +98,9 @@ class MethodsApiServiceACLSpec extends BaseServiceSpec with MethodsApiService wi
// configuration endpoints return the mock data in the proper order
"when retrieving ACLs from configs" - {
"the entire list is successfully translated" in {
- Get("/" + localConfigsPath + MockAgoraACLData.standardPermsPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(methodsApiServiceRoutes) ~> check {
+ Get("/" + localConfigsPath + MockAgoraACLData.standardPermsPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ methodsApiServiceRoutes
+ ) ~> check {
status should equal(OK)
var perms = responseAs[List[FireCloudPermission]]
perms shouldBe standardFC
@@ -101,7 +110,9 @@ class MethodsApiServiceACLSpec extends BaseServiceSpec with MethodsApiService wi
// methods endpoints return the mock data in reverse order - this way we can differentiate methods vs. configs
"when retrieving ACLs from methods" - {
"the entire (reversed) list is successfully translated" in {
- Get("/" + localMethodsPath + MockAgoraACLData.standardPermsPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(methodsApiServiceRoutes) ~> check {
+ Get("/" + localMethodsPath + MockAgoraACLData.standardPermsPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ methodsApiServiceRoutes
+ ) ~> check {
status should equal(OK)
var perms = responseAs[List[FireCloudPermission]]
perms shouldBe standardFC.reverse
@@ -112,14 +123,18 @@ class MethodsApiServiceACLSpec extends BaseServiceSpec with MethodsApiService wi
// AGORA RETURNS FAULTY DATA
"when retrieving bad Agora data from configs" - {
"InternalServerError is returned" in {
- Get("/" + localConfigsPath + MockAgoraACLData.withEdgeCasesPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(methodsApiServiceRoutes) ~> check {
+ Get("/" + localConfigsPath + MockAgoraACLData.withEdgeCasesPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ methodsApiServiceRoutes
+ ) ~> check {
status should equal(InternalServerError)
}
}
}
"when retrieving bad Agora data from methods" - {
"InternalServerError is returned" in {
- Get("/" + localMethodsPath + MockAgoraACLData.withEdgeCasesPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(methodsApiServiceRoutes) ~> check {
+ Get("/" + localMethodsPath + MockAgoraACLData.withEdgeCasesPath) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ methodsApiServiceRoutes
+ ) ~> check {
status should equal(InternalServerError)
}
}
@@ -129,7 +144,9 @@ class MethodsApiServiceACLSpec extends BaseServiceSpec with MethodsApiService wi
// configs endpoint returns good data from Agora on post
"when posting good data to configs, expecting a good response" - {
"a good response is returned" in {
- Post("/" + localConfigsPath + MockAgoraACLData.standardPermsPath, standardFC) ~> dummyUserIdHeaders("1234") ~> sealRoute(methodsApiServiceRoutes) ~> check {
+ Post("/" + localConfigsPath + MockAgoraACLData.standardPermsPath, standardFC) ~> dummyUserIdHeaders(
+ "1234"
+ ) ~> sealRoute(methodsApiServiceRoutes) ~> check {
status should equal(OK)
var perms = responseAs[List[FireCloudPermission]]
perms shouldBe standardFC
@@ -139,7 +156,9 @@ class MethodsApiServiceACLSpec extends BaseServiceSpec with MethodsApiService wi
// methods endpoint returns faulty data from Agora on post
"when posting good data to methods, expecting an invalid response" - {
"an invalid response is returned and we throw an error" in {
- Post("/" + localMethodsPath + MockAgoraACLData.standardPermsPath, standardFC) ~> dummyUserIdHeaders("1234") ~> sealRoute(methodsApiServiceRoutes) ~> check {
+ Post("/" + localMethodsPath + MockAgoraACLData.standardPermsPath, standardFC) ~> dummyUserIdHeaders(
+ "1234"
+ ) ~> sealRoute(methodsApiServiceRoutes) ~> check {
status should equal(InternalServerError)
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/MethodsApiServiceMultiACLSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/MethodsApiServiceMultiACLSpec.scala
index 05d81bf0e..7a968a535 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/MethodsApiServiceMultiACLSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/MethodsApiServiceMultiACLSpec.scala
@@ -14,8 +14,11 @@ import spray.json._
import scala.concurrent.ExecutionContext
-
-class MethodsApiServiceMultiACLSpec extends BaseServiceSpec with ServiceSpec with MethodsApiService with SprayJsonSupport {
+class MethodsApiServiceMultiACLSpec
+ extends BaseServiceSpec
+ with ServiceSpec
+ with MethodsApiService
+ with SprayJsonSupport {
def actorRefFactory = system
@@ -31,11 +34,10 @@ class MethodsApiServiceMultiACLSpec extends BaseServiceSpec with ServiceSpec wit
"Methods Repository multi-ACL upsert endpoint" - {
"when testing DELETE, GET, POST methods on the multi-permissions path" - {
"NotFound is returned" in {
- List(HttpMethods.DELETE, HttpMethods.GET, HttpMethods.POST) foreach {
- method =>
- new RequestBuilder(method)(localMethodPermissionsPath) ~> sealRoute(methodsApiServiceRoutes) ~> check {
- status should equal(MethodNotAllowed)
- }
+ List(HttpMethods.DELETE, HttpMethods.GET, HttpMethods.POST) foreach { method =>
+ new RequestBuilder(method)(localMethodPermissionsPath) ~> sealRoute(methodsApiServiceRoutes) ~> check {
+ status should equal(MethodNotAllowed)
+ }
}
}
}
@@ -43,10 +45,12 @@ class MethodsApiServiceMultiACLSpec extends BaseServiceSpec with ServiceSpec wit
"when sending valid input" - {
"returns OK and translates responses" in {
val payload = Seq(
- MethodAclPair(MethodRepoMethod("ns1","n1",1), Seq(FireCloudPermission("user1@example.com","OWNER"))),
- MethodAclPair(MethodRepoMethod("ns2","n2",2), Seq(FireCloudPermission("user2@example.com","READER")))
+ MethodAclPair(MethodRepoMethod("ns1", "n1", 1), Seq(FireCloudPermission("user1@example.com", "OWNER"))),
+ MethodAclPair(MethodRepoMethod("ns2", "n2", 2), Seq(FireCloudPermission("user2@example.com", "READER")))
)
- Put(localMethodPermissionsPath, payload) ~> dummyUserIdHeaders("MethodsApiServiceMultiACLSpec") ~> sealRoute(methodsApiServiceRoutes) ~> check {
+ Put(localMethodPermissionsPath, payload) ~> dummyUserIdHeaders("MethodsApiServiceMultiACLSpec") ~> sealRoute(
+ methodsApiServiceRoutes
+ ) ~> check {
status should equal(OK)
val resp = responseAs[Seq[MethodAclPair]]
@@ -55,12 +59,13 @@ class MethodsApiServiceMultiACLSpec extends BaseServiceSpec with ServiceSpec wit
}
}
-
// BAD INPUTS
"when posting malformed data" - {
"BadRequest is returned" in {
// endpoint expects a JsArray; send it a JsObject and expect BadRequest.
- Put(localMethodPermissionsPath, JsObject(Map("foo"->JsString("bar")))) ~> dummyAuthHeaders ~> sealRoute(methodsApiServiceRoutes) ~> check {
+ Put(localMethodPermissionsPath, JsObject(Map("foo" -> JsString("bar")))) ~> dummyAuthHeaders ~> sealRoute(
+ methodsApiServiceRoutes
+ ) ~> check {
status should equal(BadRequest)
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/MethodsApiServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/MethodsApiServiceSpec.scala
index d3a5b8d46..cb1ca6c73 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/MethodsApiServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/MethodsApiServiceSpec.scala
@@ -21,7 +21,7 @@ import scala.concurrent.ExecutionContext
final class MethodsApiServiceSpec extends BaseServiceSpec with ServiceSpec with MethodsApiService {
- def actorRefFactory:ActorSystem = system
+ def actorRefFactory: ActorSystem = system
override val executionContext: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
@@ -40,20 +40,32 @@ final class MethodsApiServiceSpec extends BaseServiceSpec with ServiceSpec with
NB: we don't test the permissions endpoints here, because they are not passthroughs;
those are tested elsewhere
- */
+ */
val testCases = Seq(
- Api("/configurations", GET, "/api/v1/configurations", allowQueryParams=true),
- Api("/configurations", POST, "/api/v1/configurations", allowQueryParams=false),
- Api("/configurations/namespace/name/1", GET, "/api/v1/configurations/namespace/name/1", allowQueryParams=true),
- Api("/configurations/namespace/name/1", DELETE, "/api/v1/configurations/namespace/name/1", allowQueryParams=false),
- Api("/methods", GET, "/api/v1/methods", allowQueryParams=true),
- Api("/methods", POST, "/api/v1/methods", allowQueryParams=false),
- Api("/methods/namespace/name/1", GET, "/api/v1/methods/namespace/name/1", allowQueryParams=true),
- Api("/methods/namespace/name/1", DELETE, "/api/v1/methods/namespace/name/1", allowQueryParams=false),
- Api("/methods/namespace/name/1", POST, "/api/v1/methods/namespace/name/1", allowQueryParams=true),
- Api("/methods/namespace/name/1/configurations", GET, "/api/v1/methods/namespace/name/1/configurations", allowQueryParams=false),
- Api("/methods/definitions", GET, "/api/v1/methods/definitions", allowQueryParams=false),
- Api("/methods/namespace/name/configurations", GET, "/api/v1/methods/namespace/name/configurations", allowQueryParams=false)
+ Api("/configurations", GET, "/api/v1/configurations", allowQueryParams = true),
+ Api("/configurations", POST, "/api/v1/configurations", allowQueryParams = false),
+ Api("/configurations/namespace/name/1", GET, "/api/v1/configurations/namespace/name/1", allowQueryParams = true),
+ Api("/configurations/namespace/name/1",
+ DELETE,
+ "/api/v1/configurations/namespace/name/1",
+ allowQueryParams = false
+ ),
+ Api("/methods", GET, "/api/v1/methods", allowQueryParams = true),
+ Api("/methods", POST, "/api/v1/methods", allowQueryParams = false),
+ Api("/methods/namespace/name/1", GET, "/api/v1/methods/namespace/name/1", allowQueryParams = true),
+ Api("/methods/namespace/name/1", DELETE, "/api/v1/methods/namespace/name/1", allowQueryParams = false),
+ Api("/methods/namespace/name/1", POST, "/api/v1/methods/namespace/name/1", allowQueryParams = true),
+ Api("/methods/namespace/name/1/configurations",
+ GET,
+ "/api/v1/methods/namespace/name/1/configurations",
+ allowQueryParams = false
+ ),
+ Api("/methods/definitions", GET, "/api/v1/methods/definitions", allowQueryParams = false),
+ Api("/methods/namespace/name/configurations",
+ GET,
+ "/api/v1/methods/namespace/name/configurations",
+ allowQueryParams = false
+ )
)
/*
@@ -81,13 +93,14 @@ final class MethodsApiServiceSpec extends BaseServiceSpec with ServiceSpec with
testCases foreach { api =>
methodsServer
.when(request().withMethod(api.verb.name).withPath(api.remotePath))
- .respond(callback().withCallbackClass("org.broadinstitute.dsde.firecloud.webservice.MethodsApiServiceSpecCallback"))
+ .respond(
+ callback().withCallbackClass("org.broadinstitute.dsde.firecloud.webservice.MethodsApiServiceSpecCallback")
+ )
}
}
- override def afterAll(): Unit = {
+ override def afterAll(): Unit =
methodsServer.stop()
- }
// tests
"MethodsApiService uses of passthrough directive" - {
@@ -130,13 +143,14 @@ final class MethodsApiServiceSpec extends BaseServiceSpec with ServiceSpec with
final class MethodsApiServiceSpecCallback extends ExpectationResponseCallback {
override def handle(httpRequest: HttpRequest): HttpResponse = {
- val method:String = httpRequest.getMethod.getValue
- val path:String = httpRequest.getPath.getValue
- val hasParams:Boolean = !httpRequest.getQueryStringParameterList.isEmpty
+ val method: String = httpRequest.getMethod.getValue
+ val path: String = httpRequest.getPath.getValue
+ val hasParams: Boolean = !httpRequest.getQueryStringParameterList.isEmpty
val content = s"$method $path $hasParams"
- val resp = response().withHeaders(MockUtils.header).withStatusCode(NonAuthoritativeInformation.intValue).withBody(content)
+ val resp =
+ response().withHeaders(MockUtils.header).withStatusCode(NonAuthoritativeInformation.intValue).withBody(content)
resp
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/NamespaceApiServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/NamespaceApiServiceSpec.scala
index ecdc3046b..17ab38e06 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/NamespaceApiServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/NamespaceApiServiceSpec.scala
@@ -26,40 +26,38 @@ class NamespaceApiServiceSpec extends BaseServiceSpec with NamespaceApiService {
"when calling GET on a namespace permissions path" - {
"a valid list of FireCloud permissions is returned" in {
- urls map {
- url =>
- Get(url) ~> dummyUserIdHeaders("1234") ~> sealRoute(namespaceRoutes) ~> check {
- status should equal(OK)
- val permissions = responseAs[List[FireCloudPermission]]
- permissions should be (fcPermissions)
- }
+ urls map { url =>
+ Get(url) ~> dummyUserIdHeaders("1234") ~> sealRoute(namespaceRoutes) ~> check {
+ status should equal(OK)
+ val permissions = responseAs[List[FireCloudPermission]]
+ permissions should be(fcPermissions)
+ }
}
}
}
"when calling POST on a namespace permissions path" - {
"a valid FireCloud permission is returned" in {
- urls map {
- url =>
- Post(url, fcPermissions) ~> dummyUserIdHeaders("1234") ~> sealRoute(namespaceRoutes) ~> check {
- status should equal(OK)
- val permissions = responseAs[List[FireCloudPermission]]
- permissions should be (fcPermissions)
- }
+ urls map { url =>
+ Post(url, fcPermissions) ~> dummyUserIdHeaders("1234") ~> sealRoute(namespaceRoutes) ~> check {
+ status should equal(OK)
+ val permissions = responseAs[List[FireCloudPermission]]
+ permissions should be(fcPermissions)
+ }
}
}
}
"when calling PUT or DELETE on a namespace permissions path" - {
"a Method Not Allowed response is returned" in {
- urls map {
- url =>
- List(HttpMethods.PUT, HttpMethods.DELETE) map {
- method =>
- new RequestBuilder(method)(url, fcPermissions) ~> dummyUserIdHeaders("1234") ~> sealRoute(namespaceRoutes) ~> check {
- status should equal(MethodNotAllowed)
- }
+ urls map { url =>
+ List(HttpMethods.PUT, HttpMethods.DELETE) map { method =>
+ new RequestBuilder(method)(url, fcPermissions) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ namespaceRoutes
+ ) ~> check {
+ status should equal(MethodNotAllowed)
}
+ }
}
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/NihApiServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/NihApiServiceSpec.scala
index ae8be3e3b..65f5b49a5 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/NihApiServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/NihApiServiceSpec.scala
@@ -33,31 +33,68 @@ class NihApiServiceSpec extends ApiServiceSpec with BeforeAndAfterAll with SamMo
val targetDbGaPAuthorized = FireCloudConfig.Nih.whitelists.filter(_.name.equals("TARGET")).head.groupToSync
// These tokens were encoded using the private key that pairs with the public key in MockShibbolethDAO
- //JWT for NIH username "firecloud-dev"
- val firecloudDevJwt = JWTWrapper("eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJlcmFDb21tb25zVXNlcm5hbWUiOiJmaXJlY2xvdWQtZGV2IiwiaWF0IjoxNjE0ODc3MTk3MDB9.k2HVt74OedfgP_bVHSz6U-1c25_XRMw2v8YtuiPHWZUPdYdXR8qZRzYq9YIUI1wbWtr6M7_w1XgBC9ubl7aLFtOcm00CSFAYkTA23NvF3jzrW_qoCArUfYP5GfvUAsA-8RPn-jIOpT5xBWp6vnoTElddiujrZ3_ykToB0s2ZE_cpi2uRUl6SQvNxsWmVdnAKi84NvPHKNwb3Z8HCQ9WdMJ53K2a_ks8psviQao-RvtLUO2hZY4G8cPM581WpfhZ_FM61EHqGQlflJlOSYceI6tiKuKoqPHvWHUAEkd5TdUtee1FVVgLYVEq6hidACMFSsanhqCfmnt4bA7Wlfzyt3A")
-
- //JWT for NIH username "tcga-user"
- val tcgaUserJwt = JWTWrapper("eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJlcmFDb21tb25zVXNlcm5hbWUiOiJ0Y2dhLXVzZXIiLCJpYXQiOjE2MTQ4ODIzMDIwMH0.S7RrbT8dfNVuQ-KdKXwjyvMiLSiaKHtsX00l8FzXuTMzb1FmS7xDxQlf2ZMTX2-BW1KRb8Hc7zMZ57LaizjBV4A-IGbbminOcdkIxtBsnmUWrT_UZrrcQD7AiXObVJdNx80CaozggVaAkWzd2WC-E_QRNC1C3YbQqCdErHxrBaLKrE7mU7RevCLQybrLCdcWFaKrrY8Lyvp_0yAJ0yd1iB86cr2tMvne7VGDGOmAWrFBm0FPr5J1tjzVYdpU9dY_Dpcd1E9tnQ9dCqaOmlC13V5dzI1BDt5oM74iwiuqQ8HbvHhgYE1oFJismKieW6VHDlKggie82dfG_Z86ajBOzg")
-
- //JWT for NIH username "target-user"
- val targetUserJwt = JWTWrapper("eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJlcmFDb21tb25zVXNlcm5hbWUiOiJ0YXJnZXQtdXNlciIsImlhdCI6MTYxNDg3NzUwNzAwfQ.pHJj6yt-lCowUp5cXP7UObU9yFsoQUOBfWX93jnnRBqPkIyEj2e5nKO_DMQl73oSj7WX3H_LVBExUbBUuFTjvJZ977nb6YouSg2IBqj3_bB8QGBrBqQT-ZlsoBfvQ8Q02pVSWBbppqueP4IqFdBgl8ot9pyEx2I_utpohL2VKwwQJrOE4IewGURxA1Ie8F-NIzpAIN2b2N2uV_dkeD5pM7DP7kHUpfnAdLlSkqKTj0pu_jVtdsdF29rWDaxU1uAoJN9YgkVtULaTZ3pTwrRE31WAvCIQhfBAF7CRzXRJwv9fubktiGC1mWeJ7eHH8wpOvysm7OL-kS0R7boNlA9qhA")
-
- //JWT for NIH username "not-on-whitelist" (don't ever add this to the mock whitelists in MockGoogleServicesDAO.scala)
- val validJwtNotOnWhitelist = JWTWrapper("eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJlcmFDb21tb25zVXNlcm5hbWUiOiJub3Qtb24td2hpdGVsaXN0IiwiaWF0IjoxNjE0ODc3NTc4MDB9.WpDrgtui5mOgDc5WvdWYC-l6vljGVyRI7DbBnpRYm7QOq00VLU6FI5YzVFe1eyjnHIqdz_KkkQD604Bi3G1qdyzhk_KKFCSeT4k5in-zS4Em_I2rcyUFs9DeHyFqVrBMZK8eZM_oKtSs23AtwGJASQ-sMvfXeXLcjTFuLWUdeiQEYedj9oOOA93ne-5Kaw9V7sR1foX-ybLDDHfHuAwTN2Vnvpmz0Qlk5osvvv-NunCo4M6A4fQ2FQWjrCwXk8-1N4Wf06dgDJ7ymsw9HtwHhzctVDzodaVlVU_RaC2gtSOWeD5nPaAJ7h6aNmNeLRmNwzCBm3TyPDY-qznPVM0DRg")
-
- case class TestApiService(agoraDao: MockAgoraDAO, googleDao: MockGoogleServicesDAO, ontologyDao: MockOntologyDAO, rawlsDao: MockRawlsDAO, samDao: MockSamDAO, searchDao: MockSearchDAO, researchPurposeSupport: MockResearchPurposeSupport, thurloeDao: MockThurloeDAO, shareLogDao: MockShareLogDAO, shibbolethDao: MockShibbolethDAO, cwdsDao: CwdsDAO, ecmDao: ExternalCredsDAO)(implicit val executionContext: ExecutionContext, implicit val materializer: Materializer) extends ApiServices
+ // JWT for NIH username "firecloud-dev"
+ val firecloudDevJwt = JWTWrapper(
+ "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJlcmFDb21tb25zVXNlcm5hbWUiOiJmaXJlY2xvdWQtZGV2IiwiaWF0IjoxNjE0ODc3MTk3MDB9.k2HVt74OedfgP_bVHSz6U-1c25_XRMw2v8YtuiPHWZUPdYdXR8qZRzYq9YIUI1wbWtr6M7_w1XgBC9ubl7aLFtOcm00CSFAYkTA23NvF3jzrW_qoCArUfYP5GfvUAsA-8RPn-jIOpT5xBWp6vnoTElddiujrZ3_ykToB0s2ZE_cpi2uRUl6SQvNxsWmVdnAKi84NvPHKNwb3Z8HCQ9WdMJ53K2a_ks8psviQao-RvtLUO2hZY4G8cPM581WpfhZ_FM61EHqGQlflJlOSYceI6tiKuKoqPHvWHUAEkd5TdUtee1FVVgLYVEq6hidACMFSsanhqCfmnt4bA7Wlfzyt3A"
+ )
+
+ // JWT for NIH username "tcga-user"
+ val tcgaUserJwt = JWTWrapper(
+ "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJlcmFDb21tb25zVXNlcm5hbWUiOiJ0Y2dhLXVzZXIiLCJpYXQiOjE2MTQ4ODIzMDIwMH0.S7RrbT8dfNVuQ-KdKXwjyvMiLSiaKHtsX00l8FzXuTMzb1FmS7xDxQlf2ZMTX2-BW1KRb8Hc7zMZ57LaizjBV4A-IGbbminOcdkIxtBsnmUWrT_UZrrcQD7AiXObVJdNx80CaozggVaAkWzd2WC-E_QRNC1C3YbQqCdErHxrBaLKrE7mU7RevCLQybrLCdcWFaKrrY8Lyvp_0yAJ0yd1iB86cr2tMvne7VGDGOmAWrFBm0FPr5J1tjzVYdpU9dY_Dpcd1E9tnQ9dCqaOmlC13V5dzI1BDt5oM74iwiuqQ8HbvHhgYE1oFJismKieW6VHDlKggie82dfG_Z86ajBOzg"
+ )
+
+ // JWT for NIH username "target-user"
+ val targetUserJwt = JWTWrapper(
+ "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJlcmFDb21tb25zVXNlcm5hbWUiOiJ0YXJnZXQtdXNlciIsImlhdCI6MTYxNDg3NzUwNzAwfQ.pHJj6yt-lCowUp5cXP7UObU9yFsoQUOBfWX93jnnRBqPkIyEj2e5nKO_DMQl73oSj7WX3H_LVBExUbBUuFTjvJZ977nb6YouSg2IBqj3_bB8QGBrBqQT-ZlsoBfvQ8Q02pVSWBbppqueP4IqFdBgl8ot9pyEx2I_utpohL2VKwwQJrOE4IewGURxA1Ie8F-NIzpAIN2b2N2uV_dkeD5pM7DP7kHUpfnAdLlSkqKTj0pu_jVtdsdF29rWDaxU1uAoJN9YgkVtULaTZ3pTwrRE31WAvCIQhfBAF7CRzXRJwv9fubktiGC1mWeJ7eHH8wpOvysm7OL-kS0R7boNlA9qhA"
+ )
+
+ // JWT for NIH username "not-on-whitelist" (don't ever add this to the mock whitelists in MockGoogleServicesDAO.scala)
+ val validJwtNotOnWhitelist = JWTWrapper(
+ "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJlcmFDb21tb25zVXNlcm5hbWUiOiJub3Qtb24td2hpdGVsaXN0IiwiaWF0IjoxNjE0ODc3NTc4MDB9.WpDrgtui5mOgDc5WvdWYC-l6vljGVyRI7DbBnpRYm7QOq00VLU6FI5YzVFe1eyjnHIqdz_KkkQD604Bi3G1qdyzhk_KKFCSeT4k5in-zS4Em_I2rcyUFs9DeHyFqVrBMZK8eZM_oKtSs23AtwGJASQ-sMvfXeXLcjTFuLWUdeiQEYedj9oOOA93ne-5Kaw9V7sR1foX-ybLDDHfHuAwTN2Vnvpmz0Qlk5osvvv-NunCo4M6A4fQ2FQWjrCwXk8-1N4Wf06dgDJ7ymsw9HtwHhzctVDzodaVlVU_RaC2gtSOWeD5nPaAJ7h6aNmNeLRmNwzCBm3TyPDY-qznPVM0DRg"
+ )
+
+ case class TestApiService(agoraDao: MockAgoraDAO,
+ googleDao: MockGoogleServicesDAO,
+ ontologyDao: MockOntologyDAO,
+ rawlsDao: MockRawlsDAO,
+ samDao: MockSamDAO,
+ searchDao: MockSearchDAO,
+ researchPurposeSupport: MockResearchPurposeSupport,
+ thurloeDao: MockThurloeDAO,
+ shareLogDao: MockShareLogDAO,
+ shibbolethDao: MockShibbolethDAO,
+ cwdsDao: CwdsDAO,
+ ecmDao: ExternalCredsDAO
+ )(implicit val executionContext: ExecutionContext, implicit val materializer: Materializer)
+ extends ApiServices
def withDefaultApiServices[T](testCode: TestApiService => T): T = {
- val apiService = TestApiService(new MockAgoraDAO, new MockGoogleServicesDAO, new MockOntologyDAO, new MockRawlsDAO, new MockSamDAO, new MockSearchDAO, new MockResearchPurposeSupport, new MockThurloeDAO, new MockShareLogDAO, new MockShibbolethDAO, new MockCwdsDAO, new DisabledExternalCredsDAO)
+ val apiService = TestApiService(
+ new MockAgoraDAO,
+ new MockGoogleServicesDAO,
+ new MockOntologyDAO,
+ new MockRawlsDAO,
+ new MockSamDAO,
+ new MockSearchDAO,
+ new MockResearchPurposeSupport,
+ new MockThurloeDAO,
+ new MockShareLogDAO,
+ new MockShibbolethDAO,
+ new MockCwdsDAO,
+ new DisabledExternalCredsDAO
+ )
testCode(apiService)
}
- "NihApiService" should "return NotFound when GET-ting a profile with no NIH username" in withDefaultApiServices { services =>
- val toLink = WorkbenchEmail(services.thurloeDao.TCGA_AND_TARGET_UNLINKED)
+ "NihApiService" should "return NotFound when GET-ting a profile with no NIH username" in withDefaultApiServices {
+ services =>
+ val toLink = WorkbenchEmail(services.thurloeDao.TCGA_AND_TARGET_UNLINKED)
- Get("/nih/status") ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(services.nihRoutes) ~> check {
- status should equal(NotFound)
- }
+ Get("/nih/status") ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(
+ services.nihRoutes
+ ) ~> check {
+ status should equal(NotFound)
+ }
}
it should "return NotFound when GET-ting a non-existent profile" in withDefaultApiServices { services =>
@@ -69,7 +106,10 @@ class NihApiServiceSpec extends ApiServiceSpec with BeforeAndAfterAll with SamMo
it should "return BadRequest when NIH linking with an invalid JWT" in withDefaultApiServices { services =>
val toLink = WorkbenchEmail(services.thurloeDao.TCGA_AND_TARGET_UNLINKED)
- Post("/nih/callback", JWTWrapper("bad-token")) ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(services.nihRoutes) ~> check {
+ Post("/nih/callback", JWTWrapper("bad-token")) ~> dummyUserIdHeaders(toLink.value,
+ "access_token",
+ toLink.value
+ ) ~> sealRoute(services.nihRoutes) ~> check {
status should equal(BadRequest)
assert(!services.samDao.groups(tcgaDbGaPAuthorized).contains(toLink))
assert(!services.samDao.groups(targetDbGaPAuthorized).contains(toLink))
@@ -81,7 +121,9 @@ class NihApiServiceSpec extends ApiServiceSpec with BeforeAndAfterAll with SamMo
assert(!services.samDao.groups(targetDbGaPAuthorized).contains(toLink))
assert(!services.samDao.groups(tcgaDbGaPAuthorized).contains(toLink))
- Post("/nih/callback", tcgaUserJwt) ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(services.nihRoutes) ~> check {
+ Post("/nih/callback", tcgaUserJwt) ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(
+ services.nihRoutes
+ ) ~> check {
status should equal(OK)
assert(!services.samDao.groups(targetDbGaPAuthorized).contains(toLink))
assert(services.samDao.groups(tcgaDbGaPAuthorized).contains(toLink))
@@ -93,7 +135,9 @@ class NihApiServiceSpec extends ApiServiceSpec with BeforeAndAfterAll with SamMo
assert(!services.samDao.groups(targetDbGaPAuthorized).contains(toLink))
assert(!services.samDao.groups(tcgaDbGaPAuthorized).contains(toLink))
- Post("/nih/callback", targetUserJwt) ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(services.nihRoutes) ~> check {
+ Post("/nih/callback", targetUserJwt) ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(
+ services.nihRoutes
+ ) ~> check {
status should equal(OK)
assert(services.samDao.groups(targetDbGaPAuthorized).contains(toLink))
assert(!services.samDao.groups(tcgaDbGaPAuthorized).contains(toLink))
@@ -105,13 +149,17 @@ class NihApiServiceSpec extends ApiServiceSpec with BeforeAndAfterAll with SamMo
assert(!services.samDao.groups(targetDbGaPAuthorized).contains(toLink))
assert(!services.samDao.groups(tcgaDbGaPAuthorized).contains(toLink))
- Post("/nih/callback", targetUserJwt) ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(services.nihRoutes) ~> check {
+ Post("/nih/callback", targetUserJwt) ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(
+ services.nihRoutes
+ ) ~> check {
status should equal(OK)
assert(services.samDao.groups(targetDbGaPAuthorized).contains(toLink))
assert(!services.samDao.groups(tcgaDbGaPAuthorized).contains(toLink))
}
// notice tcgaUserJwt, not targetUserJwt as above
- Post("/nih/callback", tcgaUserJwt) ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(services.nihRoutes) ~> check {
+ Post("/nih/callback", tcgaUserJwt) ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(
+ services.nihRoutes
+ ) ~> check {
status should equal(OK)
assert(!services.samDao.groups(targetDbGaPAuthorized).contains(toLink))
assert(services.samDao.groups(tcgaDbGaPAuthorized).contains(toLink))
@@ -123,68 +171,85 @@ class NihApiServiceSpec extends ApiServiceSpec with BeforeAndAfterAll with SamMo
assert(!services.samDao.groups(targetDbGaPAuthorized).contains(toLink))
assert(!services.samDao.groups(tcgaDbGaPAuthorized).contains(toLink))
- Post("/nih/callback", firecloudDevJwt) ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(services.nihRoutes) ~> check {
+ Post("/nih/callback", firecloudDevJwt) ~> dummyUserIdHeaders(toLink.value,
+ "access_token",
+ toLink.value
+ ) ~> sealRoute(services.nihRoutes) ~> check {
status should equal(OK)
assert(services.samDao.groups(targetDbGaPAuthorized).contains(toLink))
assert(services.samDao.groups(tcgaDbGaPAuthorized).contains(toLink))
}
}
- it should "link but not sync when user is on neither the TARGET nor the TCGA whitelist" in withDefaultApiServices { services =>
- val toLink = WorkbenchEmail(services.thurloeDao.TCGA_AND_TARGET_UNLINKED)
-
- Post("/nih/callback", validJwtNotOnWhitelist) ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(services.nihRoutes) ~> check {
- status should equal(OK)
- assert(!services.samDao.groups(tcgaDbGaPAuthorized).contains(toLink))
- assert(!services.samDao.groups(targetDbGaPAuthorized).contains(toLink))
- }
+ it should "link but not sync when user is on neither the TARGET nor the TCGA whitelist" in withDefaultApiServices {
+ services =>
+ val toLink = WorkbenchEmail(services.thurloeDao.TCGA_AND_TARGET_UNLINKED)
+
+ Post("/nih/callback", validJwtNotOnWhitelist) ~> dummyUserIdHeaders(toLink.value,
+ "access_token",
+ toLink.value
+ ) ~> sealRoute(services.nihRoutes) ~> check {
+ status should equal(OK)
+ assert(!services.samDao.groups(tcgaDbGaPAuthorized).contains(toLink))
+ assert(!services.samDao.groups(targetDbGaPAuthorized).contains(toLink))
+ }
}
- it should "return OK when an expired user re-links. their new link time should be in the future" in withDefaultApiServices { services =>
- //verify that their link is indeed already expired
- val toLink = WorkbenchEmail(services.thurloeDao.TCGA_AND_TARGET_LINKED_EXPIRED)
-
- Get("/nih/status") ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(services.nihRoutes) ~> check {
- status should equal(OK)
- assert(responseAs[NihStatus].linkExpireTime.get < DateUtils.now)
- }
-
- //link them using a valid JWT for a user on the whitelist
- Post("/nih/callback", firecloudDevJwt) ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(services.nihRoutes) ~> check {
- status should equal(OK)
- }
-
- //verify that their link expiration has been updated
- Get("/nih/status") ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(services.nihRoutes) ~> check {
- status should equal(OK)
- val linkExpireTime = responseAs[NihStatus].linkExpireTime.get
-
- assert(linkExpireTime >= DateUtils.now) //link expire time is fresh
- assert(services.samDao.groups(tcgaDbGaPAuthorized).contains(toLink))
- assert(services.samDao.groups(targetDbGaPAuthorized).contains(toLink))
- }
+ it should "return OK when an expired user re-links. their new link time should be in the future" in withDefaultApiServices {
+ services =>
+ // verify that their link is indeed already expired
+ val toLink = WorkbenchEmail(services.thurloeDao.TCGA_AND_TARGET_LINKED_EXPIRED)
+
+ Get("/nih/status") ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(
+ services.nihRoutes
+ ) ~> check {
+ status should equal(OK)
+ assert(responseAs[NihStatus].linkExpireTime.get < DateUtils.now)
+ }
+
+ // link them using a valid JWT for a user on the whitelist
+ Post("/nih/callback", firecloudDevJwt) ~> dummyUserIdHeaders(toLink.value,
+ "access_token",
+ toLink.value
+ ) ~> sealRoute(services.nihRoutes) ~> check {
+ status should equal(OK)
+ }
+
+ // verify that their link expiration has been updated
+ Get("/nih/status") ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(
+ services.nihRoutes
+ ) ~> check {
+ status should equal(OK)
+ val linkExpireTime = responseAs[NihStatus].linkExpireTime.get
+
+ assert(linkExpireTime >= DateUtils.now) // link expire time is fresh
+ assert(services.samDao.groups(tcgaDbGaPAuthorized).contains(toLink))
+ assert(services.samDao.groups(targetDbGaPAuthorized).contains(toLink))
+ }
}
it should "unlink an NIH account for a user that is already linked" in withDefaultApiServices { services =>
val toLink = WorkbenchEmail(services.thurloeDao.TCGA_AND_TARGET_LINKED)
- //Assert that the keys are present in Thurloe
+ // Assert that the keys are present in Thurloe
assert(services.thurloeDao.mockKeyValues(toLink.value).map(_.key).contains(Some("linkedNihUsername")))
assert(services.thurloeDao.mockKeyValues(toLink.value).map(_.key).contains(Some("linkExpireTime")))
- //Assert that the user is a member of the TCGA and TARGET NIH groups
+ // Assert that the user is a member of the TCGA and TARGET NIH groups
assert(services.samDao.groups(tcgaDbGaPAuthorized).contains(toLink))
assert(services.samDao.groups(targetDbGaPAuthorized).contains(toLink))
- Delete("/nih/account") ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(services.nihRoutes) ~> check {
+ Delete("/nih/account") ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(
+ services.nihRoutes
+ ) ~> check {
status should equal(NoContent)
}
- //Assert that the keys were removed from Thurloe
+ // Assert that the keys were removed from Thurloe
assert(!services.thurloeDao.mockKeyValues(toLink.value).map(_.key).contains(Some("linkedNihUsername")))
assert(!services.thurloeDao.mockKeyValues(toLink.value).map(_.key).contains(Some("linkExpireTime")))
- //Assert that the user has been removed from the relevant NIH groups
+ // Assert that the user has been removed from the relevant NIH groups
assert(!services.samDao.groups(tcgaDbGaPAuthorized).contains(toLink))
assert(!services.samDao.groups(targetDbGaPAuthorized).contains(toLink))
}
@@ -192,16 +257,20 @@ class NihApiServiceSpec extends ApiServiceSpec with BeforeAndAfterAll with SamMo
it should "tolerate unlinking an NIH account that is not linked" in withDefaultApiServices { services =>
val toLink = WorkbenchEmail(services.thurloeDao.TCGA_UNLINKED)
- Delete("/nih/account") ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(services.nihRoutes) ~> check {
+ Delete("/nih/account") ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(
+ services.nihRoutes
+ ) ~> check {
status should equal(NoContent)
}
- //Assert that there is no NIH account link
- Get("/nih/status") ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(services.nihRoutes) ~> check {
+ // Assert that there is no NIH account link
+ Get("/nih/status") ~> dummyUserIdHeaders(toLink.value, "access_token", toLink.value) ~> sealRoute(
+ services.nihRoutes
+ ) ~> check {
status should equal(NotFound)
}
- //Assert the user is not in any of the NIH groups
+ // Assert the user is not in any of the NIH groups
assert(!services.samDao.groups(tcgaDbGaPAuthorized).contains(toLink))
assert(!services.samDao.groups(targetDbGaPAuthorized).contains(toLink))
}
@@ -215,18 +284,25 @@ class NihApiServiceSpec extends ApiServiceSpec with BeforeAndAfterAll with SamMo
1 user that is linked and has active TARGET access. they should remain in the TARGET group
1 user that is linked and has active TARGET & TCGA access. they should remain in the TARGET and TCGA groups
*/
- it should "return NoContent and properly sync the whitelist for users of different link statuses across whitelists" in withDefaultApiServices { services =>
- Post("/sync_whitelist") ~> sealRoute(services.syncRoute) ~> check {
- status should equal(NoContent)
- assertSameElements(Set(services.thurloeDao.TCGA_AND_TARGET_LINKED, services.thurloeDao.TCGA_LINKED), services.samDao.groups(tcgaDbGaPAuthorized).map(_.value))
- assertSameElements(Set(services.thurloeDao.TCGA_AND_TARGET_LINKED, services.thurloeDao.TARGET_LINKED), services.samDao.groups(targetDbGaPAuthorized).map(_.value))
- }
+ it should "return NoContent and properly sync the whitelist for users of different link statuses across whitelists" in withDefaultApiServices {
+ services =>
+ Post("/sync_whitelist") ~> sealRoute(services.syncRoute) ~> check {
+ status should equal(NoContent)
+ assertSameElements(Set(services.thurloeDao.TCGA_AND_TARGET_LINKED, services.thurloeDao.TCGA_LINKED),
+ services.samDao.groups(tcgaDbGaPAuthorized).map(_.value)
+ )
+ assertSameElements(Set(services.thurloeDao.TCGA_AND_TARGET_LINKED, services.thurloeDao.TARGET_LINKED),
+ services.samDao.groups(targetDbGaPAuthorized).map(_.value)
+ )
+ }
}
it should "return NoContent and properly sync a single whitelist" in withDefaultApiServices { services =>
Post("/sync_whitelist/TCGA") ~> sealRoute(services.syncRoute) ~> check {
status should equal(NoContent)
- assertSameElements(Set(services.thurloeDao.TCGA_AND_TARGET_LINKED, services.thurloeDao.TCGA_LINKED), services.samDao.groups(tcgaDbGaPAuthorized).map(_.value))
+ assertSameElements(Set(services.thurloeDao.TCGA_AND_TARGET_LINKED, services.thurloeDao.TCGA_LINKED),
+ services.samDao.groups(tcgaDbGaPAuthorized).map(_.value)
+ )
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/NotificationsApiServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/NotificationsApiServiceSpec.scala
index bfa4d6314..6d8ee4efa 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/NotificationsApiServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/NotificationsApiServiceSpec.scala
@@ -13,13 +13,11 @@ final class NotificationsApiServiceSpec extends BaseServiceSpec with Notificatio
override val executionContext: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
- override def beforeAll(): Unit = {
+ override def beforeAll(): Unit =
MockWorkspaceServer.startWorkspaceServer()
- }
- override def afterAll(): Unit = {
+ override def afterAll(): Unit =
MockWorkspaceServer.stopWorkspaceServer()
- }
"NotificationsApiService" - {
"get workspace notifications" in {
@@ -54,9 +52,8 @@ final class NotificationsApiServiceSpec extends BaseServiceSpec with Notificatio
}
}
- private def doAssert(method: HttpMethod, uri: String, expectedStatus: StatusCode): Unit = {
+ private def doAssert(method: HttpMethod, uri: String, expectedStatus: StatusCode): Unit =
new RequestBuilder(method)(uri) ~> dummyAuthHeaders ~> sealRoute(notificationsRoutes) ~> check {
status should be(expectedStatus)
}
- }
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/PerimeterApiServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/PerimeterApiServiceSpec.scala
index 51c8f6a35..f816a58e5 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/PerimeterApiServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/PerimeterApiServiceSpec.scala
@@ -31,65 +31,92 @@ final class PerimeterApiServiceSpec extends BaseServiceSpec with PerimeterApiSer
mockWorkspaceServer = startClientAndServer(MockUtils.workspaceServerPort)
- mockWorkspaceServer.when(
- request()
- .withMethod(PUT.name)
- .withPath(s"$perimeterPath/$validPerimeter/projects/$validProject"))
- .respond(HttpResponse.response()
+ mockWorkspaceServer
+ .when(
+ request()
+ .withMethod(PUT.name)
+ .withPath(s"$perimeterPath/$validPerimeter/projects/$validProject")
+ )
+ .respond(
+ HttpResponse
+ .response()
.withHeaders(MockUtils.header)
- .withStatusCode(Accepted.intValue))
-
- mockWorkspaceServer.when(
- request()
- .withMethod(PUT.name)
- .withPath(s"$perimeterPath/$invalidPerimeter/projects/$validProject"))
- .respond(HttpResponse.response()
- .withHeaders(MockUtils.header)
- .withStatusCode(NotFound.intValue))
-
- mockWorkspaceServer.when(
- request()
- .withMethod(PUT.name)
- .withPath(s"$perimeterPath/$validPerimeter/projects/$invalidProject"))
- .respond(HttpResponse.response()
- .withHeaders(MockUtils.header)
- .withStatusCode(Forbidden.intValue))
-
- mockWorkspaceServer.when(
- request()
- .withMethod(PUT.name)
- .withPath(s"$perimeterPath/$validPerimeter/projects/$notReadyProject"))
- .respond(HttpResponse.response()
- .withHeaders(MockUtils.header)
- .withStatusCode(BadRequest.intValue))
+ .withStatusCode(Accepted.intValue)
+ )
+
+ mockWorkspaceServer
+ .when(
+ request()
+ .withMethod(PUT.name)
+ .withPath(s"$perimeterPath/$invalidPerimeter/projects/$validProject")
+ )
+ .respond(
+ HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(NotFound.intValue)
+ )
+
+ mockWorkspaceServer
+ .when(
+ request()
+ .withMethod(PUT.name)
+ .withPath(s"$perimeterPath/$validPerimeter/projects/$invalidProject")
+ )
+ .respond(
+ HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(Forbidden.intValue)
+ )
+
+ mockWorkspaceServer
+ .when(
+ request()
+ .withMethod(PUT.name)
+ .withPath(s"$perimeterPath/$validPerimeter/projects/$notReadyProject")
+ )
+ .respond(
+ HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(BadRequest.intValue)
+ )
}
- override def afterAll(): Unit = {
+ override def afterAll(): Unit =
mockWorkspaceServer.stop()
- }
"PerimeterApiService" - {
"add project to perimeter" in {
- Put(s"/servicePerimeters/$validPerimeter/projects/$validProject") ~> dummyAuthHeaders ~> sealRoute(perimeterServiceRoutes) ~> check {
+ Put(s"/servicePerimeters/$validPerimeter/projects/$validProject") ~> dummyAuthHeaders ~> sealRoute(
+ perimeterServiceRoutes
+ ) ~> check {
status should be(Accepted)
}
}
"add project to invalid perimeter" in {
- Put(s"/servicePerimeters/$invalidPerimeter/projects/$validProject") ~> dummyAuthHeaders ~> sealRoute(perimeterServiceRoutes) ~> check {
+ Put(s"/servicePerimeters/$invalidPerimeter/projects/$validProject") ~> dummyAuthHeaders ~> sealRoute(
+ perimeterServiceRoutes
+ ) ~> check {
status should be(NotFound)
}
}
"add invalid project to perimeter" in {
- Put(s"/servicePerimeters/$validPerimeter/projects/$invalidProject") ~> dummyAuthHeaders ~> sealRoute(perimeterServiceRoutes) ~> check {
+ Put(s"/servicePerimeters/$validPerimeter/projects/$invalidProject") ~> dummyAuthHeaders ~> sealRoute(
+ perimeterServiceRoutes
+ ) ~> check {
status should be(Forbidden)
}
}
"add unready project to perimeter" in {
- Put(s"/servicePerimeters/$validPerimeter/projects/$notReadyProject") ~> dummyAuthHeaders ~> sealRoute(perimeterServiceRoutes) ~> check {
+ Put(s"/servicePerimeters/$validPerimeter/projects/$notReadyProject") ~> dummyAuthHeaders ~> sealRoute(
+ perimeterServiceRoutes
+ ) ~> check {
status should be(BadRequest)
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/PermissionReportApiSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/PermissionReportApiSpec.scala
index 16bf4f0ac..41f06b87b 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/PermissionReportApiSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/PermissionReportApiSpec.scala
@@ -6,7 +6,14 @@ import org.broadinstitute.dsde.firecloud.{EntityService, FireCloudExceptionWithE
import org.broadinstitute.dsde.firecloud.dataaccess.{MockAgoraDAO, MockRawlsDAO}
import org.broadinstitute.dsde.firecloud.model.ErrorReportExtensions.FCErrorReport
import org.broadinstitute.dsde.firecloud.model.OrchMethodRepository._
-import org.broadinstitute.dsde.firecloud.model.{OrchMethodConfigurationName, ModelSchema, PermissionReport, PermissionReportRequest, UserInfo, WithAccessToken}
+import org.broadinstitute.dsde.firecloud.model.{
+ ModelSchema,
+ OrchMethodConfigurationName,
+ PermissionReport,
+ PermissionReportRequest,
+ UserInfo,
+ WithAccessToken
+}
import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol._
import org.broadinstitute.dsde.firecloud.service.{BaseServiceSpec, PermissionReportService, WorkspaceService}
import org.broadinstitute.dsde.rawls.model.{MethodConfigurationShort, MethodRepoMethod, _}
@@ -17,7 +24,11 @@ import akka.http.scaladsl.server.Route.{seal => sealRoute}
import scala.concurrent.{ExecutionContext, Future}
-class PermissionReportApiSpec extends BaseServiceSpec with WorkspaceApiService with BeforeAndAfterEach with SprayJsonSupport {
+class PermissionReportApiSpec
+ extends BaseServiceSpec
+ with WorkspaceApiService
+ with BeforeAndAfterEach
+ with SprayJsonSupport {
import PermissionReportMockMethods._
@@ -28,57 +39,69 @@ class PermissionReportApiSpec extends BaseServiceSpec with WorkspaceApiService w
val testApp = app.copy(agoraDAO = new PermissionReportMockAgoraDAO(), rawlsDAO = new PermissionReportMockRawlsDAO())
val workspaceServiceConstructor: (WithAccessToken) => WorkspaceService = WorkspaceService.constructor(testApp)
- val permissionReportServiceConstructor: (UserInfo) => PermissionReportService = PermissionReportService.constructor(testApp)
+ val permissionReportServiceConstructor: (UserInfo) => PermissionReportService =
+ PermissionReportService.constructor(testApp)
val entityServiceConstructor: (ModelSchema) => EntityService = EntityService.constructor(app)
- def permissionReportPath(ns:String,name:String) = s"/api/workspaces/$ns/$name/permissionReport"
+ def permissionReportPath(ns: String, name: String) = s"/api/workspaces/$ns/$name/permissionReport"
"Permission Report API" - {
"should only accept POST" in {
- List(HttpMethods.GET, HttpMethods.PUT, HttpMethods.PATCH, HttpMethods.DELETE) map {
- method =>
- new RequestBuilder(method)(permissionReportPath("foo","bar")) ~> dummyUserIdHeaders("1234") ~> sealRoute(workspaceRoutes) ~> check {
- status should equal(MethodNotAllowed)
- }
+ List(HttpMethods.GET, HttpMethods.PUT, HttpMethods.PATCH, HttpMethods.DELETE) map { method =>
+ new RequestBuilder(method)(permissionReportPath("foo", "bar")) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
+ status should equal(MethodNotAllowed)
+ }
}
}
"should reject malformed input" in {
// endpoint expects an object; send it an array
- val badPayload = Seq("one","two","three")
- Post(permissionReportPath("foo","bar"), badPayload) ~> dummyUserIdHeaders("1234") ~> sealRoute(workspaceRoutes) ~> check {
+ val badPayload = Seq("one", "two", "three")
+ Post(permissionReportPath("foo", "bar"), badPayload) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should equal(BadRequest)
}
}
"should return 404 if workspace doesn't exist" in {
- val payload = PermissionReportRequest(None,None)
- Post(permissionReportPath("notfound","notfound"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(workspaceRoutes) ~> check {
+ val payload = PermissionReportRequest(None, None)
+ Post(permissionReportPath("notfound", "notfound"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should equal(NotFound)
}
}
"should accept correctly-formed input" in {
- val payload = PermissionReportRequest(Some(Seq("foo")),Some(Seq(OrchMethodConfigurationName("ns","name"))))
- Post(permissionReportPath("foo","bar"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(workspaceRoutes) ~> check {
+ val payload = PermissionReportRequest(Some(Seq("foo")), Some(Seq(OrchMethodConfigurationName("ns", "name"))))
+ Post(permissionReportPath("foo", "bar"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should equal(OK)
}
}
"should treat both users and configs as optional inputs" in {
- val payload = PermissionReportRequest(None,None)
- Post(permissionReportPath("foo","bar"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(workspaceRoutes) ~> check {
+ val payload = PermissionReportRequest(None, None)
+ Post(permissionReportPath("foo", "bar"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should equal(OK)
}
}
"should return all users and all configs if caller omits inputs" in {
- val payload = PermissionReportRequest(None,None)
- Post(permissionReportPath("foo","bar"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(workspaceRoutes) ~> check {
+ val payload = PermissionReportRequest(None, None)
+ Post(permissionReportPath("foo", "bar"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should equal(OK)
val report = responseAs[PermissionReport]
- assertResult(Set("alice@example.com","bob@example.com","carol@example.com")) {report.workspaceACL.keySet}
+ assertResult(Set("alice@example.com", "bob@example.com", "carol@example.com"))(report.workspaceACL.keySet)
val expectedConfigsNoAcls = Map(
OrchMethodConfigurationName("configns1", "configname1") -> Some(mockMethod1),
@@ -86,18 +109,22 @@ class PermissionReportApiSpec extends BaseServiceSpec with WorkspaceApiService w
OrchMethodConfigurationName("configns3", "configname3") -> Some(mockMethod3)
)
- assertResult(expectedConfigsNoAcls) {(report.referencedMethods map {
- x => x.referencedBy -> x.method
- }).toMap}
+ assertResult(expectedConfigsNoAcls) {
+ (report.referencedMethods map { x =>
+ x.referencedBy -> x.method
+ }).toMap
+ }
}
}
"should filter users if caller specifies" in {
- val payload = PermissionReportRequest(Some(Seq("carol@example.com")),None)
- Post(permissionReportPath("foo","bar"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(workspaceRoutes) ~> check {
+ val payload = PermissionReportRequest(Some(Seq("carol@example.com")), None)
+ Post(permissionReportPath("foo", "bar"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should equal(OK)
val report = responseAs[PermissionReport]
- assertResult(Set("carol@example.com")) {report.workspaceACL.keySet}
+ assertResult(Set("carol@example.com"))(report.workspaceACL.keySet)
val expectedConfigsNoAcls = Map(
OrchMethodConfigurationName("configns1", "configname1") -> Some(mockMethod1),
@@ -105,49 +132,63 @@ class PermissionReportApiSpec extends BaseServiceSpec with WorkspaceApiService w
OrchMethodConfigurationName("configns3", "configname3") -> Some(mockMethod3)
)
- assertResult(expectedConfigsNoAcls) {(report.referencedMethods map {
- x => x.referencedBy -> x.method
- }).toMap}
+ assertResult(expectedConfigsNoAcls) {
+ (report.referencedMethods map { x =>
+ x.referencedBy -> x.method
+ }).toMap
+ }
}
}
"should filter configs if caller specifies" in {
- val payload = PermissionReportRequest(None,Some(Seq(OrchMethodConfigurationName("configns2","configname2"))))
- Post(permissionReportPath("foo","bar"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(workspaceRoutes) ~> check {
+ val payload = PermissionReportRequest(None, Some(Seq(OrchMethodConfigurationName("configns2", "configname2"))))
+ Post(permissionReportPath("foo", "bar"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should equal(OK)
val report = responseAs[PermissionReport]
- assertResult(Set("alice@example.com","bob@example.com","carol@example.com")) {report.workspaceACL.keySet}
+ assertResult(Set("alice@example.com", "bob@example.com", "carol@example.com"))(report.workspaceACL.keySet)
val expectedConfigsNoAcls = Map(
OrchMethodConfigurationName("configns2", "configname2") -> Some(mockMethod2)
)
- assertResult(expectedConfigsNoAcls) {(report.referencedMethods map {
- x => x.referencedBy -> x.method
- }).toMap}
+ assertResult(expectedConfigsNoAcls) {
+ (report.referencedMethods map { x =>
+ x.referencedBy -> x.method
+ }).toMap
+ }
}
}
"should filter both users and configs if caller specifies" in {
- val payload = PermissionReportRequest(Some(Seq("carol@example.com")),Some(Seq(OrchMethodConfigurationName("configns2","configname2"))))
- Post(permissionReportPath("foo","bar"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(workspaceRoutes) ~> check {
+ val payload = PermissionReportRequest(Some(Seq("carol@example.com")),
+ Some(Seq(OrchMethodConfigurationName("configns2", "configname2")))
+ )
+ Post(permissionReportPath("foo", "bar"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should equal(OK)
val report = responseAs[PermissionReport]
- assertResult(Set("carol@example.com")) {report.workspaceACL.keySet}
+ assertResult(Set("carol@example.com"))(report.workspaceACL.keySet)
val expectedConfigsNoAcls = Map(
OrchMethodConfigurationName("configns2", "configname2") -> Some(mockMethod2)
)
- assertResult(expectedConfigsNoAcls) {(report.referencedMethods map {
- x => x.referencedBy -> x.method
- }).toMap}
+ assertResult(expectedConfigsNoAcls) {
+ (report.referencedMethods map { x =>
+ x.referencedBy -> x.method
+ }).toMap
+ }
}
}
"should propagate method-specific error message from Agora" in {
- val payload = PermissionReportRequest(None,None)
- Post(permissionReportPath("foo","bar"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(workspaceRoutes) ~> check {
+ val payload = PermissionReportRequest(None, None)
+ Post(permissionReportPath("foo", "bar"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should equal(OK)
val report = responseAs[PermissionReport]
@@ -159,11 +200,13 @@ class PermissionReportApiSpec extends BaseServiceSpec with WorkspaceApiService w
}
"should omit a caller-specified user if user doesn't exist in the workspace" in {
- val payload = PermissionReportRequest(Some(Seq("carol@example.com", "dan@example.com")),None)
- Post(permissionReportPath("foo","bar"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(workspaceRoutes) ~> check {
+ val payload = PermissionReportRequest(Some(Seq("carol@example.com", "dan@example.com")), None)
+ Post(permissionReportPath("foo", "bar"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should equal(OK)
val report = responseAs[PermissionReport]
- assertResult(Set("carol@example.com")) {report.workspaceACL.keySet}
+ assertResult(Set("carol@example.com"))(report.workspaceACL.keySet)
val expectedConfigsNoAcls = Map(
OrchMethodConfigurationName("configns1", "configname1") -> Some(mockMethod1),
@@ -171,37 +214,47 @@ class PermissionReportApiSpec extends BaseServiceSpec with WorkspaceApiService w
OrchMethodConfigurationName("configns3", "configname3") -> Some(mockMethod3)
)
- assertResult(expectedConfigsNoAcls) {(report.referencedMethods map {
- x => x.referencedBy -> x.method
- }).toMap}
+ assertResult(expectedConfigsNoAcls) {
+ (report.referencedMethods map { x =>
+ x.referencedBy -> x.method
+ }).toMap
+ }
}
}
"should omit a caller-specified config if config doesn't exist in the workspace" in {
val payload = PermissionReportRequest(None,
- Some(Seq(
- OrchMethodConfigurationName("configns2","configname2"),
- OrchMethodConfigurationName("confignsZZZ","confignameZZZ")
- ))
+ Some(
+ Seq(
+ OrchMethodConfigurationName("configns2", "configname2"),
+ OrchMethodConfigurationName("confignsZZZ", "confignameZZZ")
+ )
+ )
)
- Post(permissionReportPath("foo","bar"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(workspaceRoutes) ~> check {
+ Post(permissionReportPath("foo", "bar"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should equal(OK)
val report = responseAs[PermissionReport]
- assertResult(Set("alice@example.com","bob@example.com","carol@example.com")) {report.workspaceACL.keySet}
+ assertResult(Set("alice@example.com", "bob@example.com", "carol@example.com"))(report.workspaceACL.keySet)
val expectedConfigsNoAcls = Map(
OrchMethodConfigurationName("configns2", "configname2") -> Some(mockMethod2)
)
- assertResult(expectedConfigsNoAcls) {(report.referencedMethods map {
- x => x.referencedBy -> x.method
- }).toMap}
+ assertResult(expectedConfigsNoAcls) {
+ (report.referencedMethods map { x =>
+ x.referencedBy -> x.method
+ }).toMap
+ }
}
}
"should return empty workspace ACLs but still get method info if caller is not owner of workspace" in {
- val payload = PermissionReportRequest(None,None)
- Post(permissionReportPath("notowner","notowner"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(workspaceRoutes) ~> check {
+ val payload = PermissionReportRequest(None, None)
+ Post(permissionReportPath("notowner", "notowner"), payload) ~> dummyUserIdHeaders("1234") ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should equal(OK)
val report = responseAs[PermissionReport]
@@ -213,9 +266,11 @@ class PermissionReportApiSpec extends BaseServiceSpec with WorkspaceApiService w
OrchMethodConfigurationName("configns3", "configname3") -> Some(mockMethod3)
)
- assertResult(expectedConfigsNoAcls) {(report.referencedMethods map {
- x => x.referencedBy -> x.method
- }).toMap}
+ assertResult(expectedConfigsNoAcls) {
+ (report.referencedMethods map { x =>
+ x.referencedBy -> x.method
+ }).toMap
+ }
}
}
@@ -224,11 +279,13 @@ class PermissionReportApiSpec extends BaseServiceSpec with WorkspaceApiService w
class PermissionReportMockRawlsDAO extends MockRawlsDAO {
- val mockACL = WorkspaceACL(Map(
- "alice@example.com" -> AccessEntry(WorkspaceAccessLevels.Owner, false, false, true),
- "bob@example.com" -> AccessEntry(WorkspaceAccessLevels.Write, false, false, true),
- "carol@example.com" -> AccessEntry(WorkspaceAccessLevels.Read, false, true, false)
- ))
+ val mockACL = WorkspaceACL(
+ Map(
+ "alice@example.com" -> AccessEntry(WorkspaceAccessLevels.Owner, false, false, true),
+ "bob@example.com" -> AccessEntry(WorkspaceAccessLevels.Write, false, false, true),
+ "carol@example.com" -> AccessEntry(WorkspaceAccessLevels.Read, false, true, false)
+ )
+ )
val mockConfigs = Seq(
AgoraConfigurationShort("configname1", "participant", MethodRepoMethod("methodns1", "methodname1", 1), "configns1"),
@@ -236,24 +293,36 @@ class PermissionReportMockRawlsDAO extends MockRawlsDAO {
AgoraConfigurationShort("configname3", "participant", MethodRepoMethod("methodns3", "methodname3", 3), "configns3")
)
- override def getWorkspaceACL(ns: String, name: String)(implicit userToken: WithAccessToken): Future[WorkspaceACL] = {
+ override def getWorkspaceACL(ns: String, name: String)(implicit userToken: WithAccessToken): Future[WorkspaceACL] =
ns match {
- case "notfound" => Future.failed(new FireCloudExceptionWithErrorReport(ErrorReport(NotFound, "Not Found response from Mock")))
- case "notowner" => Future.failed(new FireCloudExceptionWithErrorReport(ErrorReport(Forbidden, "Forbidden response from Mock")))
+ case "notfound" =>
+ Future.failed(new FireCloudExceptionWithErrorReport(ErrorReport(NotFound, "Not Found response from Mock")))
+ case "notowner" =>
+ Future.failed(new FireCloudExceptionWithErrorReport(ErrorReport(Forbidden, "Forbidden response from Mock")))
case _ => Future.successful(mockACL)
}
- }
- override def getAgoraMethodConfigs(workspaceNamespace: String, workspaceName: String)(implicit userToken: WithAccessToken): Future[Seq[AgoraConfigurationShort]] = {
+ override def getAgoraMethodConfigs(workspaceNamespace: String, workspaceName: String)(implicit
+ userToken: WithAccessToken
+ ): Future[Seq[AgoraConfigurationShort]] =
Future.successful(mockConfigs)
- }
}
object PermissionReportMockMethods {
- val mockMethod1 = Method(Some("methodns1"), Some("methodname1"), Some(1), managers=Some(Seq("alice@example.com")), public=Some(true))
- val mockMethod2 = Method(Some("methodns2"), Some("methodname2"), Some(2), managers=Some(Seq("bob@example.com")), public=Some(false))
- val mockMethod3 = Method(Some("methodns3"), Some("methodname3"), Some(3), public=Some(false))
+ val mockMethod1 = Method(Some("methodns1"),
+ Some("methodname1"),
+ Some(1),
+ managers = Some(Seq("alice@example.com")),
+ public = Some(true)
+ )
+ val mockMethod2 = Method(Some("methodns2"),
+ Some("methodname2"),
+ Some(2),
+ managers = Some(Seq("bob@example.com")),
+ public = Some(false)
+ )
+ val mockMethod3 = Method(Some("methodns3"), Some("methodname3"), Some(3), public = Some(false))
}
class PermissionReportMockAgoraDAO extends MockAgoraDAO {
@@ -261,27 +330,30 @@ class PermissionReportMockAgoraDAO extends MockAgoraDAO {
import PermissionReportMockMethods._
val mockEntityAccessControlList = List(
- EntityAccessControlAgora(mockMethod1,
+ EntityAccessControlAgora(
+ mockMethod1,
Seq(
AgoraPermission(Some("alice@example.com"), Some(ACLNames.ListOwner)),
AgoraPermission(Some("bob@example.com"), Some(ACLNames.ListReader)),
AgoraPermission(Some("public"), Some(ACLNames.ListReader))
),
- None),
- EntityAccessControlAgora(mockMethod2,
+ None
+ ),
+ EntityAccessControlAgora(
+ mockMethod2,
Seq(
AgoraPermission(Some("bob@example.com"), Some(ACLNames.ListOwner)),
AgoraPermission(Some("carol@example.com"), Some(ACLNames.ListReader)),
AgoraPermission(Some("public"), Some(List.empty[String]))
),
- None),
- EntityAccessControlAgora(mockMethod3,
- Seq.empty[AgoraPermission],
- Some("this method's mock response has an error"))
+ None
+ ),
+ EntityAccessControlAgora(mockMethod3, Seq.empty[AgoraPermission], Some("this method's mock response has an error"))
)
- override def getMultiEntityPermissions(entityType: AgoraEntityType.Value, entities: List[Method])(implicit userInfo: UserInfo) = {
+ override def getMultiEntityPermissions(entityType: AgoraEntityType.Value, entities: List[Method])(implicit
+ userInfo: UserInfo
+ ) =
Future.successful(mockEntityAccessControlList)
- }
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/RegisterApiServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/RegisterApiServiceSpec.scala
index 3f5a66f29..9ae1944e9 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/RegisterApiServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/RegisterApiServiceSpec.scala
@@ -21,9 +21,14 @@ import spray.json.DefaultJsonProtocol
import scala.concurrent.{ExecutionContext, Future}
import scala.util.Success
-final class RegisterApiServiceSpec extends BaseServiceSpec with RegisterApiService with UserApiService
- with DefaultJsonProtocol with SprayJsonSupport
- with BeforeAndAfterAll with SamMockserverUtils {
+final class RegisterApiServiceSpec
+ extends BaseServiceSpec
+ with RegisterApiService
+ with UserApiService
+ with DefaultJsonProtocol
+ with SprayJsonSupport
+ with BeforeAndAfterAll
+ with SamMockserverUtils {
override val executionContext: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
@@ -36,47 +41,54 @@ final class RegisterApiServiceSpec extends BaseServiceSpec with RegisterApiServi
mockSamServer = startClientAndServer(MockUtils.samServerPort)
// disabled user
mockSamServer
- .when(request
- .withMethod("GET")
- .withPath("/register/user/v2/self/info")
- .withHeader(new Header("Authorization", "Bearer disabled")))
+ .when(
+ request
+ .withMethod("GET")
+ .withPath("/register/user/v2/self/info")
+ .withHeader(new Header("Authorization", "Bearer disabled"))
+ )
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withBody(
- """{
- | "adminEnabled": false,
- | "enabled": false,
- | "userEmail": "disabled@nowhere.com",
- | "userSubjectId": "disabled-id"
- |}""".stripMargin).withStatusCode(OK.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withBody("""{
+ | "adminEnabled": false,
+ | "enabled": false,
+ | "userEmail": "disabled@nowhere.com",
+ | "userSubjectId": "disabled-id"
+ |}""".stripMargin)
+ .withStatusCode(OK.intValue)
)
// unregistered user
mockSamServer
- .when(request
- .withMethod("GET")
- .withPath("/register/user/v2/self/info")
- .withHeader(new Header("Authorization", "Bearer unregistered")))
+ .when(
+ request
+ .withMethod("GET")
+ .withPath("/register/user/v2/self/info")
+ .withHeader(new Header("Authorization", "Bearer unregistered"))
+ )
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withBody(
- """{
- | "causes": [],
- | "message": "Google Id unregistered-id not found in sam",
- | "source": "sam",
- | "stackTrace": [],
- | "statusCode": 404
- |}""".stripMargin).withStatusCode(NotFound.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withBody("""{
+ | "causes": [],
+ | "message": "Google Id unregistered-id not found in sam",
+ | "source": "sam",
+ | "stackTrace": [],
+ | "statusCode": 404
+ |}""".stripMargin)
+ .withStatusCode(NotFound.intValue)
)
returnEnabledUser(mockSamServer)
}
-
- override val registerServiceConstructor:() => RegisterService =
+ override val registerServiceConstructor: () => RegisterService =
RegisterService.constructor(app.copy(thurloeDAO = new RegisterApiServiceSpecThurloeDAO))
- override val userServiceConstructor:(UserInfo) => UserService =
+ override val userServiceConstructor: (UserInfo) => UserService =
UserService.constructor(app.copy(thurloeDAO = new RegisterApiServiceSpecThurloeDAO))
def makeBasicProfile(hasTermsOfService: Boolean): BasicProfile = {
@@ -152,21 +164,27 @@ final class RegisterApiServiceSpec extends BaseServiceSpec with RegisterApiServi
"register-profile API POST" - {
"should fail with no terms of service" in {
val payload = makeBasicProfile(false)
- Post("/register/profile", payload) ~> dummyUserIdHeaders("RegisterApiServiceSpec", "new") ~> sealRoute(registerRoutes) ~> check {
+ Post("/register/profile", payload) ~> dummyUserIdHeaders("RegisterApiServiceSpec", "new") ~> sealRoute(
+ registerRoutes
+ ) ~> check {
status should be(Forbidden)
}
}
"should succeed with terms of service" in {
val payload = makeBasicProfile(true)
- Post("/register/profile", payload) ~> dummyUserIdHeaders("RegisterApiServiceSpec", "new") ~> sealRoute(registerRoutes) ~> check {
+ Post("/register/profile", payload) ~> dummyUserIdHeaders("RegisterApiServiceSpec", "new") ~> sealRoute(
+ registerRoutes
+ ) ~> check {
status should be(OK)
}
}
"should succeed user who already exists" in {
val payload = makeBasicProfile(true)
- Post("/register/profile", payload) ~> dummyUserIdHeaders("RegisterApiServiceSpec") ~> sealRoute(registerRoutes) ~> check {
+ Post("/register/profile", payload) ~> dummyUserIdHeaders("RegisterApiServiceSpec") ~> sealRoute(
+ registerRoutes
+ ) ~> check {
status should be(OK)
}
}
@@ -175,14 +193,18 @@ final class RegisterApiServiceSpec extends BaseServiceSpec with RegisterApiServi
"register-with-profile API POST" - {
"should fail if Sam does not register the user" in {
val payload = makeBasicProfile(false)
- Post("/users/v1/registerWithProfile", RegisterRequest(acceptsTermsOfService = false, profile = payload)) ~> dummyUserIdHeaders("RegisterApiServiceSpec", "new") ~> sealRoute(v1RegisterRoutes) ~> check {
+ Post("/users/v1/registerWithProfile",
+ RegisterRequest(acceptsTermsOfService = false, profile = payload)
+ ) ~> dummyUserIdHeaders("RegisterApiServiceSpec", "new") ~> sealRoute(v1RegisterRoutes) ~> check {
status should be(BadRequest)
}
}
"should succeed if Sam does register the user" in {
val payload = makeBasicProfile(true)
- Post("/users/v1/registerWithProfile", RegisterRequest(acceptsTermsOfService = true, profile = payload)) ~> dummyUserIdHeaders("RegisterApiServiceSpec", "new") ~> sealRoute(v1RegisterRoutes) ~> check {
+ Post("/users/v1/registerWithProfile",
+ RegisterRequest(acceptsTermsOfService = true, profile = payload)
+ ) ~> dummyUserIdHeaders("RegisterApiServiceSpec", "new") ~> sealRoute(v1RegisterRoutes) ~> check {
status should be(OK)
}
}
@@ -196,7 +218,9 @@ final class RegisterApiServiceSpec extends BaseServiceSpec with RegisterApiServi
// These tests will fail if GET /register/profile is put behind requireEnabledUser().
List("enabled", "disabled", "unregistered") foreach { testCase =>
s"should succeed for a(n) $testCase user" in {
- Get("/register/profile") ~> dummyUserIdHeaders(userId = testCase, token = testCase) ~> sealRoute(userServiceRoutes) ~> check {
+ Get("/register/profile") ~> dummyUserIdHeaders(userId = testCase, token = testCase) ~> sealRoute(
+ userServiceRoutes
+ ) ~> check {
withClue(s"with actual response body: ${responseAs[String]}, got error message ->") {
status should be(OK)
}
@@ -206,15 +230,15 @@ final class RegisterApiServiceSpec extends BaseServiceSpec with RegisterApiServi
}
}
- private def assertPreferencesUpdate(payload: Map[String, String], expectedStatus: StatusCode): Unit = {
- Post("/profile/preferences", payload) ~> dummyUserIdHeaders("RegisterApiServiceSpec") ~> sealRoute(profileRoutes) ~> check {
+ private def assertPreferencesUpdate(payload: Map[String, String], expectedStatus: StatusCode): Unit =
+ Post("/profile/preferences", payload) ~> dummyUserIdHeaders("RegisterApiServiceSpec") ~> sealRoute(
+ profileRoutes
+ ) ~> check {
status should be(expectedStatus)
}
- }
// for purposes of these tests, we treat Thurloe as if it is always successful.
final class RegisterApiServiceSpecThurloeDAO extends MockThurloeDAO {
- override def saveKeyValues(userInfo: UserInfo, keyValues: Map[String, String])= Future.successful(Success(()))
+ override def saveKeyValues(userInfo: UserInfo, keyValues: Map[String, String]) = Future.successful(Success(()))
}
}
-
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/ShareLogApiServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/ShareLogApiServiceSpec.scala
index d6b92bdc8..5d2eceee4 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/ShareLogApiServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/ShareLogApiServiceSpec.scala
@@ -17,7 +17,11 @@ import spray.json.DefaultJsonProtocol._
import scala.concurrent.ExecutionContext
-final class ShareLogApiServiceSpec extends BaseServiceSpec with ShareLogApiService with SamMockserverUtils with BeforeAndAfterAll {
+final class ShareLogApiServiceSpec
+ extends BaseServiceSpec
+ with ShareLogApiService
+ with SamMockserverUtils
+ with BeforeAndAfterAll {
override val executionContext: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
@@ -41,21 +45,27 @@ final class ShareLogApiServiceSpec extends BaseServiceSpec with ShareLogApiServi
val localShareLogDao = new ShareLogApiServiceSpecShareLogDAO
- override val shareLogServiceConstructor: () => ShareLogService = ShareLogService.constructor(app.copy(shareLogDAO = localShareLogDao))
-
+ override val shareLogServiceConstructor: () => ShareLogService =
+ ShareLogService.constructor(app.copy(shareLogDAO = localShareLogDao))
+
"ShareLogApiService" - {
"when getting all sharees" in {
- Get(getShareesPath) ~> getUserHeaders("fake1", "fake1@gmail.com") ~> sealRoute(shareLogServiceRoutes) ~> check {
- assertResult(OK) { status }
- responseAs[Seq[String]] should contain theSameElementsAs ElasticSearchShareLogDAOSpecFixtures.fixtureShares.map(_.sharee)
+ Get(getShareesPath) ~> getUserHeaders("fake1", "fake1@gmail.com") ~> sealRoute(shareLogServiceRoutes) ~> check {
+ assertResult(OK)(status)
+ responseAs[Seq[String]] should contain theSameElementsAs ElasticSearchShareLogDAOSpecFixtures.fixtureShares.map(
+ _.sharee
+ )
}
}
"when getting workspace sharees" in {
- Get(makeGetShareesPath(ShareType.WORKSPACE)) ~> getUserHeaders("fake1", "fake1@gmail.com") ~> sealRoute(shareLogServiceRoutes) ~> check {
- assertResult(OK) { status }
- responseAs[Seq[String]] should contain theSameElementsAs ElasticSearchShareLogDAOSpecFixtures.fixtureShares.map(_.sharee)
+ Get(makeGetShareesPath(ShareType.WORKSPACE)) ~> getUserHeaders("fake1", "fake1@gmail.com") ~> sealRoute(
+ shareLogServiceRoutes
+ ) ~> check {
+ assertResult(OK)(status)
+ responseAs[Seq[String]] should contain theSameElementsAs ElasticSearchShareLogDAOSpecFixtures.fixtureShares.map(
+ _.sharee
+ )
}
}
}
}
-
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/StatusApiServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/StatusApiServiceSpec.scala
index 903754bfc..b327c76c0 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/StatusApiServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/StatusApiServiceSpec.scala
@@ -12,7 +12,6 @@ import akka.http.scaladsl.model.StatusCodes.OK
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
-
/* We don't do much testing of the HealthMonitor itself, because that's tested as part of
workbench-libs. Here, we test routing, de/serialization, and the config we send into
the HealthMonitor.
@@ -24,17 +23,17 @@ class StatusApiServiceSpec extends BaseServiceSpec with StatusApiService with Sp
override val executionContext: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
val healthMonitorChecks = new HealthChecks(app).healthMonitorChecks
- val healthMonitor = system.actorOf(HealthMonitor.props(healthMonitorChecks().keySet)( healthMonitorChecks ), "health-monitor")
- val monitorSchedule = system.scheduler.scheduleWithFixedDelay(Duration.Zero, 1.second, healthMonitor, HealthMonitor.CheckAll)
+ val healthMonitor =
+ system.actorOf(HealthMonitor.props(healthMonitorChecks().keySet)(healthMonitorChecks), "health-monitor")
+ val monitorSchedule =
+ system.scheduler.scheduleWithFixedDelay(Duration.Zero, 1.second, healthMonitor, HealthMonitor.CheckAll)
- override def beforeAll() = {
+ override def beforeAll() =
// wait for the healthMonitor to start up ...
Thread.sleep(3000)
- }
- override def afterAll() = {
+ override def afterAll() =
monitorSchedule.cancel()
- }
override val statusServiceConstructor: () => StatusService = StatusService.constructor(healthMonitor)
@@ -64,7 +63,7 @@ class StatusApiServiceSpec extends BaseServiceSpec with StatusApiService with Sp
// changing the values of expectedSystems may affect the orch liveness probe
// https://github.com/broadinstitute/terra-helmfile/blob/master/charts/firecloudorch/templates/probe/configmap.yaml
val expectedSystems = Set(Agora, GoogleBuckets, LibraryIndex, OntologyIndex, Rawls, Sam, Thurloe)
- assertResult(expectedSystems) { statusCheckResponse.systems.keySet }
+ assertResult(expectedSystems)(statusCheckResponse.systems.keySet)
}
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/SubmissionApiServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/SubmissionApiServiceSpec.scala
index 66a8ac39c..56fc7db5f 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/SubmissionApiServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/SubmissionApiServiceSpec.scala
@@ -15,61 +15,64 @@ final class SubmissionApiServiceSpec extends BaseServiceSpec with SubmissionApiS
override val executionContext: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
- override def beforeAll(): Unit = {
+ override def beforeAll(): Unit =
MockWorkspaceServer.startWorkspaceServer()
- }
- override def afterAll(): Unit = {
+ override def afterAll(): Unit =
MockWorkspaceServer.stopWorkspaceServer()
- }
- val localSubmissionsCountPath = FireCloudConfig.Rawls.submissionsCountPath.format(
- MockWorkspaceServer.mockValidWorkspace.namespace,
- MockWorkspaceServer.mockValidWorkspace.name)
+ val localSubmissionsCountPath = FireCloudConfig.Rawls.submissionsCountPath
+ .format(MockWorkspaceServer.mockValidWorkspace.namespace, MockWorkspaceServer.mockValidWorkspace.name)
- val localSubmissionsPath = FireCloudConfig.Rawls.submissionsPath.format(
- MockWorkspaceServer.mockValidWorkspace.namespace,
- MockWorkspaceServer.mockValidWorkspace.name)
+ val localSubmissionsPath = FireCloudConfig.Rawls.submissionsPath
+ .format(MockWorkspaceServer.mockValidWorkspace.namespace, MockWorkspaceServer.mockValidWorkspace.name)
val localSubmissionIdPath = FireCloudConfig.Rawls.submissionsIdPath.format(
MockWorkspaceServer.mockValidWorkspace.namespace,
MockWorkspaceServer.mockValidWorkspace.name,
- MockWorkspaceServer.mockValidId)
+ MockWorkspaceServer.mockValidId
+ )
val localInvalidSubmissionIdPath = FireCloudConfig.Rawls.submissionsIdPath.format(
MockWorkspaceServer.mockValidWorkspace.namespace,
MockWorkspaceServer.mockValidWorkspace.name,
- MockWorkspaceServer.mockInvalidId)
+ MockWorkspaceServer.mockInvalidId
+ )
val localSubmissionWorkflowIdPath = FireCloudConfig.Rawls.submissionsWorkflowIdPath.format(
MockWorkspaceServer.mockValidWorkspace.namespace,
MockWorkspaceServer.mockValidWorkspace.name,
MockWorkspaceServer.mockValidId,
- MockWorkspaceServer.mockValidId)
+ MockWorkspaceServer.mockValidId
+ )
val localSpacedWorkspaceWorkflowIdPath = FireCloudConfig.Rawls.submissionsWorkflowIdPath.format(
MockWorkspaceServer.mockSpacedWorkspace.namespace,
MockWorkspaceServer.mockSpacedWorkspace.name,
MockWorkspaceServer.mockValidId,
- MockWorkspaceServer.mockValidId)
+ MockWorkspaceServer.mockValidId
+ )
val localInvalidSubmissionWorkflowIdPath = FireCloudConfig.Rawls.submissionsWorkflowIdPath.format(
MockWorkspaceServer.mockValidWorkspace.namespace,
MockWorkspaceServer.mockValidWorkspace.name,
MockWorkspaceServer.mockInvalidId,
- MockWorkspaceServer.mockInvalidId)
+ MockWorkspaceServer.mockInvalidId
+ )
val localSubmissionWorkflowIdOutputsPath = FireCloudConfig.Rawls.submissionsWorkflowIdOutputsPath.format(
MockWorkspaceServer.mockValidWorkspace.namespace,
MockWorkspaceServer.mockValidWorkspace.name,
MockWorkspaceServer.mockValidId,
- MockWorkspaceServer.mockValidId)
+ MockWorkspaceServer.mockValidId
+ )
val localInvalidSubmissionWorkflowIdOutputsPath = FireCloudConfig.Rawls.submissionsWorkflowIdOutputsPath.format(
MockWorkspaceServer.mockValidWorkspace.namespace,
MockWorkspaceServer.mockValidWorkspace.name,
MockWorkspaceServer.mockInvalidId,
- MockWorkspaceServer.mockInvalidId)
+ MockWorkspaceServer.mockInvalidId
+ )
"SubmissionApiService" - {
"when hitting the /submissions/queueStatus path" - {
@@ -95,7 +98,7 @@ final class SubmissionApiServiceSpec extends BaseServiceSpec with SubmissionApiS
"a list of submissions is returned" in {
(Get(localSubmissionsPath)
~> dummyAuthHeaders) ~> sealRoute(submissionServiceRoutes) ~> check {
- status should equal(OK)
+ status should equal(OK)
}
}
}
@@ -104,9 +107,9 @@ final class SubmissionApiServiceSpec extends BaseServiceSpec with SubmissionApiS
"OK response is returned" in {
(Post(localSubmissionsPath, MockWorkspaceServer.mockValidSubmission)
~> dummyAuthHeaders) ~> sealRoute(submissionServiceRoutes) ~> check {
- status should equal(OK)
- val submission = responseAs[OrchSubmissionRequest]
- submission shouldNot be (None)
+ status should equal(OK)
+ val submission = responseAs[OrchSubmissionRequest]
+ submission shouldNot be(None)
}
}
}
@@ -115,8 +118,8 @@ final class SubmissionApiServiceSpec extends BaseServiceSpec with SubmissionApiS
"BadRequest response is returned" in {
(Post(localSubmissionsPath, MockWorkspaceServer.mockInvalidSubmission)
~> dummyAuthHeaders) ~> sealRoute(submissionServiceRoutes) ~> check {
- status should equal(BadRequest)
- errorReportCheck("Rawls", BadRequest)
+ status should equal(BadRequest)
+ errorReportCheck("Rawls", BadRequest)
}
}
}
@@ -126,7 +129,9 @@ final class SubmissionApiServiceSpec extends BaseServiceSpec with SubmissionApiS
*/
"when calling POST on the /workspaces/*/*/submissions path without a valid authentication token" - {
"Found (302 redirect) response is returned" in {
- Post(localSubmissionsPath, MockWorkspaceServer.mockValidSubmission) ~> sealRoute(submissionServiceRoutes) ~> check {
+ Post(localSubmissionsPath, MockWorkspaceServer.mockValidSubmission) ~> sealRoute(
+ submissionServiceRoutes
+ ) ~> check {
status should equal(Found)
}
}
@@ -136,17 +141,17 @@ final class SubmissionApiServiceSpec extends BaseServiceSpec with SubmissionApiS
"with a valid submission, OK response is returned" in {
(Post(s"$localSubmissionsPath/validate", MockWorkspaceServer.mockValidSubmission)
~> dummyAuthHeaders) ~> sealRoute(submissionServiceRoutes) ~> check {
- status should equal(OK)
- val submission = responseAs[OrchSubmissionRequest]
- submission shouldNot be (None)
+ status should equal(OK)
+ val submission = responseAs[OrchSubmissionRequest]
+ submission shouldNot be(None)
}
}
"with an invalid submission, BadRequest response is returned" in {
(Post(s"$localSubmissionsPath/validate", MockWorkspaceServer.mockInvalidSubmission)
~> dummyAuthHeaders) ~> sealRoute(submissionServiceRoutes) ~> check {
- status should equal(BadRequest)
- errorReportCheck("Rawls", BadRequest)
+ status should equal(BadRequest)
+ errorReportCheck("Rawls", BadRequest)
}
}
}
@@ -188,13 +193,16 @@ final class SubmissionApiServiceSpec extends BaseServiceSpec with SubmissionApiS
"when calling PATCH on the /workspaces/*/*/submissions/* path" - {
MockWorkspaceServer.submissionIdPatchResponseMapping.foreach { case (id, expectedResponseCode, _) =>
s"HTTP $expectedResponseCode responses are forwarded back correctly" in {
- val submissionIdPath = FireCloudConfig.Rawls.submissionsIdPath.format(
- MockWorkspaceServer.mockValidWorkspace.namespace,
- MockWorkspaceServer.mockValidWorkspace.name,
- id)
-
- (Patch(submissionIdPath, "PATCH request body. The mock server will ignore this content and respond " +
- "entirely based on submission ID instead")
+ val submissionIdPath =
+ FireCloudConfig.Rawls.submissionsIdPath.format(MockWorkspaceServer.mockValidWorkspace.namespace,
+ MockWorkspaceServer.mockValidWorkspace.name,
+ id
+ )
+
+ (Patch(submissionIdPath,
+ "PATCH request body. The mock server will ignore this content and respond " +
+ "entirely based on submission ID instead"
+ )
~> dummyAuthHeaders) ~> sealRoute(submissionServiceRoutes) ~> check {
status should equal(expectedResponseCode)
if (status != OK) {
@@ -216,7 +224,9 @@ final class SubmissionApiServiceSpec extends BaseServiceSpec with SubmissionApiS
// the request inbound to orchestration should encoded, so we replace spaces with %20 in the test below.
// this test really verifies that the runtime orch code can accept an encoded URI and maintain the encoding
// when it passes through the request to rawls - i.e. it doesn't decode the request at any point.
- Get(localSpacedWorkspaceWorkflowIdPath.replace(" ","%20")) ~> dummyAuthHeaders ~> sealRoute(submissionServiceRoutes) ~> check {
+ Get(localSpacedWorkspaceWorkflowIdPath.replace(" ", "%20")) ~> dummyAuthHeaders ~> sealRoute(
+ submissionServiceRoutes
+ ) ~> check {
status should equal(OK)
}
}
@@ -237,7 +247,9 @@ final class SubmissionApiServiceSpec extends BaseServiceSpec with SubmissionApiS
}
"with an invalid id, NotFound response is returned" in {
- Get(localInvalidSubmissionWorkflowIdOutputsPath) ~> dummyAuthHeaders ~> sealRoute(submissionServiceRoutes) ~> check {
+ Get(localInvalidSubmissionWorkflowIdOutputsPath) ~> dummyAuthHeaders ~> sealRoute(
+ submissionServiceRoutes
+ ) ~> check {
status should equal(NotFound)
errorReportCheck("Rawls", NotFound)
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/UserApiServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/UserApiServiceSpec.scala
index 6561a93d9..ae6bf9396 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/UserApiServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/UserApiServiceSpec.scala
@@ -19,18 +19,28 @@ import spray.json._
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, ExecutionContext}
-class UserApiServiceSpec extends BaseServiceSpec with SamMockserverUtils
- with RegisterApiService with UserApiService with SprayJsonSupport {
+class UserApiServiceSpec
+ extends BaseServiceSpec
+ with SamMockserverUtils
+ with RegisterApiService
+ with UserApiService
+ with SprayJsonSupport {
override val executionContext: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
- val registerServiceConstructor:() => RegisterService = RegisterService.constructor(app)
- val userServiceConstructor:(UserInfo) => UserService = UserService.constructor(app)
+ val registerServiceConstructor: () => RegisterService = RegisterService.constructor(app)
+ val userServiceConstructor: (UserInfo) => UserService = UserService.constructor(app)
var workspaceServer: ClientAndServer = _
var profileServer: ClientAndServer = _
var samServer: ClientAndServer = _
- val httpMethods = List(HttpMethods.GET, HttpMethods.POST, HttpMethods.PUT,
- HttpMethods.DELETE, HttpMethods.PATCH, HttpMethods.OPTIONS, HttpMethods.HEAD)
+ val httpMethods = List(HttpMethods.GET,
+ HttpMethods.POST,
+ HttpMethods.PUT,
+ HttpMethods.DELETE,
+ HttpMethods.PATCH,
+ HttpMethods.OPTIONS,
+ HttpMethods.HEAD
+ )
val userWithGoogleGroup = "have-google-group"
val userWithEmptyGoogleGroup = "have-empty-google-group"
@@ -39,7 +49,7 @@ class UserApiServiceSpec extends BaseServiceSpec with SamMockserverUtils
val exampleKey = "favoriteColor"
val exampleVal = "green"
val fullProfile = BasicProfile(
- firstName= randomAlpha(),
+ firstName = randomAlpha(),
lastName = randomAlpha(),
title = randomAlpha(),
contactEmail = None,
@@ -66,9 +76,12 @@ class UserApiServiceSpec extends BaseServiceSpec with SamMockserverUtils
| }
|}""".stripMargin
- val enabledV1UserBody = """{"enabled": {"google": true, "ldap": true, "allUsersGroup": true}, "userInfo": {"userSubjectId": "1111111111", "userEmail": "no@nope.org"}}"""
- val noLdapV1UserBody = """{"enabled": {"google": true, "ldap": false, "allUsersGroup": true}, "userInfo": {"userSubjectId": "1111111111", "userEmail": "no@nope.org"}}"""
- val noGoogleV1UserBody = """{"enabled": {"google": false, "ldap": true, "allUsersGroup": true}, "userInfo": {"userSubjectId": "1111111111", "userEmail": "no@nope.org"}}"""
+ val enabledV1UserBody =
+ """{"enabled": {"google": true, "ldap": true, "allUsersGroup": true}, "userInfo": {"userSubjectId": "1111111111", "userEmail": "no@nope.org"}}"""
+ val noLdapV1UserBody =
+ """{"enabled": {"google": true, "ldap": false, "allUsersGroup": true}, "userInfo": {"userSubjectId": "1111111111", "userEmail": "no@nope.org"}}"""
+ val noGoogleV1UserBody =
+ """{"enabled": {"google": false, "ldap": true, "allUsersGroup": true}, "userInfo": {"userSubjectId": "1111111111", "userEmail": "no@nope.org"}}"""
val enabledV2UserBody = """{"userSubjectId": "1111111111", "userEmail": "no@nope.org", "enabled": true}"""
val noLdapV2UserBody = """{"userSubjectId": "1111111111", "userEmail": "no@nope.org", "enabled": false}"""
@@ -79,44 +92,54 @@ class UserApiServiceSpec extends BaseServiceSpec with SamMockserverUtils
val uglyJsonBody = """{"userInfo": "whaaaaaaat??"}"""
-
override def beforeAll(): Unit = {
workspaceServer = startClientAndServer(workspaceServerPort)
workspaceServer
.when(request.withMethod("GET").withPath(UserApiService.billingPath))
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withStatusCode(OK.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(OK.intValue)
)
samServer = startClientAndServer(samServerPort)
samServer
.when(request.withMethod("GET").withPath(UserApiService.samRegisterUserPath))
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withStatusCode(OK.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(OK.intValue)
)
samServer
.when(request.withMethod("POST").withPath(UserApiService.samRegisterUserPath))
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withStatusCode(Created.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(Created.intValue)
)
samServer
.when(request.withMethod("GET").withPath(UserApiService.samRegisterUserPath))
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withBody(userStatus).withStatusCode(OK.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withBody(userStatus)
+ .withStatusCode(OK.intValue)
)
samServer
.when(request.withMethod("GET").withPath(UserApiService.samUserProxyGroupPath("test@test.test")))
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withStatusCode(OK.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(OK.intValue)
)
returnEnabledUser(samServer)
@@ -124,38 +147,62 @@ class UserApiServiceSpec extends BaseServiceSpec with SamMockserverUtils
profileServer = startClientAndServer(thurloeServerPort)
// Generate a mock response for all combinations of profile properties
// to ensure that all posts to any combination will yield a successful response.
- allProperties.keys foreach {
- key =>
- profileServer
- .when(request().withMethod("POST").withHeader(fireCloudHeader.name, fireCloudHeader.value).withPath(
- UserApiService.remoteGetKeyPath.format(uniqueId, key)))
- .respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withStatusCode(OK.intValue)
- )
+ allProperties.keys foreach { key =>
+ profileServer
+ .when(
+ request()
+ .withMethod("POST")
+ .withHeader(fireCloudHeader.name, fireCloudHeader.value)
+ .withPath(UserApiService.remoteGetKeyPath.format(uniqueId, key))
+ )
+ .respond(
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(OK.intValue)
+ )
}
- List(HttpMethods.GET, HttpMethods.POST, HttpMethods.DELETE) foreach {
- method =>
- profileServer
- .when(request().withMethod(method.name).withHeader(fireCloudHeader.name, fireCloudHeader.value).withPath(
- UserApiService.remoteGetKeyPath.format(uniqueId, exampleKey)))
- .respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withStatusCode(OK.intValue)
- )
+ List(HttpMethods.GET, HttpMethods.POST, HttpMethods.DELETE) foreach { method =>
+ profileServer
+ .when(
+ request()
+ .withMethod(method.name)
+ .withHeader(fireCloudHeader.name, fireCloudHeader.value)
+ .withPath(UserApiService.remoteGetKeyPath.format(uniqueId, exampleKey))
+ )
+ .respond(
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(OK.intValue)
+ )
}
profileServer
- .when(request().withMethod("GET").withHeader(fireCloudHeader.name, fireCloudHeader.value).withPath(UserApiService.remoteGetAllPath.format(uniqueId)))
+ .when(
+ request()
+ .withMethod("GET")
+ .withHeader(fireCloudHeader.name, fireCloudHeader.value)
+ .withPath(UserApiService.remoteGetAllPath.format(uniqueId))
+ )
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withStatusCode(OK.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(OK.intValue)
)
profileServer
- .when(request().withMethod("POST").withHeader(fireCloudHeader.name, fireCloudHeader.value).withPath(UserApiService.remoteSetKeyPath))
+ .when(
+ request()
+ .withMethod("POST")
+ .withHeader(fireCloudHeader.name, fireCloudHeader.value)
+ .withPath(UserApiService.remoteSetKeyPath)
+ )
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withStatusCode(OK.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(OK.intValue)
)
}
@@ -195,32 +242,56 @@ class UserApiServiceSpec extends BaseServiceSpec with SamMockserverUtils
}
"if anonymousGroup KVP does not exist, it gets assigned" in {
Get("/register/profile") ~> dummyUserIdHeaders(uniqueId) ~> sealRoute(userServiceRoutes) ~> check {
- assert(entityAs[String].parseJson.convertTo[ProfileWrapper].keyValuePairs
- .find(_.key.contains("anonymousGroup")) // .find returns Option[FireCloudKeyValue]
- .flatMap(_.value).equals(Option("new-google-group@support.something.firecloud.org")))
+ assert(
+ entityAs[String].parseJson
+ .convertTo[ProfileWrapper]
+ .keyValuePairs
+ .find(_.key.contains("anonymousGroup")) // .find returns Option[FireCloudKeyValue]
+ .flatMap(_.value)
+ .equals(Option("new-google-group@support.something.firecloud.org"))
+ )
}
}
"if anonymousGroup key exists but value is empty, a new group gets assigned, and MethodNotAllowed is not returned" in {
- Get("/register/profile") ~> dummyUserIdHeaders(userWithEmptyGoogleGroup) ~> sealRoute(userServiceRoutes) ~> check {
- assert(entityAs[String].parseJson.convertTo[ProfileWrapper].keyValuePairs
- .find(_.key.contains("anonymousGroup")) // .find returns Option[FireCloudKeyValue]
- .flatMap(_.value).equals(Option("new-google-group@support.something.firecloud.org")))
+ Get("/register/profile") ~> dummyUserIdHeaders(userWithEmptyGoogleGroup) ~> sealRoute(
+ userServiceRoutes
+ ) ~> check {
+ assert(
+ entityAs[String].parseJson
+ .convertTo[ProfileWrapper]
+ .keyValuePairs
+ .find(_.key.contains("anonymousGroup")) // .find returns Option[FireCloudKeyValue]
+ .flatMap(_.value)
+ .equals(Option("new-google-group@support.something.firecloud.org"))
+ )
status shouldNot equal(MethodNotAllowed)
}
}
"existing anonymousGroup is not overwritten, and MethodNotAllowed is not returned" in {
Get("/register/profile") ~> dummyUserIdHeaders(userWithGoogleGroup) ~> sealRoute(userServiceRoutes) ~> check {
- assert(entityAs[String].parseJson.convertTo[ProfileWrapper].keyValuePairs
- .find(_.key.contains("anonymousGroup")) // .find returns Option[FireCloudKeyValue]
- .flatMap(_.value).equals(Option("existing-google-group@support.something.firecloud.org")))
+ assert(
+ entityAs[String].parseJson
+ .convertTo[ProfileWrapper]
+ .keyValuePairs
+ .find(_.key.contains("anonymousGroup")) // .find returns Option[FireCloudKeyValue]
+ .flatMap(_.value)
+ .equals(Option("existing-google-group@support.something.firecloud.org"))
+ )
status shouldNot equal(MethodNotAllowed)
}
}
"a user with no contact email still gets assigned a new anonymousGroup, and MethodNotAllowed is not returned" in {
- Get("/register/profile") ~> dummyUserIdHeaders(userWithNoContactEmail) ~> sealRoute(userServiceRoutes) ~> check {
- assert(entityAs[String].parseJson.convertTo[ProfileWrapper].keyValuePairs
- .find(_.key.contains("anonymousGroup")) // .find returns Option[FireCloudKeyValue]
- .flatMap(_.value).equals(Option("new-google-group@support.something.firecloud.org")))
+ Get("/register/profile") ~> dummyUserIdHeaders(userWithNoContactEmail) ~> sealRoute(
+ userServiceRoutes
+ ) ~> check {
+ assert(
+ entityAs[String].parseJson
+ .convertTo[ProfileWrapper]
+ .keyValuePairs
+ .find(_.key.contains("anonymousGroup")) // .find returns Option[FireCloudKeyValue]
+ .flatMap(_.value)
+ .equals(Option("new-google-group@support.something.firecloud.org"))
+ )
status shouldNot equal(MethodNotAllowed)
}
}
@@ -229,10 +300,10 @@ class UserApiServiceSpec extends BaseServiceSpec with SamMockserverUtils
"when POST-ting a complete profile" - {
"OK response is returned" in {
Post(s"/$ApiPrefix", fullProfile) ~> dummyUserIdHeaders(uniqueId) ~>
- sealRoute(registerRoutes) ~> check {
- log.debug(s"POST /$ApiPrefix: " + status)
- status should equal(OK)
- }
+ sealRoute(registerRoutes) ~> check {
+ log.debug(s"POST /$ApiPrefix: " + status)
+ status should equal(OK)
+ }
}
}
@@ -241,9 +312,9 @@ class UserApiServiceSpec extends BaseServiceSpec with SamMockserverUtils
val incompleteProfile = Map("name" -> randomAlpha())
Post(s"/$ApiPrefix", incompleteProfile) ~>
dummyUserIdHeaders(uniqueId) ~> sealRoute(registerRoutes) ~> check {
- log.debug(s"POST /$ApiPrefix: " + status)
- status should equal(BadRequest)
- }
+ log.debug(s"POST /$ApiPrefix: " + status)
+ status should equal(BadRequest)
+ }
}
}
@@ -251,15 +322,15 @@ class UserApiServiceSpec extends BaseServiceSpec with SamMockserverUtils
"OK response is returned for valid user" in {
Get("/api/proxyGroup/test@test.test") ~>
dummyUserIdHeaders(uniqueId) ~> sealRoute(userServiceRoutes) ~> check {
- status should equal(OK)
- }
+ status should equal(OK)
+ }
}
"NotFound response is returned for invalid user" in {
Get("/api/proxyGroup/test@not.found") ~>
dummyUserIdHeaders(uniqueId) ~> sealRoute(userServiceRoutes) ~> check {
- status should equal(NotFound)
- }
+ status should equal(NotFound)
+ }
}
}
}
@@ -272,14 +343,16 @@ class UserApiServiceSpec extends BaseServiceSpec with SamMockserverUtils
samServer
.when(request.withMethod("GET").withPath(UserApiService.samRegisterUserPath))
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withStatusCode(NotFound.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(NotFound.intValue)
)
Post(s"/$ApiPrefix", fullProfile) ~> dummyUserIdHeaders(uniqueId) ~>
- sealRoute(registerRoutes) ~> check {
- log.debug(s"POST /$ApiPrefix: " + status)
- status should equal(OK)
- }
+ sealRoute(registerRoutes) ~> check {
+ log.debug(s"POST /$ApiPrefix: " + status)
+ status should equal(OK)
+ }
}
}
@@ -290,22 +363,25 @@ class UserApiServiceSpec extends BaseServiceSpec with SamMockserverUtils
samServer
.when(request.withMethod("GET").withPath(UserApiService.samRegisterUserPath))
.respond(
- org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withStatusCode(NotFound.intValue)
+ org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(NotFound.intValue)
)
samServer
.when(request.withMethod("POST").withPath(UserApiService.samRegisterUserPath))
.respond(
- org.mockserver.model.HttpResponse.response()
+ org.mockserver.model.HttpResponse
+ .response()
.withHeaders(MockUtils.header)
.withStatusCode(InternalServerError.intValue)
.withBody(s"${Conflict.intValue} ${Conflict.reason}")
)
Post(s"/$ApiPrefix", fullProfile) ~> dummyUserIdHeaders(uniqueId) ~>
- sealRoute(registerRoutes) ~> check {
- log.debug(s"POST /$ApiPrefix: " + status)
- status should equal(OK)
- }
+ sealRoute(registerRoutes) ~> check {
+ log.debug(s"POST /$ApiPrefix: " + status)
+ status should equal(OK)
+ }
}
}
@@ -508,9 +584,11 @@ class UserApiServiceSpec extends BaseServiceSpec with SamMockserverUtils
samServer
.when(request.withMethod("GET").withPath(path))
.respond(
- org.mockserver.model.HttpResponse.response()
+ org.mockserver.model.HttpResponse
+ .response()
.withBody(body)
- .withHeaders(MockUtils.header).withStatusCode(statusCode)
+ .withHeaders(MockUtils.header)
+ .withStatusCode(statusCode)
)
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/WorkspaceApiServiceJobSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/WorkspaceApiServiceJobSpec.scala
index c4a661c2c..6b79ed063 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/WorkspaceApiServiceJobSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/WorkspaceApiServiceJobSpec.scala
@@ -17,27 +17,49 @@ import org.scalatestplus.mockito.MockitoSugar
import java.util.UUID
import scala.concurrent.ExecutionContext
-class WorkspaceApiServiceJobSpec extends BaseServiceSpec with WorkspaceApiService with MockitoSugar with
- BeforeAndAfterEach {
+class WorkspaceApiServiceJobSpec
+ extends BaseServiceSpec
+ with WorkspaceApiService
+ with MockitoSugar
+ with BeforeAndAfterEach {
// mocks for Cwds and Rawls
private val mockitoCwdsDao = mock[CwdsDAO]
// setup for the WorkspaceApiService routes
override val executionContext: ExecutionContext = ExecutionContext.Implicits.global
- override val workspaceServiceConstructor: WithAccessToken => WorkspaceService = WorkspaceService.constructor(app.copy())
+ override val workspaceServiceConstructor: WithAccessToken => WorkspaceService =
+ WorkspaceService.constructor(app.copy())
override val permissionReportServiceConstructor: UserInfo => PermissionReportService = PermissionReportService
.constructor(app)
- override val entityServiceConstructor: ModelSchema => EntityService = EntityService.constructor(app.copy(cwdsDAO = mockitoCwdsDao))
+ override val entityServiceConstructor: ModelSchema => EntityService =
+ EntityService.constructor(app.copy(cwdsDAO = mockitoCwdsDao))
// dummy data for use in tests below
private val dummyUserId = "1234"
- private val workspace = WorkspaceDetails("namespace", "name", MockRawlsDAO.mockWorkspaceId, "buckety_bucket", Some
- ("wf-collection"), DateTime.now(), DateTime.now(), "my_workspace_creator", Some(Map()), //attributes
- isLocked = false, //locked
- Some(Set.empty), //authorizationDomain
- WorkspaceVersions.V2, GoogleProjectId("googleProject"), Some(GoogleProjectNumber("googleProjectNumber")), Some
- (RawlsBillingAccountName("billingAccount")), None, None, Option(DateTime.now()), None, None, WorkspaceState.Ready)
+ private val workspace = WorkspaceDetails(
+ "namespace",
+ "name",
+ MockRawlsDAO.mockWorkspaceId,
+ "buckety_bucket",
+ Some("wf-collection"),
+ DateTime.now(),
+ DateTime.now(),
+ "my_workspace_creator",
+ Some(Map()), // attributes
+ isLocked = false, // locked
+ Some(Set.empty), // authorizationDomain
+ WorkspaceVersions.V2,
+ GoogleProjectId("googleProject"),
+ Some(GoogleProjectNumber("googleProjectNumber")),
+ Some(RawlsBillingAccountName("billingAccount")),
+ None,
+ None,
+ Option(DateTime.now()),
+ None,
+ None,
+ WorkspaceState.Ready
+ )
private val importList = List(
CwdsListResponse(UUID.randomUUID().toString, "running", "filetype1", None),
CwdsListResponse(UUID.randomUUID().toString, "error", "filetype2", Some("my error message")),
@@ -45,9 +67,9 @@ class WorkspaceApiServiceJobSpec extends BaseServiceSpec with WorkspaceApiServic
)
// a few shortcuts for accessing the routes
- private final val workspacesRoot = FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.workspacesPath
- private final val pfbImportPath = workspacesRoot + "/%s/%s/importPFB".format(workspace.namespace, workspace.name)
- private final val importJobPath = workspacesRoot + "/%s/%s/importJob".format(workspace.namespace, workspace.name)
+ final private val workspacesRoot = FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.workspacesPath
+ final private val pfbImportPath = workspacesRoot + "/%s/%s/importPFB".format(workspace.namespace, workspace.name)
+ final private val importJobPath = workspacesRoot + "/%s/%s/importJob".format(workspace.namespace, workspace.name)
"WorkspaceService list-jobs API" - {
// test both the importPFB and importJob routes
@@ -60,13 +82,15 @@ class WorkspaceApiServiceJobSpec extends BaseServiceSpec with WorkspaceApiServic
clearInvocations(mockitoCwdsDao)
when(mockitoCwdsDao.listJobsV1(any[String], any[Boolean])(any[UserInfo])).thenReturn(importList)
// execute the route
- (Get(s"$pathUnderTest?running_only=$runningOnly") ~> dummyUserIdHeaders(dummyUserId) ~> seal
- (workspaceRoutes)) ~> check {
+ (Get(s"$pathUnderTest?running_only=$runningOnly") ~> dummyUserIdHeaders(dummyUserId) ~> seal(
+ workspaceRoutes
+ )) ~> check {
// route should return 200 OK
status should equal(OK)
// we should have invoked the CwdsDAO correctly
verify(mockitoCwdsDao, times(1)).listJobsV1(ArgumentMatchers.eq(workspace.workspaceId),
- ArgumentMatchers.eq(runningOnly))(any[UserInfo])
+ ArgumentMatchers.eq(runningOnly)
+ )(any[UserInfo])
}
}
}
diff --git a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/WorkspaceApiServiceSpec.scala b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/WorkspaceApiServiceSpec.scala
index 6e311d2c3..446d2b8f4 100644
--- a/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/WorkspaceApiServiceSpec.scala
+++ b/src/test/scala/org/broadinstitute/dsde/firecloud/webservice/WorkspaceApiServiceSpec.scala
@@ -7,7 +7,12 @@ import akka.http.scaladsl.model._
import akka.http.scaladsl.server.Route.{seal => sealRoute}
import org.broadinstitute.dsde.firecloud.dataaccess.LegacyFileTypes.{FILETYPE_PFB, FILETYPE_TDR}
-import org.broadinstitute.dsde.firecloud.dataaccess.{MockCwdsDAO, MockRawlsDAO, MockShareLogDAO, WorkspaceApiServiceSpecShareLogDAO}
+import org.broadinstitute.dsde.firecloud.dataaccess.{
+ MockCwdsDAO,
+ MockRawlsDAO,
+ MockShareLogDAO,
+ WorkspaceApiServiceSpecShareLogDAO
+}
import org.broadinstitute.dsde.firecloud.mock.MockUtils._
import org.broadinstitute.dsde.firecloud.mock.{MockTSVFormData, MockUtils}
import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol._
@@ -37,9 +42,9 @@ object WorkspaceApiServiceSpec {
DateTime.now(),
DateTime.now(),
"my_workspace_creator",
- Some(Map(AttributeName("library", "published") -> AttributeBoolean(true))), //attributes
- false, //locked
- Some(Set.empty), //authorizationDomain
+ Some(Map(AttributeName("library", "published") -> AttributeBoolean(true))), // attributes
+ false, // locked
+ Some(Set.empty), // authorizationDomain
WorkspaceVersions.V2,
GoogleProjectId("googleProject"),
Some(GoogleProjectNumber("googleProjectNumber")),
@@ -54,7 +59,11 @@ object WorkspaceApiServiceSpec {
}
-class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService with BeforeAndAfterEach with SprayJsonSupport {
+class WorkspaceApiServiceSpec
+ extends BaseServiceSpec
+ with WorkspaceApiService
+ with BeforeAndAfterEach
+ with SprayJsonSupport {
override val executionContext: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
@@ -67,9 +76,9 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
DateTime.now(),
DateTime.now(),
"my_workspace_creator",
- Some(Map()), //attributes
- false, //locked
- Some(Set.empty), //authorizationDomain
+ Some(Map()), // attributes
+ false, // locked
+ Some(Set.empty), // authorizationDomain
WorkspaceVersions.V2,
GoogleProjectId("googleProject"),
Some(GoogleProjectNumber("googleProjectNumber")),
@@ -85,33 +94,45 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
val jobId = "testOp"
// Mock remote endpoints
- private final val workspacesRoot = FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.workspacesPath
- private final val workspacesPath = workspacesRoot + "/%s/%s".format(workspace.namespace, workspace.name)
- private final val methodconfigsPath = workspacesRoot + "/%s/%s/methodconfigs".format(workspace.namespace, workspace.name)
- private final val updateAttributesPath = workspacesRoot + "/%s/%s/updateAttributes".format(workspace.namespace, workspace.name)
- private final val setAttributesPath = workspacesRoot + "/%s/%s/setAttributes".format(workspace.namespace, workspace.name)
- private final val tsvAttributesImportPath = workspacesRoot + "/%s/%s/importAttributesTSV".format(workspace.namespace, workspace.name)
- private final val tsvAttributesExportPath = workspacesRoot + "/%s/%s/exportAttributesTSV".format(workspace.namespace, workspace.name)
- private final val batchUpsertPath = s"${workspacesRoot}/${workspace.namespace}/${workspace.name}/entities/batchUpsert"
- private final val aclPath = workspacesRoot + "/%s/%s/acl".format(workspace.namespace, workspace.name)
- private final val sendChangeNotificationPath = workspacesRoot + "/%s/%s/sendChangeNotification".format(workspace.namespace, workspace.name)
- private final val accessInstructionsPath = workspacesRoot + "/%s/%s/accessInstructions".format(workspace.namespace, workspace.name)
- private final val clonePath = workspacesRoot + "/%s/%s/clone".format(workspace.namespace, workspace.name)
- private final val lockPath = workspacesRoot + "/%s/%s/lock".format(workspace.namespace, workspace.name)
- private final val unlockPath = workspacesRoot + "/%s/%s/unlock".format(workspace.namespace, workspace.name)
- private final val bucketPath = workspacesRoot + "/%s/%s/checkBucketReadAccess".format(workspace.namespace, workspace.name)
- private final val tsvImportPath = workspacesRoot + "/%s/%s/importEntities".format(workspace.namespace, workspace.name)
- private final val tsvImportFlexiblePath = workspacesRoot + "/%s/%s/flexibleImportEntities".format(workspace.namespace, workspace.name)
- private final val pfbImportPath = workspacesRoot + "/%s/%s/importPFB".format(workspace.namespace, workspace.name)
- private final val importJobPath = workspacesRoot + "/%s/%s/importJob".format(workspace.namespace, workspace.name)
- private final val importJobStatusPath = workspacesRoot + "/%s/%s/importJob".format(workspace.namespace, workspace.name)
- private final val bucketUsagePath = s"$workspacesPath/bucketUsage"
- private final val usBucketStorageCostEstimatePath = workspacesRoot + "/%s/%s/storageCostEstimate".format("usBucketWorkspace", workspace.name)
- private final val europeWest1storageCostEstimatePath = workspacesRoot + "/%s/%s/storageCostEstimate".format("europeWest1BucketWorkspace", workspace.name)
- private final val tagAutocompletePath = s"$workspacesRoot/tags"
- private final val executionEngineVersionPath = "/version/executionEngine"
-
- private def catalogPath(ns:String=workspace.namespace, name:String=workspace.name) =
+ final private val workspacesRoot = FireCloudConfig.Rawls.authPrefix + FireCloudConfig.Rawls.workspacesPath
+ final private val workspacesPath = workspacesRoot + "/%s/%s".format(workspace.namespace, workspace.name)
+ final private val methodconfigsPath =
+ workspacesRoot + "/%s/%s/methodconfigs".format(workspace.namespace, workspace.name)
+ final private val updateAttributesPath =
+ workspacesRoot + "/%s/%s/updateAttributes".format(workspace.namespace, workspace.name)
+ final private val setAttributesPath =
+ workspacesRoot + "/%s/%s/setAttributes".format(workspace.namespace, workspace.name)
+ final private val tsvAttributesImportPath =
+ workspacesRoot + "/%s/%s/importAttributesTSV".format(workspace.namespace, workspace.name)
+ final private val tsvAttributesExportPath =
+ workspacesRoot + "/%s/%s/exportAttributesTSV".format(workspace.namespace, workspace.name)
+ final private val batchUpsertPath = s"${workspacesRoot}/${workspace.namespace}/${workspace.name}/entities/batchUpsert"
+ final private val aclPath = workspacesRoot + "/%s/%s/acl".format(workspace.namespace, workspace.name)
+ final private val sendChangeNotificationPath =
+ workspacesRoot + "/%s/%s/sendChangeNotification".format(workspace.namespace, workspace.name)
+ final private val accessInstructionsPath =
+ workspacesRoot + "/%s/%s/accessInstructions".format(workspace.namespace, workspace.name)
+ final private val clonePath = workspacesRoot + "/%s/%s/clone".format(workspace.namespace, workspace.name)
+ final private val lockPath = workspacesRoot + "/%s/%s/lock".format(workspace.namespace, workspace.name)
+ final private val unlockPath = workspacesRoot + "/%s/%s/unlock".format(workspace.namespace, workspace.name)
+ final private val bucketPath =
+ workspacesRoot + "/%s/%s/checkBucketReadAccess".format(workspace.namespace, workspace.name)
+ final private val tsvImportPath = workspacesRoot + "/%s/%s/importEntities".format(workspace.namespace, workspace.name)
+ final private val tsvImportFlexiblePath =
+ workspacesRoot + "/%s/%s/flexibleImportEntities".format(workspace.namespace, workspace.name)
+ final private val pfbImportPath = workspacesRoot + "/%s/%s/importPFB".format(workspace.namespace, workspace.name)
+ final private val importJobPath = workspacesRoot + "/%s/%s/importJob".format(workspace.namespace, workspace.name)
+ final private val importJobStatusPath =
+ workspacesRoot + "/%s/%s/importJob".format(workspace.namespace, workspace.name)
+ final private val bucketUsagePath = s"$workspacesPath/bucketUsage"
+ final private val usBucketStorageCostEstimatePath =
+ workspacesRoot + "/%s/%s/storageCostEstimate".format("usBucketWorkspace", workspace.name)
+ final private val europeWest1storageCostEstimatePath =
+ workspacesRoot + "/%s/%s/storageCostEstimate".format("europeWest1BucketWorkspace", workspace.name)
+ final private val tagAutocompletePath = s"$workspacesRoot/tags"
+ final private val executionEngineVersionPath = "/version/executionEngine"
+
+ private def catalogPath(ns: String = workspace.namespace, name: String = workspace.name) =
workspacesRoot + "/%s/%s/catalog".format(ns, name)
val localShareLogDao: MockShareLogDAO = new WorkspaceApiServiceSpecShareLogDAO
@@ -119,9 +140,12 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
// use a disabled cWDS for these tests; enabled cWDS has tests coverage elsewhere
val mockCwdsDao: MockCwdsDAO = new MockCwdsDAO(enabled = false)
- val workspaceServiceConstructor: (WithAccessToken) => WorkspaceService = WorkspaceService.constructor(app.copy(shareLogDAO = localShareLogDao))
- val permissionReportServiceConstructor: (UserInfo) => PermissionReportService = PermissionReportService.constructor(app)
- val entityServiceConstructor: (ModelSchema) => EntityService = EntityService.constructor(app.copy(cwdsDAO = mockCwdsDao))
+ val workspaceServiceConstructor: (WithAccessToken) => WorkspaceService =
+ WorkspaceService.constructor(app.copy(shareLogDAO = localShareLogDao))
+ val permissionReportServiceConstructor: (UserInfo) => PermissionReportService =
+ PermissionReportService.constructor(app)
+ val entityServiceConstructor: (ModelSchema) => EntityService =
+ EntityService.constructor(app.copy(cwdsDAO = mockCwdsDao))
val nihProtectedAuthDomain = ManagedGroupRef(RawlsGroupName("dbGapAuthorizedUsers"))
@@ -131,14 +155,14 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"attributes",
"att",
"id",
- "", //bucketname
+ "", // bucketname
Some("wf-collection"),
DateTime.now(),
DateTime.now(),
"mb",
- Some(Map()), //attrs
+ Some(Map()), // attrs
false,
- Some(Set(nihProtectedAuthDomain)), //authorizationDomain
+ Some(Set(nihProtectedAuthDomain)), // authorizationDomain
WorkspaceVersions.V2,
GoogleProjectId("googleProject"),
Some(GoogleProjectNumber("googleProjectNumber")),
@@ -155,14 +179,14 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"attributes",
"att",
"id",
- "", //bucketname
+ "", // bucketname
Some("wf-collection"),
DateTime.now(),
DateTime.now(),
"mb",
- Some(Map()), //attrs
+ Some(Map()), // attrs
false,
- Some(Set(ManagedGroupRef(RawlsGroupName("secret_realm")))), //authorizationDomain
+ Some(Set(ManagedGroupRef(RawlsGroupName("secret_realm")))), // authorizationDomain
WorkspaceVersions.V2,
GoogleProjectId("googleProject"),
Some(GoogleProjectNumber("googleProjectNumber")),
@@ -179,14 +203,14 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"attributes",
"att",
"id",
- "", //bucketname
+ "", // bucketname
Some("wf-collection"),
DateTime.now(),
DateTime.now(),
"mb",
- Some(Map()), //attrs
+ Some(Map()), // attrs
false,
- Some(Set.empty), //authorizationDomain
+ Some(Set.empty), // authorizationDomain
WorkspaceVersions.V2,
GoogleProjectId("googleProject"),
Some(GoogleProjectNumber("googleProjectNumber")),
@@ -199,9 +223,39 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
WorkspaceState.Ready
)
- val protectedRawlsWorkspaceResponse = WorkspaceResponse(Some(WorkspaceAccessLevels.Owner), canShare=Some(false), canCompute=Some(true), catalog=Some(false), protectedRawlsWorkspace, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None)
- val authDomainRawlsWorkspaceResponse = WorkspaceResponse(Some(WorkspaceAccessLevels.Owner), canShare=Some(false), canCompute=Some(true), catalog=Some(false), authDomainRawlsWorkspace, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None)
- val nonAuthDomainRawlsWorkspaceResponse = WorkspaceResponse(Some(WorkspaceAccessLevels.Owner), canShare=Some(false), canCompute=Some(true), catalog=Some(false), nonAuthDomainRawlsWorkspace, Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)), Some(WorkspaceBucketOptions(false)), Some(Set.empty), None)
+ val protectedRawlsWorkspaceResponse = WorkspaceResponse(
+ Some(WorkspaceAccessLevels.Owner),
+ canShare = Some(false),
+ canCompute = Some(true),
+ catalog = Some(false),
+ protectedRawlsWorkspace,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
+ val authDomainRawlsWorkspaceResponse = WorkspaceResponse(
+ Some(WorkspaceAccessLevels.Owner),
+ canShare = Some(false),
+ canCompute = Some(true),
+ catalog = Some(false),
+ authDomainRawlsWorkspace,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
+ val nonAuthDomainRawlsWorkspaceResponse = WorkspaceResponse(
+ Some(WorkspaceAccessLevels.Owner),
+ canShare = Some(false),
+ canCompute = Some(true),
+ catalog = Some(false),
+ nonAuthDomainRawlsWorkspace,
+ Some(WorkspaceSubmissionStats(None, None, runningSubmissionsCount = 0)),
+ Some(WorkspaceBucketOptions(false)),
+ Some(Set.empty),
+ None
+ )
var rawlsServer: ClientAndServer = _
@@ -211,15 +265,24 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
* @param path request path
* @param status status for the response
*/
- def stubRawlsService(method: HttpMethod, path: String, status: StatusCode, body: Option[String] = None, query: Option[(String, String)] = None, requestBody: Option[String] = None): Unit = {
+ def stubRawlsService(method: HttpMethod,
+ path: String,
+ status: StatusCode,
+ body: Option[String] = None,
+ query: Option[(String, String)] = None,
+ requestBody: Option[String] = None
+ ): Unit = {
rawlsServer.reset()
- val request = org.mockserver.model.HttpRequest.request()
+ val request = org.mockserver.model.HttpRequest
+ .request()
.withMethod(method.name)
.withPath(path)
if (query.isDefined) request.withQueryStringParameter(query.get._1, query.get._2)
requestBody.foreach(request.withBody)
- val response = org.mockserver.model.HttpResponse.response()
- .withHeaders(MockUtils.header).withStatusCode(status.intValue)
+ val response = org.mockserver.model.HttpResponse
+ .response()
+ .withHeaders(MockUtils.header)
+ .withStatusCode(status.intValue)
if (body.isDefined) response.withBody(body.get)
rawlsServer
.when(request)
@@ -237,10 +300,35 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
* @param authDomain (optional) authorization domain for the new workspace
* @return pair of expected WorkspaceRequest and the Workspace that the stub will respond with
*/
- def stubRawlsCreateWorkspace(namespace: String, name: String, authDomain: Set[ManagedGroupRef] = Set.empty): (WorkspaceRequest, WorkspaceDetails) = {
+ def stubRawlsCreateWorkspace(namespace: String,
+ name: String,
+ authDomain: Set[ManagedGroupRef] = Set.empty
+ ): (WorkspaceRequest, WorkspaceDetails) = {
rawlsServer.reset()
val rawlsRequest = WorkspaceRequest(namespace, name, Map(), Option(authDomain))
- val rawlsResponse = WorkspaceDetails(namespace, name, "foo", "bar", Some("wf-collection"), DateTime.now(), DateTime.now(), "bob", Some(Map()), false, Some(authDomain), WorkspaceVersions.V2, GoogleProjectId("googleProject"), Some(GoogleProjectNumber("googleProjectNumber")), Some(RawlsBillingAccountName("billingAccount")), None, None, Option(DateTime.now()), None, None, WorkspaceState.Ready)
+ val rawlsResponse = WorkspaceDetails(
+ namespace,
+ name,
+ "foo",
+ "bar",
+ Some("wf-collection"),
+ DateTime.now(),
+ DateTime.now(),
+ "bob",
+ Some(Map()),
+ false,
+ Some(authDomain),
+ WorkspaceVersions.V2,
+ GoogleProjectId("googleProject"),
+ Some(GoogleProjectNumber("googleProjectNumber")),
+ Some(RawlsBillingAccountName("billingAccount")),
+ None,
+ None,
+ Option(DateTime.now()),
+ None,
+ None,
+ WorkspaceState.Ready
+ )
stubRawlsService(HttpMethods.POST, workspacesRoot, Created, Option(rawlsResponse.toJson.compactPrint))
(rawlsRequest, rawlsResponse)
}
@@ -257,12 +345,39 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
* @param attributes (optional) attributes expected to be given to rawls for the new cloned workspace
* @return pair of expected WorkspaceRequest and the Workspace that the stub will respond with
*/
- def stubRawlsCloneWorkspace(namespace: String, name: String, authDomain: Set[ManagedGroupRef] = Set.empty, attributes: Attributable.AttributeMap = Map()): (WorkspaceRequest, WorkspaceDetails) = {
+ def stubRawlsCloneWorkspace(namespace: String,
+ name: String,
+ authDomain: Set[ManagedGroupRef] = Set.empty,
+ attributes: Attributable.AttributeMap = Map()
+ ): (WorkspaceRequest, WorkspaceDetails) = {
rawlsServer.reset()
val published: (AttributeName, AttributeBoolean) = AttributeName("library", "published") -> AttributeBoolean(false)
val discoverable = AttributeName("library", "discoverableByGroups") -> AttributeValueEmptyList
- val rawlsRequest: WorkspaceRequest = WorkspaceRequest(namespace, name, attributes + published + discoverable, Option(authDomain))
- val rawlsResponse = WorkspaceDetails(namespace, name, "foo", "bar", Some("wf-collection"), DateTime.now(), DateTime.now(), "bob", Some(attributes + published + discoverable), false, Some(authDomain), WorkspaceVersions.V2, GoogleProjectId("googleProject"), Some(GoogleProjectNumber("googleProjectNumber")), Some(RawlsBillingAccountName("billingAccount")), None, None, Option(DateTime.now()), None, None, WorkspaceState.Ready)
+ val rawlsRequest: WorkspaceRequest =
+ WorkspaceRequest(namespace, name, attributes + published + discoverable, Option(authDomain))
+ val rawlsResponse = WorkspaceDetails(
+ namespace,
+ name,
+ "foo",
+ "bar",
+ Some("wf-collection"),
+ DateTime.now(),
+ DateTime.now(),
+ "bob",
+ Some(attributes + published + discoverable),
+ false,
+ Some(authDomain),
+ WorkspaceVersions.V2,
+ GoogleProjectId("googleProject"),
+ Some(GoogleProjectNumber("googleProjectNumber")),
+ Some(RawlsBillingAccountName("billingAccount")),
+ None,
+ None,
+ Option(DateTime.now()),
+ None,
+ None,
+ WorkspaceState.Ready
+ )
stubRawlsService(HttpMethods.POST, clonePath, Created, Option(rawlsResponse.toJson.compactPrint))
(rawlsRequest, rawlsResponse)
}
@@ -272,31 +387,28 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
rawlsServer
.when(request().withMethod(method.name).withPath(path))
.respond(
- org.mockserver.model.HttpResponse.response()
+ org.mockserver.model.HttpResponse
+ .response()
.withHeaders(MockUtils.header)
.withStatusCode(status.intValue)
.withBody(rawlsErrorReport(status).toJson.compactPrint)
)
}
- override def beforeAll(): Unit = {
+ override def beforeAll(): Unit =
rawlsServer = startClientAndServer(MockUtils.workspaceServerPort)
- }
- override def afterAll(): Unit = {
+ override def afterAll(): Unit =
rawlsServer.stop
- }
- override def beforeEach(): Unit = {
+ override def beforeEach(): Unit =
this.searchDao.reset()
- }
- override def afterEach(): Unit = {
+ override def afterEach(): Unit =
this.searchDao.reset()
- }
- //there are many values in the response that in reality cannot be predicted
- //we will only compare the key details: namespace, name, authdomain, attributes
+ // there are many values in the response that in reality cannot be predicted
+ // we will only compare the key details: namespace, name, authdomain, attributes
def assertWorkspaceDetailsEqual(expected: WorkspaceDetails, actual: WorkspaceDetails) = {
actual.namespace should equal(expected.namespace)
actual.name should equal(expected.name)
@@ -308,9 +420,10 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"Passthrough tests on the /workspaces path" - {
"MethodNotAllowed error is returned for HTTP PUT, PATCH, DELETE methods" in {
- List(HttpMethods.PUT, HttpMethods.PATCH, HttpMethods.DELETE) map {
- method =>
- new RequestBuilder(method)("/api/workspaces") ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ List(HttpMethods.PUT, HttpMethods.PATCH, HttpMethods.DELETE) map { method =>
+ new RequestBuilder(method)("/api/workspaces") ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should equal(MethodNotAllowed)
}
}
@@ -319,9 +432,10 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"Passthrough tests on the /workspaces/segment/segment path" - {
"MethodNotAllowed error is returned for HTTP PUT, PATCH, POST methods" in {
- List(HttpMethods.PUT, HttpMethods.PATCH, HttpMethods.POST) map {
- method =>
- new RequestBuilder(method)("/api/workspaces/namespace/name") ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ List(HttpMethods.PUT, HttpMethods.PATCH, HttpMethods.POST) map { method =>
+ new RequestBuilder(method)("/api/workspaces/namespace/name") ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should equal(MethodNotAllowed)
}
}
@@ -330,17 +444,28 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"Passthrough tests on the /workspaces/segment/segment/methodconfigs path" - {
"MethodNotAllowed error is returned for HTTP PUT, PATCH, DELETE methods" in {
- List(HttpMethods.PUT, HttpMethods.PATCH, HttpMethods.DELETE) map {
- method =>
- new RequestBuilder(method)("/api/workspaces/namespace/name/methodconfigs") ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ List(HttpMethods.PUT, HttpMethods.PATCH, HttpMethods.DELETE) map { method =>
+ new RequestBuilder(method)("/api/workspaces/namespace/name/methodconfigs") ~> dummyUserIdHeaders(
+ dummyUserId
+ ) ~> sealRoute(workspaceRoutes) ~> check {
status should equal(MethodNotAllowed)
}
}
}
- Seq("this","workspace") foreach { prefix =>
+ Seq("this", "workspace") foreach { prefix =>
s"Forbidden error is returned for HTTP POST with an output to $prefix.library:" in {
- val methodConfigs = MethodConfiguration("namespace", "name", Some("root"), None, Map.empty, Map("value" -> AttributeString(s"$prefix.library:param")), MethodRepoMethod("methodnamespace", "methodname", 1))
- Post(methodconfigsPath, methodConfigs) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ val methodConfigs = MethodConfiguration(
+ "namespace",
+ "name",
+ Some("root"),
+ None,
+ Map.empty,
+ Map("value" -> AttributeString(s"$prefix.library:param")),
+ MethodRepoMethod("methodnamespace", "methodname", 1)
+ )
+ Post(methodconfigsPath, methodConfigs) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should equal(Forbidden)
}
}
@@ -349,9 +474,10 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"Passthrough tests on the /workspaces/segment/segment/acl path" - {
"MethodNotAllowed error is returned for HTTP PUT, POST, DELETE methods" in {
- List(HttpMethods.PUT, HttpMethods.POST, HttpMethods.DELETE) map {
- method =>
- new RequestBuilder(method)("/api/workspaces/namespace/name/acl") ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ List(HttpMethods.PUT, HttpMethods.POST, HttpMethods.DELETE) map { method =>
+ new RequestBuilder(method)("/api/workspaces/namespace/name/acl") ~> dummyUserIdHeaders(
+ dummyUserId
+ ) ~> sealRoute(workspaceRoutes) ~> check {
status should equal(MethodNotAllowed)
}
}
@@ -360,9 +486,10 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"Passthrough tests on the /workspaces/segment/segment/clone path" - {
"MethodNotAllowed error is returned for HTTP PUT, PATCH, GET, DELETE methods" in {
- List(HttpMethods.PUT, HttpMethods.PATCH, HttpMethods.GET, HttpMethods.DELETE) map {
- method =>
- new RequestBuilder(method)("/api/workspaces/namespace/name/clone") ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ List(HttpMethods.PUT, HttpMethods.PATCH, HttpMethods.GET, HttpMethods.DELETE) map { method =>
+ new RequestBuilder(method)("/api/workspaces/namespace/name/clone") ~> dummyUserIdHeaders(
+ dummyUserId
+ ) ~> sealRoute(workspaceRoutes) ~> check {
status should equal(MethodNotAllowed)
}
}
@@ -371,9 +498,10 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"Passthrough tests on the /workspaces/segment/segment/lock path" - {
"MethodNotAllowed error is returned for HTTP POST, PATCH, GET, DELETE methods" in {
- List(HttpMethods.POST, HttpMethods.PATCH, HttpMethods.GET, HttpMethods.DELETE) map {
- method =>
- new RequestBuilder(method)("/api/workspaces/namespace/name/lock") ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ List(HttpMethods.POST, HttpMethods.PATCH, HttpMethods.GET, HttpMethods.DELETE) map { method =>
+ new RequestBuilder(method)("/api/workspaces/namespace/name/lock") ~> dummyUserIdHeaders(
+ dummyUserId
+ ) ~> sealRoute(workspaceRoutes) ~> check {
status should equal(MethodNotAllowed)
}
}
@@ -382,9 +510,10 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"Passthrough tests on the /workspaces/segment/segment/unlock path" - {
"MethodNotAllowed error is returned for HTTP POST, PATCH, GET, DELETE methods" in {
- List(HttpMethods.POST, HttpMethods.PATCH, HttpMethods.GET, HttpMethods.DELETE) map {
- method =>
- new RequestBuilder(method)("/api/workspaces/namespace/name/unlock") ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ List(HttpMethods.POST, HttpMethods.PATCH, HttpMethods.GET, HttpMethods.DELETE) map { method =>
+ new RequestBuilder(method)("/api/workspaces/namespace/name/unlock") ~> dummyUserIdHeaders(
+ dummyUserId
+ ) ~> sealRoute(workspaceRoutes) ~> check {
status should equal(MethodNotAllowed)
}
}
@@ -393,9 +522,10 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"Passthrough tests on the /workspaces/segment/segment/checkBucketReadAccess path" - {
"MethodNotAllowed error is returned for HTTP POST, PATCH, PUT, DELETE methods" in {
- List(HttpMethods.POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE) map {
- method =>
- new RequestBuilder(method)("/api/workspaces/namespace/name/checkBucketReadAccess") ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ List(HttpMethods.POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE) map { method =>
+ new RequestBuilder(method)("/api/workspaces/namespace/name/checkBucketReadAccess") ~> dummyUserIdHeaders(
+ dummyUserId
+ ) ~> sealRoute(workspaceRoutes) ~> check {
status should equal(MethodNotAllowed)
}
}
@@ -404,9 +534,10 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"Passthrough tests on the /workspaces/segment/segment/sendChangeNotification path" - {
"MethodNotAllowed error is returned for HTTP GET, PATCH, PUT, DELETE methods" in {
- List(HttpMethods.GET, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE) map {
- method =>
- new RequestBuilder(method)("/api/workspaces/namespace/name/sendChangeNotification") ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ List(HttpMethods.GET, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE) map { method =>
+ new RequestBuilder(method)("/api/workspaces/namespace/name/sendChangeNotification") ~> dummyUserIdHeaders(
+ dummyUserId
+ ) ~> sealRoute(workspaceRoutes) ~> check {
status should equal(MethodNotAllowed)
}
}
@@ -415,11 +546,12 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"Passthrough tests on the /workspaces/segment/segment/accessInstructions path" - {
"MethodNotAllowed error is returned for HTTP POST, PATCH, PUT, DELETE methods" in {
- List(HttpMethods.POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE) map {
- method =>
- new RequestBuilder(method)("/api/workspaces/namespace/name/accessInstructions") ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
- status should equal(MethodNotAllowed)
- }
+ List(HttpMethods.POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE) map { method =>
+ new RequestBuilder(method)("/api/workspaces/namespace/name/accessInstructions") ~> dummyUserIdHeaders(
+ dummyUserId
+ ) ~> sealRoute(workspaceRoutes) ~> check {
+ status should equal(MethodNotAllowed)
+ }
}
}
}
@@ -427,7 +559,9 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"Passthrough tests on the /workspaces/segment/segment/bucketUsage path" - {
List(HttpMethods.POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE) foreach { method =>
s"MethodNotAllowed error is returned for $method" in {
- new RequestBuilder(method)("/api/workspaces/namespace/name/bucketUsage") ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ new RequestBuilder(method)("/api/workspaces/namespace/name/bucketUsage") ~> dummyUserIdHeaders(
+ dummyUserId
+ ) ~> sealRoute(workspaceRoutes) ~> check {
status should equal(MethodNotAllowed)
}
}
@@ -437,7 +571,9 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"Passthrough tests on the /workspaces/tags path" - {
List(HttpMethods.POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE) foreach { method =>
s"MethodNotAllowed error is returned for $method" in {
- new RequestBuilder(method)("/api/workspaces/tags") ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ new RequestBuilder(method)("/api/workspaces/tags") ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should equal(MethodNotAllowed)
}
}
@@ -451,10 +587,12 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
List(HttpMethods.GET) foreach { method =>
s"OK status is returned for HTTP $method" in {
val dao = new MockRawlsDAO
- val rwr = dao.rawlsWorkspaceResponseWithAttributes.copy(canShare=Some(false))
- val lrwr = Seq.fill(2){rwr}
+ val rwr = dao.rawlsWorkspaceResponseWithAttributes.copy(canShare = Some(false))
+ val lrwr = Seq.fill(2)(rwr)
stubRawlsService(method, workspacesRoot, OK, Some(lrwr.toJson.compactPrint))
- new RequestBuilder(method)(workspacesRoot) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ new RequestBuilder(method)(workspacesRoot) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should equal(OK)
}
}
@@ -463,29 +601,43 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"Passthrough tests on the GET /workspaces/%s/%s path" - {
s"OK status is returned for HTTP GET (workspace in authdomain)" in {
- stubRawlsService(HttpMethods.GET, workspacesPath, OK, Some(authDomainRawlsWorkspaceResponse.toJson.compactPrint))
+ stubRawlsService(HttpMethods.GET,
+ workspacesPath,
+ OK,
+ Some(authDomainRawlsWorkspaceResponse.toJson.compactPrint)
+ )
Get(workspacesPath) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
status should equal(OK)
- //generally this is not how we want to treat the response
- //it should already be returned as JSON but for some strange reason it's being returned as text/plain
- //here we take the plain text and force it to be json so we can get the test to work
- assert(entityAs[String].parseJson.convertTo[UIWorkspaceResponse].workspace.get.authorizationDomain.get.nonEmpty)
+ // generally this is not how we want to treat the response
+ // it should already be returned as JSON but for some strange reason it's being returned as text/plain
+ // here we take the plain text and force it to be json so we can get the test to work
+ assert(
+ entityAs[String].parseJson.convertTo[UIWorkspaceResponse].workspace.get.authorizationDomain.get.nonEmpty
+ )
}
}
s"OK status is returned for HTTP GET (non-auth-domained workspace)" in {
- stubRawlsService(HttpMethods.GET, workspacesPath, OK, Some(nonAuthDomainRawlsWorkspaceResponse.toJson.compactPrint))
+ stubRawlsService(HttpMethods.GET,
+ workspacesPath,
+ OK,
+ Some(nonAuthDomainRawlsWorkspaceResponse.toJson.compactPrint)
+ )
Get(workspacesPath) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
status should equal(OK)
- //generally this is not how we want to treat the response
- //it should already be returned as JSON but for some strange reason it's being returned as text/plain
- //here we take the plain text and force it to be json so we can get the test to work
- assert(entityAs[String].parseJson.convertTo[UIWorkspaceResponse].workspace.get.authorizationDomain.get.isEmpty)
+ // generally this is not how we want to treat the response
+ // it should already be returned as JSON but for some strange reason it's being returned as text/plain
+ // here we take the plain text and force it to be json so we can get the test to work
+ assert(
+ entityAs[String].parseJson.convertTo[UIWorkspaceResponse].workspace.get.authorizationDomain.get.isEmpty
+ )
}
}
s"Accepted status is returned for HTTP DELETE" in {
- new RequestBuilder(HttpMethods.DELETE)(workspacesPath) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ new RequestBuilder(HttpMethods.DELETE)(workspacesPath) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should equal(Accepted)
}
}
@@ -495,7 +647,9 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
List(HttpMethods.GET) foreach { method =>
s"OK status is returned for HTTP $method" in {
stubRawlsService(method, methodconfigsPath, OK)
- new RequestBuilder(method)(methodconfigsPath) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ new RequestBuilder(method)(methodconfigsPath) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should equal(OK)
}
}
@@ -507,8 +661,12 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
Seq("allRepos" -> "true", "allRepos" -> "false", "allRepos" -> "banana") foreach { query =>
stubRawlsService(HttpMethods.GET, methodconfigsPath, OK, None, Some(query))
- Get(Uri(methodconfigsPath).withQuery(Query(query))) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
- rawlsServer.verify(request().withPath(methodconfigsPath).withMethod("GET").withQueryStringParameter(query._1, query._2))
+ Get(Uri(methodconfigsPath).withQuery(Query(query))) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
+ rawlsServer.verify(
+ request().withPath(methodconfigsPath).withMethod("GET").withQueryStringParameter(query._1, query._2)
+ )
status should equal(OK)
}
@@ -552,7 +710,6 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
}
}
-
"Passthrough tests on the /workspaces/%s/%s/unlock path" - {
"OK status is returned for PUT" in {
stubRawlsService(HttpMethods.PUT, unlockPath, OK)
@@ -562,7 +719,6 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
}
}
-
"Passthrough tests on the /workspaces/%s/%s/checkBucketReadAccess path" - {
"OK status is returned for GET" in {
stubRawlsService(HttpMethods.GET, bucketPath, OK)
@@ -582,21 +738,25 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
}
"Passthrough tests on the /version/executionEngine path" - {
- "OK status is returned for GET" in {
- stubRawlsService(HttpMethods.GET, executionEngineVersionPath, OK)
- Get(executionEngineVersionPath) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
- status should equal(OK)
- }
+ "OK status is returned for GET" in {
+ stubRawlsService(HttpMethods.GET, executionEngineVersionPath, OK)
+ Get(executionEngineVersionPath) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ status should equal(OK)
}
}
+ }
"Passthrough tests on the workspaces/tags path" - {
"OK status is returned for GET" in {
val tagJsonString = """{ "tag": "tagtest", "count": 3 }"""
stubRawlsService(HttpMethods.GET, tagAutocompletePath, OK, Some(tagJsonString), Some("q", "tag"))
Get("/api/workspaces/tags", ("q", "tag"))
- new RequestBuilder(HttpMethods.GET)("/api/workspaces/tags?q=tag") ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
- rawlsServer.verify(request().withPath(tagAutocompletePath).withMethod("GET").withQueryStringParameter("q", "tag"))
+ new RequestBuilder(HttpMethods.GET)("/api/workspaces/tags?q=tag") ~> dummyUserIdHeaders(
+ dummyUserId
+ ) ~> sealRoute(workspaceRoutes) ~> check {
+ rawlsServer.verify(
+ request().withPath(tagAutocompletePath).withMethod("GET").withQueryStringParameter("q", "tag")
+ )
status should equal(OK)
responseAs[String] should equal(tagJsonString)
}
@@ -609,26 +769,37 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
val (rawlsRequest, rawlsResponse) = stubRawlsCreateWorkspace("namespace", "name")
val orchestrationRequest = WorkspaceRequest("namespace", "name", Map())
- Post(workspacesRoot, orchestrationRequest) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
- rawlsServer.verify(request().withPath(workspacesRoot).withMethod("POST").withBody(rawlsRequest.toJson.compactPrint))
+ Post(workspacesRoot, orchestrationRequest) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
+ rawlsServer.verify(
+ request().withPath(workspacesRoot).withMethod("POST").withBody(rawlsRequest.toJson.compactPrint)
+ )
status should equal(Created)
responseAs[WorkspaceDetails] should equal(rawlsResponse)
}
}
"POST on /workspaces with 'protected' workspace request sends NIH-realm WorkspaceRequest to Rawls and passes back the Rawls status and body" in {
- val (rawlsRequest, rawlsResponse) = stubRawlsCreateWorkspace("namespace", "name", authDomain = Set(nihProtectedAuthDomain))
+ val (rawlsRequest, rawlsResponse) =
+ stubRawlsCreateWorkspace("namespace", "name", authDomain = Set(nihProtectedAuthDomain))
val orchestrationRequest = WorkspaceRequest("namespace", "name", Map(), Option(Set(nihProtectedAuthDomain)))
- Post(workspacesRoot, orchestrationRequest) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
- rawlsServer.verify(request().withPath(workspacesRoot).withMethod("POST").withBody(rawlsRequest.toJson.compactPrint))
+ Post(workspacesRoot, orchestrationRequest) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
+ rawlsServer.verify(
+ request().withPath(workspacesRoot).withMethod("POST").withBody(rawlsRequest.toJson.compactPrint)
+ )
status should equal(Created)
responseAs[WorkspaceDetails] should equal(rawlsResponse)
}
}
"OK status is returned from PATCH on /workspaces/%s/%s/acl" in {
- Patch(aclPath, List(WorkspaceACLUpdate("dummy@test.org", WorkspaceAccessLevels.NoAccess, Some(false)))) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ Patch(aclPath,
+ List(WorkspaceACLUpdate("dummy@test.org", WorkspaceAccessLevels.NoAccess, Some(false)))
+ ) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
status should equal(OK)
}
}
@@ -646,7 +817,8 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"POST on /workspaces/.../.../clone for 'protected' workspace sends NIH-realm WorkspaceRequest to Rawls and passes back the Rawls status and body" in {
val (_, rawlsResponse) = stubRawlsCloneWorkspace("namespace", "name", authDomain = Set(nihProtectedAuthDomain))
- val orchestrationRequest: WorkspaceRequest = WorkspaceRequest("namespace", "name", Map(), Option(Set(nihProtectedAuthDomain)))
+ val orchestrationRequest: WorkspaceRequest =
+ WorkspaceRequest("namespace", "name", Map(), Option(Set(nihProtectedAuthDomain)))
Post(clonePath, orchestrationRequest) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
status should equal(Created)
assertWorkspaceDetailsEqual(rawlsResponse, responseAs[WorkspaceDetails])
@@ -654,11 +826,17 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
}
"When cloning a published workspace, the clone should not be published" in {
- val (_, rawlsResponse) = stubRawlsCloneWorkspace("namespace", "name",
- attributes = Map(AttributeName("library", "published") -> AttributeBoolean(false), AttributeName("library", "discoverableByGroups") -> AttributeValueEmptyList))
+ val (_, rawlsResponse) = stubRawlsCloneWorkspace(
+ "namespace",
+ "name",
+ attributes = Map(AttributeName("library", "published") -> AttributeBoolean(false),
+ AttributeName("library", "discoverableByGroups") -> AttributeValueEmptyList
+ )
+ )
val published = AttributeName("library", "published") -> AttributeBoolean(true)
- val discoverable = AttributeName("library", "discoverableByGroups") -> AttributeValueList(Seq(AttributeString("all_broad_users")))
+ val discoverable =
+ AttributeName("library", "discoverableByGroups") -> AttributeValueList(Seq(AttributeString("all_broad_users")))
val orchestrationRequest = WorkspaceRequest("namespace", "name", Map(published, discoverable))
Post(clonePath, orchestrationRequest) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
status should equal(Created)
@@ -669,35 +847,48 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"Catalog permission tests on /workspaces/.../.../catalog" - {
"when calling PATCH" - {
"should be Forbidden as reader" in {
- val content = HttpEntity(ContentTypes.`application/json`, "[ {\"email\": \"user@gmail.com\",\"catalog\": true} ]")
- new RequestBuilder(HttpMethods.PATCH)(catalogPath("reader"), content) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ val content =
+ HttpEntity(ContentTypes.`application/json`, "[ {\"email\": \"user@gmail.com\",\"catalog\": true} ]")
+ new RequestBuilder(HttpMethods.PATCH)(catalogPath("reader"), content) ~> dummyUserIdHeaders(
+ dummyUserId
+ ) ~> sealRoute(workspaceRoutes) ~> check {
status should equal(Forbidden)
}
}
"should be Forbidden as writer" in {
- val content = HttpEntity(ContentTypes.`application/json`, "[ {\"email\": \"user@gmail.com\",\"catalog\": true} ]")
- new RequestBuilder(HttpMethods.PATCH)(catalogPath("unpublishedwriter"), content) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ val content =
+ HttpEntity(ContentTypes.`application/json`, "[ {\"email\": \"user@gmail.com\",\"catalog\": true} ]")
+ new RequestBuilder(HttpMethods.PATCH)(catalogPath("unpublishedwriter"), content) ~> dummyUserIdHeaders(
+ dummyUserId
+ ) ~> sealRoute(workspaceRoutes) ~> check {
status should equal(Forbidden)
}
}
"should be OK as owner" in {
- val content = HttpEntity(ContentTypes.`application/json`, "[ {\"email\": \"user@gmail.com\",\"catalog\": true} ]")
- new RequestBuilder(HttpMethods.PATCH)(catalogPath(), content) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ val content =
+ HttpEntity(ContentTypes.`application/json`, "[ {\"email\": \"user@gmail.com\",\"catalog\": true} ]")
+ new RequestBuilder(HttpMethods.PATCH)(catalogPath(), content) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should equal(OK)
- val expected = WorkspaceCatalogUpdateResponseList(Seq(WorkspaceCatalogResponse("userid", true)),Seq.empty)
- responseAs[WorkspaceCatalogUpdateResponseList] should equal (expected)
+ val expected = WorkspaceCatalogUpdateResponseList(Seq(WorkspaceCatalogResponse("userid", true)), Seq.empty)
+ responseAs[WorkspaceCatalogUpdateResponseList] should equal(expected)
}
}
}
"when calling GET" - {
"should be OK as reader" in {
- new RequestBuilder(HttpMethods.GET)(catalogPath("reader")) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ new RequestBuilder(HttpMethods.GET)(catalogPath("reader")) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
status should equal(OK)
}
}
"should be OK as writer" in {
- new RequestBuilder(HttpMethods.GET)(catalogPath("unpublishedwriter")) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ new RequestBuilder(HttpMethods.GET)(catalogPath("unpublishedwriter")) ~> dummyUserIdHeaders(
+ dummyUserId
+ ) ~> sealRoute(workspaceRoutes) ~> check {
status should equal(OK)
}
}
@@ -708,11 +899,12 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"when calling any method other than POST on workspaces/*/*/importEntities path" - {
"should receive a MethodNotAllowed error" in {
- List(HttpMethods.PUT, HttpMethods.PATCH, HttpMethods.GET, HttpMethods.DELETE) map {
- method =>
- new RequestBuilder(method)(tsvImportPath, MockTSVFormData.membershipValid) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
- status should equal(MethodNotAllowed)
- }
+ List(HttpMethods.PUT, HttpMethods.PATCH, HttpMethods.GET, HttpMethods.DELETE) map { method =>
+ new RequestBuilder(method)(tsvImportPath, MockTSVFormData.membershipValid) ~> dummyUserIdHeaders(
+ dummyUserId
+ ) ~> sealRoute(workspaceRoutes) ~> check {
+ status should equal(MethodNotAllowed)
+ }
}
}
}
@@ -907,7 +1099,8 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
~> dummyUserIdHeaders(dummyUserId)
~> sealRoute(workspaceRoutes)) ~> check {
status should equal(BadRequest)
- errorReportCheck("FireCloud", BadRequest) }
+ errorReportCheck("FireCloud", BadRequest)
+ }
}
"should 200 OK if the entity type is non-FC model when calling the flexible import" in {
@@ -1016,9 +1209,9 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
(Post(pfbImportPath, PFBImportRequest("https://bad.request.avro"))
~> dummyUserIdHeaders(dummyUserId)
~> sealRoute(workspaceRoutes)) ~> check {
- status should equal(BadRequest)
- responseAs[String] should include ("Bad request as reported by cwds")
- }
+ status should equal(BadRequest)
+ responseAs[String] should include("Bad request as reported by cwds")
+ }
}
"should bubble up 403 from cwds" in {
@@ -1026,7 +1219,7 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
~> dummyUserIdHeaders(dummyUserId)
~> sealRoute(workspaceRoutes)) ~> check {
status should equal(Forbidden)
- responseAs[String] should include ("Missing Authorization: Bearer token in header")
+ responseAs[String] should include("Missing Authorization: Bearer token in header")
}
}
"should propagate any other errors from cWDS" in {
@@ -1035,7 +1228,7 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
~> dummyUserIdHeaders(dummyUserId)
~> sealRoute(workspaceRoutes)) ~> check {
status should equal(UnavailableForLegalReasons)
- responseAs[String] should include ("cwds message")
+ responseAs[String] should include("cwds message")
}
}
@@ -1043,18 +1236,20 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
val pfbPath = "https://good.avro"
- val orchExpectedPayload = AsyncImportResponse(url = pfbPath, jobId = "MockCwdsDAO will generate a random UUID",
- workspace = WorkspaceName(workspace.namespace, workspace.name))
+ val orchExpectedPayload = AsyncImportResponse(url = pfbPath,
+ jobId = "MockCwdsDAO will generate a random UUID",
+ workspace = WorkspaceName(workspace.namespace, workspace.name)
+ )
(Post(pfbImportPath, PFBImportRequest("https://good.avro"))
~> dummyUserIdHeaders(dummyUserId)
~> sealRoute(workspaceRoutes)) ~> check {
- status should equal(Accepted)
- val jobResponse = responseAs[AsyncImportResponse]
- jobResponse.url should be (orchExpectedPayload.url)
- jobResponse.workspace should be (orchExpectedPayload.workspace)
- jobResponse.jobId should not be empty
- }
+ status should equal(Accepted)
+ val jobResponse = responseAs[AsyncImportResponse]
+ jobResponse.url should be(orchExpectedPayload.url)
+ jobResponse.workspace should be(orchExpectedPayload.workspace)
+ jobResponse.jobId should not be empty
+ }
}
}
@@ -1062,7 +1257,6 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"WorkspaceService POST importJob Tests" - {
List(FILETYPE_PFB, FILETYPE_TDR) foreach { filetype =>
-
s"for filetype $filetype" - {
"should bubble up 400 from cwds" in {
@@ -1070,7 +1264,7 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
~> dummyUserIdHeaders(dummyUserId)
~> sealRoute(workspaceRoutes)) ~> check {
status should equal(BadRequest)
- responseAs[String] should include ("Bad request as reported by cwds")
+ responseAs[String] should include("Bad request as reported by cwds")
}
}
@@ -1079,7 +1273,7 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
~> dummyUserIdHeaders(dummyUserId)
~> sealRoute(workspaceRoutes)) ~> check {
status should equal(Forbidden)
- responseAs[String] should include ("Missing Authorization: Bearer token in header")
+ responseAs[String] should include("Missing Authorization: Bearer token in header")
}
}
"should propagate any other errors from cWDS" in {
@@ -1088,7 +1282,7 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
~> dummyUserIdHeaders(dummyUserId)
~> sealRoute(workspaceRoutes)) ~> check {
status should equal(UnavailableForLegalReasons)
- responseAs[String] should include ("cwds message")
+ responseAs[String] should include("cwds message")
}
}
@@ -1097,17 +1291,18 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
val pfbPath = "https://good.avro"
val orchExpectedPayload = AsyncImportResponse(url = pfbPath,
- jobId = "MockCwdsDAO will generate a random UUID",
- workspace = WorkspaceName(workspace.namespace, workspace.name))
+ jobId = "MockCwdsDAO will generate a random UUID",
+ workspace = WorkspaceName(workspace.namespace, workspace.name)
+ )
(Post(importJobPath, AsyncImportRequest("https://good.avro", filetype))
~> dummyUserIdHeaders(dummyUserId)
~> sealRoute(workspaceRoutes)) ~> check {
status should equal(Accepted)
val jobResponse = responseAs[AsyncImportResponse]
- jobResponse.url should be (orchExpectedPayload.url)
- jobResponse.workspace should be (orchExpectedPayload.workspace)
- jobResponse.jobId should not be empty
+ jobResponse.url should be(orchExpectedPayload.url)
+ jobResponse.workspace should be(orchExpectedPayload.workspace)
+ jobResponse.jobId should not be empty
}
}
}
@@ -1118,11 +1313,12 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"Workspace updateAttributes tests" - {
"when calling any method other than PATCH on workspaces/*/*/updateAttributes path" - {
"should receive a MethodNotAllowed error" in {
- List(HttpMethods.PUT, HttpMethods.POST, HttpMethods.GET, HttpMethods.DELETE) map {
- method =>
- new RequestBuilder(method)(updateAttributesPath, HttpEntity(MediaTypes.`application/json`, "{}")) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
- status should equal(MethodNotAllowed)
- }
+ List(HttpMethods.PUT, HttpMethods.POST, HttpMethods.GET, HttpMethods.DELETE) map { method =>
+ new RequestBuilder(method)(updateAttributesPath,
+ HttpEntity(MediaTypes.`application/json`, "{}")
+ ) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ status should equal(MethodNotAllowed)
+ }
}
}
}
@@ -1137,14 +1333,19 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
}
"should 200 OK if the payload is ok" in {
- (Patch(updateAttributesPath,
- HttpEntity(MediaTypes.`application/json`, """[
- | {
- | "op": "AddUpdateAttribute",
- | "attributeName": "library:dataCategory",
- | "addUpdateAttribute": "test-attribute-value"
- | }
- |]""".stripMargin))
+ (Patch(
+ updateAttributesPath,
+ HttpEntity(
+ MediaTypes.`application/json`,
+ """[
+ | {
+ | "op": "AddUpdateAttribute",
+ | "attributeName": "library:dataCategory",
+ | "addUpdateAttribute": "test-attribute-value"
+ | }
+ |]""".stripMargin
+ )
+ )
~> dummyUserIdHeaders(dummyUserId)
~> sealRoute(workspaceRoutes)) ~> check {
status should equal(OK)
@@ -1154,18 +1355,27 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"should republish if the document is already published" in {
- (Patch(workspacesRoot + "/%s/%s/updateAttributes".format(WorkspaceApiServiceSpec.publishedWorkspace.namespace, WorkspaceApiServiceSpec.publishedWorkspace.name),
- HttpEntity(MediaTypes.`application/json`, """[
- | {
- | "op": "AddUpdateAttribute",
- | "attributeName": "library:dataCategory",
- | "addUpdateAttribute": "test-attribute-value"
- | }
- |]""".stripMargin))
+ (Patch(
+ workspacesRoot + "/%s/%s/updateAttributes".format(WorkspaceApiServiceSpec.publishedWorkspace.namespace,
+ WorkspaceApiServiceSpec.publishedWorkspace.name
+ ),
+ HttpEntity(
+ MediaTypes.`application/json`,
+ """[
+ | {
+ | "op": "AddUpdateAttribute",
+ | "attributeName": "library:dataCategory",
+ | "addUpdateAttribute": "test-attribute-value"
+ | }
+ |]""".stripMargin
+ )
+ )
~> dummyUserIdHeaders(dummyUserId)
~> sealRoute(workspaceRoutes)) ~> check {
status should equal(OK)
- assert(this.searchDao.indexDocumentInvoked.get(), "Should have republished this published WS when changing attributes")
+ assert(this.searchDao.indexDocumentInvoked.get(),
+ "Should have republished this published WS when changing attributes"
+ )
}
}
@@ -1175,11 +1385,12 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"Workspace setAttributes tests" - {
"when calling any method other than PATCH on workspaces/*/*/setAttributes path" - {
"should receive a MethodNotAllowed error" in {
- List(HttpMethods.PUT, HttpMethods.POST, HttpMethods.GET, HttpMethods.DELETE) map {
- method =>
- new RequestBuilder(method)(setAttributesPath, HttpEntity(MediaTypes.`application/json`, "{}")) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
- status should equal(MethodNotAllowed)
- }
+ List(HttpMethods.PUT, HttpMethods.POST, HttpMethods.GET, HttpMethods.DELETE) map { method =>
+ new RequestBuilder(method)(setAttributesPath,
+ HttpEntity(MediaTypes.`application/json`, "{}")
+ ) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
+ status should equal(MethodNotAllowed)
+ }
}
}
}
@@ -1194,10 +1405,15 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
}
"should 200 OK if the payload is ok" in {
- (Patch(setAttributesPath,
- HttpEntity(MediaTypes.`application/json`, """{"description": "something",
- | "array": [1, 2, 3]
- | }""".stripMargin))
+ (Patch(
+ setAttributesPath,
+ HttpEntity(
+ MediaTypes.`application/json`,
+ """{"description": "something",
+ | "array": [1, 2, 3]
+ | }""".stripMargin
+ )
+ )
~> dummyUserIdHeaders(dummyUserId)
~> sealRoute(workspaceRoutes)) ~> check {
status should equal(OK)
@@ -1207,14 +1423,23 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"should republish if the document is already published" in {
- (Patch(workspacesRoot + "/%s/%s/setAttributes".format(WorkspaceApiServiceSpec.publishedWorkspace.namespace, WorkspaceApiServiceSpec.publishedWorkspace.name),
- HttpEntity(MediaTypes.`application/json`, """{"description": "something",
- | "array": [1, 2, 3]
- | }""".stripMargin))
+ (Patch(
+ workspacesRoot + "/%s/%s/setAttributes".format(WorkspaceApiServiceSpec.publishedWorkspace.namespace,
+ WorkspaceApiServiceSpec.publishedWorkspace.name
+ ),
+ HttpEntity(
+ MediaTypes.`application/json`,
+ """{"description": "something",
+ | "array": [1, 2, 3]
+ | }""".stripMargin
+ )
+ )
~> dummyUserIdHeaders(dummyUserId)
~> sealRoute(workspaceRoutes)) ~> check {
status should equal(OK)
- assert(this.searchDao.indexDocumentInvoked.get(), "Should have republished this published WS when changing attributes")
+ assert(this.searchDao.indexDocumentInvoked.get(),
+ "Should have republished this published WS when changing attributes"
+ )
}
}
@@ -1225,48 +1450,48 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
(Post(tsvAttributesImportPath, MockTSVFormData.addNewWorkspaceAttributes)
~> dummyUserIdHeaders(dummyUserId)
~> sealRoute(workspaceRoutes) ~> check {
- status should equal(OK)
- })
+ status should equal(OK)
+ })
}
"should 400 Bad Request if first row does not start with \"workspace\"" in {
(Post(tsvAttributesImportPath, MockTSVFormData.wrongHeaderWorkspaceAttributes)
~> dummyUserIdHeaders(dummyUserId)
~> sealRoute(workspaceRoutes) ~> check {
- status should equal(BadRequest)
- })
+ status should equal(BadRequest)
+ })
}
"should 400 Bad Request if there are more names than values" in {
(Post(tsvAttributesImportPath, MockTSVFormData.tooManyNamesWorkspaceAttributes)
~> dummyUserIdHeaders(dummyUserId)
~> sealRoute(workspaceRoutes) ~> check {
- status should equal(BadRequest)
- })
+ status should equal(BadRequest)
+ })
}
"should 400 Bad Request if there are more values than names" in {
(Post(tsvAttributesImportPath, MockTSVFormData.tooManyValuesWorkspaceAttributes)
~> dummyUserIdHeaders(dummyUserId)
~> sealRoute(workspaceRoutes) ~> check {
- status should equal(BadRequest)
- })
+ status should equal(BadRequest)
+ })
}
"should 400 Bad Request if there are more than 2 rows" in {
(Post(tsvAttributesImportPath, MockTSVFormData.tooManyRowsWorkspaceAttributes)
~> dummyUserIdHeaders(dummyUserId)
~> sealRoute(workspaceRoutes) ~> check {
- status should equal(BadRequest)
- })
+ status should equal(BadRequest)
+ })
}
"should 400 Bad Request if there are fewer than 2 rows" in {
(Post(tsvAttributesImportPath, MockTSVFormData.tooFewRowsWorkspaceAttributes)
~> dummyUserIdHeaders(dummyUserId)
~> sealRoute(workspaceRoutes) ~> check {
- status should equal(BadRequest)
- })
+ status should equal(BadRequest)
+ })
}
}
@@ -1275,31 +1500,36 @@ class WorkspaceApiServiceSpec extends BaseServiceSpec with WorkspaceApiService w
"Workspace storage cost estimate tests" - {
"when calling any method other than GET on workspaces/*/*/storageCostEstimate" - {
"should return 405 Method Not Allowed for anything other than GET" in {
- List(HttpMethods.PUT, HttpMethods.POST, HttpMethods.PATCH, HttpMethods.DELETE) map {
- method =>
- new RequestBuilder(method)(usBucketStorageCostEstimatePath) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
- status should be (MethodNotAllowed)
- }
+ List(HttpMethods.PUT, HttpMethods.POST, HttpMethods.PATCH, HttpMethods.DELETE) map { method =>
+ new RequestBuilder(method)(usBucketStorageCostEstimatePath) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
+ status should be(MethodNotAllowed)
+ }
}
}
}
"when calling GET on workspaces/*/*/storageCostEstimate" - {
"should return 200 with result for us region" in {
- Get(usBucketStorageCostEstimatePath) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
- status should be (OK)
+ Get(usBucketStorageCostEstimatePath) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
+ status should be(OK)
// 256000000000 / (1024 * 1024 * 1024) *0.01
- responseAs[WorkspaceStorageCostEstimate].estimate should be ("$2.38")
+ responseAs[WorkspaceStorageCostEstimate].estimate should be("$2.38")
}
}
}
"when calling GET on workspaces/*/*/storageCostEstimate" - {
"should return 200 with result for different europe east 1 region." in {
- Get(europeWest1storageCostEstimatePath) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(workspaceRoutes) ~> check {
- status should be (OK)
+ Get(europeWest1storageCostEstimatePath) ~> dummyUserIdHeaders(dummyUserId) ~> sealRoute(
+ workspaceRoutes
+ ) ~> check {
+ status should be(OK)
// 256000000000 / (1024 * 1024 * 1024) *0.02
- responseAs[WorkspaceStorageCostEstimate].estimate should be ("$4.77")
+ responseAs[WorkspaceStorageCostEstimate].estimate should be("$4.77")
}
}
}