Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Upgrade to scala 2.13 #7327

Merged
merged 26 commits into from
Oct 4, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
- Added social media link previews for links to datasets and annotations (only if they are public or if the links contain sharing tokens). [#7331](https://github.com/scalableminds/webknossos/pull/7331)

### Changed
- Updated backend code to Scala 2.13, with upgraded Dependencies for optimized performance. [#7327](https://github.com/scalableminds/webknossos/pull/7327)

### Fixed
- Fixed that segment statistics were requested in the wrong resolution and without properly considering the dataset scale. [#7355](https://github.com/scalableminds/webknossos/pull/7355)
Expand Down
10 changes: 6 additions & 4 deletions app/controllers/AnnotationIOController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import akka.actor.ActorSystem
import akka.stream.Materializer
import com.mohiva.play.silhouette.api.Silhouette
import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext}
import com.scalableminds.util.io.{NamedEnumeratorStream, ZipIO}
import com.scalableminds.util.io.ZipIO
import com.scalableminds.util.tools.{Fox, FoxImplicits, TextUtils}
import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracingOpt, SkeletonTracings}
import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings}
Expand Down Expand Up @@ -383,7 +383,8 @@ Expects:
tracingStoreClient.getSkeletonTracing(_, skeletonVersion))
user <- userService.findOneCached(annotation._user)(GlobalAccessContext)
taskOpt <- Fox.runOptional(annotation._task)(taskDAO.findOne)
nmlStream = nmlWriter.toNmlStream(fetchedAnnotationLayers,
nmlStream = nmlWriter.toNmlStream("temp",
fetchedAnnotationLayers,
Some(annotation),
dataset.scale,
None,
Expand All @@ -394,7 +395,7 @@ Expects:
taskOpt)
nmlTemporaryFile = temporaryFileCreator.create()
temporaryFileStream = new BufferedOutputStream(new FileOutputStream(nmlTemporaryFile))
_ <- NamedEnumeratorStream("", nmlStream).writeTo(temporaryFileStream)
_ <- nmlStream.writeTo(temporaryFileStream)
_ = temporaryFileStream.close()
} yield nmlTemporaryFile

Expand All @@ -415,6 +416,7 @@ Expects:
user <- userService.findOneCached(annotation._user)(GlobalAccessContext) ?~> "annotation.download.findUser.failed"
taskOpt <- Fox.runOptional(annotation._task)(taskDAO.findOne)
nmlStream = nmlWriter.toNmlStream(
name,
fetchedSkeletonLayers ::: fetchedVolumeLayers,
Some(annotation),
dataset.scale,
Expand All @@ -428,7 +430,7 @@ Expects:
)
temporaryFile = temporaryFileCreator.create()
zipper = ZipIO.startZip(new BufferedOutputStream(new FileOutputStream(new File(temporaryFile.path.toString))))
_ <- zipper.addFileFromEnumerator(name + ".nml", nmlStream) ?~> "annotation.download.zipNml.failed"
_ <- zipper.addFileFromNamedStream(nmlStream, suffix = ".nml") ?~> "annotation.download.zipNml.failed"
_ = fetchedVolumeLayers.zipWithIndex.map {
case (volumeLayer, index) =>
volumeLayer.volumeDataOpt.foreach { volumeData =>
Expand Down
4 changes: 2 additions & 2 deletions app/controllers/Application.scala
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@ class Application @Inject()(multiUserDAO: MultiUserDAO,
addRemoteOriginHeaders(
Ok(
Json.obj(
"webknossos" -> webknossos.BuildInfo.toMap.mapValues(_.toString),
"webknossos-wrap" -> webknossoswrap.BuildInfo.toMap.mapValues(_.toString),
"webknossos" -> Json.toJson(webknossos.BuildInfo.toMap.view.mapValues(_.toString).toMap),
"webknossos-wrap" -> Json.toJson(webknossoswrap.BuildInfo.toMap.view.mapValues(_.toString).toMap),
"schemaVersion" -> schemaVersion.toOption,
"localDataStoreEnabled" -> storeModules.localDataStoreEnabled,
"localTracingStoreEnabled" -> storeModules.localTracingStoreEnabled
Expand Down
354 changes: 184 additions & 170 deletions app/controllers/AuthenticationController.scala

Large diffs are not rendered by default.

22 changes: 11 additions & 11 deletions app/controllers/DataStoreController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -25,19 +25,19 @@ class DataStoreController @Inject()(dataStoreDAO: DataStoreDAO,
with FoxImplicits {

private val dataStoreReads: Reads[DataStore] =
((__ \ 'name).read[String] and
(__ \ 'url).read[String] and
(__ \ 'publicUrl).read[String] and
(__ \ 'key).read[String] and
(__ \ 'isScratch).readNullable[Boolean] and
(__ \ 'allowsUpload).readNullable[Boolean])(DataStore.fromForm _)
((__ \ "name").read[String] and
(__ \ "url").read[String] and
(__ \ "publicUrl").read[String] and
(__ \ "key").read[String] and
(__ \ "isScratch").readNullable[Boolean] and
(__ \ "allowsUpload").readNullable[Boolean])(DataStore.fromForm _)

private val dataStorePublicReads: Reads[DataStore] =
((__ \ 'name).read[String] and
(__ \ 'url).read[String] and
(__ \ 'publicUrl).read[String] and
(__ \ 'isScratch).readNullable[Boolean] and
(__ \ 'allowsUpload).readNullable[Boolean])(DataStore.fromUpdateForm _)
((__ \ "name").read[String] and
(__ \ "url").read[String] and
(__ \ "publicUrl").read[String] and
(__ \ "isScratch").readNullable[Boolean] and
(__ \ "allowsUpload").readNullable[Boolean])(DataStore.fromUpdateForm _)
@ApiOperation(value = "List all available datastores", nickname = "datastoreList")
@ApiResponses(
Array(new ApiResponse(code = 200, message = "JSON list of objects containing datastore information"),
Expand Down
12 changes: 6 additions & 6 deletions app/controllers/DatasetController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -78,12 +78,12 @@ class DatasetController @Inject()(userService: UserService,
extends Controller {

private val datasetPublicReads =
((__ \ 'description).readNullable[String] and
(__ \ 'displayName).readNullable[String] and
(__ \ 'sortingKey).readNullable[Instant] and
(__ \ 'isPublic).read[Boolean] and
(__ \ 'tags).read[List[String]] and
(__ \ 'folderId).readNullable[ObjectId]).tupled
((__ \ "description").readNullable[String] and
(__ \ "displayName").readNullable[String] and
(__ \ "sortingKey").readNullable[Instant] and
(__ \ "isPublic").read[Boolean] and
(__ \ "tags").read[List[String]] and
(__ \ "folderId").readNullable[ObjectId]).tupled

@ApiOperation(hidden = true, value = "")
def removeFromThumbnailCache(organizationName: String, dataSetName: String): Action[AnyContent] =
Expand Down
2 changes: 1 addition & 1 deletion app/controllers/JobsController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@ class JobsController @Inject()(jobDAO: JobDAO,
}
}

def export(jobId: String): Action[AnyContent] =
def redirectToExport(jobId: String): Action[AnyContent] =
sil.SecuredAction.async { implicit request =>
for {
jobIdValidated <- ObjectId.fromString(jobId)
Expand Down
4 changes: 2 additions & 2 deletions app/controllers/OrganizationController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -189,8 +189,8 @@ class OrganizationController @Inject()(
}

private val organizationUpdateReads =
((__ \ 'displayName).read[String] and
(__ \ 'newUserMailingList).read[String]).tupled
((__ \ "displayName").read[String] and
(__ \ "newUserMailingList").read[String]).tupled

def sendExtendPricingPlanEmail(): Action[AnyContent] = sil.SecuredAction.async { implicit request =>
for {
Expand Down
6 changes: 3 additions & 3 deletions app/controllers/ScriptController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@ class ScriptController @Inject()(scriptDAO: ScriptDAO,
with FoxImplicits {

private val scriptPublicReads =
((__ \ 'name).read[String](minLength[String](2) or maxLength[String](50)) and
(__ \ 'gist).read[String] and
(__ \ 'owner).read[ObjectId])(Script.fromForm _)
((__ \ "name").read[String](minLength[String](2) or maxLength[String](50)) and
(__ \ "gist").read[String] and
(__ \ "owner").read[ObjectId])(Script.fromForm _)

def create: Action[JsValue] = sil.SecuredAction.async(parse.json) { implicit request =>
withJsonBodyUsing(scriptPublicReads) { script =>
Expand Down
22 changes: 12 additions & 10 deletions app/controllers/SitemapController.scala
Original file line number Diff line number Diff line change
@@ -1,23 +1,25 @@
package controllers

import akka.stream.scaladsl.Source
import com.google.inject.Inject
import com.mohiva.play.silhouette.api.Silhouette
import oxalis.security.WkEnv
import play.api.libs.iteratee.streams.IterateeStreams
import play.api.mvc.{Action, AnyContent}
import utils.SitemapWriter

class SitemapController @Inject()(sitemapWriter: SitemapWriter, sil: Silhouette[WkEnv]) extends Controller {
import scala.concurrent.ExecutionContext

// Only called explicitly via RequestHandler
def getSitemap(prefix: String): Action[AnyContent] = sil.UserAwareAction {
val downloadStream = sitemapWriter.toSitemapStream(prefix)
class SitemapController @Inject()(sitemapWriter: SitemapWriter, sil: Silhouette[WkEnv])(implicit ec: ExecutionContext)
extends Controller {

Ok.chunked(Source.fromPublisher(IterateeStreams.enumeratorToPublisher(downloadStream)))
.as(xmlMimeType)
.withHeaders(CONTENT_DISPOSITION ->
"""sitemap.xml""")
// Only called explicitly via RequestHandler
def getSitemap(prefix: String): Action[AnyContent] = sil.UserAwareAction.async { implicit request =>
for {
sitemap <- sitemapWriter.getSitemap(prefix)
} yield
Ok(sitemap)
.as(xmlMimeType)
.withHeaders(CONTENT_DISPOSITION ->
"""sitemap.xml""")
}

}
2 changes: 1 addition & 1 deletion app/controllers/TaskController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ class TaskController @Inject()(taskCreationService: TaskCreationService,
taskParameters)
volumeBaseOpts: List[Option[(VolumeTracing, Option[File])]] <- taskCreationService
.createTaskVolumeTracingBases(taskParameters, request.identity._organization)
paramsWithTracings = (taskParameters, skeletonBaseOpts, volumeBaseOpts).zipped.map {
paramsWithTracings = taskParameters.lazyZip(skeletonBaseOpts).lazyZip(volumeBaseOpts).map {
case (params, skeletonOpt, volumeOpt) => Full((params, skeletonOpt, volumeOpt))
}
result <- taskCreationService.createTasks(paramsWithTracings, request.identity)
Expand Down
12 changes: 6 additions & 6 deletions app/controllers/TaskTypeController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,12 @@ class TaskTypeController @Inject()(taskTypeDAO: TaskTypeDAO,
with FoxImplicits {

private val taskTypePublicReads =
((__ \ 'summary).read[String](minLength[String](2) or maxLength[String](50)) and
(__ \ 'description).read[String] and
(__ \ 'teamId).read[ObjectId] and
(__ \ 'settings).read[AnnotationSettings] and
(__ \ 'recommendedConfiguration).readNullable[JsValue] and
(__ \ 'tracingType).read[TracingType.Value])(taskTypeService.fromForm _)
((__ \ "summary").read[String](minLength[String](2) or maxLength[String](50)) and
(__ \ "description").read[String] and
(__ \ "teamId").read[ObjectId] and
(__ \ "settings").read[AnnotationSettings] and
(__ \ "recommendedConfiguration").readNullable[JsValue] and
(__ \ "tracingType").read[TracingType.Value])(taskTypeService.fromForm _)

def create: Action[JsValue] = sil.SecuredAction.async(parse.json) { implicit request =>
withJsonBodyUsing(taskTypePublicReads) { taskType =>
Expand Down
6 changes: 3 additions & 3 deletions app/controllers/TracingStoreController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@ class TracingStoreController @Inject()(tracingStoreService: TracingStoreService,
extends Controller
with FoxImplicits {
private val tracingStorePublicReads: Reads[TracingStore] =
((__ \ 'name).read[String] and
(__ \ 'url).read[String] and
(__ \ 'publicUrl).read[String])(TracingStore.fromUpdateForm _)
((__ \ "name").read[String] and
(__ \ "url").read[String] and
(__ \ "publicUrl").read[String])(TracingStore.fromUpdateForm _)

def listOne: Action[AnyContent] = sil.UserAwareAction.async { implicit request =>
for {
Expand Down
2 changes: 1 addition & 1 deletion app/models/annotation/AnnotationIdentifier.scala
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import scala.concurrent.ExecutionContext
case class AnnotationIdentifier(annotationType: AnnotationType, identifier: ObjectId) {

def toUniqueString: String =
annotationType + "__" + identifier
f"${annotationType}__$identifier"

}

Expand Down
1 change: 0 additions & 1 deletion app/models/annotation/AnnotationPrivateLink.scala
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.schema.Tables._
import oxalis.security.RandomIDGenerator
import play.api.libs.json.{JsValue, Json, OFormat}
import slick.jdbc.PostgresProfile.api._
import slick.lifted.Rep
import utils.ObjectId
import utils.sql.{SQLDAO, SqlClient, SqlToken}
Expand Down
50 changes: 24 additions & 26 deletions app/models/annotation/AnnotationService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import akka.actor.ActorSystem
import akka.stream.Materializer
import com.scalableminds.util.accesscontext.{AuthorizedAccessContext, DBAccessContext, GlobalAccessContext}
import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int}
import com.scalableminds.util.io.ZipIO
import com.scalableminds.util.io.{NamedStream, ZipIO}
import com.scalableminds.util.mvc.Formatter
import com.scalableminds.util.time.Instant
import com.scalableminds.util.tools.{BoxImplicits, Fox, FoxImplicits, TextUtils}
Expand Down Expand Up @@ -43,7 +43,6 @@ import models.annotation.AnnotationType.AnnotationType
import models.annotation.handler.SavedTracingInformationHandler
import models.annotation.nml.NmlWriter
import models.binary._
import models.mesh.{MeshDAO, MeshService}
import models.organization.OrganizationDAO
import models.project.ProjectDAO
import models.task.{Task, TaskDAO, TaskService, TaskTypeDAO}
Expand All @@ -52,7 +51,6 @@ import models.user.{User, UserDAO, UserService}
import net.liftweb.common.{Box, Full}
import play.api.i18n.{Messages, MessagesProvider}
import play.api.libs.Files.{TemporaryFile, TemporaryFileCreator}
import play.api.libs.iteratee.Enumerator
import play.api.libs.json.{JsNull, JsObject, JsValue, Json}
import utils.{ObjectId, WkConf}

Expand Down Expand Up @@ -106,8 +104,6 @@ class AnnotationService @Inject()(
annotationRestrictionDefults: AnnotationRestrictionDefaults,
nmlWriter: NmlWriter,
temporaryFileCreator: TemporaryFileCreator,
meshDAO: MeshDAO,
meshService: MeshService,
conf: WkConf,
)(implicit ec: ExecutionContext, val materializer: Materializer)
extends BoxImplicits
Expand Down Expand Up @@ -643,7 +639,7 @@ class AnnotationService @Inject()(
ctx: DBAccessContext): Fox[TemporaryFile] =
for {
downloadAnnotations <- getTracingsScalesAndNamesFor(annotations, skipVolumeData)
nmlsAndNames <- Fox.serialCombined(downloadAnnotations.flatten) {
nmlsAndVolumes <- Fox.serialCombined(downloadAnnotations.flatten) {
case DownloadAnnotation(skeletonTracingIdOpt,
volumeTracingIdOpt,
skeletonTracingOpt,
Expand All @@ -661,18 +657,21 @@ class AnnotationService @Inject()(
volumeTracingIdOpt,
skeletonTracingOpt,
volumeTracingOpt)
nml = nmlWriter.toNmlStream(fetchedAnnotationLayersForAnnotation,
Some(annotation),
scaleOpt,
Some(name + "_data.zip"),
organizationName,
conf.Http.uri,
datasetName,
Some(user),
taskOpt)
} yield (nml, name, volumeDataOpt)
nml = nmlWriter.toNmlStream(
name,
fetchedAnnotationLayersForAnnotation,
Some(annotation),
scaleOpt,
Some(name + "_data.zip"),
organizationName,
conf.Http.uri,
datasetName,
Some(user),
taskOpt
)
} yield (nml, volumeDataOpt)
}
zip <- createZip(nmlsAndNames, zipFileName)
zip <- createZip(nmlsAndVolumes, zipFileName)
} yield zip

private def getTracingsScalesAndNamesFor(annotations: List[Annotation], skipVolumeData: Boolean)(
Expand Down Expand Up @@ -768,27 +767,26 @@ class AnnotationService @Inject()(
Fox.combined(tracingsGrouped.toList)
}

private def createZip(nmls: List[(Enumerator[Array[Byte]], String, Option[Array[Byte]])],
zipFileName: String): Future[TemporaryFile] = {
private def createZip(nmls: List[(NamedStream, Option[Array[Byte]])], zipFileName: String): Fox[TemporaryFile] = {
val zipped = temporaryFileCreator.create(TextUtils.normalize(zipFileName), ".zip")
val zipper = ZipIO.startZip(new BufferedOutputStream(new FileOutputStream(new File(zipped.path.toString))))

def addToZip(nmls: List[(Enumerator[Array[Byte]], String, Option[Array[Byte]])]): Future[Boolean] =
def addToZip(nmls: List[(NamedStream, Option[Array[Byte]])]): Fox[Boolean] =
nmls match {
case (nml, name, volumeDataOpt) :: tail =>
case (nml, volumeDataOpt) :: tail =>
if (volumeDataOpt.isDefined) {
val subZip = temporaryFileCreator.create(TextUtils.normalize(name), ".zip")
val subZip = temporaryFileCreator.create(TextUtils.normalize(nml.name), ".zip")
val subZipper =
ZipIO.startZip(new BufferedOutputStream(new FileOutputStream(new File(subZip.path.toString))))
volumeDataOpt.foreach(volumeData => subZipper.addFileFromBytes(name + "_data.zip", volumeData))
volumeDataOpt.foreach(volumeData => subZipper.addFileFromBytes(nml.name + "_data.zip", volumeData))
for {
_ <- subZipper.addFileFromEnumerator(name + ".nml", nml)
_ <- subZipper.addFileFromNamedStream(nml, suffix = ".nml")
_ = subZipper.close()
_ = zipper.addFileFromTemporaryFile(name + ".zip", subZip)
_ = zipper.addFileFromTemporaryFile(nml.name + ".zip", subZip)
res <- addToZip(tail)
} yield res
} else {
zipper.addFileFromEnumerator(name + ".nml", nml).flatMap(_ => addToZip(tail))
zipper.addFileFromNamedStream(nml, suffix = ".nml").flatMap(_ => addToZip(tail))
}
case _ =>
Future.successful(true)
Expand Down
21 changes: 9 additions & 12 deletions app/models/annotation/nml/NmlParser.scala
Original file line number Diff line number Diff line change
Expand Up @@ -262,8 +262,8 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener
depth <- getSingleAttribute(node, "depth").toIntOpt
} yield BoundingBox(Vec3Int(topLeftX, topLeftY, topLeftZ), width, height, depth)

private def parseAdditionalAxes(nodes: NodeSeq)(implicit m: MessagesProvider) = {
val additionalAxes = nodes.headOption.map(
private def parseAdditionalAxes(nodes: NodeSeq)(implicit m: MessagesProvider): Box[Seq[AdditionalAxisProto]] = {
val additionalAxes: Option[collection.Seq[AdditionalAxisProto]] = nodes.headOption.map(
_.child.flatMap(
additionalAxisNode => {
for {
Expand All @@ -281,7 +281,7 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener
additionalAxes match {
case Some(axes) =>
if (axes.map(_.name).distinct.size == axes.size) {
Full(axes)
Full(axes.toSeq)
} else {
Failure(Messages("nml.additionalCoordinates.notUnique"))
}
Expand Down Expand Up @@ -519,17 +519,14 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener
}

private def parseAdditionalCoordinateValues(node: XMLNode): Seq[AdditionalCoordinateProto] = {
val regex = "additionalCoordinate-(\\w)".r("name")
val regex = "^additionalCoordinate-(\\w)".r
node.attributes.flatMap {
case attribute: Attribute => {
if (attribute.key.startsWith("additionalCoordinate")) {
Some(
new AdditionalCoordinateProto(regex.findAllIn(attribute.key).group("name"),
attribute.value.toString().toInt))
} else {
None
case attribute: Attribute =>
attribute.key match {
case regex(axisName) =>
Some(new AdditionalCoordinateProto(axisName, attribute.value.toString().toInt))
case _ => None
}
}
case _ => None
}.toSeq
}
Expand Down
Loading