Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/caws mutable data stream batch domain #91

Merged
merged 14 commits into from
Jun 21, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import dk.cachet.carp.webservices.data.domain.DataStreamSnapshot
import dk.cachet.carp.webservices.data.repository.DataStreamConfigurationRepository
import dk.cachet.carp.webservices.data.repository.DataStreamIdRepository
import dk.cachet.carp.webservices.data.repository.DataStreamSequenceRepository
import dk.cachet.carp.webservices.data.service.impl.CawsMutableDataStreamBatchWrapper
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
import org.springframework.stereotype.Component
Expand Down Expand Up @@ -64,7 +65,7 @@ class CoreDataStreamService(
},
) { "The batch contains a sequence with a data stream which wasn't configured for this study deployment." }

val dataStreams = MutableDataStreamBatch()
val dataStreams = CawsMutableDataStreamBatchWrapper()

// appending sequences to batch
dataStreams.appendBatch(batch)
Expand Down Expand Up @@ -190,7 +191,7 @@ class CoreDataStreamService(
}
}
}
.fold(MutableDataStreamBatch()) { batch, sequence ->
.fold(CawsMutableDataStreamBatchWrapper()) { batch, sequence ->
batch.apply { appendSequence(sequence) }
}
}
Expand Down Expand Up @@ -242,7 +243,7 @@ class CoreDataStreamService(
val config = mapToCoreConfig(configOptional.get().config!!)

val ids =
config.expectedDataStreamIds.map { dataStream ->
config.expectedDataStreamIds.map { dataStream: DataStreamId ->
dataStreamIdRepository.findByStudyDeploymentIdAndDeviceRoleNameAndNameAndNameSpace(
studyDeploymentId = dataStream.studyDeploymentId.stringRepresentation,
deviceRoleName = dataStream.deviceRoleName,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
package dk.cachet.carp.webservices.data.service.impl

import dk.cachet.carp.common.application.data.Data
import dk.cachet.carp.data.application.*

/** UNDER CONSTRUCTION --> CHECK create a function that checks sequence IDs before appending
* + usable if we can get back last preceding upload of a sequence so the client know what to send
* This a "trick" to make the class `DataStreamBatch` open for allowing appendDataStream without checking
* all the sequence IDs in the append method. This is a temporary solution until we find a better way to handle this.
*/
class CawsMutableDataStreamBatchWrapper : Sequence<DataStreamPoint<*>>, DataStreamBatch {
private val sequenceMap: MutableMap<DataStreamId, MutableList<MutableDataStreamSequence<*>>> = mutableMapOf()

/**
* A list of sequences covering all sequences so far appended to [CawsMutableDataStreamBatchWrapper].
* This may return less sequences than originally appended in case appended sequences were merged with prior ones.
*/
override val sequences: Sequence<DataStreamSequence<*>>
get() = sequenceMap.asSequence().flatMap { it.value }.map { it }

/**
* Consider val for sequence.toMutableDataStreamSequence(), but not necessary
*/
@Suppress("UNCHECKED_CAST")
fun appendSequence(sequence: DataStreamSequence<*>) {
val sequenceList = sequenceMap[sequence.dataStream]

if (sequenceList == null) {
sequenceMap[sequence.dataStream] = mutableListOf(sequence.toMutableDataStreamSequence())
return
}

val last = sequenceList.last() as MutableDataStreamSequence<Data>

if (last.isImmediatelyFollowedBy(sequence)) {
last.appendSequence(sequence as MutableDataStreamSequence<Data>)
} else {
sequenceList.add(sequence.toMutableDataStreamSequence())
}
}

/**
* Append all data stream sequences contained in [batch] to this batch.
* rid of premature optimization and sequential checks. <-- let fly in anything and check later.
* --> This is a temporary solution
*/

fun appendBatch(batch: DataStreamBatch) {
batch.sequences.forEach(::appendSequence)
}

override fun equals(other: Any?): Boolean {
if (this === other) return true
if (other !is DataStreamBatch) return false

return toList() == other.toList()
}

override fun hashCode(): Int = sequences.hashCode()
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,12 @@ package dk.cachet.carp.webservices.data.service

import dk.cachet.carp.common.application.UUID
import dk.cachet.carp.common.infrastructure.test.StubDataPoint
import dk.cachet.carp.data.application.MutableDataStreamBatch
import dk.cachet.carp.webservices.data.repository.DataStreamConfigurationRepository
import dk.cachet.carp.webservices.data.service.core.CoreDataStreamService
import io.mockk.*
import dk.cachet.carp.webservices.data.service.impl.CawsMutableDataStreamBatchWrapper
import io.mockk.coEvery
import io.mockk.coVerify
import io.mockk.mockk
import kotlinx.coroutines.test.runTest
import org.junit.jupiter.api.Nested
import org.junit.jupiter.api.Test
Expand All @@ -22,7 +24,7 @@ class CoreDataStreamServiceTest {
val incorrectId = UUID.randomUUID()
val correctId = UUID.randomUUID()

val batch = MutableDataStreamBatch()
val batch = CawsMutableDataStreamBatchWrapper()
batch.appendSequence(createStubSequence(correctId, 0, StubDataPoint()))
batch.appendSequence(createStubSequence(incorrectId, 0, StubDataPoint()))

Expand All @@ -45,7 +47,7 @@ class CoreDataStreamServiceTest {
val dataStreamConfigurationRepository = mockk<DataStreamConfigurationRepository>()
val studyDeploymentId = UUID.randomUUID()

val batch = MutableDataStreamBatch()
val batch = CawsMutableDataStreamBatchWrapper()
batch.appendSequence(createStubSequence(studyDeploymentId, 0, StubDataPoint()))

coEvery {
Expand Down
Loading