Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import com.simprints.infra.enrolment.records.store.domain.models.FaceIdentity
import com.simprints.infra.enrolment.records.store.domain.models.FingerprintIdentity
import com.simprints.infra.enrolment.records.store.domain.models.SubjectQuery
import com.simprints.infra.enrolment.records.store.usecases.CompareImplicitTokenizedStringsUseCase
import com.simprints.infra.events.event.cosync.CoSyncEnrolmentRecordCreationEventDeserializer
import com.simprints.infra.events.event.cosync.CoSyncEnrolmentRecordEvents
import com.simprints.infra.events.event.domain.models.subject.EnrolmentRecordCreationEvent
import com.simprints.infra.events.event.domain.models.subject.FaceReference
Expand Down Expand Up @@ -240,6 +241,10 @@ internal class CommCareIdentityDataSource @Inject constructor(
TokenizableString::class.java,
TokenizationClassNameDeserializer(),
)
addDeserializer(
EnrolmentRecordCreationEvent::class.java,
CoSyncEnrolmentRecordCreationEventDeserializer(),
)
}

override suspend fun count(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
package com.simprints.infra.events.event.cosync

import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.DeserializationContext
import com.fasterxml.jackson.databind.JsonNode
import com.fasterxml.jackson.databind.deser.std.StdDeserializer
import com.simprints.core.domain.tokenization.TokenizableString
import com.simprints.infra.events.event.domain.models.subject.BiometricReference
import com.simprints.infra.events.event.domain.models.subject.EnrolmentRecordCreationEvent

/**
* Deserializer for [EnrolmentRecordCreationEvent] that reads the JSON node and constructs the
* [EnrolmentRecordCreationEvent] object.
* Accounts for past versions of the event where moduleId and attendantId were plain strings.
*/
class CoSyncEnrolmentRecordCreationEventDeserializer :
StdDeserializer<EnrolmentRecordCreationEvent>(
EnrolmentRecordCreationEvent::class.java,
) {
override fun deserialize(
p: JsonParser,
ctxt: DeserializationContext,
): EnrolmentRecordCreationEvent {
val node: JsonNode = p.codec.readTree(p)
val id = node["id"].asText()
val payload = node["payload"]

val subjectId = payload["subjectId"].asText()
val projectId = payload["projectId"].asText()

// Try to parse as TokenizableString first, fall back to plain String
val moduleId = try {
ctxt.readTreeAsValue(payload["moduleId"], TokenizableString::class.java)
} catch (_: Exception) {
TokenizableString.Raw(payload["moduleId"].asText())
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I assume it would not be possible to see if the value is tokenized just without the appropriate meta-data, right?

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Unfortunately, no. We have the same problem for the "middle case" where we serilized TokenizableString without explicitly specifying the class (Raw or Tokenized). What we do downstream is try to decrypt Raw values to check whether they are actually encrypted or not. For Tokenized we are sure they are actually what they say they are.

}

// Try to parse as TokenizableString first, fall back to plain String
val attendantId = try {
ctxt.readTreeAsValue(payload["attendantId"], TokenizableString::class.java)
} catch (_: Exception) {
TokenizableString.Raw(payload["attendantId"].asText())
}

val biometricReferences = ctxt.readTreeAsValue<List<BiometricReference>>(
payload["biometricReferences"],
ctxt.typeFactory.constructCollectionType(List::class.java, BiometricReference::class.java),
)

return EnrolmentRecordCreationEvent(
id,
EnrolmentRecordCreationEvent.EnrolmentRecordCreationPayload(
subjectId,
projectId,
moduleId,
attendantId,
biometricReferences,
),
)
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
package com.simprints.infra.events.event.cosync

import com.fasterxml.jackson.databind.DeserializationContext
import com.fasterxml.jackson.databind.JavaType
import com.fasterxml.jackson.databind.JsonNode
import com.fasterxml.jackson.databind.ObjectMapper
import com.simprints.core.domain.tokenization.TokenizableString
import com.simprints.infra.events.event.domain.models.subject.BiometricReference
import io.mockk.every
import io.mockk.mockk
import org.junit.Test
import kotlin.test.assertEquals

class CoSyncEnrolmentRecordCreationEventDeserializerTest {
private val deserializer = CoSyncEnrolmentRecordCreationEventDeserializer()
private val objectMapper = ObjectMapper()

@Test
fun `deserialize handles old format with plain strings`() {
val json = JSON_TEMPLATE.format(PLAIN_MODULE, PLAIN_ATTENDANT)
val parser = objectMapper.createParser(json)
val context = mockk<DeserializationContext>()
every {
context.readTreeAsValue<List<BiometricReference>>(
any<JsonNode>(),
any<JavaType>(),
)
} returns emptyList()

val result = deserializer.deserialize(parser, context)

assertEquals(EVENT_ID, result.id)
assertEquals(SUBJECT_ID, result.payload.subjectId)
assertEquals(PROJECT_ID, result.payload.projectId)
assertEquals(TokenizableString.Raw(MODULE_ID), result.payload.moduleId)
assertEquals(TokenizableString.Raw(ATTENDANT_ID), result.payload.attendantId)
assertEquals(emptyList<BiometricReference>(), result.payload.biometricReferences)
}

@Test
fun `deserialize handles new format with TokenizableString`() {
val json = JSON_TEMPLATE.format(TOKENIZED_MODULE, RAW_ATTENDANT)
val parser = objectMapper.createParser(json)
val context = mockk<DeserializationContext>()
every {
context.readTreeAsValue(any(), TokenizableString::class.java)
} returns TokenizableString.Tokenized(ENCRYPTED_MODULE) andThen TokenizableString.Raw(UNENCRYPTED_ATTENDANT)
every {
context.readTreeAsValue<List<BiometricReference>>(
any<JsonNode>(),
any<JavaType>(),
)
} returns emptyList()

val result = deserializer.deserialize(parser, context)

assertEquals(EVENT_ID, result.id)
assertEquals(SUBJECT_ID, result.payload.subjectId)
assertEquals(PROJECT_ID, result.payload.projectId)
assertEquals(TokenizableString.Tokenized(ENCRYPTED_MODULE), result.payload.moduleId)
assertEquals(TokenizableString.Raw(UNENCRYPTED_ATTENDANT), result.payload.attendantId)
assertEquals(emptyList<BiometricReference>(), result.payload.biometricReferences)
}

@Test
fun `deserialize handles new format with TokenizableString but without explicit class`() {
val json = JSON_TEMPLATE.format(TOKENIZED_MODULE_NO_CLASS, RAW_ATTENDANT_NO_CLASS)
val parser = objectMapper.createParser(json)
val context = mockk<DeserializationContext>()
every {
context.readTreeAsValue(any(), TokenizableString::class.java)
} returns TokenizableString.Raw(ENCRYPTED_MODULE) andThen TokenizableString.Raw(UNENCRYPTED_ATTENDANT)
every {
context.readTreeAsValue<List<BiometricReference>>(
any<JsonNode>(),
any<JavaType>(),
)
} returns emptyList()

val result = deserializer.deserialize(parser, context)

assertEquals(EVENT_ID, result.id)
assertEquals(SUBJECT_ID, result.payload.subjectId)
assertEquals(PROJECT_ID, result.payload.projectId)
assertEquals(TokenizableString.Raw(ENCRYPTED_MODULE), result.payload.moduleId)
assertEquals(TokenizableString.Raw(UNENCRYPTED_ATTENDANT), result.payload.attendantId)
assertEquals(emptyList<BiometricReference>(), result.payload.biometricReferences)
}

companion object {
const val EVENT_ID = "event-id"
const val SUBJECT_ID = "subject-1"
const val PROJECT_ID = "project-1"
const val MODULE_ID = "module-1"
const val ATTENDANT_ID = "attendant-1"
const val ENCRYPTED_MODULE = "encrypted-module-1"
const val UNENCRYPTED_ATTENDANT = "unencrypted-attendant-1"

const val JSON_TEMPLATE = """
{
"id": "$EVENT_ID",
"payload": {
"subjectId": "$SUBJECT_ID",
"projectId": "$PROJECT_ID",
%s,
%s,
"biometricReferences": []
}
}"""

const val PLAIN_MODULE = """
"moduleId": "$MODULE_ID""""
const val PLAIN_ATTENDANT = """
"attendantId": "$ATTENDANT_ID""""

const val TOKENIZED_MODULE = """
"moduleId": {
"className": "TokenizableString.Tokenized",
"value": "$ENCRYPTED_MODULE"
}"""
const val RAW_ATTENDANT = """
"attendantId": {
"className": "TokenizableString.Raw",
"value": "$UNENCRYPTED_ATTENDANT"
}"""

const val TOKENIZED_MODULE_NO_CLASS = """
"moduleId": {
"value": "$ENCRYPTED_MODULE"
}"""
const val RAW_ATTENDANT_NO_CLASS = """
"attendantId": {
"value": "$UNENCRYPTED_ATTENDANT"
}"""
}
}