H2GIS deserializing Geometry type causes stack overflow error

54 Views Asked by At

My current project uses PostGIS for geolocation tools. I used the following converter config so I can serialize a Point type.

This works fine in production for Postgres with PostGIS - I can save, query and use the model, but as soon as I want to use this in test cases, with H2 and H2GIS, I get a stack overflow for decodes. More exactly, the findAll() on the repository causes the overflow, while save() seems to work fine.

This is the test I'm trying to run, simply save an entity and get it from the repository

@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, classes = [Application::class])
class ParkingIntegrationTest {
    @Autowired
    lateinit var parkingRepository: ParkingRepository

    @Test
    fun createGetParking() {

        val geometryFactory = GeometryFactory(PrecisionModel(), 4326)
        val coordinate = Coordinate(51.5285262, -0.2664018)
        val point = geometryFactory.createPoint(coordinate)

        runBlocking {
           var parking = parkingRepository.save(
                Parking(
                    null,
                    point,
                ),
            )

            println(parking)

            println(parkingRepository.findAll().toList())
        }
    }
}

On running this test the parkingRepository.save() completes, and on the parkingRepository.findAll() I get the following stackTrace:

2023-11-04T14:49:04.634+02:00  INFO 18392 --- [    Test worker] edu.myapp.ParkingIntegrationTest         : Started ParkingIntegrationTest in 2.218 seconds (process running for 2.717)
Parking(id=d32deaae-682c-4bd5-a451-7b2ae90861c0, coordinates=POINT (51.5285262 -0.2664018))
2023-11-04T14:49:05.177+02:00  WARN 18392 --- [er @coroutine#1] reactor.core.Exceptions                  : throwIfFatal detected a jvm fatal exception, which is thrown and logged below:

java.lang.StackOverflowError: null
    at io.r2dbc.h2.codecs.AbstractCodec.canDecode(AbstractCodec.java:36) ~[r2dbc-h2-1.0.0.RELEASE.jar:1.0.0.RELEASE]
    at io.r2dbc.h2.codecs.DefaultCodecs.decode(DefaultCodecs.java:56) ~[r2dbc-h2-1.0.0.RELEASE.jar:1.0.0.RELEASE]
    at io.r2dbc.h2.codecs.ParameterCodec.doDecode(ParameterCodec.java:22) ~[r2dbc-h2-1.0.0.RELEASE.jar:1.0.0.RELEASE]
    at io.r2dbc.h2.codecs.ParameterCodec.doDecode(ParameterCodec.java:6) ~[r2dbc-h2-1.0.0.RELEASE.jar:1.0.0.RELEASE]
    at io.r2dbc.h2.codecs.AbstractCodec.decode(AbstractCodec.java:60) ~[r2dbc-h2-1.0.0.RELEASE.jar:1.0.0.RELEASE]
    at io.r2dbc.h2.codecs.DefaultCodecs.decode(DefaultCodecs.java:57) ~[r2dbc-h2-1.0.0.RELEASE.jar:1.0.0.RELEASE]
    at io.r2dbc.h2.codecs.ParameterCodec.doDecode(ParameterCodec.java:22) ~[r2dbc-h2-1.0.0.RELEASE.jar:1.0.0.RELEASE]
    at io.r2dbc.h2.codecs.ParameterCodec.doDecode(ParameterCodec.java:6) ~[r2dbc-h2-1.0.0.RELEASE.jar:1.0.0.RELEASE]
    at io.r2dbc.h2.codecs.AbstractCodec.decode(AbstractCodec.java:60) ~[r2dbc-h2-1.0.0.RELEASE.jar:1.0.0.RELEASE]
    at io.r2dbc.h2.codecs.DefaultCodecs.decode(DefaultCodecs.java:57) ~[r2dbc-h2-1.0.0.RELEASE.jar:1.0.0.RELEASE]
    at io.r2dbc.h2.codecs.ParameterCodec.doDecode(ParameterCodec.java:22) ~[r2dbc-h2-1.0.0.RELEASE.jar:1.0.0.RELEASE]
    at io.r2dbc.h2.codecs.ParameterCodec.doDecode(ParameterCodec.java:6) ~[r2dbc-h2-1.0.0.RELEASE.jar:1.0.0.RELEASE]
    at io.r2dbc.h2.codecs.AbstractCodec.decode(AbstractCodec.java:60) ~[r2dbc-h2-1.0.0.RELEASE.jar:1.0.0.RELEASE]
    at io.r2dbc.h2.codecs.DefaultCodecs.decode(DefaultCodecs.java:57) ~[r2dbc-h2-1.0.0.RELEASE.jar:1.0.0.RELEASE]
    at io.r2dbc.h2.codecs.ParameterCodec.doDecode(ParameterCodec.java:22) ~[r2dbc-h2-1.0.0.RELEASE.jar:1.0.0.RELEASE]
    at io.r2dbc.h2.codecs.ParameterCodec.doDecode(ParameterCodec.java:6) ~[r2dbc-h2-1.0.0.RELEASE.jar:1.0.0.RELEASE]
    at io.r2dbc.h2.codecs.AbstractCodec.decode(AbstractCodec.java:60) ~[r2dbc-h2-1.0.0.RELEASE.jar:1.0.0.RELEASE]
...

And this repeats until my console fills up.

These are my implementations of the reader/reader

import org.locationtech.jts.geom.GeometryFactory
import org.locationtech.jts.geom.Point
import org.locationtech.jts.geom.PrecisionModel
import org.locationtech.jts.io.WKBReader
import org.springframework.core.convert.converter.Converter
import org.springframework.data.convert.ReadingConverter

@ReadingConverter
class ToPointConverter : Converter<String, Point> {
    private val geometryFactory = GeometryFactory(PrecisionModel(), 4326)
    private val reader = WKBReader(geometryFactory)
    override fun convert(source: String): Point? {
        return reader.read(source.chunked(2).map { it.toInt(16).toByte() }.toByteArray()) as Point?
    }
}
import org.locationtech.jts.geom.Point
import org.locationtech.jts.io.WKBWriter
import org.springframework.core.convert.converter.Converter
import org.springframework.data.convert.WritingConverter

@WritingConverter
class PointToWkbConverter : Converter<Point, ByteArray> {
    private val writer = WKBWriter(2, true)
    override fun convert(source: Point): ByteArray? {
        return writer.write(source)
    }
}

The repository is a simple CrudRepository

interface ParkingRepository : CoroutineCrudRepository<Parking, UUID>

Data class

@Table(name = "parking")
data class Parking(
    @Id
    @GeneratedValue(generator = "uuid")
    @GenericGenerator(name = "uuid", strategy = "uuid")
    val id: UUID? = null,

    val coordinates: Point,
)

My gradle config and dependencies used

import org.jetbrains.kotlin.gradle.tasks.KotlinCompile

plugins {
    id("org.springframework.boot") version "3.1.2"
    id("io.spring.dependency-management") version "1.1.2"
    id("org.jlleitschuh.gradle.ktlint") version "11.5.0"
    id("org.liquibase.gradle") version "2.0.4"

    kotlin("jvm") version "1.8.22"
    kotlin("plugin.spring") version "1.8.22"
    kotlin("kapt") version "1.9.10"
}

group = "h2.test"

java {
    sourceCompatibility = JavaVersion.VERSION_17
}

repositories {
    mavenCentral()
}

dependencies {
    implementation("org.springframework.boot:spring-boot-starter-data-r2dbc")
    implementation("org.springframework.boot:spring-boot-starter-webflux")
    implementation("org.springframework.boot:spring-boot-starter-validation")
    implementation("com.fasterxml.jackson.module:jackson-module-kotlin")
    implementation("io.projectreactor.kotlin:reactor-kotlin-extensions")
    implementation("org.jetbrains.kotlin:kotlin-reflect")
    implementation("org.jetbrains.kotlinx:kotlinx-coroutines-reactor")
    implementation("org.liquibase:liquibase-core:4.23.0")
    implementation("org.postgresql:r2dbc-postgresql:1.0.0.RELEASE")
    implementation("org.springframework:spring-jdbc:6.0.11")
    implementation("com.auth0:java-jwt:4.4.0")
    implementation("org.apache.httpcomponents.client5:httpclient5:5.2.1")
    implementation("org.springdoc:springdoc-openapi-starter-webflux-ui:2.2.0")
    implementation("org.springframework.boot:spring-boot-starter-data-jpa:3.1.2")
    implementation("org.locationtech.jts:jts-core:1.19.0")
    implementation("org.orbisgis:h2gis:2.2.0")

    annotationProcessor("org.springframework.boot:spring-boot-configuration-processor:3.1.2")

    runtimeOnly("org.postgresql:postgresql")
    runtimeOnly("org.postgresql:r2dbc-postgresql")

    testImplementation("io.mockk:mockk:1.13.5")
    testImplementation("io.r2dbc:r2dbc-h2:1.0.0.RELEASE")
    testImplementation("org.springframework.boot:spring-boot-starter-test")
    testImplementation("io.projectreactor:reactor-test")
}

tasks.withType<KotlinCompile> {
    kotlinOptions {
        freeCompilerArgs += "-Xjsr305=strict"
        jvmTarget = "17"
    }
}

tasks.withType<Test> {
    useJUnitPlatform()
}
0

There are 0 best solutions below