diff --git a/big/src/main/kotlin/de/darkatra/bfme2/big/BigArchive.kt b/big/src/main/kotlin/de/darkatra/bfme2/big/BigArchive.kt index 7c45743..c13e154 100644 --- a/big/src/main/kotlin/de/darkatra/bfme2/big/BigArchive.kt +++ b/big/src/main/kotlin/de/darkatra/bfme2/big/BigArchive.kt @@ -1,5 +1,6 @@ package de.darkatra.bfme2.big +import de.darkatra.bfme2.PublicApi import de.darkatra.bfme2.readNullTerminatedString import de.darkatra.bfme2.toBigEndianBytes import de.darkatra.bfme2.toBigEndianUInt @@ -22,7 +23,7 @@ import kotlin.io.path.outputStream * Heavily inspired by https://github.com/OpenSAGE/OpenSAGE/blob/master/src/OpenSage.FileFormats.Big/BigArchive.cs */ class BigArchive( - @Suppress("MemberVisibilityCanBePrivate") // public api + @PublicApi val version: BigArchiveVersion, val path: Path ) { @@ -52,7 +53,7 @@ class BigArchive( private val _entries: MutableList = arrayListOf() - @Suppress("MemberVisibilityCanBePrivate") // public api + @PublicApi val entries get() = _entries.sortedWith(Comparator.comparing(BigArchiveEntry::name)) @@ -84,7 +85,7 @@ class BigArchive( * * @param name The name of the entry to delete. */ - @Suppress("unused") // public api + @PublicApi fun deleteEntry(name: String) { if (name.isBlank()) { throw IllegalArgumentException("Name must not be blank") @@ -97,7 +98,7 @@ class BigArchive( /** * Reads the archive from disk. */ - @Suppress("MemberVisibilityCanBePrivate") // public api + @PublicApi fun readFromDisk() { if (!path.exists()) { return diff --git a/core/src/main/kotlin/de/darkatra/bfme2/Color.kt b/core/src/main/kotlin/de/darkatra/bfme2/Color.kt index 6b55b6e..e031c59 100644 --- a/core/src/main/kotlin/de/darkatra/bfme2/Color.kt +++ b/core/src/main/kotlin/de/darkatra/bfme2/Color.kt @@ -25,7 +25,7 @@ data class Color constructor( val blue: UInt get() = rgba shr 0 and 0xFFu - @Suppress("MemberVisibilityCanBePrivate") // public api + @PublicApi val alpha: UInt get() = rgba shr 24 and 0xFFu diff --git a/core/src/main/kotlin/de/darkatra/bfme2/PublicApi.kt b/core/src/main/kotlin/de/darkatra/bfme2/PublicApi.kt new file mode 100644 index 0000000..cc48ad2 --- /dev/null +++ b/core/src/main/kotlin/de/darkatra/bfme2/PublicApi.kt @@ -0,0 +1,6 @@ +package de.darkatra.bfme2 + +@MustBeDocumented +@Retention(AnnotationRetention.RUNTIME) +@Target(AnnotationTarget.FUNCTION, AnnotationTarget.PROPERTY) +annotation class PublicApi diff --git a/map/src/main/kotlin/de/darkatra/bfme2/map/MapFile.kt b/map/src/main/kotlin/de/darkatra/bfme2/map/MapFile.kt index f7e939a..b66489a 100644 --- a/map/src/main/kotlin/de/darkatra/bfme2/map/MapFile.kt +++ b/map/src/main/kotlin/de/darkatra/bfme2/map/MapFile.kt @@ -15,6 +15,7 @@ import de.darkatra.bfme2.map.posteffect.PostEffects import de.darkatra.bfme2.map.riverarea.RiverAreas import de.darkatra.bfme2.map.scripting.PlayerScriptsList import de.darkatra.bfme2.map.serialization.MapFileSerde +import de.darkatra.bfme2.map.serialization.SerializationOrder import de.darkatra.bfme2.map.serialization.Serialize import de.darkatra.bfme2.map.team.Teams import de.darkatra.bfme2.map.trigger.TriggerAreas @@ -31,6 +32,7 @@ data class MapFile( val cameras: Cameras, val environmentData: EnvironmentData, val globalLighting: GlobalLighting, + @SerializationOrder(SerializationOrder.HIGHEST_PRECEDENCE) val heightMap: HeightMap, val libraryMapsList: LibraryMapsList, val multiplayerPositions: MultiplayerPositions, diff --git a/map/src/main/kotlin/de/darkatra/bfme2/map/blendtile/BlendDescription.kt b/map/src/main/kotlin/de/darkatra/bfme2/map/blendtile/BlendDescription.kt index a837570..b8babb6 100644 --- a/map/src/main/kotlin/de/darkatra/bfme2/map/blendtile/BlendDescription.kt +++ b/map/src/main/kotlin/de/darkatra/bfme2/map/blendtile/BlendDescription.kt @@ -1,6 +1,7 @@ package de.darkatra.bfme2.map.blendtile import de.darkatra.bfme2.InvalidDataException +import de.darkatra.bfme2.PublicApi import de.darkatra.bfme2.map.serialization.ListSerde import de.darkatra.bfme2.map.serialization.SerializationContext import de.darkatra.bfme2.map.serialization.postprocessing.PostProcess @@ -17,7 +18,7 @@ data class BlendDescription( val magicValue2: UInt ) { - @Suppress("unused") // public api + @PublicApi val blendDirection: BlendDirection get() { val bytes = rawBlendDirection.toTypedArray() diff --git a/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/AssetListSerde.kt b/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/AssetListSerde.kt index dee488e..1171204 100644 --- a/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/AssetListSerde.kt +++ b/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/AssetListSerde.kt @@ -3,6 +3,7 @@ package de.darkatra.bfme2.map.serialization import com.google.common.io.CountingInputStream import de.darkatra.bfme2.map.serialization.model.DataSection import de.darkatra.bfme2.map.serialization.model.DataSectionHolder +import de.darkatra.bfme2.map.serialization.model.DataSectionLeaf import de.darkatra.bfme2.map.serialization.postprocessing.PostProcessor import de.darkatra.bfme2.map.serialization.preprocessing.PreProcessor import java.io.OutputStream @@ -16,8 +17,11 @@ internal class AssetListSerde( override fun calculateDataSection(data: List): DataSection { return DataSectionHolder( - containingData = data.map { - entrySerde.calculateDataSection(it) + containingData = buildList { + data.forEach { + add(DataSectionLeaf.ASSET_HEADER) + add(entrySerde.calculateDataSection(it)) + } } ) } diff --git a/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/MapFileReader.kt b/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/MapFileReader.kt index 0473719..ba00539 100644 --- a/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/MapFileReader.kt +++ b/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/MapFileReader.kt @@ -3,6 +3,7 @@ package de.darkatra.bfme2.map.serialization import com.google.common.io.ByteStreams import com.google.common.io.CountingInputStream import de.darkatra.bfme2.InvalidDataException +import de.darkatra.bfme2.PublicApi import de.darkatra.bfme2.SkippingInputStream import de.darkatra.bfme2.map.MapFile import de.darkatra.bfme2.map.MapFileCompression @@ -43,12 +44,16 @@ class MapFileReader { startPosition = inputStream.count ) + if (serializationContext.debugMode) { + println("Reading asset '${currentAsset.assetName}' with size ${currentAsset.assetSize}.") + } + serializationContext.push(currentAsset) callback(assetName) serializationContext.pop() val currentEndPosition = inputStream.count - val expectedEndPosition = serializationContext.currentEndPosition + val expectedEndPosition = currentAsset.endPosition if (!serializationContext.debugMode && currentEndPosition != expectedEndPosition) { throw InvalidDataException("Error reading '${currentAsset.assetName}'. Expected reader to be at position $expectedEndPosition, but was at $currentEndPosition.") } @@ -56,14 +61,14 @@ class MapFileReader { } } - @Suppress("unused") // public api + @PublicApi fun read(file: Path): MapFile { if (!file.exists()) { throw FileNotFoundException("File '${file.absolutePathString()}' does not exist.") } - return read(file.inputStream()) + return file.inputStream().use(this::read) } fun read(inputStream: InputStream): MapFile { @@ -75,42 +80,41 @@ class MapFileReader { val inputStreamSize = getInputStreamSize(bufferedInputStream) - return CountingInputStream(decodeIfNecessary(bufferedInputStream)).use { countingInputStream -> + val countingInputStream = CountingInputStream(decodeIfNecessary(bufferedInputStream)) - readAndValidateFourCC(countingInputStream) + readAndValidateFourCC(countingInputStream) - val serializationContext = SerializationContext(true) - val annotationProcessingContext = AnnotationProcessingContext(false) - val serdeFactory = SerdeFactory(annotationProcessingContext, serializationContext) + val serializationContext = SerializationContext(false) + val annotationProcessingContext = AnnotationProcessingContext(false) + val serdeFactory = SerdeFactory(annotationProcessingContext, serializationContext) - measureTime { - val assetNames = readAssetNames(countingInputStream) - serializationContext.setAssetNames(assetNames) - }.also { elapsedTime -> - if (serializationContext.debugMode) { - println("Reading asset names took $elapsedTime.") - } + measureTime { + val assetNames = readAssetNames(countingInputStream) + serializationContext.setAssetNames(assetNames) + }.also { elapsedTime -> + if (serializationContext.debugMode) { + println("Reading asset names took $elapsedTime.") } + } - serializationContext.push( - AssetEntry( - assetName = "Map", - assetVersion = 0u, - assetSize = inputStreamSize, - startPosition = 0 - ) + serializationContext.push( + AssetEntry( + assetName = "Map", + assetVersion = 0u, + assetSize = inputStreamSize, + startPosition = 0 ) + ) - val mapFileSerde = serdeFactory.getSerde(MapFile::class) + val mapFileSerde = serdeFactory.getSerde(MapFile::class) - annotationProcessingContext.invalidate() + annotationProcessingContext.invalidate() - val mapFile = mapFileSerde.deserialize(countingInputStream) + val mapFile = mapFileSerde.deserialize(countingInputStream) - serializationContext.pop() + serializationContext.pop() - mapFile - } + return mapFile } private fun getInputStreamSize(bufferedInputStream: BufferedInputStream): Long { diff --git a/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/MapFileSerde.kt b/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/MapFileSerde.kt index 23d2389..a1f9bb5 100644 --- a/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/MapFileSerde.kt +++ b/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/MapFileSerde.kt @@ -4,6 +4,7 @@ import com.google.common.io.CountingInputStream import de.darkatra.bfme2.map.Asset import de.darkatra.bfme2.map.MapFile import de.darkatra.bfme2.map.serialization.model.DataSectionHolder +import de.darkatra.bfme2.map.serialization.model.DataSectionLeaf import de.darkatra.bfme2.map.toKClass import java.io.OutputStream import kotlin.reflect.KProperty @@ -36,34 +37,46 @@ internal class MapFileSerde( override fun calculateDataSection(data: MapFile): DataSectionHolder { return DataSectionHolder( - containingData = parameterToField.entries.mapIndexed { index, (p, fieldForParameter) -> - @Suppress("UNCHECKED_CAST") - val serde = serdes[index] as Serde - val fieldData = fieldForParameter.getter.call(data)!! - serde.calculateDataSection(fieldData) - }, - assetName = "MapFile" + assetName = "MapFile", + containingData = buildList { + parameterToField.entries.mapIndexed { index, entry -> Pair(index, entry) } + .sortedBy { (_, entry) -> entry.key.findAnnotation()?.ordered ?: SerializationOrder.DEFAULT_ORDER } + .forEach { (index, entry) -> + val fieldForParameter = entry.value + + @Suppress("UNCHECKED_CAST") + val serde = serdes[index] as Serde + val fieldData = fieldForParameter.getter.call(data)!! + add(DataSectionLeaf.ASSET_HEADER) + add(serde.calculateDataSection(fieldData)) + } + } ) } @OptIn(ExperimentalTime::class) override fun serialize(outputStream: OutputStream, data: MapFile) { - // TODO: check if we need to preserve the order in which we write the data - parameterToField.entries.forEachIndexed { index, (parameter, fieldForParameter) -> - @Suppress("UNCHECKED_CAST") - val serde = serdes[index] as Serde - val fieldData = fieldForParameter.getter.call(data)!! - - measureTime { - MapFileWriter.writeAsset(outputStream, serializationContext, fieldData) - serde.serialize(outputStream, fieldData) - }.also { elapsedTime -> - if (serializationContext.debugMode) { - println("Deserialization of '${parameter.name}' took $elapsedTime.") + parameterToField.entries + .mapIndexed { index, entry -> Pair(index, entry) } + .sortedBy { (_, entry) -> entry.key.findAnnotation()?.ordered ?: SerializationOrder.DEFAULT_ORDER } + .forEach { (index, entry) -> + val parameter = entry.key + val fieldForParameter = entry.value + + @Suppress("UNCHECKED_CAST") + val serde = serdes[index] as Serde + val fieldData = fieldForParameter.getter.call(data)!! + + measureTime { + MapFileWriter.writeAsset(outputStream, serializationContext, fieldData) + serde.serialize(outputStream, fieldData) + }.also { elapsedTime -> + if (serializationContext.debugMode) { + println("Serialization of '${parameter.name}' took $elapsedTime.") + } } } - } } @OptIn(ExperimentalTime::class) diff --git a/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/MapFileWriter.kt b/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/MapFileWriter.kt index 6ac1b8c..e9d4240 100644 --- a/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/MapFileWriter.kt +++ b/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/MapFileWriter.kt @@ -1,6 +1,7 @@ package de.darkatra.bfme2.map.serialization import com.google.common.io.CountingOutputStream +import de.darkatra.bfme2.PublicApi import de.darkatra.bfme2.map.Asset import de.darkatra.bfme2.map.MapFile import de.darkatra.bfme2.map.MapFileCompression @@ -39,14 +40,16 @@ class MapFileWriter { } } - @Suppress("unused") // public api + @PublicApi fun write(file: Path, mapFile: MapFile) { if (file.exists()) { throw FileAlreadyExistsException("File '${file.absolutePathString()}' already exist.") } - write(file.outputStream(), mapFile) + file.outputStream().use { + write(it, mapFile) + } } fun write(outputStream: OutputStream, mapFile: MapFile, compression: MapFileCompression = MapFileCompression.UNCOMPRESSED) { @@ -56,49 +59,48 @@ class MapFileWriter { @OptIn(ExperimentalTime::class) fun write(bufferedOutputStream: BufferedOutputStream, mapFile: MapFile, compression: MapFileCompression = MapFileCompression.UNCOMPRESSED) { - return bufferedOutputStream.use { - + if (compression != MapFileCompression.UNCOMPRESSED) { writeFourCC(bufferedOutputStream, compression) + } - CountingOutputStream(encodeIfNecessary(bufferedOutputStream, compression)).use { countingOutputStream -> - - writeFourCC(countingOutputStream, MapFileCompression.UNCOMPRESSED) + val countingOutputStream = CountingOutputStream(encodeIfNecessary(bufferedOutputStream, compression)) - val serializationContext = SerializationContext(true) - val annotationProcessingContext = AnnotationProcessingContext(false) - val serdeFactory = SerdeFactory(annotationProcessingContext, serializationContext) + writeFourCC(countingOutputStream, MapFileCompression.UNCOMPRESSED) - val mapFileSerde: MapFileSerde = serdeFactory.getSerde(MapFile::class) as MapFileSerde - annotationProcessingContext.invalidate() + val serializationContext = SerializationContext(false) + val annotationProcessingContext = AnnotationProcessingContext(false) + val serdeFactory = SerdeFactory(annotationProcessingContext, serializationContext) - val assetDataSections = mapFileSerde.calculateDataSection(mapFile).flatten() - .filter { it.isAsset } - .distinctBy { it.assetName } - serializationContext.setAssetDataSections(assetDataSections.associateBy { it.assetName!! }) + val mapFileSerde: MapFileSerde = serdeFactory.getSerde(MapFile::class) as MapFileSerde + annotationProcessingContext.invalidate() - val assetNames = assetDataSections - .reversed() - .mapIndexed { index, dataSectionHolder -> Pair(index.toUInt() + 1u, dataSectionHolder.assetName!!) } - .toMap() - serializationContext.setAssetNames(assetNames) + val assetDataSections = mapFileSerde.calculateDataSection(mapFile).flatten() + .filter { it.isAsset } + .distinctBy { it.assetName } + serializationContext.setAssetDataSections(assetDataSections.associateBy { it.assetName!! }) - measureTime { - writeAssetNames(assetNames, countingOutputStream) - }.also { elapsedTime -> - if (serializationContext.debugMode) { - println("Writing asset names took $elapsedTime.") - } - } + val assetNames = assetDataSections + .mapIndexed { index, dataSectionHolder -> Pair(index.toUInt() + 1u, dataSectionHolder.assetName!!) } + .toMap() + serializationContext.setAssetNames(assetNames) - mapFileSerde.serialize(bufferedOutputStream, mapFile) - bufferedOutputStream.flush() + measureTime { + writeAssetNames(assetNames, countingOutputStream) + }.also { elapsedTime -> + if (serializationContext.debugMode) { + println("Writing asset names took $elapsedTime.") } } + + mapFileSerde.serialize(bufferedOutputStream, mapFile) + bufferedOutputStream.flush() } private fun writeAssetNames(assetNames: Map, outputStream: OutputStream) { val numberOfAssetStrings = assetNames.size.toUInt() + outputStream.writeUInt(numberOfAssetStrings) + for (i in numberOfAssetStrings downTo 1u step 1) { outputStream.write7BitIntPrefixedString(assetNames[i]!!) outputStream.writeUInt(i) diff --git a/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/SerializationOrder.kt b/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/SerializationOrder.kt new file mode 100644 index 0000000..fef2ee7 --- /dev/null +++ b/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/SerializationOrder.kt @@ -0,0 +1,18 @@ +package de.darkatra.bfme2.map.serialization + +/** + * Determines the order in which a property should be serialized. Only impacts serialization, not deserialization. + */ +@MustBeDocumented +@Retention(AnnotationRetention.RUNTIME) +@Target(AnnotationTarget.VALUE_PARAMETER) +internal annotation class SerializationOrder( + val ordered: Int = DEFAULT_ORDER +) { + + companion object { + const val DEFAULT_ORDER = 0 + const val HIGHEST_PRECEDENCE = Int.MIN_VALUE + const val LOWEST_PRECEDENCE = Int.MAX_VALUE + } +} diff --git a/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/model/DataSectionHolder.kt b/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/model/DataSectionHolder.kt index 8f27588..08d086c 100644 --- a/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/model/DataSectionHolder.kt +++ b/map/src/main/kotlin/de/darkatra/bfme2/map/serialization/model/DataSectionHolder.kt @@ -22,6 +22,7 @@ internal data class DataSectionLeaf( val SHORT = DataSectionLeaf(2) val INT = DataSectionLeaf(4) val FLOAT = DataSectionLeaf(4) + val ASSET_HEADER = DataSectionLeaf(10) } } @@ -36,18 +37,11 @@ internal data class DataSectionHolder( ) : DataSection { override val size: Long - get() = when (isVersionedAsset) { - // each asset has a header of 4 bytes for the assetIndex, 2 bytes for the assetVersion and 4 bytes for the assetSize - true -> 4 + 2 + 4 + containingData.sumOf(DataSection::size) - false -> containingData.sumOf(DataSection::size) - } + get() = containingData.sumOf(DataSection::size) internal val isAsset: Boolean get() = assetName != null - internal val isVersionedAsset: Boolean - get() = isAsset && assetVersion != null - internal fun flatten(): List { return flatten(containingData.filterIsInstance()) } diff --git a/map/src/test/kotlin/de/darkatra/bfme2/map/serialization/MapFileReaderTest.kt b/map/src/test/kotlin/de/darkatra/bfme2/map/serialization/MapFileReaderTest.kt index 17d0fc3..c9a30a4 100644 --- a/map/src/test/kotlin/de/darkatra/bfme2/map/serialization/MapFileReaderTest.kt +++ b/map/src/test/kotlin/de/darkatra/bfme2/map/serialization/MapFileReaderTest.kt @@ -13,7 +13,7 @@ internal class MapFileReaderTest { @Test internal fun `should read map`() { - val map = MapFileReader().read(TestUtils.getInputStream(TestUtils.UNCOMPRESSED_MAP_PATH)) + val map = TestUtils.getInputStream(TestUtils.UNCOMPRESSED_MAP_PATH).use(MapFileReader()::read) assertThat(map.blendTileData.numberOfTiles).isEqualTo(640u * 640u) assertThat(map.blendTileData.textures).hasSize(4) @@ -59,7 +59,7 @@ internal class MapFileReaderTest { @Test internal fun `should read bfme2 map with strange road type`() { - val map = MapFileReader().read(TestUtils.getInputStream("/maps/bfme2-rotwk/map mp harlond.zlib")) + val map = TestUtils.getInputStream("/maps/bfme2-rotwk/map mp harlond.zlib").use(MapFileReader()::read) assertThat(map).isNotNull assertThat(map.objects.objects.any { it.roadType == RoadType.UNKNOWN_5 }).isTrue @@ -68,7 +68,7 @@ internal class MapFileReaderTest { @Test internal fun `should read bfme2 map with standing wave areas`() { - val map = MapFileReader().read(TestUtils.getInputStream("/maps/bfme2-rotwk/map mp midgewater.zlib")) + val map = TestUtils.getInputStream("/maps/bfme2-rotwk/map mp midgewater.zlib").use(MapFileReader()::read) assertThat(map).isNotNull assertThat(map.standingWaveAreas.areas).hasSize(2) @@ -77,7 +77,7 @@ internal class MapFileReaderTest { @Test internal fun `should read bfme2 map with scripts`() { - val map = MapFileReader().read(TestUtils.getInputStream("/maps/bfme2-rotwk/script.map")) + val map = TestUtils.getInputStream("/maps/bfme2-rotwk/script.map").use(MapFileReader()::read) assertThat(map.playerScriptsList.scriptLists).isNotEmpty assertThat(map.playerScriptsList.scriptLists[0].scriptListEntries).isNotEmpty @@ -111,7 +111,7 @@ internal class MapFileReaderTest { @Test internal fun `should read bfme2 map with script that checks for active game modes`() { - val map = MapFileReader().read(TestUtils.getInputStream("/maps/bfme2-rotwk/map mp westmarch.zlib")) + val map = TestUtils.getInputStream("/maps/bfme2-rotwk/map mp westmarch.zlib").use(MapFileReader()::read) assertThat(map.playerScriptsList.scriptLists).isNotEmpty assertThat(map.playerScriptsList.scriptLists[2].scriptFolders).isNotEmpty @@ -126,9 +126,9 @@ internal class MapFileReaderTest { @Test internal fun `should read the same map information for all compressions`() { - val plain = MapFileReader().read(TestUtils.getInputStream(TestUtils.UNCOMPRESSED_MAP_PATH)) - val refpack = MapFileReader().read(TestUtils.getInputStream(TestUtils.REFPACK_COMPRESSED_MAP_PATH)) - val zlib = MapFileReader().read(TestUtils.getInputStream(TestUtils.ZLIB_COMPRESSED_MAP_PATH)) + val plain = TestUtils.getInputStream(TestUtils.UNCOMPRESSED_MAP_PATH).use(MapFileReader()::read) + val refpack = TestUtils.getInputStream(TestUtils.REFPACK_COMPRESSED_MAP_PATH).use(MapFileReader()::read) + val zlib = TestUtils.getInputStream(TestUtils.ZLIB_COMPRESSED_MAP_PATH).use(MapFileReader()::read) assertThat(plain).isEqualTo(refpack) assertThat(plain).isEqualTo(zlib) diff --git a/map/src/test/kotlin/de/darkatra/bfme2/map/serialization/MapFileWriterTest.kt b/map/src/test/kotlin/de/darkatra/bfme2/map/serialization/MapFileWriterTest.kt index 3948281..0ebf84d 100644 --- a/map/src/test/kotlin/de/darkatra/bfme2/map/serialization/MapFileWriterTest.kt +++ b/map/src/test/kotlin/de/darkatra/bfme2/map/serialization/MapFileWriterTest.kt @@ -11,14 +11,27 @@ class MapFileWriterTest { @Test fun `should write map`() { - val expectedMapFileSize = ByteStreams.exhaust(TestUtils.getInputStream(TestUtils.UNCOMPRESSED_MAP_PATH)) + val expectedMapFileSize = TestUtils.getInputStream(TestUtils.UNCOMPRESSED_MAP_PATH).use(ByteStreams::exhaust) + val parsedMapFile = TestUtils.getInputStream(TestUtils.UNCOMPRESSED_MAP_PATH).use(MapFileReader()::read) - val inputMapFile = TestUtils.getInputStream(TestUtils.UNCOMPRESSED_MAP_PATH) - val parsedMapFile = MapFileReader().read(inputMapFile) - val writtenMapFile = ByteArrayOutputStream() + val mapFileOutputStream = ByteArrayOutputStream().use { + MapFileWriter().write(it, parsedMapFile, MapFileCompression.UNCOMPRESSED) + it + } - MapFileWriter().write(writtenMapFile, parsedMapFile, MapFileCompression.UNCOMPRESSED) + assertThat(mapFileOutputStream.size()).isEqualTo(expectedMapFileSize) + } + + @Test + fun `should produce identical map file when writing a parsed map`() { + + val parsedMapFile = MapFileReader().read(TestUtils.getInputStream(TestUtils.UNCOMPRESSED_MAP_PATH)) + + val mapFileOutputStream = ByteArrayOutputStream() + MapFileWriter().write(mapFileOutputStream, parsedMapFile, MapFileCompression.UNCOMPRESSED) + + val writtenMapFile = MapFileReader().read(mapFileOutputStream.toByteArray().inputStream()) - assertThat(writtenMapFile.size()).isEqualTo(expectedMapFileSize) + assertThat(parsedMapFile.worldInfo).isEqualTo(writtenMapFile.worldInfo) } }