Skip to content

Commit

Permalink
chore: WIP MapFileWriter
Browse files Browse the repository at this point in the history
  • Loading branch information
DarkAtra committed Jun 30, 2023
1 parent 65332c4 commit da3cdc5
Show file tree
Hide file tree
Showing 13 changed files with 166 additions and 108 deletions.
9 changes: 5 additions & 4 deletions big/src/main/kotlin/de/darkatra/bfme2/big/BigArchive.kt
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package de.darkatra.bfme2.big

import de.darkatra.bfme2.PublicApi
import de.darkatra.bfme2.readNullTerminatedString
import de.darkatra.bfme2.toBigEndianBytes
import de.darkatra.bfme2.toBigEndianUInt
Expand All @@ -22,7 +23,7 @@ import kotlin.io.path.outputStream
* Heavily inspired by https://github.com/OpenSAGE/OpenSAGE/blob/master/src/OpenSage.FileFormats.Big/BigArchive.cs
*/
class BigArchive(
@Suppress("MemberVisibilityCanBePrivate") // public api
@PublicApi
val version: BigArchiveVersion,
val path: Path
) {
Expand Down Expand Up @@ -52,7 +53,7 @@ class BigArchive(

private val _entries: MutableList<BigArchiveEntry> = arrayListOf()

@Suppress("MemberVisibilityCanBePrivate") // public api
@PublicApi
val entries
get() = _entries.sortedWith(Comparator.comparing(BigArchiveEntry::name))

Expand Down Expand Up @@ -84,7 +85,7 @@ class BigArchive(
*
* @param name The name of the entry to delete.
*/
@Suppress("unused") // public api
@PublicApi
fun deleteEntry(name: String) {
if (name.isBlank()) {
throw IllegalArgumentException("Name must not be blank")
Expand All @@ -97,7 +98,7 @@ class BigArchive(
/**
* Reads the archive from disk.
*/
@Suppress("MemberVisibilityCanBePrivate") // public api
@PublicApi
fun readFromDisk() {
if (!path.exists()) {
return
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/kotlin/de/darkatra/bfme2/Color.kt
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ data class Color constructor(
val blue: UInt
get() = rgba shr 0 and 0xFFu

@Suppress("MemberVisibilityCanBePrivate") // public api
@PublicApi
val alpha: UInt
get() = rgba shr 24 and 0xFFu

Expand Down
6 changes: 6 additions & 0 deletions core/src/main/kotlin/de/darkatra/bfme2/PublicApi.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
package de.darkatra.bfme2

@MustBeDocumented
@Retention(AnnotationRetention.RUNTIME)
@Target(AnnotationTarget.FUNCTION, AnnotationTarget.PROPERTY)
annotation class PublicApi
2 changes: 2 additions & 0 deletions map/src/main/kotlin/de/darkatra/bfme2/map/MapFile.kt
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import de.darkatra.bfme2.map.posteffect.PostEffects
import de.darkatra.bfme2.map.riverarea.RiverAreas
import de.darkatra.bfme2.map.scripting.PlayerScriptsList
import de.darkatra.bfme2.map.serialization.MapFileSerde
import de.darkatra.bfme2.map.serialization.SerializationOrder
import de.darkatra.bfme2.map.serialization.Serialize
import de.darkatra.bfme2.map.team.Teams
import de.darkatra.bfme2.map.trigger.TriggerAreas
Expand All @@ -31,6 +32,7 @@ data class MapFile(
val cameras: Cameras,
val environmentData: EnvironmentData,
val globalLighting: GlobalLighting,
@SerializationOrder(SerializationOrder.HIGHEST_PRECEDENCE)
val heightMap: HeightMap,
val libraryMapsList: LibraryMapsList,
val multiplayerPositions: MultiplayerPositions,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package de.darkatra.bfme2.map.blendtile

import de.darkatra.bfme2.InvalidDataException
import de.darkatra.bfme2.PublicApi
import de.darkatra.bfme2.map.serialization.ListSerde
import de.darkatra.bfme2.map.serialization.SerializationContext
import de.darkatra.bfme2.map.serialization.postprocessing.PostProcess
Expand All @@ -17,7 +18,7 @@ data class BlendDescription(
val magicValue2: UInt
) {

@Suppress("unused") // public api
@PublicApi
val blendDirection: BlendDirection
get() {
val bytes = rawBlendDirection.toTypedArray()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package de.darkatra.bfme2.map.serialization
import com.google.common.io.CountingInputStream
import de.darkatra.bfme2.map.serialization.model.DataSection
import de.darkatra.bfme2.map.serialization.model.DataSectionHolder
import de.darkatra.bfme2.map.serialization.model.DataSectionLeaf
import de.darkatra.bfme2.map.serialization.postprocessing.PostProcessor
import de.darkatra.bfme2.map.serialization.preprocessing.PreProcessor
import java.io.OutputStream
Expand All @@ -16,8 +17,11 @@ internal class AssetListSerde<T : Any>(

override fun calculateDataSection(data: List<T>): DataSection {
return DataSectionHolder(
containingData = data.map {
entrySerde.calculateDataSection(it)
containingData = buildList {
data.forEach {
add(DataSectionLeaf.ASSET_HEADER)
add(entrySerde.calculateDataSection(it))
}
}
)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package de.darkatra.bfme2.map.serialization
import com.google.common.io.ByteStreams
import com.google.common.io.CountingInputStream
import de.darkatra.bfme2.InvalidDataException
import de.darkatra.bfme2.PublicApi
import de.darkatra.bfme2.SkippingInputStream
import de.darkatra.bfme2.map.MapFile
import de.darkatra.bfme2.map.MapFileCompression
Expand Down Expand Up @@ -43,27 +44,31 @@ class MapFileReader {
startPosition = inputStream.count
)

if (serializationContext.debugMode) {
println("Reading asset '${currentAsset.assetName}' with size ${currentAsset.assetSize}.")
}

serializationContext.push(currentAsset)
callback(assetName)
serializationContext.pop()

val currentEndPosition = inputStream.count
val expectedEndPosition = serializationContext.currentEndPosition
val expectedEndPosition = currentAsset.endPosition
if (!serializationContext.debugMode && currentEndPosition != expectedEndPosition) {
throw InvalidDataException("Error reading '${currentAsset.assetName}'. Expected reader to be at position $expectedEndPosition, but was at $currentEndPosition.")
}
}
}
}

@Suppress("unused") // public api
@PublicApi
fun read(file: Path): MapFile {

if (!file.exists()) {
throw FileNotFoundException("File '${file.absolutePathString()}' does not exist.")
}

return read(file.inputStream())
return file.inputStream().use(this::read)
}

fun read(inputStream: InputStream): MapFile {
Expand All @@ -75,42 +80,41 @@ class MapFileReader {

val inputStreamSize = getInputStreamSize(bufferedInputStream)

return CountingInputStream(decodeIfNecessary(bufferedInputStream)).use { countingInputStream ->
val countingInputStream = CountingInputStream(decodeIfNecessary(bufferedInputStream))

readAndValidateFourCC(countingInputStream)
readAndValidateFourCC(countingInputStream)

val serializationContext = SerializationContext(true)
val annotationProcessingContext = AnnotationProcessingContext(false)
val serdeFactory = SerdeFactory(annotationProcessingContext, serializationContext)
val serializationContext = SerializationContext(false)
val annotationProcessingContext = AnnotationProcessingContext(false)
val serdeFactory = SerdeFactory(annotationProcessingContext, serializationContext)

measureTime {
val assetNames = readAssetNames(countingInputStream)
serializationContext.setAssetNames(assetNames)
}.also { elapsedTime ->
if (serializationContext.debugMode) {
println("Reading asset names took $elapsedTime.")
}
measureTime {
val assetNames = readAssetNames(countingInputStream)
serializationContext.setAssetNames(assetNames)
}.also { elapsedTime ->
if (serializationContext.debugMode) {
println("Reading asset names took $elapsedTime.")
}
}

serializationContext.push(
AssetEntry(
assetName = "Map",
assetVersion = 0u,
assetSize = inputStreamSize,
startPosition = 0
)
serializationContext.push(
AssetEntry(
assetName = "Map",
assetVersion = 0u,
assetSize = inputStreamSize,
startPosition = 0
)
)

val mapFileSerde = serdeFactory.getSerde(MapFile::class)
val mapFileSerde = serdeFactory.getSerde(MapFile::class)

annotationProcessingContext.invalidate()
annotationProcessingContext.invalidate()

val mapFile = mapFileSerde.deserialize(countingInputStream)
val mapFile = mapFileSerde.deserialize(countingInputStream)

serializationContext.pop()
serializationContext.pop()

mapFile
}
return mapFile
}

private fun getInputStreamSize(bufferedInputStream: BufferedInputStream): Long {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import com.google.common.io.CountingInputStream
import de.darkatra.bfme2.map.Asset
import de.darkatra.bfme2.map.MapFile
import de.darkatra.bfme2.map.serialization.model.DataSectionHolder
import de.darkatra.bfme2.map.serialization.model.DataSectionLeaf
import de.darkatra.bfme2.map.toKClass
import java.io.OutputStream
import kotlin.reflect.KProperty
Expand Down Expand Up @@ -36,34 +37,46 @@ internal class MapFileSerde(
override fun calculateDataSection(data: MapFile): DataSectionHolder {

return DataSectionHolder(
containingData = parameterToField.entries.mapIndexed { index, (p, fieldForParameter) ->
@Suppress("UNCHECKED_CAST")
val serde = serdes[index] as Serde<Any>
val fieldData = fieldForParameter.getter.call(data)!!
serde.calculateDataSection(fieldData)
},
assetName = "MapFile"
assetName = "MapFile",
containingData = buildList {
parameterToField.entries.mapIndexed { index, entry -> Pair(index, entry) }
.sortedBy { (_, entry) -> entry.key.findAnnotation<SerializationOrder>()?.ordered ?: SerializationOrder.DEFAULT_ORDER }
.forEach { (index, entry) ->
val fieldForParameter = entry.value

@Suppress("UNCHECKED_CAST")
val serde = serdes[index] as Serde<Any>
val fieldData = fieldForParameter.getter.call(data)!!
add(DataSectionLeaf.ASSET_HEADER)
add(serde.calculateDataSection(fieldData))
}
}
)
}

@OptIn(ExperimentalTime::class)
override fun serialize(outputStream: OutputStream, data: MapFile) {

// TODO: check if we need to preserve the order in which we write the data
parameterToField.entries.forEachIndexed { index, (parameter, fieldForParameter) ->
@Suppress("UNCHECKED_CAST")
val serde = serdes[index] as Serde<Any>
val fieldData = fieldForParameter.getter.call(data)!!

measureTime {
MapFileWriter.writeAsset(outputStream, serializationContext, fieldData)
serde.serialize(outputStream, fieldData)
}.also { elapsedTime ->
if (serializationContext.debugMode) {
println("Deserialization of '${parameter.name}' took $elapsedTime.")
parameterToField.entries
.mapIndexed { index, entry -> Pair(index, entry) }
.sortedBy { (_, entry) -> entry.key.findAnnotation<SerializationOrder>()?.ordered ?: SerializationOrder.DEFAULT_ORDER }
.forEach { (index, entry) ->
val parameter = entry.key
val fieldForParameter = entry.value

@Suppress("UNCHECKED_CAST")
val serde = serdes[index] as Serde<Any>
val fieldData = fieldForParameter.getter.call(data)!!

measureTime {
MapFileWriter.writeAsset(outputStream, serializationContext, fieldData)
serde.serialize(outputStream, fieldData)
}.also { elapsedTime ->
if (serializationContext.debugMode) {
println("Serialization of '${parameter.name}' took $elapsedTime.")
}
}
}
}
}

@OptIn(ExperimentalTime::class)
Expand Down
Loading

0 comments on commit da3cdc5

Please sign in to comment.