Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Scala 3 Migration #209

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
Scala 3 Migration (#1)
Migrates the code to Scala 3 and enables cross-compilation with Scala 2.13.
  • Loading branch information
LaurelineSwissBorg authored Oct 21, 2024
commit a9005b32776bae0b9c9380cf6a5b1a5c2dffdcc5
5 changes: 3 additions & 2 deletions bench/src/main/scala/sjsonnet/MaterializerBenchmark.scala
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import java.util.concurrent.TimeUnit
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra._
import ujson.JsVisitor
import scala.compiletime.uninitialized

@BenchmarkMode(Array(Mode.AverageTime))
@Fork(2)
Expand All @@ -16,8 +17,8 @@ import ujson.JsVisitor
@State(Scope.Benchmark)
class MaterializerBenchmark {

private var interp: Interpreter = _
private var value: Val = _
private var interp: Interpreter = uninitialized
private var value: Val = uninitialized

@Setup
def setup(): Unit = {
Expand Down
10 changes: 6 additions & 4 deletions bench/src/main/scala/sjsonnet/OptimizerBenchmark.scala
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra._

import scala.collection.mutable
import scala.compiletime.uninitialized

@BenchmarkMode(Array(Mode.AverageTime))
@Fork(2)
Expand All @@ -20,16 +21,17 @@ import scala.collection.mutable
@State(Scope.Benchmark)
class OptimizerBenchmark {

private var inputs: Iterable[(Expr, FileScope)] = _
private var allFiles: IndexedSeq[(Path, String)] = _
private var ev: EvalScope = _
private var inputs: Iterable[(Expr, FileScope)] = uninitialized
private var allFiles: IndexedSeq[(Path, String)] = uninitialized
private var ev: EvalScope = uninitialized

@Setup
def setup(): Unit = {
val (allFiles, ev) = MainBenchmark.findFiles()
this.inputs = allFiles.map { case (p, s) =>
fastparse.parse(s, new Parser(p, true, mutable.HashMap.empty, mutable.HashMap.empty).document(_)) match {
fastparse.parse(s, new Parser(p, true, mutable.HashMap.empty, mutable.HashMap.empty).document(using _)) match {
case Success(v, _) => v
case _ => throw new RuntimeException("Parse Failed")
}
}
this.ev = ev
Expand Down
9 changes: 5 additions & 4 deletions bench/src/main/scala/sjsonnet/ParserBenchmark.scala
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import scala.collection.mutable.HashMap
import fastparse.Parsed.Success
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra._
import scala.compiletime.uninitialized

@BenchmarkMode(Array(Mode.AverageTime))
@Fork(2)
Expand All @@ -17,8 +18,8 @@ import org.openjdk.jmh.infra._
@State(Scope.Benchmark)
class ParserBenchmark {

private var allFiles: IndexedSeq[(Path, String)] = _
private var interp: Interpreter = _
private var allFiles: IndexedSeq[(Path, String)] = uninitialized
private var interp: Interpreter = uninitialized

@Setup
def setup(): Unit =
Expand All @@ -27,8 +28,8 @@ class ParserBenchmark {
@Benchmark
def main(bh: Blackhole): Unit = {
bh.consume(allFiles.foreach { case (p, s) =>
val res = fastparse.parse(s, new Parser(p, true, HashMap.empty, HashMap.empty).document(_))
bh.consume(res.asInstanceOf[Success[_]])
val res = fastparse.parse(s, new Parser(p, true, HashMap.empty, HashMap.empty).document(using _))
bh.consume(res.asInstanceOf[Success[?]])
})
}
}
60 changes: 47 additions & 13 deletions build.sbt
Original file line number Diff line number Diff line change
@@ -1,29 +1,55 @@
val sjsonnetVersion = "0.4.4"

scalaVersion in Global := "2.13.4"
val scala213 = "2.13.15"
val scala3 = "3.5.1"

val commonOptions: Seq[String] = Seq(
"-opt:l:inline",
"-opt-inline-from:sjsonnet.*,sjsonnet.**",
)

cancelable in Global := true
publish / skip := true

lazy val main = (project in file("sjsonnet"))
.settings(
Compile / scalacOptions ++= Seq("-opt:l:inline", "-opt-inline-from:sjsonnet.*,sjsonnet.**"),
name := "sjsonnet",

// Enable cross-compilation
scalaVersion := scala3,
crossScalaVersions := Seq(scala213, scala3),
scalacOptions ++= {
(CrossVersion.partialVersion(scalaVersion.value) match {
case Some((3, _)) =>
commonOptions ++ Seq(
// options dedicated for cross build / migration to Scala 3
"-source:3.5-migration"
)
case _ =>
commonOptions ++ Seq(
"-Xsource:3"
)
})
},


Test / fork := true,
Test / baseDirectory := (ThisBuild / baseDirectory).value,
libraryDependencies ++= Seq(
"com.lihaoyi" %% "fastparse" % "2.3.1",
"com.lihaoyi" %% "pprint" % "0.6.1",
"com.lihaoyi" %% "ujson" % "1.3.7",
"com.lihaoyi" %% "scalatags" % "0.9.3",
"com.lihaoyi" %% "os-lib" % "0.7.2",
"com.lihaoyi" %% "mainargs" % "0.2.0",
"com.lihaoyi" %% "fastparse" % "3.1.1",
"com.lihaoyi" %% "pprint" % "0.9.0",
"com.lihaoyi" %% "ujson" % "4.0.0",
"com.lihaoyi" %% "scalatags" % "0.12.0",
"com.lihaoyi" %% "os-lib" % "0.10.3",
"com.lihaoyi" %% "mainargs" % "0.7.5",
"org.lz4" % "lz4-java" % "1.8.0",
"org.json" % "json" % "20211205",
"org.scala-lang.modules" %% "scala-collection-compat" % "2.4.0",
"org.tukaani" % "xz" % "1.8",
"org.yaml" % "snakeyaml" % "1.30",
"org.json" % "json" % "20240303",
"org.scala-lang.modules" %% "scala-collection-compat" % "2.12.0",
"org.tukaani" % "xz" % "1.9",
"org.yaml" % "snakeyaml" % "2.0",
),
libraryDependencies ++= Seq(
"com.lihaoyi" %% "utest" % "0.7.7",
"com.lihaoyi" %% "utest" % "0.8.3",
).map(_ % "test"),
testFrameworks += new TestFramework("utest.runner.Framework"),
(Compile / unmanagedSourceDirectories) := Seq(
Expand Down Expand Up @@ -56,4 +82,12 @@ lazy val bench = (project in file("bench"))
.enablePlugins(JmhPlugin)
.settings(
run / fork := true,
// Do not cross-compile the benchmark
scalaVersion := scala3,
)

lazy val root = (project in file("."))
.aggregate(main)
.settings(
publishArtifact := false
)
3 changes: 1 addition & 2 deletions project/plugins.sbt
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
//addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7")
addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.3")
addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.3")
4 changes: 2 additions & 2 deletions sjsonnet/src-jvm-native/sjsonnet/CachedResolvedFile.scala
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ class CachedResolvedFile(val resolvedImportPath: OsPath, memoryLimitBytes: Long,
// Assert that the file is less than limit
assert(jFile.length() <= memoryLimitBytes, s"Resolved import path ${resolvedImportPath} is too large: ${jFile.length()} bytes > ${memoryLimitBytes} bytes")

private[this] val resolvedImportContent: StaticResolvedFile = {
private val resolvedImportContent: StaticResolvedFile = {
if (jFile.length() > cacheThresholdBytes) {
// If the file is too large, then we will just read it from disk
null
Expand All @@ -35,7 +35,7 @@ class CachedResolvedFile(val resolvedImportPath: OsPath, memoryLimitBytes: Long,
}
}

private[this] def readString(jFile: File): String = {
private def readString(jFile: File): String = {
new String(Files.readAllBytes(jFile.toPath), StandardCharsets.UTF_8);
}

Expand Down
2 changes: 1 addition & 1 deletion sjsonnet/src-jvm-native/sjsonnet/OsPath.scala
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,6 @@ case class OsPath(p: os.Path) extends Path{
":" + Util.prettyIndex(lineStarts, offset)
}

p.relativeTo(os.pwd) + offsetStr
p.relativeTo(os.pwd).toString() + offsetStr
}
}
14 changes: 7 additions & 7 deletions sjsonnet/src-jvm-native/sjsonnet/SjsonnetMain.scala
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ object SjsonnetMain {
val doc = "usage: sjsonnet [sjsonnet-options] script-file"
val result = for{
config <- parser.constructEither(
args,
args.toIndexedSeq,
customName = name, customDoc = doc,
autoPrintHelpAndExit = None
)
Expand Down Expand Up @@ -113,7 +113,7 @@ object SjsonnetMain {
def writeFile(config: Config, f: os.Path, contents: String): Either[String, Unit] =
handleWriteFile(os.write.over(f, contents, createFolders = config.createDirs.value))

def writeToFile(config: Config, wd: os.Path)(materialize: Writer => Either[String, _]): Either[String, String] = {
def writeToFile(config: Config, wd: os.Path)(materialize: Writer => Either[String, ?]): Either[String, String] = {
config.outputFile match{
case None =>
val sw = new StringWriter
Expand Down Expand Up @@ -205,12 +205,12 @@ object SjsonnetMain {
importer = importer match{
case Some(i) => new Importer {
def resolve(docBase: Path, importName: String): Option[Path] =
i(docBase, importName).map(OsPath)
i(docBase, importName).map(OsPath.apply)
def read(path: Path): Option[ResolvedFile] = {
readPath(path)
}
}
case None => resolveImport(config.jpaths.map(os.Path(_, wd)).map(OsPath(_)), allowedInputs)
case None => resolveImport(config.jpaths.map(os.Path(_, wd)).map(OsPath.apply), allowedInputs)
},
parseCache,
settings = new Settings(
Expand Down Expand Up @@ -246,8 +246,8 @@ object SjsonnetMain {
Right(writer.toString)
}
}
relPath = os.FilePath(multiPath) / os.RelPath(f)
_ <- writeFile(config, relPath.resolveFrom(wd), rendered)
relPath = os.Path(multiPath, wd) / f
_ <- writeFile(config, relPath, rendered)
} yield relPath
}

Expand Down Expand Up @@ -299,7 +299,7 @@ object SjsonnetMain {
* of caching on top of the underlying file system. Small files are read into memory, while large
* files are read from disk.
*/
private[this] def readPath(path: Path): Option[ResolvedFile] = {
private def readPath(path: Path): Option[ResolvedFile] = {
val osPath = path.asInstanceOf[OsPath].p
if (os.exists(osPath) && os.isFile(osPath)) {
Some(new CachedResolvedFile(path.asInstanceOf[OsPath], memoryLimitBytes = Int.MaxValue.toLong))
Expand Down
17 changes: 10 additions & 7 deletions sjsonnet/src-jvm/sjsonnet/Platform.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,13 @@ package sjsonnet

import org.json.JSONObject

import java.io.{ByteArrayOutputStream, BufferedInputStream, File, FileInputStream}
import java.io.{BufferedInputStream, ByteArrayOutputStream, File, FileInputStream}
import java.util.Base64
import java.util.zip.GZIPOutputStream
import net.jpountz.xxhash.{StreamingXXHash64, XXHashFactory, XXHash64}
import net.jpountz.xxhash.{StreamingXXHash64, XXHash64, XXHashFactory}
import org.tukaani.xz.LZMA2Options
import org.tukaani.xz.XZOutputStream
import org.yaml.snakeyaml.Yaml
import org.yaml.snakeyaml.{LoaderOptions, Yaml}
import org.yaml.snakeyaml.constructor.Constructor

object Platform {
Expand All @@ -21,12 +21,13 @@ object Platform {
outputStream.close()
gzippedBase64
}

def gzipString(s: String): String = {
gzipBytes(s.getBytes())
}

/**
* Valid compression levels are 0 (no compression) to 9 (maximum compression).
* Valid compression levels are 0 (no compression) to 9 (maximum compression).
*/
def xzBytes(b: Array[Byte], compressionLevel: Option[Int]): String = {
val outputStream: ByteArrayOutputStream = new ByteArrayOutputStream(b.length)
Expand All @@ -45,17 +46,19 @@ object Platform {
}

def yamlToJson(yamlString: String): String = {
val yaml: java.util.LinkedHashMap[String, Object] = new Yaml(new Constructor(classOf[java.util.LinkedHashMap[String, Object]])).load(yamlString)
val options = new LoaderOptions()
val yaml: java.util.LinkedHashMap[String, Object] = new Yaml(new Constructor(classOf[java.util.LinkedHashMap[String, Object]], options)).load(yamlString)
new JSONObject(yaml).toString()
}

def md5(s: String): String = {
java.security.MessageDigest.getInstance("MD5")
.digest(s.getBytes("UTF-8"))
.map{ b => String.format("%02x", new java.lang.Integer(b & 0xff))}
.map { b => String.format("%02x", Integer.valueOf(b & 0xff)) }
.mkString
}

private[this] val xxHashFactory = XXHashFactory.fastestInstance()
private val xxHashFactory = XXHashFactory.fastestInstance()

def hashFile(file: File): String = {
val buffer = new Array[Byte](8192)
Expand Down
12 changes: 6 additions & 6 deletions sjsonnet/src/sjsonnet/BaseCharRenderer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,16 @@ class BaseCharRenderer[T <: upickle.core.CharOps.Output]
(out: T,
indent: Int = -1,
escapeUnicode: Boolean = false) extends JsVisitor[T, T]{
protected[this] val elemBuilder = new upickle.core.CharBuilder
protected[this] val unicodeCharBuilder = new upickle.core.CharBuilder()
protected val elemBuilder = new upickle.core.CharBuilder
protected val unicodeCharBuilder = new upickle.core.CharBuilder()
def flushCharBuilder() = {
elemBuilder.writeOutToIfLongerThan(out, if (depth == 0) 0 else 1000)
}

protected[this] var depth: Int = 0
protected var depth: Int = 0


protected[this] var commaBuffered = false
protected var commaBuffered = false

def flushBuffer() = {
if (commaBuffered) {
Expand Down Expand Up @@ -49,7 +49,7 @@ class BaseCharRenderer[T <: upickle.core.CharOps.Output]
}
}

def visitObject(length: Int, index: Int) = new ObjVisitor[T, T] {
def visitJsonableObject(length: Int, index: Int) = new ObjVisitor[T, T] {
flushBuffer()
elemBuilder.append('{')
depth += 1
Expand Down Expand Up @@ -144,7 +144,7 @@ class BaseCharRenderer[T <: upickle.core.CharOps.Output]

def visitNonNullString(s: CharSequence, index: Int) = {
flushBuffer()
upickle.core.RenderUtils.escapeChar(unicodeCharBuilder, elemBuilder, s, escapeUnicode)
upickle.core.RenderUtils.escapeChar(unicodeCharBuilder, elemBuilder, s, escapeUnicode, wrapQuotes = true)
flushCharBuilder()
out
}
Expand Down
2 changes: 1 addition & 1 deletion sjsonnet/src/sjsonnet/BaseRenderer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ class BaseRenderer[T <: java.io.Writer]
}
}

def visitObject(length: Int, index: Int) = new ObjVisitor[T, T] {
def visitJsonableObject(length: Int, index: Int) = new ObjVisitor[T, T] {
flushBuffer()
out.append('{')
depth += 1
Expand Down
8 changes: 4 additions & 4 deletions sjsonnet/src/sjsonnet/DecimalFormat.scala
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ object DecimalFormat {
def leftPad(n: Long, targetWidth: Int): String = {
val sign = if (n < 0) "-" else ""
val absN = math.abs(n)
val nWidth = if (absN == 0) 1 else Math.log10(absN).toInt + 1
val nWidth = if (absN == 0) 1 else Math.log10(absN.toDouble).toInt + 1
sign + "0" * (targetWidth - nWidth) + absN
}
def rightPad(n0: Long, minWidth: Int, maxWidth: Int): String = {
Expand All @@ -31,21 +31,21 @@ object DecimalFormat {
val n = (n0 / Math.pow(10, trailingZeroes(n0))).toInt
assert(n == math.abs(n))
val nWidth = if (n == 0) 1 else Math.log10(n).toInt + 1
(n + "0" * (minWidth - nWidth)).take(maxWidth)
(n.toString + "0" * (minWidth - nWidth)).take(maxWidth)
}
}
def format(fracLengthOpt: Option[(Int, Int)], expLengthOpt: Option[Int], number: Double): String = {
expLengthOpt match{
case Some(expLength) =>
val roundLog10 = Math.ceil(Math.log10(Math.abs(number))).toLong
val expNum = roundLog10 - 1
val scaled = number / math.pow(10, expNum)
val scaled = number / math.pow(10, expNum.toDouble)
val prefix = scaled.toLong.toString
val expFrag = leftPad(expNum, expLength)
val fracFrag = fracLengthOpt.map{case (zeroes, hashes) =>
if (zeroes == 0 && hashes == 0) ""
else {
val divided = number / Math.pow(10, expNum - zeroes - hashes)
val divided = number / Math.pow(10, (expNum - zeroes - hashes).toDouble)
val scaledFrac = divided % Math.pow(10, zeroes + hashes)
rightPad(Math.abs(Math.round(scaledFrac)), zeroes, zeroes + hashes)
}
Expand Down
Loading