Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import org.typelevel.scalacoptions.ScalacOptions
import org.typelevel.scalacoptions.ScalaVersion
import sbtdynver.DynVerPlugin.autoImport.*

val scala3Version = "3.6.4"
val scala3Version = "3.7.0"
val scala212Version = "2.12.20"
val scala213Version = "2.13.16"

Expand Down
2 changes: 1 addition & 1 deletion kyo-caliban/src/main/scala/kyo/Resolvers.scala
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ object Resolvers:
req =>
val f = Unsafe.unsafely { runtime.unsafe.runToFuture(endpoint.logic(zioMonadError)(())(req)) }
KyoSttpMonad.async { cb =>
f.onComplete(r => cb(r.toEither))(ExecutionContext.parasitic)
f.onComplete(r => cb(r.toEither))(using ExecutionContext.parasitic)
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

the compiler now warns when passing an implicit param without using

Canceler { () =>
val _ = f.cancel()
()
Expand Down
2 changes: 1 addition & 1 deletion kyo-cats/shared/src/main/scala/kyo/Cats.scala
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ object Cats:
future.onComplete {
case Success(v) => p.complete(Result.succeed(v))
case Failure(ex) => p.complete(Result.panic(ex))
}(ExecutionContext.parasitic)
}(using ExecutionContext.parasitic)
p.onInterrupt(_ => discard(cancel()))
p.safe.get
}
Expand Down
20 changes: 10 additions & 10 deletions kyo-core/jvm/src/main/scala/kyo/StreamCompression.scala
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ object StreamCompression:
Present(cont),
Chunk.empty[Byte]
)))
case Absent -> _ =>
case _ =>
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

the compiler started complaining about exhaustiveness check

IO(deflater.finish()).andThen(Loop.continue(DeflateState.PullDeflater(
deflater,
Absent,
Expand Down Expand Up @@ -280,7 +280,7 @@ object StreamCompression:
crc32.update(toUnboxByteArray(bytes))
deflater.setInput(toUnboxByteArray(bytes))
}.andThen(Loop.continue(GZipState.PullDeflater(deflater, crc32, Present(cont), Chunk.empty[Byte])))
case Absent -> _ =>
case _ =>
IO(deflater.finish()).andThen(Loop.continue(GZipState.PullDeflater(deflater, crc32, Absent, Chunk.empty[Byte])))
}
case GZipState.PullDeflater(deflater, crc32, maybeEmitFn, chunk) =>
Expand Down Expand Up @@ -361,7 +361,7 @@ object StreamCompression:
.andThen(
Loop.continue(InflateState.PullInflater(inflater, Present(emitFn), bytes))
)
case Absent -> _ =>
case _ =>
IO(Loop.continue(InflateState.PullInflater(inflater, Absent, Chunk.empty)))
case InflateState.PullInflater(inflater, maybeEmitFn, bytes) =>
if inflater.finished then
Expand Down Expand Up @@ -517,7 +517,7 @@ object StreamCompression:
Emit.runFirst(emit).map:
case Present(bytes) -> emitFn =>
Loop.continue(GunzipState.ParseHeader(accBytes.concat(bytes), headerCrc32, emitFn()))
case Absent -> _ =>
case _ =>
if accBytes.isEmpty then
// No data, we stop
Loop.done
Expand Down Expand Up @@ -563,7 +563,7 @@ object StreamCompression:
commentsToSkip,
emitFn()
))
case Absent -> _ =>
case _ =>
Abort
.fail(new StreamCompressionException("Invalid GZip header"))
.andThen(Loop.done)
Expand All @@ -581,7 +581,7 @@ object StreamCompression:
commentsToSkip,
emitFn()
))
case Absent -> _ =>
case _ =>
Abort
.fail(new StreamCompressionException("Invalid GZip header"))
.andThen(Loop.done)
Expand Down Expand Up @@ -609,7 +609,7 @@ object StreamCompression:
commentsToSkip,
emitFn()
))
case Absent -> _ =>
case _ =>
Abort
.fail(new StreamCompressionException("Invalid GZip header"))
.andThen(Loop.done)
Expand All @@ -628,7 +628,7 @@ object StreamCompression:
Emit.runFirst(emit).map:
case Present(bytes) -> emitFn =>
Loop.continue(GunzipState.CheckCrc16(accBytes.concat(bytes), headerCrc32, emitFn()))
case Absent -> _ =>
case _ =>
Abort
.fail(new StreamCompressionException("Invalid GZip header"))
.andThen(Loop.done)
Expand Down Expand Up @@ -657,7 +657,7 @@ object StreamCompression:
.andThen(
Loop.continue(GunzipState.PullInflater(inflater, contentCrc32, Present(emitFn), bytes))
)
case Absent -> _ =>
case _ =>
Loop.continue(GunzipState.PullInflater(inflater, contentCrc32, Absent, Chunk.empty))
else
IO(inflater.setInput(toUnboxByteArray(leftOver)))
Expand Down Expand Up @@ -703,7 +703,7 @@ object StreamCompression:
contentCrc32,
Present(nextEmitFn)
))
case Absent -> _ =>
case _ =>
Loop.continue(GunzipState.CheckTrailer(accBytes, inflater, contentCrc32, Absent))
case Absent =>
Abort.fail[StreamCompressionException](StreamCompressionException("Checksum error"))
Expand Down
26 changes: 14 additions & 12 deletions kyo-core/shared/src/main/scala/kyo/Fiber.scala
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ import scala.util.control.NoStackTrace
* @see
* [[Fiber.Unsafe]] for low-level operations requiring [[AllowUnsafe]]
*/
opaque type Fiber[+E, +A] = IOPromise[? <: E, ? <: A]
opaque type Fiber[+E, +A] = IOPromiseBase[E, A]
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The compile crashes with [? <: E, ? <: A]. As a workaround, I've added IOPromiseBase and the new asPromise method to cast


object Fiber extends FiberPlatformSpecific:

Expand Down Expand Up @@ -107,13 +107,15 @@ object Fiber extends FiberPlatformSpecific:

extension [E, A](self: Fiber[E, A])

private[kyo] def asPromise: IOPromise[E, A] = self.asInstanceOf[IOPromise[E, A]]

/** Gets the result of the Fiber.
*
* @return
* The result of the Fiber
*/
def get(using reduce: Reducible[Abort[E]], frame: Frame): A < (reduce.SReduced & Async) =
Async.get(self)
Async.get(self.asPromise)

/** Uses the result of the Fiber to compute a new value.
*
Expand All @@ -123,14 +125,14 @@ object Fiber extends FiberPlatformSpecific:
* The result of applying the function to the Fiber's result
*/
def use[B, S](f: A => B < S)(using reduce: Reducible[Abort[E]], frame: Frame): B < (reduce.SReduced & Async & S) =
Async.use(self)(f)
Async.use(self.asPromise)(f)

/** Gets the result of the Fiber as a Result.
*
* @return
* The Result of the Fiber
*/
def getResult(using Frame): Result[E, A] < Async = Async.getResult(self)
def getResult(using Frame): Result[E, A] < Async = Async.getResult(self.asPromise)

/** Uses the Result of the Fiber to compute a new value.
*
Expand All @@ -139,7 +141,7 @@ object Fiber extends FiberPlatformSpecific:
* @return
* The result of applying the function to the Fiber's Result
*/
def useResult[B, S](f: Result[E, A] => B < S)(using Frame): B < (Async & S) = Async.useResult(self)(f)
def useResult[B, S](f: Result[E, A] => B < S)(using Frame): B < (Async & S) = Async.useResult(self.asPromise)(f)

/** Checks if the Fiber is done.
*
Expand Down Expand Up @@ -177,7 +179,7 @@ object Fiber extends FiberPlatformSpecific:
* The Result of the Fiber, or a Timeout error
*/
def block(timeout: Duration)(using Frame): Result[E | Timeout, A] < IO =
Clock.deadline(timeout).map(d => self.block(d.unsafe))
Clock.deadline(timeout).map(d => self.asPromise.block(d.unsafe))

/** Converts the Fiber to a Future.
*
Expand Down Expand Up @@ -267,7 +269,7 @@ object Fiber extends FiberPlatformSpecific:
* @return
* The number of waiters on this Fiber
*/
def waiters(using Frame): Int < IO = IO(self.waiters())
def waiters(using Frame): Int < IO = IO(self.asPromise.waiters())

/** Polls the Fiber for a result without blocking.
*
Expand Down Expand Up @@ -510,7 +512,7 @@ object Fiber extends FiberPlatformSpecific:
}
}

opaque type Unsafe[+E, +A] = IOPromise[? <: E, ? <: A]
opaque type Unsafe[+E, +A] = IOPromiseBase[E, A]

/** WARNING: Low-level API meant for integrations, libraries, and performance-sensitive code. See AllowUnsafe for more details. */
object Unsafe:
Expand All @@ -526,7 +528,7 @@ object Fiber extends FiberPlatformSpecific:
case Failure(ex) =>
completeDiscard(Result.fail(ex))

f.onComplete(p)(ExecutionContext.parasitic)
f.onComplete(p)(using ExecutionContext.parasitic)
p
end fromFuture

Expand All @@ -550,21 +552,21 @@ object Fiber extends FiberPlatformSpecific:
end toFuture

def map[B](f: A => B)(using AllowUnsafe): Unsafe[E, B] =
val p = new IOPromise[E, B](interrupts = self) with (Result[E, A] => Unit):
val p = new IOPromise[E, B](interrupts = self.asPromise) with (Result[E, A] => Unit):
def apply(v: Result[E, A]) = completeDiscard(v.map(f))
lower.onComplete(p)
p
end map

def flatMap[E2, B](f: A => Unsafe[E2, B])(using AllowUnsafe): Unsafe[E | E2, B] =
val p = new IOPromise[E | E2, B](interrupts = self) with (Result[E, A] => Unit):
val p = new IOPromise[E | E2, B](interrupts = self.asPromise) with (Result[E, A] => Unit):
def apply(r: Result[E, A]) = r.foldError(v => becomeDiscard(f(v).asInstanceOf[IOPromise[E | E2, B]]), completeDiscard)
lower.onComplete(p)
p
end flatMap

def mapResult[E2, B](f: Result[E, A] => Result[E2, B])(using AllowUnsafe): Unsafe[E2, B] =
val p = new IOPromise[E2, B](interrupts = self) with (Result[E, A] => Unit):
val p = new IOPromise[E2, B](interrupts = self.asPromise) with (Result[E, A] => Unit):
def apply(r: Result[E, A]) = completeDiscard(Result(f(r)).flatten)
lower.onComplete(p)
p
Expand Down
1 change: 0 additions & 1 deletion kyo-core/shared/src/main/scala/kyo/Queue.scala
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,6 @@ object Queue:
object Unsafe:
extension [A](self: Unsafe[A])
def add(value: A)(using AllowUnsafe, Frame): Unit = discard(self.offer(value))
def safe: Unbounded[A] = self
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

the compiler started warning for extension methods that can never be selected. In this case, Unsafe already has a safe method


def init[A](access: Access = Access.MultiProducerMultiConsumer, chunkSize: Int = 8)(
using
Expand Down
5 changes: 4 additions & 1 deletion kyo-core/shared/src/main/scala/kyo/scheduler/IOPromise.scala
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,10 @@ import kyo.kernel.internal.Safepoint
import scala.annotation.tailrec
import scala.util.control.NonFatal

private[kyo] class IOPromise[E, A](init: State[E, A]) extends Safepoint.Interceptor with Serializable:
sealed private[kyo] trait IOPromiseBase[+E, +A]:
self: IOPromise[E, A] =>

private[kyo] class IOPromise[E, A](init: State[E, A]) extends Safepoint.Interceptor with Serializable with IOPromiseBase[E, A]:

@volatile private var state = init

Expand Down
2 changes: 0 additions & 2 deletions kyo-data/shared/src/main/scala/kyo/Duration.scala
Original file line number Diff line number Diff line change
Expand Up @@ -146,8 +146,6 @@ object Duration:
infix def <=(that: Duration): Boolean = self.toLong <= that.toLong
infix def >(that: Duration): Boolean = self.toLong > that.toLong
infix def <(that: Duration): Boolean = self.toLong < that.toLong
infix def ==(that: Duration): Boolean = self.toLong == that.toLong
infix def !=(that: Duration): Boolean = self.toLong != that.toLong

infix def +(that: Duration): Duration =
val sum: Long = self.toLong + that.toLong
Expand Down
2 changes: 1 addition & 1 deletion kyo-data/shared/src/main/scala/kyo/Maybe.scala
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ object Maybe:
* @return
* the flattened Maybe
*/
inline def flatten[B](using inline ev: A <:< Maybe[B]): Maybe[B] =
def flatten[B](using ev: A <:< Maybe[B]): Maybe[B] =
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

compiler was crashing with this method inlined

if isEmpty then Absent else ev(get)

/** Filters the Maybe based on a predicate.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,6 @@ private[kyo] trait BaseKyoDataTest:
if errors.contains(expected) && !expected.isEmpty() then assertionSuccess
else assertionFailure(frame.render(Map("expected" -> expected, "actual" -> errors)))
case Result.Panic(exception) => assertionFailure(exception.getMessage)
case Result.Success(_) => assertionFailure("Code type-checked successfully, expected a failure")
case _ => assertionFailure("Code type-checked successfully, expected a failure")

end BaseKyoDataTest
4 changes: 2 additions & 2 deletions kyo-data/shared/src/main/scala/kyo/internal/ForSome.scala
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ type ForSome[F[_]] = ForSome.Type[F]
object ForSome:
class Unwrap[F[_], A](val unwrap: F[A]) extends AnyVal

opaque type Type[F[_]] <: Unwrap[F, ?] = Unwrap[F, ?]
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@road21 the compiler was crashing and removing opaque here fixed it

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

type Type[F[_]] = Unwrap[F, ?]

/** Converts value of type `F[A]` to existential form
*/
Expand All @@ -26,7 +26,7 @@ type ForSome2[F[_, _]] = ForSome2.Type[F]
object ForSome2:
class Unwrap[F[_, _], A1, A2](val unwrap: F[A1, A2]) extends AnyVal

opaque type Type[F[_, _]] <: Unwrap[F, ?, ?] = Unwrap[F, ?, ?]
type Type[F[_, _]] = Unwrap[F, ?, ?]

/** Converts value of type `F[A1, A2]` to existential form
*/
Expand Down
4 changes: 3 additions & 1 deletion kyo-data/shared/src/test/scala/kyo/RecordTest.scala
Original file line number Diff line number Diff line change
Expand Up @@ -297,7 +297,8 @@ class RecordTest extends Test:
inline def stage[Name <: String, Value](field: Field[Name, Value]): Column[Value] =
Column[Value](field.name)(using summonInline[AsColumn[Value]])

"build record if all inlined" in {
"build record if all inlined" in pendingUntilFixed {
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@road21 this is still crashing. I've tried a few things without success. I'll open an issue to follow up.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

assertCompiles("""
type Person = "name" ~ String & "age" ~ Int

val columns = Record.stage[Person](ColumnInline)
Expand All @@ -307,6 +308,7 @@ class RecordTest extends Test:
assert(columns.name == Column[String]("name"))
assert(columns.age == Column[Int]("age"))
assert(columns == result)
""")
}

"compile error when inlining failed" in {
Expand Down
1 change: 1 addition & 0 deletions kyo-direct/shared/src/main/scala/kyo/Direct.scala
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,7 @@ private def impl[A: Type](body: Expr[A])(using Quotes): Expr[Any] =
qual.asExprOf[t < s2]
})
}.asTerm
case _ => bug("unreachable")
case Apply(TypeApply(Ident("later"), List(t, s2)), List(qual)) =>
qual
}
Expand Down
2 changes: 1 addition & 1 deletion kyo-prelude/shared/src/main/scala/kyo/Stream.scala
Original file line number Diff line number Diff line change
Expand Up @@ -648,7 +648,7 @@ sealed abstract class Stream[V, -S] extends Serializable:
val restEmit = if rest.isEmpty then nextEmitFn() else Emit.valueWith(rest)(nextEmitFn())
Loop.done(taken -> Stream(restEmit))
end if
case (Absent, _) => Loop.done(curChunk -> Stream(Emit.value(Chunk.empty[V])))
case (_, _) => Loop.done(curChunk -> Stream(Emit.value(Chunk.empty[V])))
end splitAt

/** Process with a [[Sink]] of corresponding streaming element type.
Expand Down
17 changes: 11 additions & 6 deletions kyo-prelude/shared/src/main/scala/kyo/internal/LayerMacros.scala
Original file line number Diff line number Diff line change
Expand Up @@ -48,13 +48,18 @@ private[kyo] object LayerMacros:
reportErrors(errors)

val exprFold = targetLayer.fold[Expr[Layer[?, ?]]](
{ case ('{ $left: Layer[out1, s1] }, '{ $right: Layer[out2, s2] }) =>
'{ $left.and($right) }
{
case ('{ $left: Layer[out1, s1] }, '{ $right: Layer[out2, s2] }) =>
'{ $left.and($right) }
case _ => bug("macro expected layers")
},
{ case ('{ $left: Layer[out1, s1] }, right) =>
right match
case '{ $right: Layer[out2, Env[`out1`] & s2] } =>
'{ $left.to($right) }
{
case ('{ $left: Layer[out1, s1] }, right) =>
right match
case '{ $right: Layer[out2, Env[`out1`] & s2] } =>
'{ $left.to($right) }
case _ => bug("macro expected layers")
case _ => bug("macro expected layers")
},
_.value, {
// TODO: MAke nIcEr PlZEaz
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ private[netty] class NettyKyoRequestBody(val createFile: ServerRequest => KyoStt
fut.onComplete { r =>
import AllowUnsafe.embrace.danger
p.unsafe.complete(Result(r.get))
}(ExecutionContext.parasitic)
}(using ExecutionContext.parasitic)
p.get
}

Expand Down
2 changes: 0 additions & 2 deletions kyo-zio/shared/src/test/scala/kyo/ZIOsTest.scala
Original file line number Diff line number Diff line change
Expand Up @@ -389,7 +389,6 @@ class ZIOsTest extends Test:
val cause = Cause.die(exception)
cause.toError match
case Result.Panic(e) => assert(e == exception)
case _ => fail("Expected Result.Panic")
}

"Interrupt" in runKyo {
Expand Down Expand Up @@ -420,7 +419,6 @@ class ZIOsTest extends Test:
cause.toError match
case Result.Panic(e) =>
assert(e.getMessage.startsWith("Unexpected zio.Cause.Empty at"))
case _ => fail("Expected Result.Panic")
end match
}
}
Expand Down
2 changes: 1 addition & 1 deletion project/plugins.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.4")
addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.9.3")

addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.3.2")
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.18.2")
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.19.0")

addSbtPlugin("org.scala-native" % "sbt-scala-native" % "0.5.7")
addSbtPlugin("org.portable-scala" % "sbt-scala-native-crossproject" % "1.3.2")
Expand Down
Loading