Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Constraint solving for function overloading #213

Merged
merged 46 commits into from
Oct 4, 2024
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
Show all changes
46 commits
Select commit Hold shift + click to select a range
8e2e0c0
WIP: constraints solving for function overloading
auht Jan 3, 2024
3ad402f
fix type variables being filtered out
auht Jan 3, 2024
d40c9b7
fix lcg and add constraints filtering by lowerbound
auht Jan 14, 2024
4aa30ac
fix test: single function in overload
auht Jan 15, 2024
fcc772f
Update tests & remove obsolete function
LPTK Jan 29, 2024
badd1ef
Extend Constrained with field to hold constraints
auht Feb 14, 2024
63eb500
update tests
auht Feb 15, 2024
3ed1981
wip: constraints on ST and add polarity
auht Mar 3, 2024
9615646
fix prov
auht Mar 9, 2024
fe6ed2f
modify type simplifier
auht Mar 10, 2024
74ba3f0
WIP Changes from meeting
LPTK Mar 11, 2024
f41cc1b
use LinkedHashMap preserve insertion order
auht Mar 12, 2024
c8c35df
fix duplicate occurence
auht Mar 13, 2024
a4760d0
modify lcg
auht Mar 23, 2024
9f84c57
print pol
auht Mar 24, 2024
e759d29
add lcg record
auht Mar 24, 2024
6f7f30c
Changes from meeting
LPTK Mar 25, 2024
420bd56
fix tsc tuple size mismatch
auht Mar 29, 2024
e14560f
modify lcg and update tests
auht Apr 22, 2024
13d8c8f
fix bounds before tsc
auht Apr 23, 2024
ed52d0b
modify freshenAbove and disable distributivity
auht Jun 12, 2024
e254f86
fix tuple and record subtyping
auht Jun 14, 2024
0e27171
fix type variables with multiple indices
auht Jul 2, 2024
0e110ad
fix type variable with multiple indices and add test cases
auht Jul 3, 2024
0819149
fix ambiguity test
auht Jul 3, 2024
b8f3a3a
fix polarity and modify type simplifier
auht Jul 28, 2024
c46b74f
minor changes
auht Jul 31, 2024
20e73a9
Merge remote-tracking branch 'upstream/mlscript' into overloading-con…
auht Aug 1, 2024
cf244aa
fix compilation error
auht Aug 1, 2024
db8379e
tests explanation
auht Aug 1, 2024
d2b8f2b
run tests
auht Aug 1, 2024
fc32f52
fix freshenAbove
auht Aug 23, 2024
09b5082
Changes from meeting
LPTK Aug 26, 2024
e1b7fba
fix provtype
auht Aug 27, 2024
9cf43a4
use iterator
auht Aug 29, 2024
50afcc4
add flag
auht Sep 11, 2024
a540d00
fix not a function error
auht Sep 15, 2024
14e626f
Merge branch 'mlscript' into overloading-constraints
LPTK Sep 19, 2024
7103216
Update shared/src/test/diff/fcp/Overloads.mls
auht Sep 19, 2024
eff743c
add ambiguity check to toplevel forall
auht Sep 22, 2024
bbd8f9b
Update HeungTung.mls
auht Sep 23, 2024
4f21762
minor changes
auht Sep 27, 2024
e740159
guard with flag
auht Oct 4, 2024
f00661e
Merge branch 'mlscript' into overloading-constraints
auht Oct 4, 2024
81caf7b
Update Overloads.mls
auht Oct 4, 2024
8e5118f
run tests
auht Oct 4, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions shared/src/main/scala/mlscript/ConstraintSolver.scala
Original file line number Diff line number Diff line change
Expand Up @@ -619,6 +619,11 @@ class ConstraintSolver extends NormalForms { self: Typer =>
recLb(ar.inner, b.inner)
rec(b.inner.ub, ar.inner.ub, false)
case (LhsRefined(S(b: ArrayBase), ts, r, _), _) => reportError()
case (LhsRefined(S(ov: Overload), ts, r, trs), RhsBases(_, S(L(f: FunctionType)), _)) =>
val tsc = TupleSetConstraints.mk(ov, f)
if (tsc.constraints.isEmpty) reportError()
// val t = TupleSetConstraints.mk(ov)
// annoying(Nil, LhsRefined(S(t), ts, r, trs), Nil, done_rs)
case (LhsRefined(S(ov: Overload), ts, r, trs), _) =>
annoying(Nil, LhsRefined(S(ov.approximatePos), ts, r, trs), Nil, done_rs) // TODO remove approx. with ambiguous constraints
case (LhsRefined(S(Without(b, ns)), ts, r, _), RhsBases(pts, N | S(L(_)), _)) =>
Expand Down Expand Up @@ -818,13 +823,23 @@ class ConstraintSolver extends NormalForms { self: Typer =>
val newBound = (cctx._1 ::: cctx._2.reverse).foldRight(rhs)((c, ty) =>
if (c.prov is noProv) ty else mkProxy(ty, c.prov))
lhs.upperBounds ::= newBound // update the bound
lhs.tsc.foreachEntry {
case (tsc, i) =>
tsc.updateOn(i, rhs)
if (tsc.constraints.isEmpty) reportError()
auht marked this conversation as resolved.
Show resolved Hide resolved
}
lhs.lowerBounds.foreach(rec(_, rhs, true)) // propagate from the bound

case (lhs, rhs: TypeVariable) if lhs.level <= rhs.level =>
println(s"NEW $rhs LB (${lhs.level})")
val newBound = (cctx._1 ::: cctx._2.reverse).foldLeft(lhs)((ty, c) =>
if (c.prov is noProv) ty else mkProxy(ty, c.prov))
rhs.lowerBounds ::= newBound // update the bound
rhs.tsc.foreachEntry {
case (tsc, i) =>
tsc.updateOn(i, lhs)
if (tsc.constraints.isEmpty) reportError()
}
rhs.upperBounds.foreach(rec(lhs, _, true)) // propagate from the bound


Expand Down Expand Up @@ -1550,6 +1565,8 @@ class ConstraintSolver extends NormalForms { self: Typer =>
freshened += tv -> v
v.lowerBounds = tv.lowerBounds.mapConserve(freshen)
v.upperBounds = tv.upperBounds.mapConserve(freshen)
v.tsc = tv.tsc // fixme
v.tsc.foreachEntry { case (tsc, i) => tsc.tvs = tsc.tvs.mapConserve(x => (x._1, freshen(x._2))) }
v
}

Expand Down
30 changes: 28 additions & 2 deletions shared/src/main/scala/mlscript/TypeSimplifier.scala
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,20 @@ trait TypeSimplifier { self: Typer =>
tv.upperBounds.reverseIterator.map(process(_, S(false -> tv)))
.reduceOption(_ &- _).filterNot(_.isTop).toList
else Nil
// fixme
// nv.tsc = tv.tsc.map {
// case (tsc, i) =>
// tsc.tvs.collect { case (_, x: TV) => renewed.get(x).flatMap(_.tsc) }
// .flatten.headOption
// .fold(new TupleSetConstraints(tsc.constraints, tsc.tvs)(tsc.prov), i) {
// tsc => (tsc._1, i)
// }
// }
// nv.tsc.foreach { case (tsc, i) =>
// val (l, r) = tsc.tvs.splitAt(i)
// tsc.tvs = l ++ ((r.head._1, nv) :: r.drop(1))
// }
}

nv

case ComposedType(true, l, r) =>
Expand Down Expand Up @@ -544,7 +556,9 @@ trait TypeSimplifier { self: Typer =>
analyzed1.setAndIfUnset(tv -> pol(tv).getOrElse(false)) { apply(pol)(ty) }
case N =>
if (pol(tv) =/= S(false))
analyzed1.setAndIfUnset(tv -> true) { tv.lowerBounds.foreach(apply(pol.at(tv.level, true))) }
analyzed1.setAndIfUnset(tv -> true) {
tv.lowerBounds.foreach(apply(pol.at(tv.level, true)))
}
if (pol(tv) =/= S(true))
analyzed1.setAndIfUnset(tv -> false) { tv.upperBounds.foreach(apply(pol.at(tv.level, false))) }
}
Expand Down Expand Up @@ -1012,6 +1026,18 @@ trait TypeSimplifier { self: Typer =>
res.lowerBounds = tv.lowerBounds.map(transform(_, pol.at(tv.level, true), Set.single(tv)))
if (occNums.contains(false -> tv))
res.upperBounds = tv.upperBounds.map(transform(_, pol.at(tv.level, false), Set.single(tv)))
// TODO
// res.tsc = tv.tsc.map {
// case (tsc, i) =>
// tsc.tvs.collect { case (_, x: TV) => renewals.get(x).flatMap(_.tsc)}.flatten.headOption
// .fold(new TupleSetConstraints(tsc.constraints, tsc.tvs)(tsc.prov), i) {
// tsc => (tsc._1, i)
// }
// }
// res.tsc.foreach { case (tsc, i) =>
// val (l, r) = tsc.tvs.splitAt(i)
// tsc.tvs = l ++ ((r.head._1, res) :: r.drop(1))
// }
}
res
}()
Expand Down
30 changes: 25 additions & 5 deletions shared/src/main/scala/mlscript/Typer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -578,7 +578,7 @@ class Typer(var dbg: Boolean, var verbose: Bool, var explainErrors: Bool, val ne
tv.assignedTo = S(bod)
tv
case Rem(base, fs) => Without(rec(base), fs.toSortedSet)(tyTp(ty.toLoc, "field removal type"))
case Constrained(base, tvbs, where) =>
case Constrained(base, tvbs, where, tscs) =>
val res = rec(base match {
case ty: Type => ty
case _ => die
Expand All @@ -591,6 +591,14 @@ class Typer(var dbg: Boolean, var verbose: Bool, var explainErrors: Bool, val ne
constrain(rec(lo), rec(hi))(raise,
tp(mergeOptions(lo.toLoc, hi.toLoc)(_ ++ _), "constraint specifiation"), ctx)
}
tscs.foreach { case (typevars, constrs) =>
val tvs = typevars.map(x => (x._1, rec(x._2)))
val tsc = new TupleSetConstraints(MutSet.empty ++ constrs.map(_.map(rec)), tvs)(res.prov)
tvs.zipWithIndex.foreach {
case ((_, tv: TV), i) => tv.tsc += tsc -> i
case _ => ()
}
}
res
case PolyType(vars, ty) =>
val oldLvl = ctx.lvl
Expand Down Expand Up @@ -1674,6 +1682,7 @@ class Typer(var dbg: Boolean, var verbose: Bool, var explainErrors: Bool, val ne
val expandType = ()

var bounds: Ls[TypeVar -> Bounds] = Nil
var tscs: Ls[Ls[(Opt[Bool], Type)] -> Ls[Ls[Type]]] = Nil

val seenVars = mutable.Set.empty[TV]

Expand Down Expand Up @@ -1783,6 +1792,14 @@ class Typer(var dbg: Boolean, var verbose: Bool, var explainErrors: Bool, val ne
if (l =/= Bot || u =/= Top)
bounds ::= nv -> Bounds(l, u)
}
tv.tsc.foreachEntry {
case (tsc, i) =>
if (tsc.tvs.forall { case (_, v: TV) => !seenVars(v) || v === tv; case _ => true }) {
val tvs = tsc.tvs.map(x => (x._1, go(x._2)))
val constrs = tsc.constraints.toList.map(_.map(go))
tscs ::= tvs -> constrs
}
}
}
nv
})
Expand Down Expand Up @@ -1832,17 +1849,20 @@ class Typer(var dbg: Boolean, var verbose: Bool, var explainErrors: Bool, val ne
case Overload(as) => as.map(go).reduce(Inter)
case PolymorphicType(lvl, bod) =>
val boundsSize = bounds.size
val tscsSize = tscs.size
val b = go(bod)

// This is not completely correct: if we've already traversed TVs as part of a previous sibling PolymorphicType,
// the bounds of these TVs won't be registered again...
// FIXME in principle we'd want to compute a transitive closure...
val newBounds = bounds.reverseIterator.drop(boundsSize).toBuffer
val newTscs = tscs.reverseIterator.drop(tscsSize).toBuffer

val qvars = bod.varsBetween(lvl, MaxLevel).iterator
val ftvs = b.freeTypeVariables ++
newBounds.iterator.map(_._1) ++
newBounds.iterator.flatMap(_._2.freeTypeVariables)
newBounds.iterator.flatMap(_._2.freeTypeVariables) ++
newTscs.iterator.flatMap(_._1)
val fvars = qvars.filter(tv => ftvs.contains(tv.asTypeVar))
if (fvars.isEmpty) b else
PolyType(fvars.map(_.asTypeVar pipe (R(_))).toList, b)
Expand All @@ -1851,16 +1871,16 @@ class Typer(var dbg: Boolean, var verbose: Bool, var explainErrors: Bool, val ne
val lbs = others1.mapValues(_.head).groupMap(_._2)(_._1).toList
val bounds = (ubs.mapValues(_.reduce(_ &- _)) ++ lbs.mapValues(_.reduce(_ | _)).map(_.swap))
val procesased = bounds.map { case (lo, hi) => Bounds(go(lo), go(hi)) }
Constrained(go(bod), Nil, procesased)
Constrained(go(bod), Nil, procesased, Nil)

// case DeclType(lvl, info) =>

}
// }(r => s"~> $r")

val res = goLike(st)(new ExpCtx(Map.empty))
if (bounds.isEmpty) res
else Constrained(res, bounds, Nil)
if (bounds.isEmpty && tscs.isEmpty) res
else Constrained(res, bounds, Nil, tscs)

// goLike(st)
}
Expand Down
142 changes: 141 additions & 1 deletion shared/src/main/scala/mlscript/TyperDatatypes.scala
Original file line number Diff line number Diff line change
Expand Up @@ -534,6 +534,8 @@ abstract class TyperDatatypes extends TyperHelpers { Typer: Typer =>
require(value.forall(_.level <= level))
_assignedTo = value
}

var tsc: MutMap[TupleSetConstraints, Int] = MutMap.empty

// * Bounds should always be disregarded when `equatedTo` is defined, as they are then irrelevant:
def lowerBounds: List[SimpleType] = { require(assignedTo.isEmpty, this); _lowerBounds }
Expand Down Expand Up @@ -646,5 +648,143 @@ abstract class TyperDatatypes extends TyperHelpers { Typer: Typer =>
lazy val underlying: SimpleType = tt.neg()
val prov = noProv
}


class TupleSetConstraints(val constraints: MutSet[Ls[ST]], var tvs: Ls[(Opt[Bool], ST)])(val prov: TypeProvenance) {
def updateImpl(index: Int, bound: ST)(implicit raise: Raise, ctx: Ctx) : Unit = {
val (_, tvs0, cs) = TupleSetConstraints.lcg(tvs(index)._1, bound, constraints.toList.map(x => S(x(index))))(prov, ctx)
val cs0 = tvs0.zip(cs).map {
case ((_, v: TV), c) => c
case ((S(true), lb), c) => c.map(_.flatMap { ty =>
val dnf = DNF.mk(MaxLevel, Nil, lb & ty.neg(), true)
if (dnf.isBot || dnf.cs.forall(c => !(c.vars.isEmpty && c.nvars.isEmpty)))
S(ty)
else N
})
case ((S(false), ub), c) => c.map(_.flatMap { ty =>
val dnf = DNF.mk(MaxLevel, Nil, ty & ub.neg(), true)
if (dnf.isBot || dnf.cs.forall(c => !(c.vars.isEmpty && c.nvars.isEmpty)))
S(ty)
else N
})
case ((N, b), c) => c.map(_.flatMap { ty =>
val dnf = DNF.mk(MaxLevel, Nil, ty & b.neg(), true)
val dnf0 = DNF.mk(MaxLevel, Nil, b & ty.neg(), true)
if ((dnf.isBot || dnf.cs.forall(c => !(c.vars.isEmpty && c.nvars.isEmpty)))
&& (dnf0.isBot || dnf0.cs.forall(c => !(c.vars.isEmpty && c.nvars.isEmpty))))
S(ty)
else N
})
}
val ncs = constraints.toList.zip(cs0.transpose).map {
case (u, v) => if (v.exists(_.isEmpty)) Nil else u ++ v.flatten
}.filter(_.nonEmpty)
constraints.clear()
constraints ++= ncs
tvs ++= tvs0
tvs.zipWithIndex.foreach {
case ((pol, tv: TV), i) => tv.tsc.update(this, i)
case _ => ()
}
}
def updateOn(index: Int, bound: ST)(implicit raise: Raise, ctx: Ctx) : Unit = {
println(s"TSC update: $tvs in $constraints")
updateImpl(index, bound)
println(s"TSC update: $tvs in $constraints")
if (constraints.sizeCompare(1) === 0) {
tvs.foreach {
case (pol, tv: TV) => tv.tsc.clear()
case _ => ()
}
constraints.head.zip(tvs).foreach {
case (c, (pol, t)) =>
if (pol =/= S(true)) constrain(c, t)(raise, prov, ctx)
if (pol =/= S(false)) constrain(t, c)(raise, prov, ctx)
}
}
}
}
object TupleSetConstraints {
def lcgField(pol: Opt[Bool], first: FieldType, rest: Ls[Opt[FieldType]])
(implicit prov: TypeProvenance, ctx: Ctx)
: (FieldType, Ls[(Opt[Bool], ST)], Ls[Ls[Opt[ST]]]) = {
val (ub, tvs, constrs) = lcg(pol, first.ub, rest.map(_.map(_.ub)))
if (first.lb.isEmpty && rest.flatten.forall(_.lb.isEmpty)) {
(FieldType(N, ub)(prov), tvs, constrs)
} else {
val (lb, ltvs, lconstrs) = lcg(pol.map(!_), first.lb.getOrElse(BotType), rest.map(_.map(_.lb.getOrElse(BotType))))
(FieldType(S(lb), ub)(prov), tvs ++ ltvs, constrs ++ lconstrs)
}
}
def lcg(pol: Opt[Bool], first: ST, rest: Ls[Opt[ST]])
(implicit prov: TypeProvenance, ctx: Ctx)
: (ST, Ls[(Opt[Bool], ST)], Ls[Ls[Opt[ST]]]) = first.unwrapProxies match {
case a: FunctionType =>
val (lhss, rhss) = rest.map(_.map(_.unwrapProxies) match {
auht marked this conversation as resolved.
Show resolved Hide resolved
case S(FunctionType(lhs, rhs)) => S(lhs) -> S(rhs)
case _ => (N, N)
}).unzip
val (lhs, ltvs, lconstrs) = lcg(pol.map(!_), a.lhs, lhss)
val (rhs, rtvs, rconstrs) = lcg(pol, a.rhs, rhss)
(FunctionType(lhs, rhs)(prov), ltvs ++ rtvs, lconstrs ++ rconstrs)
case a: ArrayType =>
val inners = rest.map(_.map(_.unwrapProxies) match {
case S(b: ArrayType) => S(b.inner)
case _ => N
})
val (t, tvs, constrs) = lcgField(pol, a.inner, inners)
(ArrayType(t)(prov), tvs, constrs)
case a: TupleType =>
val fields = rest.map(_.map(_.unwrapProxies) match {
case S(TupleType(fs)) if a.fields.sizeCompare(fs.size) === 0 =>
fs.map(x => S(x._2))
case _ => a.fields.map(_ => N)
})
val (fts, tvss, constrss) = a.fields.map(_._2).zip(fields.transpose).map { case (a, bs) => lcgField(pol, a, bs) }.unzip3
(TupleType(fts.map(N -> _))(prov), tvss.flatten, constrss.flatten)
case a: TR if rest.flatten.map(_.unwrapProxies).forall { case b: TR => a.defn === b.defn && a.targs.sizeCompare(b.targs.size) === 0; case _ => false } =>
val targs = rest.map(_.map(_.unwrapProxies) match {
case S(b: TR) => b.targs.map(S(_))
case _ => a.targs.map(_ => N)
})
val (ts, tvss, constrss) = a.targs.zip(targs.transpose).map { case (a, bs) => lcg(pol, a, bs) }.unzip3
(TypeRef(a.defn, ts)(prov), tvss.flatten, constrss.flatten)
case a: TV if rest.flatten.map(_.unwrapProxies).forall { case b: TV => a.compare(b) === 0; case _ => false } => (a, Nil, Nil)
case a if rest.flatten.forall(_.unwrapProxies === a) => (a, Nil, Nil)
case _ =>
(first, List((pol, first)), List(rest.map(_.map(_.unwrapProxies))))
// val tv = freshVar(prov, N)
// (tv, List(tv), List(first :: rest))
}
def lcgFunction(pol: Opt[Bool], first: FT, rest: Ls[FunctionType])(implicit prov: TypeProvenance, ctx: Ctx)
: (FT, Ls[(Opt[Bool], ST)], Ls[Ls[Opt[ST]]]) = {
val (lhss, rhss) = rest.map {
case FunctionType(lhs, rhs) => S(lhs) -> S(rhs)
}.unzip
val (lhs, ltvs, lconstrs) = lcg(pol.map(!_), first.lhs, lhss)
val (rhs, rtvs, rconstrs) = lcg(pol, first.rhs, rhss)
(FunctionType(lhs, rhs)(prov), ltvs ++ rtvs, lconstrs ++ rconstrs)
}
def mk(ov: Overload, f: FT)(implicit raise: Raise, ctx: Ctx): TupleSetConstraints = {
val (t, tvs, constrs) = lcgFunction(S(false), f, ov.alts)(ov.prov, ctx)
val tsc = new TupleSetConstraints(MutSet.empty ++ constrs.transpose.filter(_.forall(_.isDefined)).map(_.flatten), tvs)(ov.prov)
println(s"TSC mk: ${tsc.tvs} in ${tsc.constraints}")
tvs.zipWithIndex.foreach {
case ((pol, tv: TV), i) => tv.tsc.update(tsc, i)
case ((_, ty), i) => tsc.updateImpl(i, ty)
}
if (tsc.constraints.sizeCompare(1) === 0) {
tvs.foreach {
case (pol, tv: TV) => tv.tsc.clear()
case _ => ()
}
tsc.constraints.head.zip(tvs).foreach {
case (c, (pol, t)) =>
if (pol =/= S(true)) constrain(c, t)(raise, ov.prov, ctx)
if (pol =/= S(false)) constrain(t, c)(raise, ov.prov, ctx)
}
}
println(s"TSC mk: ${tsc.tvs} in ${tsc.constraints}")
tsc
}
}
}
Loading