Skip to content

Commit

Permalink
fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
mio-19 committed Jan 22, 2025
1 parent e017c01 commit c117ef8
Show file tree
Hide file tree
Showing 6 changed files with 77 additions and 76 deletions.
4 changes: 2 additions & 2 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ val scalafixRules = Seq(
"CompareSameValue",
"DirectoryAndPackageName",
"DiscardValue",
"DuplicateWildcardImport",
// "DuplicateWildcardImport",
// "EitherFold",
"EitherGetOrElse",
"EitherMap",
Expand Down Expand Up @@ -77,7 +77,7 @@ val scalafixRules = Seq(
"RemoveEmptyObject",
// "RemoveIf",
"RemovePureEff",
"RemoveSamePackageImport",
// "RemoveSamePackageImport",
"RemoveStringInterpolation",
"RemoveUselessParamComments",
"RepeatedRewrite",
Expand Down
26 changes: 13 additions & 13 deletions reader/src/main/scala/chester/readerv2/Lexer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2,40 +2,40 @@ package chester.readerv2

import chester.error.Pos
import chester.reader.ParseError
import Token._
import chester.readerv2.Token.*

case class LexerState(
tokens: TokenStream,
current: Token,
errors: Vector[ParseError] = Vector.empty
tokens: TokenStream,
current: Token,
errors: Vector[ParseError] = Vector.empty
)

class Lexer(tokens: TokenStream) {
def initialize: LexerState = {
tokens.headOption match {
case Some(Right(token)) => LexerState(tokens.tail, token)
case Some(Left(error)) => LexerState(tokens.tail, EOF(error.pos), Vector(error))
case None => LexerState(LazyList.empty, EOF(Pos.zero))
case Some(Left(error)) => LexerState(tokens.tail, EOF(error.pos), Vector(error))
case None => LexerState(LazyList.empty, EOF(Pos.zero))
}
}

def advance(state: LexerState): LexerState = {
state.tokens.headOption match {
case Some(Right(token)) =>
case Some(Right(token)) =>
state.copy(tokens = state.tokens.tail, current = token)
case Some(Left(error)) =>
case Some(Left(error)) =>
state.copy(
tokens = state.tokens.tail,
tokens = state.tokens.tail,
errors = state.errors :+ error
)
case None => state
}
}

def skipWhitespaceAndComments(state: LexerState): LexerState = {
state.current match {
case _: Whitespace | _: Comment => skipWhitespaceAndComments(advance(state))
case _ => state
case _ => state
}
}
}
}
28 changes: 14 additions & 14 deletions reader/src/main/scala/chester/readerv2/Token.scala
Original file line number Diff line number Diff line change
Expand Up @@ -12,43 +12,43 @@ object Token {
sealed trait NamePart
case class IdentifierPart(value: Vector[Char]) extends NamePart
case class OperatorPart(value: Vector[Char]) extends NamePart

sealed trait StringSegment
case class StringChars(chars: Vector[Char]) extends StringSegment
case class StringEscape(char: Char) extends StringSegment
case class StringInterpolation(expr: Vector[Token]) extends StringSegment

case class Identifier(parts: Vector[NamePart], pos: Pos) extends Token {
def text: String = parts.map {
case IdentifierPart(chars) => chars.mkString
case OperatorPart(chars) => chars.mkString
case OperatorPart(chars) => chars.mkString
}.mkString
}

case class IntegerLiteral(value: BigInt, radix: Int, pos: Pos) extends Token {
def text: String = if (radix == 10) value.toString else s"0x${value.toString(16)}"
}

case class RationalLiteral(value: BigDecimal, pos: Pos) extends Token {
def text: String = value.toString
}

case class StringLiteral(segments: Vector[StringSegment], pos: Pos) extends Token {
def text: String = {
val sb = new StringBuilder("\"")
segments.foreach {
case StringChars(chars) => sb.append(chars.mkString)
case StringEscape(c) => sb.append('\\').append(c)
case StringChars(chars) => sb.append(chars.mkString)
case StringEscape(c) => sb.append('\\').append(c)
case StringInterpolation(expr) => sb.append("${").append(expr.map(_.text).mkString).append("}")
}
sb.append("\"").toString
}
}

case class SymbolLiteral(name: String, pos: Pos) extends Token {
def text: String = s"'$name"
}

// Delimiters
case class LParen(pos: Pos) extends Token { def text = "(" }
case class RParen(pos: Pos) extends Token { def text = ")" }
Expand All @@ -60,17 +60,17 @@ object Token {
case class Dot(pos: Pos) extends Token { def text = "." }
case class Equal(pos: Pos) extends Token { def text = "=" }
case class Arrow(pos: Pos) extends Token { def text = "->" }

// Comments and Whitespace
case class Comment(content: Vector[Char], pos: Pos) extends Token {
def text: String = s"//${content.mkString}"
}

case class Whitespace(chars: Vector[Char], pos: Pos) extends Token {
def text: String = chars.mkString
}

case class EOF(pos: Pos) extends Token { def text = "" }
}

type TokenStream = LazyList[Either[ParseError, Token]]
type TokenStream = LazyList[Either[ParseError, Token]]
93 changes: 48 additions & 45 deletions reader/src/main/scala/chester/readerv2/Tokenizer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2,36 +2,36 @@ package chester.readerv2

import chester.error.{Pos, Reporter}
import chester.utils.WithUTF16
import chester.syntax.IdentifierRules._
import chester.syntax.IdentifierRules.*
import chester.reader.{ParseError, SourceOffset}
import _root_.io.github.iltotore.iron._
import _root_.io.github.iltotore.iron.constraint.all.{Positive0 => IPositive0, _}
import Token._
import _root_.io.github.iltotore.iron.*
import _root_.io.github.iltotore.iron.constraint.all.{Positive0 as IPositive0, *}
import chester.readerv2.Token.*

class Tokenizer(sourceOffset: SourceOffset)(using reporter: Reporter[ParseError]) {
private val content = sourceOffset.readContent match {
case Right(content) => content
case Left(error) =>
case Left(error) =>
reporter(error)
""
}

private case class TokenizerState(
index: Int,
line: Int :| IPositive0,
column: WithUTF16,
utf16Column: Int :| IPositive0
index: Int,
line: Int :| IPositive0,
column: WithUTF16,
utf16Column: Int :| IPositive0
)

private def currentPos(state: TokenizerState): Pos = Pos(
sourceOffset.posOffset + WithUTF16(state.index.refineUnsafe, state.utf16Column),
state.line,
state.column
)
private def peek(state: TokenizerState): Option[Char] =

private def peek(state: TokenizerState): Option[Char] =
if (state.index + 1 >= content.length) None else Some(content(state.index + 1))

private def advance(state: TokenizerState): TokenizerState = {
if (state.index < content.length) {
val c = content(state.index)
Expand Down Expand Up @@ -69,9 +69,9 @@ class Tokenizer(sourceOffset: SourceOffset)(using reporter: Reporter[ParseError]
case ',' => Comma(pos)
case '.' => Dot(pos)
case '=' => Equal(pos)
case '-' if peek(state) == Some('>') =>
case '-' if peek(state) == Some('>') =>
Arrow(pos)
case c =>
case c =>
reporter(ParseError(s"Unexpected character: $c", pos))
EOF(pos)
}
Expand All @@ -82,63 +82,65 @@ class Tokenizer(sourceOffset: SourceOffset)(using reporter: Reporter[ParseError]
val startPos = currentPos(state)
val chars = new scala.collection.mutable.ArrayBuffer[Char]()
var current = state

while (current.index < content.length && content(current.index).isWhitespace) {
chars += content(current.index)
current = advance(current)
}

(current, Right(Token.Whitespace(chars.toVector, startPos)))
}

private def scanComment(state: TokenizerState): (TokenizerState, Either[ParseError, Token]) = {
val startPos = currentPos(state)
val chars = new scala.collection.mutable.ArrayBuffer[Char]()
var current = advance(advance(state)) // Skip //

while (current.index < content.length && content(current.index) != '\n') {
chars += content(current.index)
current = advance(current)
}

(current, Right(Comment(chars.toVector, startPos)))
}

private def scanIdentifier(state: TokenizerState): (TokenizerState, Either[ParseError, Token]) = {
val startPos = currentPos(state)
val chars = new scala.collection.mutable.ArrayBuffer[Char]()
var current = state

while (current.index < content.length && isIdentifierPart(content(current.index))) {
chars += content(current.index)
current = advance(current)
}

(current, Right(Identifier(Vector(IdentifierPart(chars.toVector)), startPos)))
}

private def isHexDigit(c: Char): Boolean =
private def isHexDigit(c: Char): Boolean =
c.isDigit || ('a' to 'f').contains(c.toLower)

private def scanNumber(state: TokenizerState): (TokenizerState, Either[ParseError, Token]) = {
val startPos = currentPos(state)
val chars = new scala.collection.mutable.ArrayBuffer[Char]()
var current = state
var isHex = false

if (content(current.index) == '0' && current.index + 1 < content.length && content(current.index + 1).toLower == 'x') {
isHex = true
chars += '0' += 'x'
current = advance(advance(current))
}

while (current.index < content.length &&
(content(current.index).isDigit ||
(isHex && isHexDigit(content(current.index))))) {

while (
current.index < content.length &&
(content(current.index).isDigit ||
(isHex && isHexDigit(content(current.index))))
) {
chars += content(current.index)
current = advance(current)
}

if (current.index < content.length && content(current.index) == '.' && !isHex) {
chars += '.'
current = advance(current)
Expand All @@ -149,10 +151,11 @@ class Tokenizer(sourceOffset: SourceOffset)(using reporter: Reporter[ParseError]
val value = BigDecimal(chars.mkString)
(current, Right(RationalLiteral(value, startPos)))
} else {
val value = if (isHex)
BigInt(chars.drop(2).mkString, 16)
else
BigInt(chars.mkString)
val value =
if (isHex)
BigInt(chars.drop(2).mkString, 16)
else
BigInt(chars.mkString)
(current, Right(IntegerLiteral(value, if (isHex) 16 else 10, startPos)))
}
}
Expand All @@ -161,7 +164,7 @@ class Tokenizer(sourceOffset: SourceOffset)(using reporter: Reporter[ParseError]
val startPos = currentPos(state)
val chars = new scala.collection.mutable.ArrayBuffer[Char]()
var current = advance(state) // Skip opening quote

while (current.index < content.length && content(current.index) != '"') {
if (content(current.index) == '\\' && current.index + 1 < content.length) {
current = advance(current)
Expand All @@ -171,7 +174,7 @@ class Tokenizer(sourceOffset: SourceOffset)(using reporter: Reporter[ParseError]
}
current = advance(current)
}

if (current.index >= content.length) {
(current, Left(ParseError("Unterminated string literal", startPos)))
} else {
Expand All @@ -184,12 +187,12 @@ class Tokenizer(sourceOffset: SourceOffset)(using reporter: Reporter[ParseError]
val startPos = currentPos(state)
val chars = new scala.collection.mutable.ArrayBuffer[Char]()
var current = advance(state) // Skip opening quote

while (current.index < content.length && content(current.index) != '\'' && content(current.index) != '\n') {
chars += content(current.index)
current = advance(current)
}

if (current.index >= content.length || content(current.index) == '\n') {
(current, Left(ParseError("Unterminated symbol literal", startPos)))
} else {
Expand All @@ -205,18 +208,18 @@ class Tokenizer(sourceOffset: SourceOffset)(using reporter: Reporter[ParseError]
} else {
val c = content(state.index)
val (nextState, result) = c match {
case c if c.isWhitespace => scanWhitespace(state)
case c if c.isWhitespace => scanWhitespace(state)
case '/' if peek(state) == Some('/') => scanComment(state)
case c if isIdentifierFirst(c) => scanIdentifier(state)
case c if c.isDigit => scanNumber(state)
case '"' => scanString(state)
case '\'' => scanSymbol(state)
case c => singleCharToken(state, c)
case c if isIdentifierFirst(c) => scanIdentifier(state)
case c if c.isDigit => scanNumber(state)
case '"' => scanString(state)
case '\'' => scanSymbol(state)
case c => singleCharToken(state, c)
}
result #:: loop(nextState)
}
}

loop(TokenizerState(0, sourceOffset.linesOffset, sourceOffset.posOffset, sourceOffset.posOffset.utf16))
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ package chester.erasure

import chester.error.*
import chester.syntax.core.*
import chester.tyck.*

trait Eraser {
def checkAndErase(term: Term, ty: Term, effects: Effects)(using context: ErasureContext, reporter: Reporter[TyckProblem]): Term
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ import cats.implicits.*
import chester.error.*
import chester.syntax.{Const, ModuleRef}
import chester.syntax.concrete.*
import chester.tyck.*
import chester.utils.*

import scala.annotation.tailrec
Expand Down

0 comments on commit c117ef8

Please sign in to comment.