Skip to content

Commit

Permalink
Docs
Browse files Browse the repository at this point in the history
  • Loading branch information
Kampfkarren committed Jan 20, 2024
1 parent 6a12257 commit 2d13175
Show file tree
Hide file tree
Showing 8 changed files with 71 additions and 5 deletions.
4 changes: 4 additions & 0 deletions full-moon/src/ast/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2167,6 +2167,7 @@ impl BinOp {
matches!(*self, BinOp::Caret(_) | BinOp::TwoDots(_))
}

/// Given a token, returns whether it is a right associative binary operator.
pub fn is_right_associative_token(token: &TokenReference) -> bool {
matches!(
token.token_type(),
Expand All @@ -2182,6 +2183,7 @@ impl BinOp {
/// Operators that require just one operand, such as #X
#[derive(Clone, Debug, Display, PartialEq, Eq, Node, Visit)]
#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))]
#[allow(missing_docs)]
#[non_exhaustive]
#[display(fmt = "{}")]
pub enum UnOp {
Expand Down Expand Up @@ -2219,10 +2221,12 @@ pub struct AstError {
}

impl AstError {
/// Returns a human readable error message
pub fn error_message(&self) -> Cow<'static, str> {
self.additional.clone()
}

/// Returns the range of the error
pub fn range(&self) -> (Position, Position) {
self.range
.or_else(|| Some((self.token.start_position(), self.token.end_position())))
Expand Down
11 changes: 10 additions & 1 deletion full-moon/src/ast/parser_structs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -232,19 +232,27 @@ impl<T> ParserResult<T> {
}
}

/// A produced [`Ast`](crate::ast::Ast), along with any errors found during parsing.
/// This Ast may not be exactly the same as the input code, as reconstruction may have occurred.
/// For more information, read the documentation for [`parse_fallible`](crate::parse_fallible).
pub struct AstResult {
ast: Ast,
errors: Vec<crate::Error>,
}

impl AstResult {
/// Returns the AST that was parsed.
/// Returns a reference to the [`Ast`](crate::ast::Ast) that was parsed.
/// If there were any errors, this will not be exactly the same,
/// as reconstruction will have occurred.
/// For more information, read the documentation for [`parse_fallible`](crate::parse_fallible).
pub fn ast(&self) -> &Ast {
&self.ast
}

/// Consumes the [`Ast`](crate::ast::Ast) that was parsed.
/// If there were any errors, this will not be exactly the same,
/// as reconstruction will have occurred.
/// For more information, read the documentation for [`parse_fallible`](crate::parse_fallible).
pub fn into_ast(self) -> Ast {
self.ast
}
Expand Down Expand Up @@ -343,6 +351,7 @@ impl AstResult {
}
}

/// Consumes this AstResult, returning the [`Ast`](crate::ast::Ast) that was parsed.
pub fn into_result(self) -> Result<Ast, Vec<crate::Error>> {
self.into()
}
Expand Down
2 changes: 2 additions & 0 deletions full-moon/src/ast/punctuated.rs
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,8 @@ impl<T> Punctuated<T> {
self.pairs.push(pair);
}

/// Pushes a new node `T` onto the sequence, with the given punctuation.
/// Will apply the punctuation to the last item, which must exist.
pub fn push_punctuated(&mut self, value: T, punctuation: TokenReference) {
let last_pair = self.pairs.pop().expect(
"push_punctuated adds the punctuation onto the last element, but there are no elements",
Expand Down
1 change: 1 addition & 0 deletions full-moon/src/ast/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -679,6 +679,7 @@ impl ExportedTypeDeclaration {
#[derive(Clone, Debug, Display, PartialEq, Eq, Node, Visit)]
#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))]
#[non_exhaustive]
#[allow(missing_docs)]
#[display(fmt = "{}")]
/// Compound operators, such as X += Y or X -= Y
pub enum CompoundOp {
Expand Down
17 changes: 17 additions & 0 deletions full-moon/src/ast/versions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,82 +5,99 @@ const VERSION_LUA52: u8 = 1 << 1;
const VERSION_LUA53: u8 = 1 << 2;
const VERSION_LUA54: u8 = 1 << 3;

/// Represents the Lua version(s) to parse as.
/// Lua 5.1 is always included.
/// In order to get more Lua versions, you must include their respective features.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub struct LuaVersion {
bitfield: u8,
}

impl LuaVersion {
/// Creates a new LuaVersion with the default features: Luau, Lua 5.2, Lua 5.3, and Lua 5.4.
pub fn new() -> Self {
Self::default()
}

/// Creates a new LuaVersion with only Lua 5.1.
pub fn lua51() -> Self {
Self { bitfield: 0 }
}

/// Creates a new LuaVersion with only Luau.
#[cfg(feature = "luau")]
pub fn luau() -> Self {
Self {
bitfield: VERSION_LUAU,
}
}

/// Adds Luau as a version to parse for.
#[cfg(feature = "luau")]
pub fn with_luau(self) -> Self {
Self {
bitfield: self.bitfield | VERSION_LUAU,
}
}

/// Returns true if Luau is enabled.
pub fn has_luau(self) -> bool {
cfg!(feature = "luau") && (self.bitfield & VERSION_LUAU != 0)
}

/// Creates a new LuaVersion with only Lua 5.2.
#[cfg(feature = "lua52")]
pub fn lua52() -> Self {
Self {
bitfield: VERSION_LUA52,
}
}

/// Adds Lua 5.2 as a version to parse for.
#[cfg(feature = "lua52")]
pub fn with_lua52(self) -> Self {
self | Self::lua52()
}

/// Returns true if Lua 5.2 is enabled.
pub fn has_lua52(self) -> bool {
cfg!(feature = "lua52") && (self.bitfield & VERSION_LUA52 != 0)
}

/// Creates a new LuaVersion with only Lua 5.3.
#[cfg(feature = "lua53")]
pub fn lua53() -> Self {
Self {
bitfield: VERSION_LUA52 | VERSION_LUA53,
}
}

/// Adds Lua 5.3 as a version to parse for.
#[cfg(feature = "lua53")]
pub fn with_lua53(self) -> Self {
self | Self::lua53()
}

/// Returns true if Lua 5.3 is enabled.
pub fn has_lua53(self) -> bool {
cfg!(feature = "lua53") && (self.bitfield & VERSION_LUA53 != 0)
}

/// Creates a new LuaVersion with only Lua 5.4.
#[cfg(feature = "lua54")]
pub fn lua54() -> Self {
Self {
bitfield: VERSION_LUA52 | VERSION_LUA53 | VERSION_LUA54,
}
}

/// Adds Lua 5.4 as a version to parse for.
#[cfg(feature = "lua54")]
pub fn with_lua54(self) -> Self {
self | Self::lua54()
}

/// Returns true if Lua 5.4 is enabled.
pub fn has_lua54(self) -> bool {
cfg!(feature = "lua54") && (self.bitfield & VERSION_LUA54 != 0)
}
Expand Down
4 changes: 3 additions & 1 deletion full-moon/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// #![warn(missing_docs)]
#![warn(missing_docs)]
#![allow(clippy::large_enum_variant)]
#![cfg_attr(doc_cfg, feature(doc_auto_cfg))]
//! # Full Moon
Expand Down Expand Up @@ -44,13 +44,15 @@ pub enum Error {
}

impl Error {
/// Returns a human readable error message
pub fn error_message(&self) -> Cow<'static, str> {
match self {
Error::AstError(error) => error.error_message(),
Error::TokenizerError(error) => error.to_string().into(),
}
}

/// Returns the range of the error
pub fn range(&self) -> (Position, Position) {
match self {
Error::AstError(error) => error.range(),
Expand Down
25 changes: 22 additions & 3 deletions full-moon/src/tokenizer/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ use super::{
#[cfg(feature = "luau")]
use super::{interpolated_strings, InterpolatedStringKind};

/// A lexer, which will produce a stream of tokens from a source string.
/// If you just want to create an [`Ast`](crate::ast::Ast) from a string, you want to use
/// [`parse`](crate::parse) instead.
pub struct Lexer {
pub(crate) source: LexerSource,
sent_eof: bool,
Expand All @@ -17,10 +20,12 @@ pub struct Lexer {
#[cfg(feature = "luau")]
pub(crate) brace_stack: Vec<interpolated_strings::BraceType>,

/// The Lua version(s) to parse for.
pub lua_version: LuaVersion,
}

impl Lexer {
/// Creates a new Lexer from the given source string and Lua version(s).
pub fn new(source: &str, lua_version: LuaVersion) -> Self {
let mut lexer = Self::new_lazy(source, lua_version);

Expand All @@ -30,6 +35,8 @@ impl Lexer {
lexer
}

/// Creates a new Lexer from the given source string and Lua version(s), but does not process
/// the first token.
pub fn new_lazy(source: &str, lua_version: LuaVersion) -> Self {
Self {
source: LexerSource::new(source),
Expand All @@ -45,21 +52,25 @@ impl Lexer {
}
}

/// Returns the current token.
pub fn current(&self) -> Option<&LexerResult<TokenReference>> {
self.next_token.as_ref()
}

/// Returns the next token.
pub fn peek(&self) -> Option<&LexerResult<TokenReference>> {
self.peek_token.as_ref()
}

/// Consumes the current token and returns the next token.
pub fn consume(&mut self) -> Option<LexerResult<TokenReference>> {
let next = self.next_token.take()?;
self.next_token = self.peek_token.take();
self.peek_token = self.process_next_with_trivia();
Some(next)
}

/// Returns a vector of all tokens left in the source string.
pub fn collect(self) -> LexerResult<Vec<Token>> {
let mut tokens = Vec::new();
let mut lexer = self;
Expand Down Expand Up @@ -236,6 +247,7 @@ impl Lexer {
trailing_trivia
}

/// Processes and returns the next token in the source string, ignoring trivia.
pub fn process_next(&mut self) -> Option<LexerResult<Token>> {
let start_position = self.source.position();

Expand Down Expand Up @@ -1246,7 +1258,7 @@ fn is_identifier_start(character: char) -> bool {
matches!(character, 'a'..='z' | 'A'..='Z' | '_')
}

pub struct LexerSource {
pub(crate) struct LexerSource {
source: Vec<char>,
lexer_position: LexerPosition,
}
Expand Down Expand Up @@ -1298,7 +1310,7 @@ impl LexerSource {
}

#[derive(Clone, Copy)]
pub struct LexerPosition {
struct LexerPosition {
position: Position,
index: usize,
}
Expand All @@ -1316,37 +1328,44 @@ impl LexerPosition {
}
}

/// The result of a lexer operation.
#[derive(Debug)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
pub enum LexerResult<T> {
/// The lexer operation was successful.
Ok(T),
/// The lexer operation was unsuccessful, and could not be recovered.
Fatal(Vec<TokenizerError>),
/// The lexer operation was unsuccessful, but some result can be extracted.
Recovered(T, Vec<TokenizerError>),
}

impl<T: std::fmt::Debug> LexerResult<T> {
pub fn new(value: T, errors: Vec<TokenizerError>) -> Self {
fn new(value: T, errors: Vec<TokenizerError>) -> Self {
if errors.is_empty() {
Self::Ok(value)
} else {
Self::Recovered(value, errors)
}
}

/// Unwraps the result, panicking if it is not [`LexerResult::Ok`].
pub fn unwrap(self) -> T {
match self {
Self::Ok(value) => value,
_ => panic!("expected ok, got {self:#?}"),
}
}

/// Unwraps the errors, panicking if it is [`LexerResult::Ok`].
pub fn unwrap_errors(self) -> Vec<TokenizerError> {
match self {
Self::Fatal(errors) | Self::Recovered(_, errors) => errors,
_ => panic!("expected fatal error, got {self:#?}"),
}
}

/// Returns the errors, if there was any.
pub fn errors(self) -> Vec<TokenizerError> {
match self {
Self::Recovered(_, errors) => errors,
Expand Down
12 changes: 12 additions & 0 deletions full-moon/src/tokenizer/structs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -586,6 +586,18 @@ impl TokenReference {
TokenReference::symbol_specific_lua_version(text, LuaVersion::lua51()).unwrap()
}

/// Returns a symbol with the leading and trailing whitespace,
/// much like [`TokenReference::symbol`], but only if it's valid
/// for the given Lua version.
/// ```rust
/// # use full_moon::tokenizer::{Symbol, TokenReference, TokenType, TokenizerErrorType};
/// # use full_moon::LuaVersion;
/// # fn main() -> Result<(), Box<TokenizerErrorType>> {
/// assert!(TokenReference::symbol_specific_lua_version("goto", LuaVersion::lua51()).is_err());
/// assert!(TokenReference::symbol_specific_lua_version("goto", LuaVersion::lua52()).is_ok());
/// # Ok(())
/// # }
/// ```
pub fn symbol_specific_lua_version(
text: &str,
lua_version: LuaVersion,
Expand Down

0 comments on commit 2d13175

Please sign in to comment.