diff --git a/.github/workflows/elixir.yml b/.github/workflows/elixir.yml index c7a2cb27b..d13c11d98 100644 --- a/.github/workflows/elixir.yml +++ b/.github/workflows/elixir.yml @@ -106,7 +106,7 @@ jobs: id: set_mix_lock_hash run: | mix_lock_hash="${{ hashFiles(env.project_mix_lock) }}" - projects_hash="${{ hashFiles(env.project_ex_blob, env.projects_locks_blob) }}" + projects_hash="${{ hashFiles(env.projects_ex_blob, env.projects_locks_blob) }}" echo "mix_lock_hash=$mix_lock_hash::$projects_hash" >> "$GITHUB_OUTPUT" # Step: Define how to cache deps. Restores existing cache if present. diff --git a/apps/common/lib/future/code/typespec.ex b/apps/common/lib/future/code/typespec.ex new file mode 100644 index 000000000..ae330302e --- /dev/null +++ b/apps/common/lib/future/code/typespec.ex @@ -0,0 +1,424 @@ +# Copied from https://github.com/elixir-lang/elixir/blob/d87aadf8bd280d4ac969a6825637fcbd1e412f81/lib/elixir/lib/code/typespec.ex +defmodule Future.Code.Typespec do + @moduledoc false + + @doc """ + Converts a spec clause back to Elixir quoted expression. + """ + @spec spec_to_quoted(atom, tuple) :: {atom, keyword, [Macro.t()]} + def spec_to_quoted(name, spec) + + def spec_to_quoted(name, {:type, anno, :fun, [{:type, _, :product, args}, result]}) + when is_atom(name) do + meta = meta(anno) + body = {name, meta, Enum.map(args, &typespec_to_quoted/1)} + + vars = + for type_expr <- args ++ [result], + var <- collect_vars(type_expr), + uniq: true, + do: {var, {:var, meta, nil}} + + spec = {:"::", meta, [body, typespec_to_quoted(result)]} + + if vars == [] do + spec + else + {:when, meta, [spec, vars]} + end + end + + def spec_to_quoted(name, {:type, anno, :fun, []}) when is_atom(name) do + meta = meta(anno) + {:"::", meta, [{name, meta, []}, quote(do: term)]} + end + + def spec_to_quoted(name, {:type, anno, :bounded_fun, [type, constrs]}) when is_atom(name) do + meta = meta(anno) + {:type, _, :fun, [{:type, _, :product, args}, result]} = type + + guards = + for {:type, _, :constraint, [{:atom, _, :is_subtype}, [{:var, _, var}, type]]} <- constrs do + {erl_to_ex_var(var), typespec_to_quoted(type)} + end + + ignore_vars = Keyword.keys(guards) + + vars = + for type_expr <- args ++ [result], + var <- collect_vars(type_expr), + var not in ignore_vars, + uniq: true, + do: {var, {:var, meta, nil}} + + args = for arg <- args, do: typespec_to_quoted(arg) + + when_args = [ + {:"::", meta, [{name, meta, args}, typespec_to_quoted(result)]}, + guards ++ vars + ] + + {:when, meta, when_args} + end + + @doc """ + Converts a type clause back to Elixir AST. + """ + def type_to_quoted(type) + + def type_to_quoted({{:record, record}, fields, args}) when is_atom(record) do + fields = for field <- fields, do: typespec_to_quoted(field) + args = for arg <- args, do: typespec_to_quoted(arg) + type = {:{}, [], [record | fields]} + quote(do: unquote(record)(unquote_splicing(args)) :: unquote(type)) + end + + def type_to_quoted({name, type, args}) when is_atom(name) do + args = for arg <- args, do: typespec_to_quoted(arg) + quote(do: unquote(name)(unquote_splicing(args)) :: unquote(typespec_to_quoted(type))) + end + + @doc """ + Returns all types available from the module's BEAM code. + + The result is returned as a list of tuples where the first + element is the type (`:typep`, `:type` and `:opaque`). + + The module must have a corresponding BEAM file which can be + located by the runtime system. The types will be in the Erlang + Abstract Format. + """ + @spec fetch_types(module | binary) :: {:ok, [tuple]} | :error + def fetch_types(module) when is_atom(module) or is_binary(module) do + case typespecs_abstract_code(module) do + {:ok, abstract_code} -> + exported_types = for {:attribute, _, :export_type, types} <- abstract_code, do: types + exported_types = List.flatten(exported_types) + + types = + for {:attribute, _, kind, {name, _, args} = type} <- abstract_code, + kind in [:opaque, :type] do + cond do + kind == :opaque -> {:opaque, type} + {name, length(args)} in exported_types -> {:type, type} + true -> {:typep, type} + end + end + + {:ok, types} + + _ -> + :error + end + end + + @doc """ + Returns all specs available from the module's BEAM code. + + The result is returned as a list of tuples where the first + element is spec name and arity and the second is the spec. + + The module must have a corresponding BEAM file which can be + located by the runtime system. The types will be in the Erlang + Abstract Format. + """ + @spec fetch_specs(module | binary) :: {:ok, [tuple]} | :error + def fetch_specs(module) when is_atom(module) or is_binary(module) do + case typespecs_abstract_code(module) do + {:ok, abstract_code} -> + {:ok, for({:attribute, _, :spec, value} <- abstract_code, do: value)} + + :error -> + :error + end + end + + @doc """ + Returns all callbacks available from the module's BEAM code. + + The result is returned as a list of tuples where the first + element is spec name and arity and the second is the spec. + + The module must have a corresponding BEAM file + which can be located by the runtime system. The types will be + in the Erlang Abstract Format. + """ + @spec fetch_callbacks(module | binary) :: {:ok, [tuple]} | :error + def fetch_callbacks(module) when is_atom(module) or is_binary(module) do + case typespecs_abstract_code(module) do + {:ok, abstract_code} -> + {:ok, for({:attribute, _, :callback, value} <- abstract_code, do: value)} + + :error -> + :error + end + end + + defp typespecs_abstract_code(module) do + with {module, binary} <- get_module_and_beam(module), + {:ok, {_, [debug_info: {:debug_info_v1, backend, data}]}} <- + :beam_lib.chunks(binary, [:debug_info]) do + case data do + {:elixir_v1, %{}, specs} -> + # Fast path to avoid translation to Erlang from Elixir. + {:ok, specs} + + _ -> + case backend.debug_info(:erlang_v1, module, data, []) do + {:ok, abstract_code} -> {:ok, abstract_code} + _ -> :error + end + end + else + _ -> :error + end + end + + defp get_module_and_beam(module) when is_atom(module) do + with {^module, beam, _filename} <- :code.get_object_code(module), + {:ok, ^module} <- beam |> :beam_lib.info() |> Keyword.fetch(:module) do + {module, beam} + else + _ -> :error + end + end + + defp get_module_and_beam(beam) when is_binary(beam) do + case :beam_lib.info(beam) do + [_ | _] = info -> {info[:module], beam} + _ -> :error + end + end + + ## To AST conversion + + defp collect_vars({:ann_type, _anno, args}) when is_list(args) do + [] + end + + defp collect_vars({:type, _anno, _kind, args}) when is_list(args) do + Enum.flat_map(args, &collect_vars/1) + end + + defp collect_vars({:remote_type, _anno, args}) when is_list(args) do + Enum.flat_map(args, &collect_vars/1) + end + + defp collect_vars({:typed_record_field, _anno, type}) do + collect_vars(type) + end + + defp collect_vars({:paren_type, _anno, [type]}) do + collect_vars(type) + end + + defp collect_vars({:var, _anno, var}) do + [erl_to_ex_var(var)] + end + + defp collect_vars(_) do + [] + end + + defp typespec_to_quoted({:user_type, anno, name, args}) do + args = for arg <- args, do: typespec_to_quoted(arg) + {name, meta(anno), args} + end + + defp typespec_to_quoted({:type, anno, :tuple, :any}) do + {:tuple, meta(anno), []} + end + + defp typespec_to_quoted({:type, anno, :tuple, args}) do + args = for arg <- args, do: typespec_to_quoted(arg) + {:{}, meta(anno), args} + end + + defp typespec_to_quoted({:type, _anno, :list, [{:type, _, :union, unions} = arg]}) do + case unpack_typespec_kw(unions, []) do + {:ok, ast} -> ast + :error -> [typespec_to_quoted(arg)] + end + end + + defp typespec_to_quoted({:type, anno, :list, []}) do + {:list, meta(anno), []} + end + + defp typespec_to_quoted({:type, _anno, :list, [arg]}) do + [typespec_to_quoted(arg)] + end + + defp typespec_to_quoted({:type, anno, :nonempty_list, []}) do + [{:..., meta(anno), nil}] + end + + defp typespec_to_quoted({:type, anno, :nonempty_list, [arg]}) do + [typespec_to_quoted(arg), {:..., meta(anno), nil}] + end + + defp typespec_to_quoted({:type, anno, :map, :any}) do + {:map, meta(anno), []} + end + + defp typespec_to_quoted({:type, anno, :map, fields}) do + fields = + Enum.map(fields, fn + {:type, _, :map_field_assoc, :any} -> + {{:optional, [], [{:any, [], []}]}, {:any, [], []}} + + {:type, _, :map_field_exact, [{:atom, _, k}, v]} -> + {k, typespec_to_quoted(v)} + + {:type, _, :map_field_exact, [k, v]} -> + {{:required, [], [typespec_to_quoted(k)]}, typespec_to_quoted(v)} + + {:type, _, :map_field_assoc, [k, v]} -> + {{:optional, [], [typespec_to_quoted(k)]}, typespec_to_quoted(v)} + end) + + case List.keytake(fields, :__struct__, 0) do + {{:__struct__, struct}, fields_pruned} when is_atom(struct) and struct != nil -> + map_pruned = {:%{}, meta(anno), fields_pruned} + {:%, meta(anno), [struct, map_pruned]} + + _ -> + {:%{}, meta(anno), fields} + end + end + + defp typespec_to_quoted({:type, anno, :binary, [arg1, arg2]}) do + [arg1, arg2] = for arg <- [arg1, arg2], do: typespec_to_quoted(arg) + line = meta(anno)[:line] + + case {typespec_to_quoted(arg1), typespec_to_quoted(arg2)} do + {arg1, 0} -> + quote(line: line, do: <<_::unquote(arg1)>>) + + {0, arg2} -> + quote(line: line, do: <<_::_*unquote(arg2)>>) + + {arg1, arg2} -> + quote(line: line, do: <<_::unquote(arg1), _::_*unquote(arg2)>>) + end + end + + defp typespec_to_quoted({:type, anno, :union, args}) do + args = for arg <- args, do: typespec_to_quoted(arg) + Enum.reduce(Enum.reverse(args), fn arg, expr -> {:|, meta(anno), [arg, expr]} end) + end + + defp typespec_to_quoted({:type, anno, :fun, [{:type, _, :product, args}, result]}) do + args = for arg <- args, do: typespec_to_quoted(arg) + [{:->, meta(anno), [args, typespec_to_quoted(result)]}] + end + + defp typespec_to_quoted({:type, anno, :fun, [args, result]}) do + [{:->, meta(anno), [[typespec_to_quoted(args)], typespec_to_quoted(result)]}] + end + + defp typespec_to_quoted({:type, anno, :fun, []}) do + typespec_to_quoted({:type, anno, :fun, [{:type, anno, :any}, {:type, anno, :any, []}]}) + end + + defp typespec_to_quoted({:type, anno, :range, [left, right]}) do + {:.., meta(anno), [typespec_to_quoted(left), typespec_to_quoted(right)]} + end + + defp typespec_to_quoted({:type, _anno, nil, []}) do + [] + end + + defp typespec_to_quoted({:type, anno, name, args}) do + args = for arg <- args, do: typespec_to_quoted(arg) + {name, meta(anno), args} + end + + defp typespec_to_quoted({:var, anno, var}) do + {erl_to_ex_var(var), meta(anno), nil} + end + + defp typespec_to_quoted({:op, anno, op, arg}) do + {op, meta(anno), [typespec_to_quoted(arg)]} + end + + defp typespec_to_quoted({:remote_type, anno, [mod, name, args]}) do + remote_type(anno, mod, name, args) + end + + defp typespec_to_quoted({:ann_type, anno, [var, type]}) do + {:"::", meta(anno), [typespec_to_quoted(var), typespec_to_quoted(type)]} + end + + defp typespec_to_quoted( + {:typed_record_field, {:record_field, anno1, {:atom, anno2, name}}, type} + ) do + typespec_to_quoted({:ann_type, anno1, [{:var, anno2, name}, type]}) + end + + defp typespec_to_quoted({:type, _, :any}) do + quote(do: ...) + end + + defp typespec_to_quoted({:paren_type, _, [type]}) do + typespec_to_quoted(type) + end + + defp typespec_to_quoted({type, _anno, atom}) when is_atom(type) do + atom + end + + defp typespec_to_quoted(other), do: other + + ## Helpers + + defp remote_type(anno, {:atom, _, :elixir}, {:atom, _, :charlist}, []) do + typespec_to_quoted({:type, anno, :charlist, []}) + end + + defp remote_type(anno, {:atom, _, :elixir}, {:atom, _, :nonempty_charlist}, []) do + typespec_to_quoted({:type, anno, :nonempty_charlist, []}) + end + + defp remote_type(anno, {:atom, _, :elixir}, {:atom, _, :struct}, []) do + typespec_to_quoted({:type, anno, :struct, []}) + end + + defp remote_type(anno, {:atom, _, :elixir}, {:atom, _, :as_boolean}, [arg]) do + typespec_to_quoted({:type, anno, :as_boolean, [arg]}) + end + + defp remote_type(anno, {:atom, _, :elixir}, {:atom, _, :keyword}, args) do + typespec_to_quoted({:type, anno, :keyword, args}) + end + + defp remote_type(anno, mod, name, args) do + args = for arg <- args, do: typespec_to_quoted(arg) + dot = {:., meta(anno), [typespec_to_quoted(mod), typespec_to_quoted(name)]} + {dot, meta(anno), args} + end + + defp erl_to_ex_var(var) do + case Atom.to_string(var) do + <<"_", c::utf8, rest::binary>> -> + String.to_atom("_#{String.downcase(<>)}#{rest}") + + <> -> + String.to_atom("#{String.downcase(<>)}#{rest}") + end + end + + defp unpack_typespec_kw([{:type, _, :tuple, [{:atom, _, atom}, type]} | t], acc) do + unpack_typespec_kw(t, [{atom, typespec_to_quoted(type)} | acc]) + end + + defp unpack_typespec_kw([], acc) do + {:ok, Enum.reverse(acc)} + end + + defp unpack_typespec_kw(_, _acc) do + :error + end + + defp meta(anno), do: [line: :erl_anno.line(anno)] +end diff --git a/apps/common/lib/lexical/ast.ex b/apps/common/lib/lexical/ast.ex index 056a07967..85027d308 100644 --- a/apps/common/lib/lexical/ast.ex +++ b/apps/common/lib/lexical/ast.ex @@ -187,9 +187,40 @@ defmodule Lexical.Ast do end @doc """ - Returns the path to the cursor in the given document at a position. + Returns the path to the innermost node in the document at the given position. - May return a path even in the event of syntax errors. + This function differs from `cursor_path/2` in that it expects a valid + AST and the returned path will not contain a `:__cursor__` node. + """ + @spec path_at(Document.t(), Position.t()) :: + {:ok, [Macro.t(), ...]} | {:error, :not_found | parse_error()} + @spec path_at(Macro.t(), Position.t()) :: + {:ok, [Macro.t(), ...]} | {:error, :not_found} + def path_at(%Document{} = document, %Position{} = position) do + with {:ok, ast} <- from(document) do + path_at(ast, position) + end + end + + def path_at(ast, %Position{} = position) do + path = + Future.Macro.path(ast, fn node -> + leaf?(node) and contains_position?(node, position) + end) + + case path do + nil -> {:error, :not_found} + path -> {:ok, path} + end + end + + @doc """ + Returns the path to the cursor in a fragment of the document from the + start to the given position. + + This function differs from `path_at/2` in that it operates on an AST + fragment as opposed to a full AST and the call never fails, though it + may return an empty list. """ @spec cursor_path( Document.t() | Macro.t(), @@ -267,21 +298,49 @@ defmodule Lexical.Ast do @doc """ Returns a zipper for the document AST focused at the given position. """ - @spec zipper_at(Document.t(), Position.t()) :: {:ok, Zipper.zipper()} | {:error, parse_error()} - def zipper_at(%Document{} = document, %Document.Position{} = position) do + @spec zipper_at(Document.t(), Position.t()) :: {:ok, Zipper.t()} | {:error, parse_error()} + def zipper_at(%Document{} = document, %Position{} = position) do with {:ok, ast} <- from(document) do zipper_at_position(ast, position) end end + @doc """ + Returns whether the given AST contains a position. + """ + @spec contains_position?(Macro.t(), Position.t()) :: boolean() + def contains_position?(ast, %Position{} = position) do + case Sourceror.get_range(ast) do + %{start: start_pos, end: end_pos} -> + on_same_line? = start_pos[:line] == end_pos[:line] and position.line == start_pos[:line] + + cond do + on_same_line? -> + position.character >= start_pos[:column] and position.character < end_pos[:column] + + position.line == start_pos[:line] -> + position.character >= start_pos[:column] + + position.line == end_pos[:line] -> + position.character < end_pos[:column] + + true -> + position.line > start_pos[:line] and position.line < end_pos[:line] + end + + nil -> + false + end + end + @doc """ Converts the document to a zipper and traverses the nodes on the given line. The given function must accept and return a (potentially modified) zipper. To maintain an accumulator, use `traverse_line/4`. """ - @spec traverse_line(Document.t(), Position.line(), (Zipper.zipper() -> Zipper.zipper())) :: - {:ok, Zipper.zipper()} | {:error, parse_error()} + @spec traverse_line(Document.t(), Position.line(), (Zipper.t() -> Zipper.t())) :: + {:ok, Zipper.t()} | {:error, parse_error()} def traverse_line(%Document{} = document, line_number, fun) when is_integer(line_number) do range = one_line_range(document, line_number) traverse_in(document, range, fun) @@ -291,9 +350,9 @@ defmodule Lexical.Ast do Document.t(), Position.line(), acc, - (Zipper.zipper(), acc -> {Zipper.zipper(), acc}) + (Zipper.t(), acc -> {Zipper.t(), acc}) ) :: - {:ok, Zipper.zipper(), acc} | {:error, parse_error()} + {:ok, Zipper.t(), acc} | {:error, parse_error()} when acc: any() def traverse_line(%Document{} = document, line_number, acc, fun) when is_integer(line_number) do range = one_line_range(document, line_number) @@ -429,7 +488,8 @@ defmodule Lexical.Ast do Code.string_to_quoted(string, literal_encoder: &{:ok, {:__block__, &2, [&1]}}, token_metadata: true, - columns: true + columns: true, + unescape: false ) end @@ -437,7 +497,8 @@ defmodule Lexical.Ast do Code.Fragment.container_cursor_to_quoted(fragment, literal_encoder: &{:ok, {:__block__, &2, [&1]}}, token_metadata: true, - columns: true + columns: true, + unescape: false ) end @@ -486,7 +547,7 @@ defmodule Lexical.Ast do # in the future, I'd like to expose functions that only traverse a section of the document, # but presently, traverse only follows a subtree, so it won't work for our purposes - defp traverse_in(%Document{} = document, %Document.Range{} = range, fun) do + defp traverse_in(%Document{} = document, %Range{} = range, fun) do ignore_acc = fn node, acc -> {fun.(node), acc} end @@ -500,11 +561,13 @@ defmodule Lexical.Ast do end end - defp traverse_in(%Document{} = document, %Document.Range{} = range, acc, fun) do + defp traverse_in(%Document{} = document, %Range{} = range, acc, fun) do with {:ok, zipper} <- zipper_at(document, range.start) do {zipper, {_position, acc}} = - Zipper.traverse_while(zipper, {{0, 0}, acc}, fn - {node, _} = zipper, {last_position, acc} -> + Zipper.traverse_while( + zipper, + {{0, 0}, acc}, + fn %Zipper{node: node} = zipper, {last_position, acc} -> current_position = node_position(node, last_position) if within_range?(current_position, range) do @@ -514,15 +577,16 @@ defmodule Lexical.Ast do else {:skip, zipper, {current_position, acc}} end - end) + end + ) {:ok, zipper, acc} end end - defp within_range?({current_line, current_column}, %Document.Range{} = range) do - start_pos = %Document.Position{} = range.start - end_pos = %Document.Position{} = range.end + defp within_range?({current_line, current_column}, %Range{} = range) do + start_pos = %Position{} = range.start + end_pos = %Position{} = range.end cond do current_line == start_pos.line -> @@ -536,11 +600,11 @@ defmodule Lexical.Ast do end end - defp at_or_after?(node, %Document.Position{} = position) do + defp at_or_after?(node, %Position{} = position) do line = get_line(node, 0) column = get_column(node, 0) - line >= position.line and column >= position.character + line > position.line or (line == position.line and column >= position.character) end defp one_line_range(%Document{} = document, line_number) do @@ -600,4 +664,32 @@ defmodule Lexical.Ast do zipper end end + + # data literals: + # 1, :foo, "foo" + defp leaf?(literal) when is_number(literal) or is_binary(literal) or is_atom(literal), + do: true + + # wrapped data literals: + # as above, but may contain additional token metadata, so consider the block a leaf + defp leaf?({:__block__, _, [literal]}), do: leaf?(literal) + + # unqualified calls or forms without any arguments: + # foo(), %{} + defp leaf?({form, _, []}) when is_atom(form), do: true + + # dot-calls: + # Foo.bar, baz.buzz + defp leaf?({:., _, _}), do: true + + # module aliases: + # Foo.Bar.Baz, __MODULE__.Inner + defp leaf?({:__aliases__, _, _}), do: true + + # variables: + # foo + defp leaf?({var, _, namespace}) when is_atom(var) and is_atom(namespace), do: true + + # consider all other forms branches + defp leaf?(_), do: false end diff --git a/apps/common/mix.exs b/apps/common/mix.exs index ffc27cd90..56901857a 100644 --- a/apps/common/mix.exs +++ b/apps/common/mix.exs @@ -33,7 +33,7 @@ defmodule Common.MixProject do defp deps do [ {:lexical_shared, path: "../../projects/lexical_shared"}, - {:sourceror, "~> 0.12"}, + {:sourceror, "~> 0.14.0"}, {:stream_data, "~> 0.6", only: [:test], runtime: false}, {:patch, "~> 0.12", only: [:test], optional: true, runtime: false} ] diff --git a/apps/common/test/lexical/ast/aliases_test.exs b/apps/common/test/lexical/ast/aliases_test.exs index e6ace6feb..f5d49b648 100644 --- a/apps/common/test/lexical/ast/aliases_test.exs +++ b/apps/common/test/lexical/ast/aliases_test.exs @@ -1,20 +1,14 @@ defmodule Lexical.Ast.AliasesTest do alias Lexical.Ast.Aliases - alias Lexical.Document - alias Lexical.Test.CodeSigil - alias Lexical.Test.CursorSupport - import Aliases - import CursorSupport - import CodeSigil + import Lexical.Test.CursorSupport + import Lexical.Test.CodeSigil use ExUnit.Case def aliases_at_cursor(text) do - pos = cursor_position(text) - text = strip_cursor(text) - doc = Document.new("file:///file.ex", text, 0) - at(doc, pos) + {position, document} = pop_cursor(text, as: :document) + Aliases.at(document, position) end describe "top level aliases" do diff --git a/apps/common/test/lexical/ast/env_test.exs b/apps/common/test/lexical/ast/env_test.exs index 5be994e42..93b4467c5 100644 --- a/apps/common/test/lexical/ast/env_test.exs +++ b/apps/common/test/lexical/ast/env_test.exs @@ -1,24 +1,14 @@ defmodule Lexical.Ast.EnvTest do - alias Lexical.Document - alias Lexical.Test.CodeSigil - alias Lexical.Test.CursorSupport - alias Lexical.Test.Fixtures - use ExUnit.Case, async: true - import CodeSigil import Lexical.Ast.Env - import CursorSupport - import Fixtures + import Lexical.Test.CodeSigil + import Lexical.Test.CursorSupport + import Lexical.Test.Fixtures def new_env(text) do project = project() - {line, column} = cursor_position(text) - stripped_text = strip_cursor(text) - document = Document.new("file://foo.ex", stripped_text, 0) - - position = Document.Position.new(document, line, column) - + {position, document} = pop_cursor(text, as: :document) {:ok, env} = new(project, document, position) env end diff --git a/apps/common/test/lexical/ast_test.exs b/apps/common/test/lexical/ast_test.exs index fab27f60e..a8030b1be 100644 --- a/apps/common/test/lexical/ast_test.exs +++ b/apps/common/test/lexical/ast_test.exs @@ -1,31 +1,30 @@ defmodule Lexical.AstTest do alias Lexical.Ast alias Lexical.Document - alias Lexical.Test.CodeSigil - alias Lexical.Test.CursorSupport + alias Lexical.Document.Position alias Sourceror.Zipper - import CursorSupport - import CodeSigil + import Lexical.Test.CodeSigil + import Lexical.Test.CursorSupport + import Lexical.Test.PositionSupport + import Lexical.Test.RangeSupport use ExUnit.Case, async: true - def cursor_path(text) do - pos = cursor_position(text) - text = strip_cursor(text) - doc = Document.new("file:///file.ex", text, 0) - Ast.cursor_path(doc, pos) - end - describe "cursor_path/2" do + defp cursor_path(text) do + {position, document} = pop_cursor(text, as: :document) + Ast.cursor_path(document, position) + end + test "contains the parent AST" do text = ~q[ - defmodule Foo do - def bar do - | + defmodule Foo do + def bar do + | + end end - end - ] + ] path = cursor_path(text) @@ -35,9 +34,9 @@ defmodule Lexical.AstTest do test "returns cursor ast when is not in a container" do text = ~q[ - | - defmodule Foo do - end + | + defmodule Foo do + end ] path = cursor_path(text) @@ -48,40 +47,98 @@ defmodule Lexical.AstTest do text = ~q[ foo(bar do baz, bat| ] + path = cursor_path(text) assert path == [] end end + describe "path_at/2" do + defp path_at(text) do + {position, document} = pop_cursor(text, as: :document) + Ast.path_at(document, position) + end + + test "returns an error if the cursor cannot be found in a node" do + code = ~q[ + | + defmodule Foo do + end + ] + + assert {:error, :not_found} = path_at(code) + end + + test "returns an error if the AST cannot be parsed" do + code = ~q[ + defmodule |Foo do + ] + + assert {:error, {[line: 2, column: 1], "missing terminator: end" <> _, ""}} = path_at(code) + end + + test "returns a path to the innermost node at position" do + code = ~q[ + defmodule Foo do + def bar do + %{foo: |:ok} + end + end + ] + + assert {:ok, [{:__block__, _, [:ok]} | _]} = path_at(code) + end + + test "returns a path containing all ancestors" do + code = ~q[ + defmodule Foo do + def |bar do + :ok + end + end + ] + + assert {:ok, + [ + {:bar, _, nil}, + {:def, _, _}, + {_, _}, + [{_, _}], + {:defmodule, _, _} + ]} = path_at(code) + end + end + describe "traverse_line" do setup do text = ~q[ - line = 1 - line = 2 - line = 3 - line = 4 - "" - ]t + line = 1 + line = 2 + line = 3 + line = 4 + "" + ]t document = Document.new("file:///file.ex", text, 1) {:ok, document: document} end - defp underscore_variable({{var_name, meta, nil}, zipper_meta}) do - {{:"_#{var_name}", meta, nil}, zipper_meta} + defp underscore_variable(%Zipper{node: {var_name, meta, nil}} = zipper) do + Zipper.replace(zipper, {:"_#{var_name}", meta, nil}) end defp underscore_variable(zipper), do: zipper - defp underscore_variable({{var_name, meta, nil}, zipper_meta}, acc) do - {{{:"_#{var_name}", meta, nil}, zipper_meta}, acc + 1} + defp underscore_variable(%Zipper{node: {_var_name, _meta, nil}} = zipper, acc) do + zipper = underscore_variable(zipper) + {zipper, acc + 1} end defp underscore_variable(zipper, acc), do: {zipper, acc} defp modify({:ok, zipper}) do - {ast, _} = Zipper.top(zipper) + %Zipper{node: ast} = Zipper.top(zipper) Sourceror.to_string(ast) end @@ -113,4 +170,95 @@ defmodule Lexical.AstTest do refute converted =~ "_line = 3" end end + + describe "contains_position?/2 single line node" do + setup do + {range, code} = pop_range(~q| + [ + «single_line_call(1, 2, 3») + ] + |) + + [single_line_ast] = ast(code) + + {:ok, [ast: single_line_ast, range: range]} + end + + test "at the bounds", %{ast: ast, range: range} do + assert Ast.contains_position?(ast, range.start) + assert Ast.contains_position?(ast, range.end) + end + + test "within the node", %{ast: ast, range: range} do + position = %Position{range.start | character: range.start.character + 1} + assert Ast.contains_position?(ast, position) + end + + test "outside the bounds", %{ast: ast, range: range} do + %Position{line: start_line, character: start_col} = range.start + %Position{line: end_line, character: end_col} = range.end + + refute Ast.contains_position?(ast, position(start_line, start_col - 1)) + refute Ast.contains_position?(ast, position(start_line - 1, start_col)) + refute Ast.contains_position?(ast, position(end_line, end_col + 1)) + refute Ast.contains_position?(ast, position(end_line + 1, end_col)) + end + end + + describe "contains_position?/2 multi line node" do + setup do + {range, code} = pop_range(~q| + [ + «multi_line_call( + 1, 2, 3 + ») + ] + |) + + [three_line_ast] = ast(code) + + {:ok, [ast: three_line_ast, range: range]} + end + + test "at the bounds", %{ast: ast, range: range} do + assert Ast.contains_position?(ast, range.start) + assert Ast.contains_position?(ast, range.end) + end + + test "on the first line", %{ast: ast, range: range} do + %Position{line: start_line, character: start_col} = range.start + + assert Ast.contains_position?(ast, position(start_line, start_col + 1)) + refute Ast.contains_position?(ast, position(start_line, start_col - 1)) + end + + test "on the last line", %{ast: ast, range: range} do + %Position{line: end_line, character: end_col} = range.end + + assert Ast.contains_position?(ast, position(end_line, end_col - 1)) + refute Ast.contains_position?(ast, position(end_line, end_col + 1)) + end + + test "within the lines", %{ast: ast, range: range} do + %Position{line: start_line} = range.start + + assert Ast.contains_position?(ast, position(start_line + 1, 1)) + assert Ast.contains_position?(ast, position(start_line + 1, 1_000)) + end + + test "outside the lines", %{ast: ast, range: range} do + %Position{line: start_line, character: start_col} = range.start + %Position{line: end_line, character: end_col} = range.end + + refute Ast.contains_position?(ast, position(start_line - 1, start_col)) + refute Ast.contains_position?(ast, position(end_line + 1, end_col)) + end + end + + defp ast(s) do + case Ast.from(s) do + {:ok, {:__block__, _, [node]}} -> node + {:ok, node} -> node + end + end end diff --git a/apps/remote_control/lib/lexical/remote_control/api.ex b/apps/remote_control/lib/lexical/remote_control/api.ex index 5b989b28a..309319c8f 100644 --- a/apps/remote_control/lib/lexical/remote_control/api.ex +++ b/apps/remote_control/lib/lexical/remote_control/api.ex @@ -72,8 +72,8 @@ defmodule Lexical.RemoteControl.Api do end @spec docs(Project.t(), module()) :: {:ok, CodeIntelligence.Docs.t()} | {:error, any()} - def docs(%Project{} = project, module) when is_atom(module) do - RemoteControl.call(project, CodeIntelligence.Docs, :for_module, [module]) + def docs(%Project{} = project, module, opts \\ []) when is_atom(module) do + RemoteControl.call(project, CodeIntelligence.Docs, :for_module, [module, opts]) end def register_listener(%Project{} = project, listener_pid, message_types) diff --git a/apps/remote_control/lib/lexical/remote_control/code_intelligence/docs.ex b/apps/remote_control/lib/lexical/remote_control/code_intelligence/docs.ex index 58cc4e170..3a95bd9f6 100644 --- a/apps/remote_control/lib/lexical/remote_control/code_intelligence/docs.ex +++ b/apps/remote_control/lib/lexical/remote_control/code_intelligence/docs.ex @@ -4,6 +4,7 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Docs do """ alias Lexical.RemoteControl.CodeIntelligence.Docs.Entry + alias Lexical.RemoteControl.Modules defstruct [:module, :doc, functions_and_macros: [], callbacks: [], types: []] @@ -17,63 +18,74 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Docs do @doc """ Fetches known documentation for the given module. + + ## Options + + * `:exclude_hidden` - if `true`, returns `{:error, :hidden}` for + modules that have been marked as hidden using `@moduledoc false`. + Defaults to `false`. + """ - @spec for_module(module()) :: {:ok, t} | {:error, any()} - def for_module(module) when is_atom(module) do - with :ok <- ensure_ready(module), - {:docs_v1, _anno, _lang, _fmt, module_doc, _meta, docs} <- Code.fetch_docs(module) do - {:ok, normalize_docs(module, module_doc, docs)} + @spec for_module(module(), [opt]) :: {:ok, t} | {:error, any()} + when opt: {:exclude_hidden, boolean()} + def for_module(module, opts) when is_atom(module) do + exclude_hidden? = Keyword.get(opts, :exclude_hidden, false) + + with {:ok, beam} <- Modules.ensure_beam(module) do + %__MODULE__{} = docs = parse_docs(module, beam) + + if docs.doc == :hidden and exclude_hidden? do + {:error, :hidden} + else + {:ok, docs} + end end end - defp normalize_docs(module, module_doc, element_docs) do - elements_by_kind = Enum.group_by(element_docs, &doc_kind/1) - functions = Map.get(elements_by_kind, :function, []) - macros = Map.get(elements_by_kind, :macro, []) - callbacks = Map.get(elements_by_kind, :callback, []) - types = Map.get(elements_by_kind, :type, []) - - %__MODULE__{ - module: module, - doc: Entry.parse_doc(module_doc), - functions_and_macros: parse_doc_elements(module, functions ++ macros), - callbacks: parse_doc_elements(module, callbacks), - types: parse_doc_elements(module, types) - } - end + defp parse_docs(module, beam) do + with {:ok, {:docs_v1, _anno, _lang, _format, module_doc, _meta, entries}} <- + Modules.fetch_docs(beam) do + entries_by_kind = Enum.group_by(entries, &doc_kind/1) + function_entries = Map.get(entries_by_kind, :function, []) + macro_entries = Map.get(entries_by_kind, :macro, []) + callback_entries = Map.get(entries_by_kind, :callback, []) + type_entries = Map.get(entries_by_kind, :type, []) - defp doc_kind({{kind, _name, _arity}, _anno, _sig, _doc, _meta}) do - kind - end + spec_defs = beam |> Modules.fetch_specs() |> ok_or([]) + callback_defs = beam |> Modules.fetch_callbacks() |> ok_or([]) + type_defs = beam |> Modules.fetch_types() |> ok_or([]) - defp parse_doc_elements(module, elements) do - elements - |> Enum.map(&Entry.from_docs_v1(module, &1)) - |> Enum.group_by(& &1.name) + %__MODULE__{ + module: module, + doc: Entry.parse_doc(module_doc), + functions_and_macros: parse_entries(module, function_entries ++ macro_entries, spec_defs), + callbacks: parse_entries(module, callback_entries, callback_defs), + types: parse_entries(module, type_entries, type_defs) + } + end end - defp ensure_ready(module) do - with {:module, _} <- Code.ensure_compiled(module), - path when is_list(path) and path != [] <- :code.which(module) do - ensure_file_exists(path) - else - _ -> {:error, :not_found} - end + defp doc_kind({{kind, _name, _arity}, _anno, _sig, _doc, _meta}) do + kind end - @timeout 10 - defp ensure_file_exists(path, attempts \\ 10) + defp parse_entries(module, raw_entries, defs) do + defs_by_name_arity = + Enum.group_by( + defs, + fn {name, arity, _formatted, _quoted} -> {name, arity} end, + fn {_name, _arity, formatted, _quoted} -> formatted end + ) - defp ensure_file_exists(_, 0) do - {:error, :beam_file_timeout} + raw_entries + |> Enum.map(fn raw_entry -> + entry = Entry.from_docs_v1(module, raw_entry) + defs = Map.get(defs_by_name_arity, {entry.name, entry.arity}, []) + %Entry{entry | defs: defs} + end) + |> Enum.group_by(& &1.name) end - defp ensure_file_exists(path, attempts) do - if File.exists?(path) do - :ok - else - Process.sleep(@timeout) - ensure_file_exists(path, attempts - 1) - end - end + defp ok_or({:ok, value}, _default), do: value + defp ok_or(_, default), do: default end diff --git a/apps/remote_control/lib/lexical/remote_control/code_intelligence/docs/entry.ex b/apps/remote_control/lib/lexical/remote_control/code_intelligence/docs/entry.ex index 112ea1824..098af162e 100644 --- a/apps/remote_control/lib/lexical/remote_control/code_intelligence/docs/entry.ex +++ b/apps/remote_control/lib/lexical/remote_control/code_intelligence/docs/entry.ex @@ -10,8 +10,8 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Docs.Entry do :arity, :signature, :doc, - :metadata - # :spec + :metadata, + defs: [] ] @type t(kind) :: %__MODULE__{ @@ -21,8 +21,8 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Docs.Entry do arity: arity(), signature: [String.t()], doc: content(), - metadata: metadata() - # spec: String.t() | nil + metadata: metadata(), + defs: [String.t()] } @type content :: String.t() | :none | :hidden diff --git a/apps/remote_control/lib/lexical/remote_control/code_mod/replace_with_underscore.ex b/apps/remote_control/lib/lexical/remote_control/code_mod/replace_with_underscore.ex index 50ff62f3b..6ee6fa8e7 100644 --- a/apps/remote_control/lib/lexical/remote_control/code_mod/replace_with_underscore.ex +++ b/apps/remote_control/lib/lexical/remote_control/code_mod/replace_with_underscore.ex @@ -2,6 +2,7 @@ defmodule Lexical.RemoteControl.CodeMod.ReplaceWithUnderscore do alias Lexical.Ast alias Lexical.Document alias Lexical.Document.Changes + alias Sourceror.Zipper @spec edits(Document.t(), non_neg_integer(), String.t() | atom) :: {:ok, Changes.t()} | :error @@ -30,7 +31,7 @@ defmodule Lexical.RemoteControl.CodeMod.ReplaceWithUnderscore do result = Ast.traverse_line(document, line_number, [], fn - {{^unused_variable_name, _meta, nil} = node, _} = zipper, patches -> + %Zipper{node: {^unused_variable_name, _meta, nil} = node} = zipper, patches -> [patch] = Sourceror.Patch.rename_identifier(node, underscored_variable_name) {zipper, [patch | patches]} diff --git a/apps/remote_control/lib/lexical/remote_control/modules.ex b/apps/remote_control/lib/lexical/remote_control/modules.ex index 71d9529af..c71027cc8 100644 --- a/apps/remote_control/lib/lexical/remote_control/modules.ex +++ b/apps/remote_control/lib/lexical/remote_control/modules.ex @@ -1,7 +1,4 @@ defmodule Lexical.RemoteControl.Modules do - @moduledoc """ - Utilities for dealing with modules on the remote control node - """ defmodule Predicate.Syntax do @moduledoc """ Syntax helpers for the predicate syntax @@ -47,8 +44,135 @@ defmodule Lexical.RemoteControl.Modules do end end + @moduledoc """ + Utilities for dealing with modules on the remote control node + """ + + alias Future.Code.Typespec + + @typedoc "Module documentation record as defined by EEP-48" + @type docs_v1 :: tuple() + + @typedoc "A type, spec, or callback definition" + @type definition :: + {name :: atom(), arity :: arity(), formatted :: String.t(), quoted :: Macro.t()} + @cache_timeout Application.compile_env(:remote_control, :modules_cache_expiry, {10, :second}) + @doc """ + Ensure the given module is compiled, returning the BEAM object code if successful. + """ + @spec ensure_beam(module()) :: + {:ok, beam :: binary()} | {:error, reason} + when reason: + :embedded + | :badfile + | :nofile + | :on_load_failure + | :unavailable + | :get_object_code_failed + def ensure_beam(module) when is_atom(module) do + with {:module, _} <- Code.ensure_compiled(module), + {_module, beam, _filename} <- :code.get_object_code(module) do + {:ok, beam} + else + :error -> {:error, :get_object_code_failed} + {:error, error} -> {:error, error} + end + end + + @doc """ + Fetch the docs chunk from BEAM object code. + """ + @spec fetch_docs(beam :: binary()) :: {:ok, docs_v1()} | :error + @docs_chunk ~c"Docs" + def fetch_docs(beam) when is_binary(beam) do + case :beam_lib.chunks(beam, [@docs_chunk]) do + {:ok, {_module, [{@docs_chunk, bin}]}} -> + {:ok, :erlang.binary_to_term(bin)} + + _ -> + :error + end + end + + @doc """ + Fetch the specs from BEAM object code. + """ + @spec fetch_specs(beam :: binary()) :: {:ok, [definition()]} | :error + def fetch_specs(beam) when is_binary(beam) do + case Typespec.fetch_specs(beam) do + {:ok, specs} -> + defs = + for {{name, arity}, defs} <- specs, + def <- defs do + quoted = Typespec.spec_to_quoted(name, def) + formatted = format_definition(quoted) + + {name, arity, formatted, quoted} + end + + {:ok, defs} + + _ -> + :error + end + end + + @doc """ + Fetch the types from BEAM object code. + """ + @spec fetch_types(beam :: binary()) :: {:ok, [definition()]} | :error + def fetch_types(beam) when is_binary(beam) do + case Typespec.fetch_types(beam) do + {:ok, types} -> + defs = + for {kind, {name, _body, args} = type} <- types do + arity = length(args) + quoted_type = Typespec.type_to_quoted(type) + quoted = {:@, [], [{kind, [], [quoted_type]}]} + formatted = format_definition(quoted) + + {name, arity, formatted, quoted} + end + + {:ok, defs} + + _ -> + :error + end + end + + @doc """ + Fetch the specs from BEAM object code. + """ + @spec fetch_callbacks(beam :: binary()) :: {:ok, [definition()]} | :error + def fetch_callbacks(beam) when is_binary(beam) do + case Typespec.fetch_callbacks(beam) do + {:ok, callbacks} -> + defs = + for {{name, arity}, defs} <- callbacks, + def <- defs do + quoted = Typespec.spec_to_quoted(name, def) + formatted = format_definition(quoted) + + {name, arity, formatted, quoted} + end + + {:ok, defs} + + _ -> + :error + end + end + + defp format_definition(quoted) do + quoted + |> Future.Code.quoted_to_algebra() + |> Inspect.Algebra.format(60) + |> IO.iodata_to_binary() + end + @doc """ Returns all modules matching a prefix diff --git a/apps/remote_control/mix.exs b/apps/remote_control/mix.exs index 2a021e02c..8e72f2351 100644 --- a/apps/remote_control/mix.exs +++ b/apps/remote_control/mix.exs @@ -41,7 +41,7 @@ defmodule Lexical.RemoteControl.MixProject do {:lexical_test, path: "../../projects/lexical_test", only: :test}, {:patch, "~> 0.12", only: [:dev, :test], optional: true, runtime: false}, {:path_glob, "~> 0.2", optional: true}, - {:sourceror, "~> 0.12"} + {:sourceror, "~> 0.14.0"} ] end diff --git a/apps/remote_control/test/lexical/remote_control/completion_test.exs b/apps/remote_control/test/lexical/remote_control/completion_test.exs index 3f48713a8..483d03ef6 100644 --- a/apps/remote_control/test/lexical/remote_control/completion_test.exs +++ b/apps/remote_control/test/lexical/remote_control/completion_test.exs @@ -4,7 +4,6 @@ defmodule Lexical.RemoteControl.CompletionTest do import Lexical.Test.CursorSupport import Lexical.Test.CodeSigil - import Lexical.Test.PositionSupport use ExUnit.Case, async: true @@ -86,23 +85,9 @@ defmodule Lexical.RemoteControl.CompletionTest do end end - defp document(source) do - text = strip_cursor(source) - Document.new(file_uri(), text, 1) - end - - defp file_uri do - "file:///elixir.ex" - end - - defp position(source) do - {line, column} = cursor_position(source) - position(line, column) - end - defp struct_fields(source) do - document = document(source) - position = position(source) + {position, document} = pop_cursor(source, as: :document) + text = Document.to_string(document) Code.compile_string(text) diff --git a/apps/server/lib/lexical/server/code_intelligence/entity.ex b/apps/server/lib/lexical/server/code_intelligence/entity.ex index ec8d9ba5c..60fb157fe 100644 --- a/apps/server/lib/lexical/server/code_intelligence/entity.ex +++ b/apps/server/lib/lexical/server/code_intelligence/entity.ex @@ -10,27 +10,26 @@ defmodule Lexical.Server.CodeIntelligence.Entity do alias Lexical.Text require Logger + require Sourceror.Identifier - @type resolved :: {:module, module()} + @type resolved :: + {:module, module()} + | {:struct, module()} + | {:call, module(), fun_name :: atom(), arity :: non_neg_integer()} + | {:type, module(), type_name :: atom(), arity :: non_neg_integer()} + + defguardp is_call(form) when Sourceror.Identifier.is_call(form) and elem(form, 0) != :. @doc """ Attempts to resolve the entity at the given position in the document. - ## Return values - - Returns `{:ok, resolved, range}` if successful and `{:error, error}` - otherwise. The `range` includes the resolved node and the - Resolved entities are one of: - - * `{:module, module}` - + Returns `{:ok, resolved, range}` if successful, `{:error, error}` otherwise. """ @spec resolve(Document.t(), Position.t()) :: {:ok, resolved, Range.t()} | {:error, term()} def resolve(%Document{} = document, %Position{} = position) do - with {:ok, %{context: context, begin: begin_pos, end: end_pos}} <- - Ast.surround_context(document, position), - {:ok, resolved, {begin_pos, end_pos}} <- - resolve(context, {begin_pos, end_pos}, document, position) do + with {:ok, surround_context} <- Ast.surround_context(document, position), + {:ok, resolved, {begin_pos, end_pos}} <- resolve(surround_context, document, position) do + Logger.info("Resolved entity: #{inspect(resolved)}") {:ok, resolved, to_range(document, begin_pos, end_pos)} else {:error, :surround_context} -> {:error, :not_found} @@ -38,67 +37,198 @@ defmodule Lexical.Server.CodeIntelligence.Entity do end end + defp resolve(%{context: context, begin: begin_pos, end: end_pos}, document, position) do + resolve(context, {begin_pos, end_pos}, document, position) + end + defp resolve({:alias, charlist}, node_range, document, position) do - resolve_module(charlist, node_range, document, position) + resolve_alias(charlist, node_range, document, position) end defp resolve({:alias, {:local_or_var, prefix}, charlist}, node_range, document, position) do - resolve_module(prefix ++ [?.] ++ charlist, node_range, document, position) + resolve_alias(prefix ++ [?.] ++ charlist, node_range, document, position) end - defp resolve({:local_or_var, ~c"__MODULE__"}, node_range, document, position) do - resolve_module(~c"__MODULE__", node_range, document, position) + defp resolve({:local_or_var, ~c"__MODULE__" = chars}, node_range, document, position) do + resolve_alias(chars, node_range, document, position) end - defp resolve(context, _node_range, _document, _position) do - unsupported_context(context) + defp resolve({:struct, charlist}, {{start_line, start_col}, end_pos}, document, position) do + # exclude the leading % from the node range so that it can be + # resolved like a normal module alias + node_range = {{start_line, start_col + 1}, end_pos} + + case resolve_alias(charlist, node_range, document, position) do + {:ok, {_, struct}, range} -> {:ok, {:struct, struct}, range} + :error -> {:error, :not_found} + end end - defp unsupported_context(context) do + defp resolve({:dot, alias_node, fun_chars}, node_range, document, position) do + fun = List.to_atom(fun_chars) + + with {:ok, module} <- expand_alias(alias_node, document, position) do + case Ast.path_at(document, position) do + {:ok, path} -> + arity = arity_at_position(path, position) + kind = kind_of_call(path, position) + {:ok, {kind, module, fun, arity}, node_range} + + _ -> + {:ok, {:call, module, fun, 0}, node_range} + end + end + end + + defp resolve(context, _node_range, _document, _position) do {:error, {:unsupported, context}} end - # Modules on a single line, e.g. "Foo.Bar.Baz" - defp resolve_module(charlist, {{line, column}, {line, _}}, document, position) - when is_list(charlist) do - # Take only the segments at and before the cursor, e.g. - # Foo|.Bar.Baz -> Foo - # Foo.|Bar.Baz -> Foo.Bar - module_string = - charlist - |> Enum.with_index(column) - |> Enum.take_while(fn {char, column} -> - column < position.character or char != ?. - end) - |> Enum.map(&elem(&1, 0)) - |> List.to_string() + defp resolve_alias(charlist, node_range, document, position) do + with {:ok, path} <- Ast.path_at(document, position), + :struct <- kind_of_alias(path) do + resolve_struct(charlist, node_range, document, position) + else + _ -> resolve_module(charlist, node_range, document, position) + end + end - expanded = - [module_string] - |> Module.concat() - |> Ast.expand_aliases(document, position) + defp resolve_struct(charlist, node_range, document, %Position{} = position) do + with {:ok, struct} <- expand_alias(charlist, document, position) do + {:ok, {:struct, struct}, node_range} + end + end + + # Modules on a single line, e.g. "Foo.Bar.Baz" + defp resolve_module(charlist, {{line, column}, {line, _}}, document, %Position{} = position) do + module_string = module_before_position(charlist, column, position) - with {:ok, module} <- expanded do - {:ok, {:module, module}, {{line, column}, {line, column + String.length(module_string)}}} + with {:ok, module} <- expand_alias(module_string, document, position) do + end_column = column + String.length(module_string) + {:ok, {:module, module}, {{line, column}, {line, end_column}}} end end # Modules on multiple lines, e.g. "Foo.\n Bar.\n Baz" # Since we no longer have formatting information at this point, we # just return the entire module for now. - defp resolve_module(charlist, node_range, document, position) do - module_string = List.to_string(charlist) + defp resolve_module(charlist, node_range, document, %Position{} = position) do + with {:ok, module} <- expand_alias(charlist, document, position) do + {:ok, {:module, module}, node_range} + end + end + + # Take only the segments at and before the cursor, e.g. + # Foo|.Bar.Baz -> Foo + # Foo.|Bar.Baz -> Foo.Bar + defp module_before_position(charlist, start_column, %Position{} = position) + when is_list(charlist) do + charlist + |> List.to_string() + |> module_before_position(position.character - start_column) + end - expanded = - [module_string] - |> Module.concat() - |> Ast.expand_aliases(document, position) + defp module_before_position(string, index) when is_binary(string) do + {prefix, suffix} = String.split_at(string, index) - with {:ok, module} <- expanded do - {:ok, {:module, module}, node_range} + case String.split(suffix, ".", parts: 2) do + [before_dot, _after_dot] -> prefix <> before_dot + [before_dot] -> prefix <> before_dot end end + defp expand_alias({:alias, {:local_or_var, prefix}, charlist}, document, %Position{} = position) do + expand_alias(prefix ++ [?.] ++ charlist, document, position) + end + + defp expand_alias({:alias, charlist}, document, %Position{} = position) do + expand_alias(charlist, document, position) + end + + defp expand_alias(charlist, document, %Position{} = position) when is_list(charlist) do + charlist + |> List.to_string() + |> expand_alias(document, position) + end + + defp expand_alias(module, document, %Position{} = position) when is_binary(module) do + [module] + |> Module.concat() + |> Ast.expand_aliases(document, position) + end + + defp expand_alias(_, _document, _position), do: :error + + # Pipes: + defp arity_at_position([{:|>, _, _} = pipe | _], %Position{} = position) do + {_call, _, args} = + pipe + |> Macro.unpipe() + |> Enum.find_value(fn {ast, _arg_position} -> + if Ast.contains_position?(ast, position) do + ast + end + end) + + length(args) + 1 + end + + # Calls inside of a pipe: + # |> MyModule.some_function(1, 2) + defp arity_at_position([{_, _, args} = call, {:|>, _, _} | _], _position) when is_call(call) do + length(args) + 1 + end + + # Calls not inside of a pipe: + # MyModule.some_function(1, 2) + # some_function.(1, 2) + defp arity_at_position([{_, _, args} = call | _], _position) when is_call(call) do + length(args) + end + + defp arity_at_position([_non_call | rest], %Position{} = position) do + arity_at_position(rest, position) + end + + defp arity_at_position([], _position), do: 0 + + # Walk up the path to see whether we're in the right-hand argument of + # a `::` type operator, which would make the kind a `:type`, not a call. + # Calls that occur on the right of a `::` type operator have kind `:type` + defp kind_of_call([{:"::", _, [_, right_arg]} | rest], %Position{} = position) do + if Ast.contains_position?(right_arg, position) do + :type + else + kind_of_call(rest, position) + end + end + + defp kind_of_call([_ | rest], %Position{} = position) do + kind_of_call(rest, position) + end + + defp kind_of_call([], _position), do: :call + + # There is a fixed set of situations where an alias is being used as + # a `:struct`, otherwise resolve as a `:module`. + defp kind_of_alias(path) + + # %|Foo{} + # %|Foo.Bar{} + # %__MODULE__.|Foo{} + defp kind_of_alias([{:__aliases__, _, _}, {:%, _, _} | _]), do: :struct + + # %|__MODULE__{} + defp kind_of_alias([{:__MODULE__, _, nil}, {:%, _, _} | _]), do: :struct + + # %|__MODULE__.Foo{} + defp kind_of_alias([head_of_aliases, {:__aliases__, _, [head_of_aliases | _]}, {:%, _, _} | _]) do + :struct + end + + # Catch-all: + defp kind_of_alias(_), do: :module + @doc """ Returns the source location of the entity at the given position in the document. """ diff --git a/apps/server/lib/lexical/server/provider/handlers/hover.ex b/apps/server/lib/lexical/server/provider/handlers/hover.ex index e8d56b797..fed58e9e6 100644 --- a/apps/server/lib/lexical/server/provider/handlers/hover.ex +++ b/apps/server/lib/lexical/server/provider/handlers/hover.ex @@ -3,18 +3,20 @@ defmodule Lexical.Server.Provider.Handlers.Hover do alias Lexical.Protocol.Requests alias Lexical.Protocol.Responses alias Lexical.Protocol.Types.Hover - alias Lexical.Protocol.Types.Markup alias Lexical.RemoteControl + alias Lexical.RemoteControl.CodeIntelligence.Docs alias Lexical.Server.CodeIntelligence.Entity alias Lexical.Server.Provider.Env + alias Lexical.Server.Provider.Markdown require Logger def handle(%Requests.Hover{} = request, %Env{} = env) do maybe_hover = - with {:ok, entity, _elixir_range} <- Entity.resolve(request.document, request.position), - {:ok, content} <- hover_content(entity, env) do - %Hover{contents: %Markup.Content{kind: :markdown, value: content}} + with {:ok, entity, range} <- Entity.resolve(request.document, request.position), + {:ok, markdown} <- hover_content(entity, env) do + content = Markdown.to_content(markdown) + %Hover{contents: content, range: range} else error -> Logger.warning("Could not resolve hover request, got: #{inspect(error)}") @@ -24,21 +26,171 @@ defmodule Lexical.Server.Provider.Handlers.Hover do {:reply, Responses.Hover.new(request.id, maybe_hover)} end - defp hover_content({:module, module}, env) do - with {:ok, module_docs} <- RemoteControl.Api.docs(env.project, module) do - doc_content = module_doc_content(module_docs.doc) + defp hover_content({kind, module}, env) when kind in [:module, :struct] do + case RemoteControl.Api.docs(env.project, module, exclude_hidden: false) do + {:ok, %Docs{} = module_docs} -> + header = module_header(kind, module_docs) + types = module_header_types(kind, module_docs) - content = """ - ### #{Ast.Module.name(module)} + additional_sections = [ + module_doc(module_docs.doc), + module_footer(kind, module_docs) + ] - #{doc_content}\ - """ + if Enum.all?([types | additional_sections], &empty?/1) do + {:error, :no_doc} + else + header_block = "#{header}\n\n#{types}" |> String.trim() |> Markdown.code_block() + {:ok, Markdown.join_sections([header_block | additional_sections])} + end - {:ok, content} + _ -> + {:error, :no_doc} end end - defp module_doc_content(s) when is_binary(s), do: s - defp module_doc_content(:none), do: "*This module is undocumented.*\n" - defp module_doc_content(:hidden), do: "*This module is private.*\n" + defp hover_content({:call, module, fun, arity}, env) do + with {:ok, %Docs{} = module_docs} <- RemoteControl.Api.docs(env.project, module), + {:ok, entries} <- Map.fetch(module_docs.functions_and_macros, fun) do + sections = + entries + |> Enum.sort_by(& &1.arity) + |> Enum.filter(&(&1.arity >= arity)) + |> Enum.map(&entry_content/1) + + {:ok, Markdown.join_sections(sections, Markdown.separator())} + end + end + + defp hover_content({:type, module, type, arity}, env) do + with {:ok, %Docs{} = module_docs} <- RemoteControl.Api.docs(env.project, module), + {:ok, entries} <- Map.fetch(module_docs.types, type) do + case Enum.find(entries, &(&1.arity == arity)) do + %Docs.Entry{} = entry -> + {:ok, entry_content(entry)} + + _ -> + {:error, :no_type} + end + end + end + + defp module_header(:module, %Docs{module: module}) do + Ast.Module.name(module) + end + + defp module_header(:struct, %Docs{module: module}) do + "%#{Ast.Module.name(module)}{}" + end + + defp module_header_types(:module, %Docs{}), do: "" + + defp module_header_types(:struct, %Docs{} = docs) do + docs.types + |> Map.get(:t, []) + |> sort_entries() + |> Enum.flat_map(& &1.defs) + |> Enum.join("\n\n") + end + + defp module_doc(s) when is_binary(s), do: s + defp module_doc(_), do: nil + + defp module_footer(:module, docs) do + callbacks = format_callbacks(docs.callbacks) + + unless empty?(callbacks) do + Markdown.section(callbacks, header: "Callbacks") + end + end + + defp module_footer(:struct, _docs), do: nil + + defp entry_content(%Docs.Entry{kind: fn_or_macro} = entry) + when fn_or_macro in [:function, :macro] do + with {:ok, call_header} <- call_header(entry) do + specs = Enum.map_join(entry.defs, "\n", &("@spec " <> &1)) + + header = + [call_header, specs] + |> Markdown.join_sections() + |> String.trim() + |> Markdown.code_block() + + Markdown.join_sections([header, entry_doc_content(entry.doc)]) + end + end + + defp entry_content(%Docs.Entry{kind: :type} = entry) do + module_name = Ast.Module.name(entry.module) + + header = + Markdown.code_block(""" + #{module_name}.#{entry.name}/#{entry.arity} + + #{type_defs(entry)}\ + """) + + Markdown.join_sections([header, entry_doc_content(entry.doc)]) + end + + defp call_header(%Docs.Entry{kind: maybe_macro} = entry) do + with [signature | _] <- entry.signature do + module_name = Ast.Module.name(entry.module) + + macro_prefix = + if maybe_macro == :macro do + "(macro) " + else + "" + end + + {:ok, "#{macro_prefix}#{module_name}.#{signature}"} + end + end + + defp type_defs(%Docs.Entry{metadata: %{opaque: true}} = entry) do + Enum.map_join(entry.defs, "\n", fn def -> + def + |> String.split("::", parts: 2) + |> List.first() + |> String.trim() + end) + end + + defp type_defs(%Docs.Entry{} = entry) do + Enum.join(entry.defs, "\n") + end + + defp format_callbacks(callbacks) do + callbacks + |> Map.values() + |> List.flatten() + |> sort_entries() + |> Enum.map_join("\n", fn %Docs.Entry{} = entry -> + header = + entry.defs + |> Enum.map_join("\n", &("@callback " <> &1)) + |> Markdown.code_block() + + if is_binary(entry.doc) do + """ + #{header} + #{entry_doc_content(entry.doc)} + """ + else + header + end + end) + end + + defp entry_doc_content(s) when is_binary(s), do: String.trim(s) + defp entry_doc_content(_), do: nil + + defp sort_entries(entries) do + Enum.sort_by(entries, &{&1.name, &1.arity}) + end + + defp empty?(empty) when empty in [nil, "", []], do: true + defp empty?(_), do: false end diff --git a/apps/server/lib/lexical/server/provider/markdown.ex b/apps/server/lib/lexical/server/provider/markdown.ex new file mode 100644 index 000000000..51879b8b0 --- /dev/null +++ b/apps/server/lib/lexical/server/provider/markdown.ex @@ -0,0 +1,84 @@ +defmodule Lexical.Server.Provider.Markdown do + @moduledoc """ + Utilities for formatting Markdown content. + """ + + alias Lexical.Protocol.Types.Markup + + @type markdown :: String.t() + + @doc """ + Converts a string of Markdown into LSP markup content. + """ + @spec to_content(markdown) :: Markup.Content.t() + def to_content(markdown) when is_binary(markdown) do + %Markup.Content{kind: :markdown, value: markdown} + end + + @doc """ + Wraps the content inside a Markdown code block. + + ## Options + + * `:lang` - The language for the block. Defaults to `"elixir"`. + + """ + @spec code_block(String.t(), [opt]) :: markdown + when opt: {:lang, String.t()} + def code_block(content, opts \\ []) do + lang = Keyword.get(opts, :lang, "elixir") + + """ + ```#{lang} + #{content} + ``` + """ + end + + @doc """ + Creates a Markdown section with a header. + + ## Options + + * `:header` (required) - The section title. + * `:header_level` - Defaults to `2`. + + """ + @spec section(markdown, [opt]) :: markdown + when opt: {:header, markdown} | {:header_level, pos_integer()} + def section(content, opts) do + header = Keyword.fetch!(opts, :header) + header_level = Keyword.get(opts, :header_level, 2) + + """ + #{String.duplicate("#", header_level)} #{header} + + #{content} + """ + end + + @doc """ + Joins multiple Markdown sections. + """ + @spec join_sections([markdown | nil]) :: markdown + def join_sections(sections, joiner \\ "\n\n") when is_list(sections) do + with_rules = + sections + |> Stream.filter(&(is_binary(&1) and &1 != "")) + |> Stream.map(&String.trim(&1)) + |> Enum.intersperse(joiner) + + case with_rules do + [] -> "" + _ -> IO.iodata_to_binary([with_rules, "\n"]) + end + end + + @doc """ + Returns a string that can be used to join sections with a horizontal rule. + """ + @spec separator() :: markdown + def separator do + "\n\n---\n\n" + end +end diff --git a/apps/server/mix.exs b/apps/server/mix.exs index 1e6451e9d..d962e225f 100644 --- a/apps/server/mix.exs +++ b/apps/server/mix.exs @@ -41,7 +41,7 @@ defmodule Lexical.Server.MixProject do defp deps do [ {:lexical_shared, path: "../../projects/lexical_shared", override: true}, - {:lexical_test, path: "../../projects/lexical_test", only: :test}, + {:lexical_test, path: "../../projects/lexical_test", only: [:dev, :test]}, {:common, in_umbrella: true}, {:elixir_sense, github: "elixir-lsp/elixir_sense"}, {:jason, "~> 1.4"}, @@ -50,7 +50,7 @@ defmodule Lexical.Server.MixProject do {:path_glob, "~> 0.2"}, {:protocol, in_umbrella: true}, {:remote_control, in_umbrella: true, runtime: false}, - {:sourceror, "~> 0.12"} + {:sourceror, "~> 0.14.0"} ] end end diff --git a/apps/server/test/lexical/server/code_intelligence/completion/builder_test.exs b/apps/server/test/lexical/server/code_intelligence/completion/builder_test.exs index 286838770..0460db742 100644 --- a/apps/server/test/lexical/server/code_intelligence/completion/builder_test.exs +++ b/apps/server/test/lexical/server/code_intelligence/completion/builder_test.exs @@ -1,27 +1,17 @@ defmodule Lexical.Server.CodeIntelligence.Completion.BuilderTest do alias Lexical.Ast.Env - alias Lexical.Document alias Lexical.Protocol.Types.Completion.Item, as: CompletionItem - alias Lexical.Server.CodeIntelligence.Completion - alias Lexical.Test.CodeSigil - alias Lexical.Test.CursorSupport - alias Lexical.Test.Fixtures use ExUnit.Case, async: true - import CodeSigil - import Completion.Builder - import CursorSupport - import Fixtures + import Lexical.Server.CodeIntelligence.Completion.Builder + import Lexical.Test.CodeSigil + import Lexical.Test.CursorSupport + import Lexical.Test.Fixtures def new_env(text) do project = project() - {line, column} = cursor_position(text) - stripped_text = strip_cursor(text) - document = Document.new("file://foo.ex", stripped_text, 0) - - position = Document.Position.new(document, line, column) - + {position, document} = pop_cursor(text, as: :document) {:ok, env} = Env.new(project, document, position) env end diff --git a/apps/server/test/lexical/server/code_intelligence/entity_test.exs b/apps/server/test/lexical/server/code_intelligence/entity_test.exs index ec827ffcb..d7d242746 100644 --- a/apps/server/test/lexical/server/code_intelligence/entity_test.exs +++ b/apps/server/test/lexical/server/code_intelligence/entity_test.exs @@ -2,18 +2,16 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do alias Lexical.Document alias Lexical.Document.Location alias Lexical.RemoteControl - alias Lexical.RemoteControl.Api.Messages alias Lexical.RemoteControl.ProjectNodeSupervisor alias Lexical.Server.CodeIntelligence.Entity + import Lexical.RemoteControl.Api.Messages import Lexical.Test.CodeSigil import Lexical.Test.CursorSupport import Lexical.Test.Fixtures import Lexical.Test.RangeSupport - import Messages use ExUnit.Case, async: false - use Lexical.Test.PositionSupport defp with_referenced_file(%{project: project}) do uri = @@ -73,7 +71,7 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do MyDefinition.greet|("World") end end - ] + ] assert {:ok, ^referenced_uri, definition_line} = definition(project, subject_module) assert definition_line == ~S[ def «greet»(name) do] @@ -88,7 +86,7 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do MyDefinition|.greet("World") end end - ] + ] assert {:ok, ^referenced_uri, definition_line} = definition(project, subject_module) assert definition_line == ~S[defmodule «MyDefinition» do] @@ -103,7 +101,7 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do MyDefinition.print_hello|() end end - ] + ] assert {:ok, ^referenced_uri, definition_line} = definition(project, subject_module) assert definition_line == ~S[ defmacro «print_hello» do] @@ -139,7 +137,7 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do MultiArity.sum|(1, 2, 3) end end - ] + ] {:ok, referenced_uri, definition_line} = definition(project, subject_module) @@ -160,7 +158,7 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do greet|("World") end end - ] + ] assert {:ok, ^referenced_uri, definition_line} = definition(project, subject_module) assert definition_line == ~S[ def «greet»(name) do] @@ -176,7 +174,7 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do print_hello|() end end - ] + ] assert {:ok, ^referenced_uri, definition_line} = definition(project, subject_module) assert definition_line == ~S[ defmacro «print_hello» do] @@ -195,7 +193,8 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do greet|("World") end end - ] + ] + assert {:ok, ^referenced_uri, definition_line} = definition(project, subject_module) assert definition_line == ~S[ def «greet»(name) do] end @@ -219,7 +218,8 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do hello_func_in_using|() end end - ] + ] + assert {:ok, ^referenced_uri, definition_line} = definition(project, subject_module) assert definition_line == ~S[ def «hello_func_in_using» do] end @@ -253,7 +253,7 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do @|b end end - ] + ] {:ok, referenced_uri, definition_line} = definition(project, subject_module) @@ -272,7 +272,7 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do end end end - ] + ] {:ok, referenced_uri, definition_line} = definition(project, subject_module) @@ -290,7 +290,7 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do %{project: project} do subject_module = ~q[ String.to_integer|("1") - ] + ] {:ok, uri, definition_line} = definition(project, subject_module) @@ -301,7 +301,8 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do test "find the definition when calling a erlang module", %{project: project} do subject_module = ~q[ :erlang.binary_to_atom|("1") - ] + ] + {:ok, uri, definition_line} = definition(project, subject_module) assert uri =~ "/src/erlang.erl" @@ -346,8 +347,8 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do test "resolves module segments at and before the cursor", %{project: project} do code = ~q[ - In.|The.Middle - ] + In.|The.Middle + ] assert {:ok, {:module, In.The}, resolved_range} = resolve(project, code) assert resolved_range =~ ~S[«In.The».Middle] @@ -355,8 +356,8 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do test "excludes trailing module segments with the cursor is on a period", %{project: project} do code = ~q[ - AAA.BBB.CCC.DDD|.EEE - ] + AAA.BBB.CCC.DDD|.EEE + ] assert {:ok, {:module, AAA.BBB.CCC.DDD}, resolved_range} = resolve(project, code) assert resolved_range =~ ~S[«AAA.BBB.CCC.DDD».EEE] @@ -364,9 +365,9 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do test "succeeds for modules within a multi-line node", %{project: project} do code = ~q[ - foo = - On.Another.Lin|e - ] + foo = + On.Another.Lin|e + ] assert {:ok, {:module, On.Another.Line}, resolved_range} = resolve(project, code) assert resolved_range =~ ~S[ «On.Another.Line»] @@ -374,10 +375,10 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do test "resolves the entire module for multi-line modules", %{project: project} do code = ~q[ - On. - |Multiple. - Lines - ] + On. + |Multiple. + Lines + ] assert {:ok, {:module, On.Multiple.Lines}, resolved_range} = resolve(project, code) @@ -390,8 +391,8 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do test "succeeds in single line calls", %{project: project} do code = ~q[ - |Enum.map(1..10, & &1 + 1) - ] + |Enum.map(1..10, & &1 + 1) + ] assert {:ok, {:module, Enum}, resolved_range} = resolve(project, code) assert resolved_range =~ ~S[«Enum».map(1..10, & &1 + 1)] @@ -399,10 +400,10 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do test "succeeds in multi-line calls", %{project: project} do code = ~q[ - |Enum.map(1..10, fn i -> - i + 1 - end) - ] + |Enum.map(1..10, fn i -> + i + 1 + end) + ] assert {:ok, {:module, Enum}, resolved_range} = resolve(project, code) assert resolved_range =~ ~S[«Enum».map(1..10, fn i ->] @@ -410,11 +411,11 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do test "expands top-level aliases", %{project: project} do code = ~q[ - defmodule Example do - alias Long.Aliased.Module - Modul|e - end - ] + defmodule Example do + alias Long.Aliased.Module + Modul|e + end + ] assert {:ok, {:module, Long.Aliased.Module}, resolved_range} = resolve(project, code) assert resolved_range =~ ~S[ «Module»] @@ -422,11 +423,11 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do test "ignores top-level aliases made after the cursor", %{project: project} do code = ~q[ - defmodule Example do - Modul|e - alias Long.Aliased.Module - end - ] + defmodule Example do + Modul|e + alias Long.Aliased.Module + end + ] assert {:ok, {:module, Module}, resolved_range} = resolve(project, code) assert resolved_range =~ ~S[ «Module»] @@ -434,13 +435,13 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do test "resolves implicit aliases", %{project: project} do code = ~q[ - defmodule Example do - defmodule Inner do - end + defmodule Example do + defmodule Inner do + end - Inne|r - end - ] + Inne|r + end + ] assert {:ok, {:module, Example.Inner}, resolved_range} = resolve(project, code) assert resolved_range =~ ~S[ «Inner»] @@ -448,10 +449,10 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do test "expands current module", %{project: project} do code = ~q[ - defmodule Example do - |__MODULE__ - end - ] + defmodule Example do + |__MODULE__ + end + ] assert {:ok, {:module, Example}, resolved_range} = resolve(project, code) assert resolved_range =~ ~S[ «__MODULE__»] @@ -459,10 +460,10 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do test "expands current module used in alias", %{project: project} do code = ~q[ - defmodule Example do - |__MODULE__.Nested - end - ] + defmodule Example do + |__MODULE__.Nested + end + ] assert {:ok, {:module, Example}, resolved_range} = resolve(project, code) assert resolved_range =~ ~S[ «__MODULE__».Nested] @@ -470,40 +471,304 @@ defmodule Lexical.Server.CodeIntelligence.EntityTest do test "expands alias following current module", %{project: project} do code = ~q[ - defmodule Example do - __MODULE__.|Nested - end - ] + defmodule Example do + __MODULE__.|Nested + end + ] assert {:ok, {:module, Example.Nested}, resolved_range} = resolve(project, code) assert resolved_range =~ ~S[ «__MODULE__.Nested»] end end + describe "struct resolve/2" do + test "succeeds when the cursor is on the %", %{project: project} do + code = ~q[ + |%MyStruct{} + ] + + assert {:ok, {:struct, MyStruct}, resolved_range} = resolve(project, code) + assert resolved_range =~ ~S[%«MyStruct»{}] + end + + test "succeeds when the cursor is in an alias", %{project: project} do + code = ~q[ + %My|Struct{} + ] + + assert {:ok, {:struct, MyStruct}, resolved_range} = resolve(project, code) + assert resolved_range =~ ~S[%«MyStruct»{}] + end + + test "succeeds when the cursor is on the opening bracket", %{project: project} do + code = ~q[ + %MyStruct|{} + ] + + assert {:ok, {:struct, MyStruct}, resolved_range} = resolve(project, code) + assert resolved_range =~ ~S[%«MyStruct»{}] + end + + test "succeeds when the struct fields span multiple lines", %{project: project} do + code = ~q[ + %MyStruct.|Nested{ + foo: 1, + bar: 2 + } + ] + + assert {:ok, {:struct, MyStruct.Nested}, resolved_range} = resolve(project, code) + assert resolved_range =~ ~S[%«MyStruct.Nested»{] + end + + test "succeeds when the struct spans multiple lines", %{project: project} do + code = ~q[ + %On. + |Multiple. + Lines{} + ] + + assert {:ok, {:struct, On.Multiple.Lines}, resolved_range} = resolve(project, code) + + assert resolved_range =~ """ + %«On. + Multiple. + Lines»{}\ + """ + end + + test "includes trailing module segments", %{project: project} do + code = ~q[ + %My|Struct.Nested{} + ] + + assert {:ok, {:struct, MyStruct.Nested}, resolved_range} = resolve(project, code) + assert resolved_range =~ ~S[%«MyStruct.Nested»{}] + end + + test "expands current module", %{project: project} do + code = ~q[ + defmodule Example do + %|__MODULE__{} + end + ] + + assert {:ok, {:struct, Example}, resolved_range} = resolve(project, code) + assert resolved_range =~ ~S[ %«__MODULE__»{}] + end + + test "succeeds for implicitly aliased module", %{project: project} do + code = ~q< + defmodule Example do + defmodule Inner do + defstruct [] + end + + %|Inner{} + end + > + + assert {:ok, {:struct, Example.Inner}, resolved_range} = resolve(project, code) + assert resolved_range =~ ~S[ %«Inner»{}] + end + + test "succeeds for explicitly aliased module", %{project: project} do + code = ~q[ + defmodule Example do + alias Something.Example + %Example.|Inner{} + end + ] + + assert {:ok, {:struct, Something.Example.Inner}, resolved_range} = resolve(project, code) + assert resolved_range =~ ~S[ %«Example.Inner»{}] + end + + test "succeeds for module nested inside current module", %{project: project} do + code = ~q[ + defmodule Example do + %__MODULE__.|Inner{} + end + ] + + assert {:ok, {:struct, Example.Inner}, resolved_range} = resolve(project, code) + assert resolved_range =~ ~S[ %«__MODULE__.Inner»{}] + end + end + + describe "call resolve/2" do + test "qualified call", %{project: project} do + code = ~q[ + def example do + MyModule.|some_function(1, 2, 3) + end + ] + + assert {:ok, {:call, MyModule, :some_function, 3}, resolved_range} = resolve(project, code) + assert resolved_range =~ ~S[«MyModule.some_function»(1, 2, 3)] + end + + test "qualified call without parens", %{project: project} do + code = ~q[ + MyModule.|some_function 1, 2, 3 + ] + + assert {:ok, {:call, MyModule, :some_function, 3}, resolved_range} = resolve(project, code) + assert resolved_range =~ ~S[«MyModule.some_function» 1, 2, 3] + end + + test "qualified call with nested alias", %{project: project} do + code = ~q[ + MyModule.Nested.|some_function(1, 2, 3) + ] + + assert {:ok, {:call, MyModule.Nested, :some_function, 3}, resolved_range} = + resolve(project, code) + + assert resolved_range =~ ~S[«MyModule.Nested.some_function»(1, 2, 3)] + end + + test "multi-line qualified call", %{project: project} do + code = ~q[ + MyModule.|some_function( + 1, 2, 3 + ) + ] + + assert {:ok, {:call, MyModule, :some_function, 3}, resolved_range} = resolve(project, code) + assert resolved_range =~ ~S[«MyModule.some_function»(] + end + + test "qualified call at start of pipe", %{project: project} do + code = ~q[ + 1 + |> MyModule.|some_function(2, 3) + |> other() + |> other() + ] + + assert {:ok, {:call, MyModule, :some_function, 3}, resolved_range} = resolve(project, code) + assert resolved_range =~ ~S[|> «MyModule.some_function»(2, 3)] + end + + test "qualified call at end of pipe", %{project: project} do + code = ~q[ + 1 + |> other() + |> other() + |> MyModule.|some_function(2, 3) + ] + + assert {:ok, {:call, MyModule, :some_function, 3}, resolved_range} = resolve(project, code) + assert resolved_range =~ ~S[|> «MyModule.some_function»(2, 3)] + end + + test "qualified call nested in a pipe", %{project: project} do + code = ~q[ + 1 + |> other() + |> MyModule.|some_function(2, 3) + |> other() + ] + + assert {:ok, {:call, MyModule, :some_function, 3}, resolved_range} = resolve(project, code) + assert resolved_range =~ ~S[|> «MyModule.some_function»(2, 3)] + end + + test "qualified call inside another call", %{project: project} do + code = ~q[ + foo(1, 2, MyModule.|some_function(3)) + ] + + assert {:ok, {:call, MyModule, :some_function, 1}, resolved_range} = resolve(project, code) + assert resolved_range =~ ~S[foo(1, 2, «MyModule.some_function»(3))] + end + + test "qualified call on same line as a string with newlines", %{project: project} do + code = ~q[ + Enum.map_join(list, "\n\n---\n\n", &String.tri|m(&1)) <> "\n" + ] + + assert {:ok, {:call, String, :trim, 1}, _} = resolve(project, code) + end + + test "qualified call within a block", %{project: project} do + code = ~q/ + if true do + MyModule.some_|function(bar) + :ok + end + / + + assert {:ok, {:call, MyModule, :some_function, 1}, _} = resolve(project, code) + end + + test "qualified call on left of type operator", %{project: project} do + code = ~q[ + my_dsl do + MyModule.|my_fun() :: MyModule.t() + end + ] + + assert {:ok, {:call, MyModule, :my_fun, 0}, resolved_range} = resolve(project, code) + assert resolved_range =~ ~S[ «MyModule.my_fun»() :: MyModule.t()] + end + end + + describe "type resolve/2" do + test "qualified types in @type", %{project: project} do + code = ~q[ + @type my_type :: MyModule.|t() + ] + + assert {:ok, {:type, MyModule, :t, 0}, resolved_range} = resolve(project, code) + assert resolved_range =~ ~S[@type my_type :: «MyModule.t»()] + end + + test "qualified types in @spec", %{project: project} do + code = ~q[ + @spec my_fun() :: MyModule.|t() + ] + + assert {:ok, {:type, MyModule, :t, 0}, resolved_range} = resolve(project, code) + assert resolved_range =~ ~S[@spec my_fun() :: «MyModule.t»()] + end + + test "qualified types in DSL", %{project: project} do + code = ~q[ + my_dsl do + my_fun() :: MyModule.|t() + end + ] + + assert {:ok, {:type, MyModule, :t, 0}, resolved_range} = resolve(project, code) + assert resolved_range =~ ~S[ my_fun() :: «MyModule.t»()] + end + + test "qualified types in nested structure", %{project: project} do + code = ~q[ + @type my_type :: %{foo: MyModule.|t()} + ] + + assert {:ok, {:type, MyModule, :t, 0}, resolved_range} = resolve(project, code) + assert resolved_range =~ ~S[@type my_type :: %{foo: «MyModule.t»()}] + end + end + defp resolve(project, code) do - with {position, code} <- pop_position(code), + with {position, code} <- pop_cursor(code), {:ok, document} <- subject_module(project, code), {:ok, resolved, range} <- Entity.resolve(document, position) do {:ok, resolved, decorate(document, range)} end end - defp definition(project, subject_module) do - with {position, subject_module} <- pop_position(subject_module), - {:ok, subject_module_doc} <- subject_module(project, subject_module), + defp definition(project, code) do + with {position, code} <- pop_cursor(code), + {:ok, document} <- subject_module(project, code), {:ok, %Location{} = location} <- - Entity.definition(project, subject_module_doc, position) do + Entity.definition(project, document, position) do {:ok, location.document.uri, decorate(location.document, location.range)} end end - - defp pop_position(subject_module) do - position = caller_position(subject_module) - {position, strip_cursor(subject_module)} - end - - defp caller_position(subject_module) do - {line, character} = cursor_position(subject_module) - position(line, character) - end end diff --git a/apps/server/test/lexical/server/provider/handlers/hover_test.exs b/apps/server/test/lexical/server/provider/handlers/hover_test.exs index ce2e4dfd6..82a2cc712 100644 --- a/apps/server/test/lexical/server/provider/handlers/hover_test.exs +++ b/apps/server/test/lexical/server/provider/handlers/hover_test.exs @@ -14,11 +14,11 @@ defmodule Lexical.Server.Provider.Handlers.HoverTest do import Lexical.Test.CodeSigil import Lexical.Test.CursorSupport + import Lexical.Test.RangeSupport require Messages use ExUnit.Case, async: false - use Lexical.Test.PositionSupport setup_all do project = Fixtures.project() @@ -81,7 +81,7 @@ defmodule Lexical.Server.Provider.Handlers.HoverTest do end describe "module hover" do - test "replies with public module doc", %{project: project} do + test "with @moduledoc", %{project: project} do code = ~q[ defmodule HoverWithDoc do @moduledoc """ @@ -90,67 +90,590 @@ defmodule Lexical.Server.Provider.Handlers.HoverTest do end ] - with_compiled_in(project, code, fn -> - assert {:reply, %{result: %Types.Hover{} = result}} = hover(project, "|HoverWithDoc") - assert result.contents.kind == :markdown + hovered = "|HoverWithDoc" - assert result.contents.value == """ - ### HoverWithDoc + expected = """ + ```elixir + HoverWithDoc + ``` - This module has a moduledoc. - """ + This module has a moduledoc. + """ + + with_compiled_in(project, code, fn -> + assert {:reply, %{result: %Types.Hover{} = result}} = hover(project, hovered) + assert result.contents.kind == :markdown + assert result.contents.value == expected + assert "«HoverWithDoc»" = hovered |> strip_cursor() |> decorate(result.range) end) end - test "notes private modules", %{project: project} do + test "with @moduledoc false", %{project: project} do code = ~q[ defmodule HoverPrivate do @moduledoc false end ] + hovered = "|HoverPrivate" + + with_compiled_in(project, code, fn -> + assert {:reply, %{result: nil}} = hover(project, hovered) + end) + end + + test "without @moduledoc", %{project: project} do + code = ~q[ + defmodule HoverNoDocs do + end + ] + + hovered = "|HoverNoDocs" + + with_compiled_in(project, code, fn -> + assert {:reply, %{result: nil}} = hover(project, hovered) + end) + end + + test "behaviour callbacks", %{project: project} do + code = ~q[ + defmodule HoverBehaviour do + @moduledoc "This is a custom behaviour." + + @type custom_type :: term() + + @callback foo(integer(), float()) :: custom_type + @callback bar(term()) :: {:ok, custom_type} + end + ] + + hovered = "|HoverBehaviour" + + expected = """ + ```elixir + HoverBehaviour + ``` + + This is a custom behaviour. + + ## Callbacks + + ```elixir + @callback bar(term()) :: {:ok, custom_type()} + ``` + + ```elixir + @callback foo(integer(), float()) :: custom_type() + ``` + """ + with_compiled_in(project, code, fn -> - assert {:reply, %{result: %Types.Hover{} = result}} = hover(project, "|HoverPrivate") + assert {:reply, %{result: %Types.Hover{} = result}} = hover(project, hovered) assert result.contents.kind == :markdown + assert result.contents.value == expected + assert "«HoverBehaviour»" = hovered |> strip_cursor() |> decorate(result.range) + end) + end + + test "behaviour callbacks with docs", %{project: project} do + code = ~q[ + defmodule HoverBehaviour do + @moduledoc "This is a custom behaviour." + + @type custom_type :: term() + + @doc """ + This is the doc for `foo/2`. + """ + @callback foo(integer(), float()) :: custom_type + + @doc """ + This is the doc for `bar/1`. + """ + @callback bar(term()) :: {:ok, custom_type} + + @callback baz(term()) :: :ok + end + ] + + hovered = "|HoverBehaviour" + + expected = """ + ```elixir + HoverBehaviour + ``` + + This is a custom behaviour. + + ## Callbacks - assert result.contents.value == """ - ### HoverPrivate + ```elixir + @callback bar(term()) :: {:ok, custom_type()} + ``` - *This module is private.* - """ + This is the doc for `bar/1`. + + ```elixir + @callback baz(term()) :: :ok + ``` + + ```elixir + @callback foo(integer(), float()) :: custom_type() + ``` + + This is the doc for `foo/2`. + """ + + with_compiled_in(project, code, fn -> + assert {:reply, %{result: %Types.Hover{} = result}} = hover(project, hovered) + assert result.contents.kind == :markdown + assert result.contents.value == expected + assert "«HoverBehaviour»" = hovered |> strip_cursor() |> decorate(result.range) end) end - test "notes modules without docs", %{project: project} do + test "struct with @moduledoc includes t/0 type", %{project: project} do code = ~q[ - defmodule HoverNoDocs do + defmodule StructWithDoc do + @moduledoc """ + This module has a moduledoc. + """ + + defstruct foo: nil, bar: nil, baz: nil + @type t :: %__MODULE__{ + foo: String.t(), + bar: integer(), + baz: {boolean(), reference()} + } + end + ] + + hovered = "%|StructWithDoc{}" + + expected = """ + ```elixir + %StructWithDoc{} + + @type t() :: %StructWithDoc{ + bar: integer(), + baz: {boolean(), reference()}, + foo: String.t() + } + ``` + + This module has a moduledoc. + """ + + with_compiled_in(project, code, fn -> + assert {:reply, %{result: %Types.Hover{} = result}} = hover(project, hovered) + assert result.contents.kind == :markdown + assert result.contents.value == expected + assert "%«StructWithDoc»{}" = hovered |> strip_cursor() |> decorate(result.range) + end) + end + + test "struct with @moduledoc includes all t types", %{project: project} do + code = ~q[ + defmodule StructWithDoc do + @moduledoc """ + This module has a moduledoc. + """ + + defstruct foo: nil + @type t :: %__MODULE__{foo: String.t()} + @type t(kind) :: %__MODULE__{foo: kind} + @type t(kind1, kind2) :: %__MODULE__{foo: {kind1, kind2}} end ] + hovered = "%|StructWithDoc{}" + + expected = """ + ```elixir + %StructWithDoc{} + + @type t() :: %StructWithDoc{foo: String.t()} + + @type t(kind) :: %StructWithDoc{foo: kind} + + @type t(kind1, kind2) :: %StructWithDoc{foo: {kind1, kind2}} + ``` + + This module has a moduledoc. + """ + with_compiled_in(project, code, fn -> - assert {:reply, %{result: %Types.Hover{} = result}} = hover(project, "|HoverNoDocs") + assert {:reply, %{result: %Types.Hover{} = result}} = hover(project, hovered) assert result.contents.kind == :markdown + assert result.contents.value == expected + assert "%«StructWithDoc»{}" = hovered |> strip_cursor() |> decorate(result.range) + end) + end + + test "struct with @moduledoc without type", %{project: project} do + code = ~q[ + defmodule StructWithDoc do + @moduledoc """ + This module has a moduledoc. + """ + + defstruct foo: nil + end + ] + + hovered = "%|StructWithDoc{}" - assert result.contents.value == """ - ### HoverNoDocs + expected = """ + ```elixir + %StructWithDoc{} + ``` - *This module is undocumented.* - """ + This module has a moduledoc. + """ + + with_compiled_in(project, code, fn -> + assert {:reply, %{result: %Types.Hover{} = result}} = hover(project, hovered) + assert result.contents.kind == :markdown + assert result.contents.value == expected + assert "%«StructWithDoc»{}" = hovered |> strip_cursor() |> decorate(result.range) end) end end - defp hover(project, code) do - with {position, code} <- pop_position(code), - {:ok, document} <- document_with_content(project, code), - {:ok, request} <- hover_request(document.uri, position) do - Handlers.Hover.handle(request, %Env{project: project}) + describe "call hover" do + test "public function with @doc and @spec", %{project: project} do + code = ~q[ + defmodule CallHover do + @doc """ + This function has docs. + """ + @spec my_fun(integer(), integer()) :: integer() + def my_fun(x, y), do: x + y + end + ] + + hovered = "CallHover.|my_fun(1, 2)" + + expected = """ + ```elixir + CallHover.my_fun(x, y) + + @spec my_fun(integer(), integer()) :: integer() + ``` + + This function has docs. + """ + + with_compiled_in(project, code, fn -> + assert {:reply, %{result: %Types.Hover{} = result}} = hover(project, hovered) + assert result.contents.kind == :markdown + assert result.contents.value == expected + assert "«CallHover.my_fun»(1, 2)" = hovered |> strip_cursor() |> decorate(result.range) + end) + end + + test "public function with multiple @spec", %{project: project} do + code = ~q[ + defmodule CallHover do + @spec my_fun(integer(), integer()) :: integer() + @spec my_fun(float(), float()) :: float() + def my_fun(x, y), do: x + y + end + ] + + hovered = "CallHover.|my_fun(1, 2)" + + expected = """ + ```elixir + CallHover.my_fun(x, y) + + @spec my_fun(integer(), integer()) :: integer() + @spec my_fun(float(), float()) :: float() + ``` + """ + + with_compiled_in(project, code, fn -> + assert {:reply, %{result: %Types.Hover{} = result}} = hover(project, hovered) + assert result.contents.kind == :markdown + assert result.contents.value == expected + assert "«CallHover.my_fun»(1, 2)" = hovered |> strip_cursor() |> decorate(result.range) + end) + end + + test "public function with multiple arities and @spec", %{project: project} do + code = ~q[ + defmodule CallHover do + @spec my_fun(integer()) :: integer() + def my_fun(x), do: x + 1 + + @spec my_fun(integer(), integer()) :: integer() + def my_fun(x, y), do: x + y + + @spec my_fun(integer(), integer(), integer()) :: integer() + def my_fun(x, y, z), do: x + y + z + end + ] + + hovered = "CallHover.|my_fun(1, 2)" + + expected = """ + ```elixir + CallHover.my_fun(x, y) + + @spec my_fun(integer(), integer()) :: integer() + ``` + + --- + + ```elixir + CallHover.my_fun(x, y, z) + + @spec my_fun(integer(), integer(), integer()) :: integer() + ``` + """ + + with_compiled_in(project, code, fn -> + assert {:reply, %{result: %Types.Hover{} = result}} = hover(project, hovered) + assert result.contents.kind == :markdown + assert result.contents.value == expected + assert "«CallHover.my_fun»(1, 2)" = hovered |> strip_cursor() |> decorate(result.range) + end) + end + + test "hovering a public function without parens", %{project: project} do + code = ~q[ + defmodule CallHover do + @doc "Function doc" + def my_fun(x), do: x + 1 + end + ] + + hovered = "CallHover.|my_fun" + + expected = """ + ```elixir + CallHover.my_fun(x) + ``` + + Function doc + """ + + with_compiled_in(project, code, fn -> + assert {:reply, %{result: %Types.Hover{} = result}} = hover(project, hovered) + assert result.contents.kind == :markdown + assert result.contents.value == expected + assert "«CallHover.my_fun»" = hovered |> strip_cursor() |> decorate(result.range) + end) + end + + test "private function", %{project: project} do + code = ~q[ + defmodule CallHover do + @spec my_fun(integer()) :: integer() + defp my_fun(x), do: x + 1 + + def my_other_fun(x, y), do: my_fun(x) + my_fun(y) + end + ] + + hovered = "CallHover.|my_fun(1)" + + with_compiled_in(project, code, fn -> + assert {:reply, %{result: nil}} = hover(project, hovered) + end) + end + + test "private function with public function of same name", %{project: project} do + code = ~q[ + defmodule CallHover do + @spec my_fun(integer()) :: integer() + defp my_fun(x), do: x + 1 + + def my_fun(x, y), do: my_fun(x) + my_fun(y) + end + ] + + hovered = "CallHover.|my_fun(1)" + + expected = """ + ```elixir + CallHover.my_fun(x, y) + ``` + """ + + with_compiled_in(project, code, fn -> + assert {:reply, %{result: %Types.Hover{} = result}} = hover(project, hovered) + assert result.contents.kind == :markdown + assert result.contents.value == expected + assert "«CallHover.my_fun»(1)" = hovered |> strip_cursor() |> decorate(result.range) + end) + end + + test "public macro with @doc", %{project: project} do + code = ~q[ + defmodule MacroHover do + @doc "This is a macro." + defmacro my_macro(expr) do + {:ok, expr} + end + end + ] + + hovered = "MacroHover.|my_macro(:foo)" + + expected = """ + ```elixir + (macro) MacroHover.my_macro(expr) + ``` + + This is a macro. + """ + + with_compiled_in(project, code, fn -> + assert {:reply, %{result: %Types.Hover{} = result}} = hover(project, hovered) + assert result.contents.kind == :markdown + assert result.contents.value == expected + assert "«MacroHover.my_macro»(:foo)" = hovered |> strip_cursor() |> decorate(result.range) + end) + end + end + + describe "type hover" do + test "with @typedoc", %{project: project} do + code = ~q[ + defmodule TypeHover do + @typedoc """ + This type has docs. + """ + @type my_type() :: integer() + end + ] + + hovered = "@type foo :: TypeHover.|my_type()" + + expected = """ + ```elixir + TypeHover.my_type/0 + + @type my_type() :: integer() + ``` + + This type has docs. + """ + + with_compiled_in(project, code, fn -> + assert {:reply, %{result: %Types.Hover{} = result}} = hover(project, hovered) + assert result.contents.kind == :markdown + assert result.contents.value == expected + + assert "@type foo :: «TypeHover.my_type»()" = + hovered |> strip_cursor() |> decorate(result.range) + end) + end + + test "without @typedoc", %{project: project} do + code = ~q[ + defmodule TypeHover do + @type my_type() :: integer() + end + ] + + hovered = "@type foo :: TypeHover.|my_type()" + + expected = """ + ```elixir + TypeHover.my_type/0 + + @type my_type() :: integer() + ``` + """ + + with_compiled_in(project, code, fn -> + assert {:reply, %{result: %Types.Hover{} = result}} = hover(project, hovered) + assert result.contents.kind == :markdown + assert result.contents.value == expected + + assert "@type foo :: «TypeHover.my_type»()" = + hovered |> strip_cursor() |> decorate(result.range) + end) + end + + test "with var", %{project: project} do + code = ~q[ + defmodule TypeHover do + @type my_type(var) :: {integer(), var} + end + ] + + hovered = "@type foo :: TypeHover.|my_type(:foo)" + + expected = """ + ```elixir + TypeHover.my_type/1 + + @type my_type(var) :: {integer(), var} + ``` + """ + + with_compiled_in(project, code, fn -> + assert {:reply, %{result: %Types.Hover{} = result}} = hover(project, hovered) + assert result.contents.kind == :markdown + assert result.contents.value == expected + + assert "@type foo :: «TypeHover.my_type»(:foo)" = + hovered |> strip_cursor() |> decorate(result.range) + end) + end + + test "opaque with var", %{project: project} do + code = ~q[ + defmodule TypeHover do + @opaque my_type(var) :: {integer(), var} + end + ] + + hovered = "@type foo :: TypeHover.|my_type(:foo)" + + expected = """ + ```elixir + TypeHover.my_type/1 + + @opaque my_type(var) + ``` + """ + + with_compiled_in(project, code, fn -> + assert {:reply, %{result: %Types.Hover{} = result}} = hover(project, hovered) + assert result.contents.kind == :markdown + assert result.contents.value == expected + + assert "@type foo :: «TypeHover.my_type»(:foo)" = + hovered |> strip_cursor() |> decorate(result.range) + end) + end + + test "private type", %{project: project} do + code = ~q[ + defmodule TypeHover do + @typep my_type() :: integer() + @type other() :: my_type() + end + ] + + hovered = "@type foo :: TypeHover.|my_type()" + + with_compiled_in(project, code, fn -> + assert {:reply, %{result: nil}} = hover(project, hovered) + end) end end - defp pop_position(code) do - {line, character} = cursor_position(code) - {position(line, character), strip_cursor(code)} + defp hover(project, hovered) do + with {position, hovered} <- pop_cursor(hovered), + {:ok, document} <- document_with_content(project, hovered), + {:ok, request} <- hover_request(document.uri, position) do + Handlers.Hover.handle(request, %Env{project: project}) + end end defp document_with_content(project, content) do @@ -179,9 +702,10 @@ defmodule Lexical.Server.Provider.Handlers.HoverTest do defp hover_request(path, line, char) do uri = Document.Path.ensure_uri(path) + # convert line and char to zero-based params = [ - text_document: [uri: uri], - position: [line: line, character: char] + position: [line: line - 1, character: char - 1], + text_document: [uri: uri] ] with {:ok, _} <- Document.Store.open_temporary(uri), diff --git a/apps/server/test/support/lexical/test/completion_case.ex b/apps/server/test/support/lexical/test/completion_case.ex index ce39ce844..06098e881 100644 --- a/apps/server/test/support/lexical/test/completion_case.ex +++ b/apps/server/test/support/lexical/test/completion_case.ex @@ -41,10 +41,6 @@ defmodule Lexical.Test.Server.CompletionCase do def complete(project, text, opts \\ []) do trigger_character = Keyword.get(opts, :trigger_character) - {line, column} = cursor_position(text) - - text = strip_cursor(text) - root_path = Project.root_path(project) file_path = @@ -61,12 +57,7 @@ defmodule Lexical.Test.Server.CompletionCase do Path.join([root_path, "lib", "file.ex"]) end - document = - file_path - |> Document.Path.ensure_uri() - |> Document.new(text, 0) - - position = %Document.Position{line: line, character: column} + {position, document} = pop_cursor(text, document: file_path) context = if is_binary(trigger_character) do diff --git a/mix.lock b/mix.lock index 8fb876199..e070de734 100644 --- a/mix.lock +++ b/mix.lock @@ -15,6 +15,6 @@ "nimble_parsec": {:hex, :nimble_parsec, "1.2.3", "244836e6e3f1200c7f30cb56733fd808744eca61fd182f731eac4af635cc6d0b", [:mix], [], "hexpm", "c8d789e39b9131acf7b99291e93dae60ab48ef14a7ee9d58c6964f59efb570b0"}, "patch": {:hex, :patch, "0.12.0", "2da8967d382bade20344a3e89d618bfba563b12d4ac93955468e830777f816b0", [:mix], [], "hexpm", "ffd0e9a7f2ad5054f37af84067ee88b1ad337308a1cb227e181e3967127b0235"}, "path_glob": {:hex, :path_glob, "0.2.0", "b9e34b5045cac5ecb76ef1aa55281a52bf603bf7009002085de40958064ca312", [:mix], [{:nimble_parsec, "~> 1.2.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "be2594cb4553169a1a189f95193d910115f64f15f0d689454bb4e8cfae2e7ebc"}, - "sourceror": {:hex, :sourceror, "0.12.3", "a2ad3a1a4554b486d8a113ae7adad5646f938cad99bf8bfcef26dc0c88e8fade", [:mix], [], "hexpm", "4d4e78010ca046524e8194ffc4683422f34a96f6b82901abbb45acc79ace0316"}, + "sourceror": {:hex, :sourceror, "0.14.0", "b6b8552d0240400d66b6f107c1bab7ac1726e998efc797f178b7b517e928e314", [:mix], [], "hexpm", "809c71270ad48092d40bbe251a133e49ae229433ce103f762a2373b7a10a8d8b"}, "stream_data": {:hex, :stream_data, "0.6.0", "e87a9a79d7ec23d10ff83eb025141ef4915eeb09d4491f79e52f2562b73e5f47", [:mix], [], "hexpm", "b92b5031b650ca480ced047578f1d57ea6dd563f5b57464ad274718c9c29501c"}, } diff --git a/projects/lexical_test/lib/lexical/test/cursor_support.ex b/projects/lexical_test/lib/lexical/test/cursor_support.ex index 1bfa3c7f2..227450e14 100644 --- a/projects/lexical_test/lib/lexical/test/cursor_support.ex +++ b/projects/lexical_test/lib/lexical/test/cursor_support.ex @@ -1,49 +1,98 @@ defmodule Lexical.Test.CursorSupport do - def cursor_position(text) do - text - |> String.graphemes() - |> Enum.chunk_every(2, 1, [""]) - |> Enum.reduce_while({starting_line(), starting_column()}, fn - ["|", ">"], {line, column} -> - {:cont, {line, column + 1}} + @moduledoc """ + Utilities for extracting cursor position in code fragments and documents. + """ - ["|", _], position -> - {:halt, position} + alias Lexical.Document + alias Lexical.Document.Position + alias Lexical.Test.PositionSupport - ["\n", _], {line, _column} -> - {:cont, {line + 1, starting_column()}} + @default_cursor "|" + @starting_line 1 + @starting_column 1 - _, {line, column} -> - {:cont, {line, column + 1}} - end) - end + @type cursor_position :: {pos_integer(), pos_integer()} - def context_before_cursor(text) do - text - |> String.graphemes() - |> Enum.chunk_every(2, 1, [""]) - |> Enum.reduce_while([], fn - ["|", ">"], iodata -> - {:cont, [iodata, "|"]} + @doc """ + Finds a cursor in `text` and returns a tuple of the cursor position and + the text with the cursor stripped out. - ["|", _lookahead], iodata -> - {:halt, iodata} + ## Options - [c, _], iodata -> - {:cont, [iodata, c]} - end) - |> IO.iodata_to_binary() + * `:cursor` - the cursor string to be found. Defaults to + `#{inspect(@default_cursor)}`. + + * `:as` - one of `:text` (default) or `:document`. If `:document`, + wraps the text in a `Lexical.Document` using the URI `"file:///file.ex"`. + + * `:document` - the document path or URI. Setting this option implies + `as: :document`. + + ## Examples + + iex> code = \""" + ...> defmodule MyModule do + ...> alias Foo| + ...> end + ...> \""" + + iex> pop_cursor(code) + { + %Position{line: 2, column: 12}, + \""" + defmodule MyModule do + alias Foo + end + \""" + } + + iex> pop_cursor(code, as: :document) + { + %Position{line: 2, column: 12}, + %Document{uri: "file:///file.ex", ...} + } + + iex> pop_cursor(code, document: "my_doc.ex") + { + %Position{line: 2, column: 12}, + %Document{uri: "file:///my_doc.ex", ...} + } + + """ + @spec pop_cursor(text :: String.t(), [opt]) :: {Position.t(), String.t() | Document.t()} + when opt: {:cursor, String.t()} | {:as, :text | :document} | {:document, String.t()} + def pop_cursor(text, opts \\ []) do + cursor = Keyword.get(opts, :cursor, @default_cursor) + as_document? = opts[:as] == :document or is_binary(opts[:document]) + + {line, column} = cursor_position(text, cursor) + stripped_text = strip_cursor(text, cursor) + + if as_document? do + uri = opts |> Keyword.get(:document, "file:///file.ex") |> Document.Path.ensure_uri() + document = Document.new(uri, stripped_text, 0) + position = Position.new(document, line, column) + {position, document} + else + position = PositionSupport.position(line, column) + {position, stripped_text} + end end - def strip_cursor(text) do + @doc """ + Strips all instances of `cursor` from `text`. + """ + @spec strip_cursor(text :: String.t(), cursor :: String.t()) :: String.t() + def strip_cursor(text, cursor \\ @default_cursor) do text |> String.graphemes() |> Enum.chunk_every(2, 1, [""]) |> Enum.reduce([], fn + # don't strip the pipe in a `|>` operator when using the default cursor ["|", ">"], iodata -> [iodata, "|"] - ["|", _lookahead], iodata -> + [^cursor, _lookahead], iodata -> iodata [c, _], iodata -> @@ -52,11 +101,23 @@ defmodule Lexical.Test.CursorSupport do |> IO.iodata_to_binary() end - defp starting_line do - 1 - end + defp cursor_position(text, cursor) do + text + |> String.graphemes() + |> Enum.chunk_every(2, 1, [""]) + |> Enum.reduce_while({@starting_line, @starting_column}, fn + # don't consider the pipe in a `|>` operator when using the default cursor + ["|", ">"], {line, column} -> + {:cont, {line, column + 1}} + + [^cursor, _], position -> + {:halt, position} - defp starting_column do - 1 + ["\n", _], {line, _column} -> + {:cont, {line + 1, @starting_column}} + + _, {line, column} -> + {:cont, {line, column + 1}} + end) end end diff --git a/projects/lexical_test/lib/lexical/test/range_support.ex b/projects/lexical_test/lib/lexical/test/range_support.ex index c4c1ead2b..ea9b10697 100644 --- a/projects/lexical_test/lib/lexical/test/range_support.ex +++ b/projects/lexical_test/lib/lexical/test/range_support.ex @@ -1,11 +1,24 @@ defmodule Lexical.Test.RangeSupport do alias Lexical.Document alias Lexical.Document.Range + alias Lexical.Test.CursorSupport + import Lexical.Document.Line, only: [line: 1] @range_start_marker "«" @range_end_marker "»" + @doc """ + Finds range markers in `text` and returns a tuple containing the range + and the text with the markers stripped out. + """ + @spec pop_range(text :: String.t()) :: {Range.t(), String.t()} + def pop_range(text) do + {start_position, text} = CursorSupport.pop_cursor(text, cursor: @range_start_marker) + {end_position, text} = CursorSupport.pop_cursor(text, cursor: @range_end_marker) + {Range.new(start_position, end_position), text} + end + def decorate(%Document{} = document, %Range{} = range) do index_range = (range.start.line - 1)..(range.end.line - 1)