diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index a268e32f8b..6b28fc6fa1 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -41,9 +41,9 @@ jobs: unison-core/.stack-work yaks/easytest/.stack-work # Main cache key: commit hash. This should always result in a cache miss... - key: stack-work-0_${{matrix.os}}-${{github.sha}} + key: stack-work-2_${{matrix.os}}-${{github.sha}} # ...but then fall back on the latest cache stored (on this branch) - restore-keys: stack-work-0_${{matrix.os}}- + restore-keys: stack-work-2_${{matrix.os}}- # Install stack by downloading the binary from GitHub. The installation process is different for Linux and macOS, # so this is split into two steps, only one of which will run on any particular build. diff --git a/.gitignore b/.gitignore index bf9b574196..434b5b4646 100644 --- a/.gitignore +++ b/.gitignore @@ -8,5 +8,8 @@ scratch.u .stack-work stack.yaml.lock +# Cabal +dist-newstyle + # GHC *.hie diff --git a/CONTRIBUTORS.markdown b/CONTRIBUTORS.markdown index 9a83c94d11..6df1e3285b 100644 --- a/CONTRIBUTORS.markdown +++ b/CONTRIBUTORS.markdown @@ -52,3 +52,8 @@ The format for this list: name, GitHub handle, and then optional blurb about wha * Nigel Farrelly (@nini-faroux) * Johannes Huster (@JohannesHuster) * Joseph Morag (@jmorag) +* Tavish Pegram (@tapegram) +* Javier Neira (@jneira) +* Simon Højberg (@hojberg) +* David Smith (@shmish111) +* Chris Penner (@ChrisPenner) diff --git a/README.md b/README.md index c9b8cfab2d..ba1dc55da1 100644 --- a/README.md +++ b/README.md @@ -36,6 +36,8 @@ $ stack --version # we'll want to know this version if you run into trouble $ stack build && stack exec tests && stack exec unison ``` +To run a local codebase-ui while building from source, you can use the `/dev-ui-install.sh` script. It will download the latest release of the codebase-ui and put it in the expected location for the unison executable created by `stack build`. When you start unison, you'll see a url where the codebase-ui is running. + See [`development.markdown`](development.markdown) for a list of build commands you'll likely use during development. Codebase Server diff --git a/codebase2/codebase-sync/package.yaml b/codebase2/codebase-sync/package.yaml index 000341ca41..be5d9b4d9f 100644 --- a/codebase2/codebase-sync/package.yaml +++ b/codebase2/codebase-sync/package.yaml @@ -6,21 +6,3 @@ library: dependencies: - base - - bytes - - bytestring - - containers - - extra - - here - - lens - - monad-validate - - mtl - - sqlite-simple - - text - - transformers - - unison-codebase - - unison-core - - unison-util - - unison-util-serialization - - unison-util-term - - unliftio - - vector \ No newline at end of file diff --git a/codebase2/codebase-sync/unison-codebase-sync.cabal b/codebase2/codebase-sync/unison-codebase-sync.cabal index d72ee8e790..793918dca4 100644 --- a/codebase2/codebase-sync/unison-codebase-sync.cabal +++ b/codebase2/codebase-sync/unison-codebase-sync.cabal @@ -4,7 +4,7 @@ cabal-version: 1.12 -- -- see: https://github.com/sol/hpack -- --- hash: 3bad1e2687a54aa39423bfd2dc30d8c1ccc8a42b88eb9e10f78cb4c858e92340 +-- hash: a62b0e8dbabe51c01ebc0871290e2427a734c0fbf9e78aee138b2bdad34d2704 name: unison-codebase-sync version: 0.0.0 @@ -25,22 +25,4 @@ library ./ build-depends: base - , bytes - , bytestring - , containers - , extra - , here - , lens - , monad-validate - , mtl - , sqlite-simple - , text - , transformers - , unison-codebase - , unison-core - , unison-util - , unison-util-serialization - , unison-util-term - , unliftio - , vector default-language: Haskell2010 diff --git a/codebase2/core/package.yaml b/codebase2/core/package.yaml index a58fafa365..b98a6a6dff 100644 --- a/codebase2/core/package.yaml +++ b/codebase2/core/package.yaml @@ -7,7 +7,5 @@ library: dependencies: - base - containers - - text - vector - - prelude-extras # deprecated in favor of base - unison-util diff --git a/codebase2/core/unison-core.cabal b/codebase2/core/unison-core.cabal index 023dc77a63..8436916748 100644 --- a/codebase2/core/unison-core.cabal +++ b/codebase2/core/unison-core.cabal @@ -4,7 +4,7 @@ cabal-version: 1.12 -- -- see: https://github.com/sol/hpack -- --- hash: 0bf63b27f45cff424f3fe8db50af886c47b614dee705f88e4f95d96a91efe6b7 +-- hash: f696d0f997ab3afba6494f5932ac232d27e77073a331ab64d3c8b7929d1deb3a name: unison-core version: 0.0.0 @@ -27,8 +27,6 @@ library build-depends: base , containers - , prelude-extras - , text , unison-util , vector default-language: Haskell2010 diff --git a/codebase2/util-serialization/package.yaml b/codebase2/util-serialization/package.yaml index ca5306fee8..51b20adb52 100644 --- a/codebase2/util-serialization/package.yaml +++ b/codebase2/util-serialization/package.yaml @@ -8,10 +8,8 @@ dependencies: - bytes - bytestring - containers - - extra - filepath - text - text-short - unliftio - vector - - unison-util diff --git a/codebase2/util-serialization/unison-util-serialization.cabal b/codebase2/util-serialization/unison-util-serialization.cabal index 8d292fbef6..be2566b468 100644 --- a/codebase2/util-serialization/unison-util-serialization.cabal +++ b/codebase2/util-serialization/unison-util-serialization.cabal @@ -4,7 +4,7 @@ cabal-version: 1.12 -- -- see: https://github.com/sol/hpack -- --- hash: a3d15e9e483a852609dc7b9c2177cdb29eca32e730651dbbbaac7247b615774b +-- hash: 0dd31d457a6c3bc41017d163374947c1c31752279ce7ce77bc5772150101afe1 name: unison-util-serialization version: 0.0.0 @@ -22,11 +22,9 @@ library , bytes , bytestring , containers - , extra , filepath , text , text-short - , unison-util , unliftio , vector default-language: Haskell2010 diff --git a/codebase2/util/package.yaml b/codebase2/util/package.yaml index 1d0b3cfce3..70828313c7 100644 --- a/codebase2/util/package.yaml +++ b/codebase2/util/package.yaml @@ -9,7 +9,6 @@ dependencies: - bytestring - containers - cryptonite - - extra - lens - memory - safe diff --git a/codebase2/util/unison-util.cabal b/codebase2/util/unison-util.cabal index 97514d14a9..4517c23e9b 100644 --- a/codebase2/util/unison-util.cabal +++ b/codebase2/util/unison-util.cabal @@ -4,7 +4,7 @@ cabal-version: 1.12 -- -- see: https://github.com/sol/hpack -- --- hash: bc7f7157a2b554df63f114447a2a6798a365df3d483cec8e3b87c0770992ac36 +-- hash: a8102baaf0e3fe04c94f0f13427e916237c029cf7554b4ca709684c48ff0b3b0 name: unison-util version: 0.0.0 @@ -41,7 +41,6 @@ library , bytestring , containers , cryptonite - , extra , lens , memory , safe diff --git a/contrib/cabal.project b/contrib/cabal.project index 244fef94aa..d12a7d8c06 100644 --- a/contrib/cabal.project +++ b/contrib/cabal.project @@ -1,10 +1,62 @@ -packages: unison-core/ parser-typechecker/ yaks/easytest/ yaks/haskeline/ +packages: + yaks/easytest + parser-typechecker + unison-core + codebase2/codebase + codebase2/codebase-sqlite + codebase2/codebase-sync + codebase2/core + codebase2/util + codebase2/util-serialization + codebase2/util-term -allow-newer: base, directory, filepath +source-repository-package + type: git + location: https://github.com/unisonweb/configurator.git + tag: e47e9e9fe1f576f8c835183b9def52d73c01327a -package unison-core - ghc-options: -Werror -Wno-type-defaults +source-repository-package + type: git + location: https://github.com/unisonweb/haskeline.git + tag: 2944b11d19ee034c48276edc991736105c9d6143 -package unison-parser-typechecker - ghc-options: -Werror -Wno-type-defaults +source-repository-package + type: git + location: https://github.com/unisonweb/megaparsec.git + tag: c4463124c578e8d1074c04518779b5ce5957af6b +allow-newer: + haskeline:base + +-- For now there is no way to apply ghc-options for all local packages +-- See https://cabal.readthedocs.io/en/latest/cabal-project.html#package-configuration-options +package easytest + ghc-options: -Wall -Werror -Wno-name-shadowing -Wno-type-defaults -Wno-missing-pattern-synonym-signatures -fprint-expanded-synonyms -fwrite-ide-info + +package parser-typechecker + ghc-options: -Wall -Werror -Wno-name-shadowing -Wno-type-defaults -Wno-missing-pattern-synonym-signatures -fprint-expanded-synonyms -fwrite-ide-info + +package codebase + ghc-options: -Wall -Werror -Wno-name-shadowing -Wno-type-defaults -Wno-missing-pattern-synonym-signatures -fprint-expanded-synonyms -fwrite-ide-info + +package codebase-sqlite + ghc-options: -Wall -Werror -Wno-name-shadowing -Wno-type-defaults -Wno-missing-pattern-synonym-signatures -fprint-expanded-synonyms -fwrite-ide-info + +package codebase-sync + ghc-options: -Wall -Werror -Wno-name-shadowing -Wno-type-defaults -Wno-missing-pattern-synonym-signatures -fprint-expanded-synonyms -fwrite-ide-info + +package core + ghc-options: -Wall -Werror -Wno-name-shadowing -Wno-type-defaults -Wno-missing-pattern-synonym-signatures -fprint-expanded-synonyms -fwrite-ide-info + +package util + ghc-options: -Wall -Werror -Wno-name-shadowing -Wno-type-defaults -Wno-missing-pattern-synonym-signatures -fprint-expanded-synonyms -fwrite-ide-info + +package util-serialization + ghc-options: -Wall -Werror -Wno-name-shadowing -Wno-type-defaults -Wno-missing-pattern-synonym-signatures -fprint-expanded-synonyms -fwrite-ide-info + +package util-term + ghc-options: -Wall -Werror -Wno-name-shadowing -Wno-type-defaults -Wno-missing-pattern-synonym-signatures -fprint-expanded-synonyms -fwrite-ide-info + +-- This options are applied to all packages, local ones and also external dependencies. +package * + ghc-options: -haddock diff --git a/development.markdown b/development.markdown index a2f012c2a6..1d9babb2cf 100644 --- a/development.markdown +++ b/development.markdown @@ -36,16 +36,21 @@ That will generate a `.prof` plain text file with profiling dat ## Building with cabal -Unison can also be built/installed with cabal. You'll need ghc 8.6.x to -successfully build its dependencies. The provided project file is also in -contrib/ so you'll need to specify its location on the command line. - -* To configure the build, you can use e.g. `cabal v2-configure - --project-file=contrib/cabal.project --with-ghc=ghc-8.6.5` if you have - multiple versions of GHC installed -* To build all projects use `cabal v2-build --project-file=contrib/cabal.project all` -* Tests can be run with e.g. `cabal v2-run --project-file=contrib/cabal.project - parser-typechecker:tests` -* The executable can be installed with `cabal v2-install - --project-file=contrib/cabal.project unison` the install directory can be - modified by setting `installdir: ...` in `.cabal/config` +Unison can also be built/installed with cabal. You'll need the same ghc +used by `stack.yaml` to successfully build its dependencies. +The provided project file is also in contrib/ so you'll need to specify +its location on the command line. + +* To build all projects use + + `cabal v2-build --project-file=contrib/cabal.project all` + +* Tests can be run with e.g. + + `cabal v2-test --project-file=contrib/cabal.project all` + +* The executable can be installed with + + `cabal v2-install --project-file=contrib/cabal.project unison` + +* The install directory can be modified with the option `--installdir: ...` diff --git a/docs/LanguageReference.md b/docs/LanguageReference.md deleted file mode 100644 index 30cb108868..0000000000 --- a/docs/LanguageReference.md +++ /dev/null @@ -1,897 +0,0 @@ -# Unison Language Reference - -## (Unison version 1.0.M1) - - -This document is an informal reference for the Unison language, meant as an aid for Unison programmers as well as authors of implementations of the language. - -* This language reference, like the language it describes, is a work in progress and will be improved over time ([GitHub link](https://github.com/unisonweb/unison/blob/master/docs/LanguageReference.md)). Contributions and corrections are welcome! - -A formal specification of Unison is outside the scope of this document, but links are provided to resources that describe the language’s formal semantics. - -## Table of contents - - * [A note on syntax](#a-note-on-syntax) - * [Top-Level declarations](#top-level-declarations) - + [Term Bindings](#term-bindings) - - [Type signature](#type-signature) - - [Term definition](#term-definition) - + [User-defined data types](#user-defined-data-types) - + [User-defined abilities](#user-defined-abilities) - + [Unique types](#unique-types) - + [Use clauses](#use-clauses) - * [Unison expressions](#unison-expressions) - + [Identifiers](#identifiers) - - [Namespace-qualified identifiers](#namespace-qualified-identifiers) - - [Absolutely qualified identifiers](#absolutely-qualified-identifiers) - - [Hash-qualified identifiers](#hash-qualified-identifiers) - + [Reserved words](#reserved-words) - + [Blocks and statements](#blocks-and-statements) - - [The Lexical Syntax of Blocks](#the-lexical-syntax-of-blocks) - + [Literals](#literals) - - [Escape sequences](#escape-sequences) - + [Comments](#comments) - + [Type annotations](#type-annotations) - + [Parenthesized expressions](#parenthesized-expressions) - + [Function application](#function-application) - + [Boolean expressions](#boolean-expressions) - - [Conditional expressions](#conditional-expressions) - - [Boolean conjunction and disjunction](#boolean-conjunction-and-disjunction) - + [Delayed computations](#delayed-computations) - + [Case expressions and pattern matching](#case-expressions-and-pattern-matching) - - [Pattern matching](#pattern-matching) - * [Blank patterns](#blank-patterns) - * [Literal patterns](#literal-patterns) - * [Variable patterns](#variable-patterns) - * [As-patterns](#as-patterns) - * [Constructor patterns](#constructor-patterns) - * [List patterns](#list-patterns) - * [Tuple patterns](#tuple-patterns) - * [Ability patterns](#ability-patterns) - * [Guard patterns](#guard-patterns) - * [Hashes](#hashes) - + [Literal hash references](#literal-hash-references) - + [Short hashes](#short-hashes) - * [Unison types](#unison-types) - + [Type variables](#type-variables) - + [Polymorphic types](#polymorphic-types) - + [Scoped type variables](#scoped-type-variables) - + [Type constructors](#type-constructors) - - [Kinds of Types](#kinds-of-types) - + [Type application](#type-application) - + [Function types](#function-types) - + [Tuple types](#tuple-types) - + [Built-in types](#built-in-types) - + [Built-in type constructors](#built-in-type-constructors) - * [Abilities and ability handlers](#abilities-and-ability-handlers) - + [Abilities in function types](#abilities-in-function-types) - + [User-defined abilities](#user-defined-abilities-1) - + [Ability handlers](#ability-handlers) - - [Pattern matching on ability constructors](#pattern-matching-on-ability-constructors) - * [Name resolution and the environment](#name-resolution-and-the-environment) - + [Type-directed name resolution](#type-directed-name-resolution) - - -## A note on syntax -Unison is a language in which _programs are not text_. That is, the source of truth for a program is not its textual representation as source code, but its structured representation as an abstract syntax tree. - -This document describes Unison in terms of its default (and currently, only) textual rendering into source code. - -## Top-Level declarations -This section describes the syntactic structure and informal semantics of Unison declarations. - -A top-level declaration can appear at the _top level_ or outermost scope of a Unison File. It can be a [term binding](#term-bindings), a [user-defined data type](#user-defined-data-types), a [user-defined ability](#user-defined-abilities), or a [use clause](#use-clauses). - -### Term Bindings - -A Unison term binding consists of an optional [type signature](#type-signature), and a [term definition](#term-definition). For example: - -``` haskell -timesTwo : Nat -> Nat -timesTwo x = x * 2 -``` - -The first line in the above is a type signature. The type signature `timesTwo : Nat -> Nat` declares that the term named `timesTwo` is a function accepting an argument of type `Nat` and computes a value of type `Nat`. The type `Nat` is the type of 64-bit natural numbers starting from zero. See [Unison types](#unison-types) for details. - -The second line is the term definition. The `=` sign splits the definition into a _left-hand side_, which is the term being defined, and the _right-hand side_, which is the definition of the term. - -The general form of a term binding is: - -``` haskell -name : Type -name p_1 p_2 … p_n = expression -``` - -#### Type signature -`name : Type` is a type signature, where `name` is the name of the term being defined and `Type` is a [type](#unison-types) for that term. The `name` given in the type signature and the `name` given in the definition must be the same. - -Type signatures are optional. In the absence of a type signature, Unison will automatically infer the type of a term declaration. If a type signature is present, Unison will verify that the term has the type given in the signature. - -#### Term definition -A term definition has the form `f p_1 p_2 … p_n = e` where `f` is the name of the term being defined. The parameters `p_1` through `p_n` are the names of parameters, if any (if the term is a function), separated by spaces. The right-hand side of the `=` sign is any [Unison expression](#unison-expressions). - -The names of the parameters as well as the name of the term are bound as local variables in the expression on the right-hand side (also known as the _body_ of the function). When the function is called, the parameter names are bound to any arguments passed in the call. See [function application](#function-application) for details on the call semantics of functions. - -If the term takes no arguments, the term has the value of the fully evaluated expression on the right-hand side and is not a function. - -The expression comprising the right-hand side can refer to the name given to the definition in the left-hand side. In that case, it’s a recursive definition. For example: - -``` Haskell -sumUpTo : Nat -> Nat -sumUpTo n = - if n < 2 then n - else n + sumUpto (drop n 1) -``` - -The above defines a function `sumUpTo` that recursively sums all the natural numbers less than some number `n`. As an example, `sumUpTo 3` is `1 + 2 + 3`, which is `6`. - -Note: The expression `drop n 1` on line 4 above subtracts one from the natural number `n`. Since the natural numbers are not closed under subtraction (`n - 1` is an `Int`), we use the operation `drop` which has the convention that `drop n 0 = 0` for all natural numbers `n`. Unison's type system saves us from having to deal with negative numbers here. - -#### Operator definitions -[Operator identifiers](#identifiers) are valid names for Unison definitions, but the syntax for defining them is slightly different. For example, we could define a binary operator `?`: - -``` Haskell -(?) x y = if x == 0 then y else x -``` - -Or we could define it using infix notation: - -``` haskell -x ? y = if x == 0 then y else x -``` - -If we want to give the operator a qualified name, we put the qualifier inside the parentheses: - -``` Haskell -(MyNamespace.?) x y = if x == 0 then y else x -``` - -Or if defining it infix: - -``` haskell -x MyNamespace.? y = if x == 0 then y else x -``` - -The operator can be applied using either notation, no matter which way it's defined. See [function application](#function-application) for details. - -### User-defined data types - -A user-defined data type is introduced with the `type` keyword. - -For example: - -``` haskell -type Optional a = None | Some a -``` - -The `=` sign splits the definition into a _left-hand side_ and a _right-hand side_, much like term definitions. - -The left-hand side is the data type being defined. It gives a name for the data type and declares a new _type constructor_ with that name (here it’s named `Optional`), followed by names for any type arguments (here there is one and it’s called `a`). These names are bound as type variables in the right-hand side. The right-hand side may also refer to the name given to the type in the left-hand side, in which case it is a recursive type declaration. Note that the fully saturated type construction `Optional Nat` is a type, whereas `Optional` by itself is a type constructor, not a type (it requires a type argument in order to construct a type). - -The right-hand side consists of zero or more data constructors separated by `|`. These are _data constructors_ for the type, or ways in which values of the type can be constructed. Each case declares a name for a data constructor (here the data constructors are `None` and `Some`), followed by the **types** of the arguments to the constructor. - -When Unison compiles a type definition, it generates a term for each data constructor. Here they are the terms `Some : a -> Optional a`, and `None : Optional a`. It also generates _patterns_ for matching on data -(see [Pattern Matching](#pattern-matching)). - -The general form of a type declaration is as follows: - -``` haskell -]?>?> type TypeConstructor p1 p2 … pn - = DataConstructor_1 - | DataConstructor_2 - .. - | DataConstructor_n -``` - -The optional `unique` keyword introduces a [unique type](#unique-types), explained in the next section. - -#### Unique types - -A type declaration gives a name to a type, but Unison does not uniquely identify a type by its name. Rather, the [hash](#hashes) of a type's definition identifies the type. The hash is based on the _structure_ of the type definition, with all identifiers removed. - -For example, Unison considers these type declarations to declare _the exact same type_, even though they give different names to both the type constructor and the data constructors: - -``` haskell -type Optional a = Some a | None - -type Maybe a = Just a | Nothing -``` - -So a value `Some 10` and a value `Just 10` are in fact the same value and these two expressions have the same type. Even though one nominally has the type `Optional Nat` and the other `Maybe Nat`, Unison understands that as the type `#5isltsdct9fhcrvu ##Nat`. - -This is not always what you want. Sometimes you want to give meaning to a type that is more than just its structure. For example, it might be confusing that these two types are identical: - -``` haskell -type Suit = Hearts | Spades | Diamonds | Clubs - -type Direction = North | South | East | West -``` - -Unison will consider every unary type constructor with four nullary data constructors as identical to these declarations. So Unison will not stop us providing a `Direction` where a `Suit` is expected. - -The `unique` keyword solves this problem: - -``` haskell -unique type Suit = Hearts | Spades | Diamonds | Clubs - -unique type Direction = North | South | East | West -``` - -When compiling these declarations, Unison will generate a [universally unique identifier](https://en.wikipedia.org/wiki/Universally_unique_identifier) for the type and use that identifier when generating the hash for the type. As a result, the type gets a hash that is universally unique. - -You can supply a unique identifier yourself if you want the hash to be completely determined by the source code. The optional identifier goes in square brackets after the `unique` keyword: - -``` haskell -unique[suit_CBtSxLszHECvjClJpNtxYw] type - Suit = Hearts | Spades | Diamonds | Clubs - -unique[direction_PWOxXSDnUkKOJttYCZTQ3Q] type - Direction = North | South | East | West -``` - -The unique identifier must be a valid [regular identifier](#identifiers). It's a good idea to use a UUID generator to generate these for you to ensure that they are unique. - -### User-defined abilities - -A user-defined _ability_ declaration has the following general form: - -``` haskell -ability A p_1 p_2 … p_n where - Request_1 : Type_1 - Request_2 : Type_2 - Request_n : Type_n -``` - -This declares an _ability type constructor_ `A` with type parameters `p_1` through `p_n`, and _request constructors_ `Request_1` through `Request_n`. - -See [Abilities and Ability Handlers](#abilities-and-ability-handlers) for more on user-defined abilities. - -### Use clauses - -A _use clause_ tells Unison to allow [identifiers](#identifiers) from a given [namespace](#namespace-qualified-identifiers) to be used [unqualified](#namespace-qualified-identifiers) in the lexical scope where the use clause appears. - -In this example, the `use .base.List` clause allows the definition that follows it to refer to `.base.List.take` as simply `take`: - -``` haskell -use .base.List - -oneTwo = take 2 [1,2,3] -``` - -The general form of `use` clauses is as follows: - -``` haskell -use namespace name_1 name_2 .. name_n -``` - -Where `namespace` is the namespace from which we want to use names unqualified, and `name_1` through `name_n` are the names we want to use. If no names are given in the `use` clause, Unison allows all the names from the namespace to be used unqualified. There's no performance penalty for this, as `use` clauses are purely a syntactic convenience. When rendering code as text, Unison will insert precise `use` clauses that mention exactly the names it uses, even if the programmer omitted the list of names. - -See the section on [identifiers](#identifiers) for more on namespaces as well as qualified and unqualified names. - -## Unison expressions -This section describes the syntax and informal semantics of Unison expressions. - -Unison’s evaluation strategy for expressions is [Applicative Order Call-by-Value](https://en.wikipedia.org/wiki/Evaluation_strategy#Applicative_order). See [Function application](#function-application) for details. - -### Identifiers -Unison identifiers come in two flavors: - -1. _Regular identifiers_ start with an alphabetic unicode character, emoji (which is any unicode character between 1F400 and 1FAFF inclusive), or underscore (`_`), followed by any number of alphanumeric characters, emoji, or the characters `_`, `!`, or `'`. For example, `foo`, `_bar4`, `qux'`, and `set!` are valid regular identifiers. -2. _Operators_ consist entirely of the characters `!$%^&*-=+<>.~\\/|:`. For example, `+`, `*`, `<>`, and `>>=` are valid operators. - -#### Namespace-qualified identifiers -The above describes _unqualified_ identifiers. An identifier can also be _qualified_. A qualified identifier consists of a _qualifier_ or _namespace_, followed by a `.`, followed by either a regular identifier or an operator. The qualifier is one or more regular identifiers separated by `.`. For example `Foo.Bar.baz` is a qualified identifier where `Foo.Bar` is the qualifier. - -#### Absolutely qualified identifiers -Namespace-qualified identifiers described above are relative to a “current” namespace, which the programmer can set (and defaults to the root of the global namespace). To ignore the current namespace, an identifier can have an _absolute qualifier_. An absolutely qualified name begins with a `.`. For example, the name `.base.List` always refers to the name `.base.List`, regardless of the current namespace, whereas the name `base.List` will refer to `foo.base.List` if the current namespace is `foo`. - -Note that [operator identifiers](#identifiers) may contain the character `.`. In order for this to not create ambiguity, the rule is as follows: - -1. `.` by itself is always an operator. -2. Any other identifier beginning with `.` is an absolutely qualified identifier. -3. A `.` immediately following a namespace is always a namespace separator. -4. Otherwise a `.` is treated as part of an operator identifier. - -if `.` is followed by whitespace or another operator character, the `.` is treated like an operator character. If it's followed by a [regular identifier](#identifiers) character, it's treated as a namespace separator. - -#### Hash-qualified identifiers -Any identifier, including a namespace-qualified one, can appear _hash-qualified_. A hash-qualified identifier has the form `x#h` where `x` is an identifier and `#h` is a [hash literal](#hashes). The hash disambiguates names that may refer to more than one thing. - -### Reserved words -The following names are reserved by Unison and cannot be used as identifiers: `=`, `:`, `->`, `if`, `then`, `else`, `forall`, `handle`, `in`, `unique`, `where`, `use`, `and`, `or`, `true`, `false`, `type`, `ability`, `alias`, `let`, `namespace`, `case`, `of`, `with`. - -### Blocks and statements -A block is an expression that has the general form: - -``` haskell -statement_1 -statement_2 -... -statement_n -expression -``` - -A block can have zero or more statements, and the value of the whole block is the value of the final `expression`. A statement is either: - -1. A [term definition](#term-definition) which defines a term within the scope of the block. The definition is not visible outside this scope, and is bound to a local name. Unlike top-level definitions, a block-level definition does not result in a hash, and cannot be referenced with a [hash literal](#hashes). -2. A [Unison expression](#unison-expressions). In particular, blocks often contain _action expressions_, which are expressions evaluated solely for their effects. An action expression has type `{A} T` for some ability `A` (see [Abilities and Ability Handlers](#abilities-and-ability-handlers)) and some type `T`. -3. A [`use` clause](#use-clauses). - -An example of a block (this evaluates to `16`): - -``` haskell -x = 4 -y = x + 2 -f a = a + y -f 10 -``` - -A number of language constructs introduce blocks. These are detailed in the relevant sections of this reference. Wherever Unison expects an expression, a block can be introduced with the `let` keyword: - -``` haskell -let -``` - -Where `` denotes a block as described above. - -#### The Lexical Syntax of Blocks -The standard syntax expects statements to appear in a line-oriented layout, where whitespace is significant. - -The opening keyword (`let`, `if`, `then`, or `else`, for example) introduces the block, and the position of the first character of the first statement in the block determines the top-left corner of the block. The beginning of each statement in the block must be lined up exactly with the left edge of the block. The first non-whitespace character that appears to the left of that edge (i.e. outdented) ends the block. Certain keywords also end blocks. For example, `then` ends the block introduced by `if`. - -A statement or expression in a block can continue for more than one line as long as each line of the statement is indented further than the first character of the statement or expression. - -For example, these are valid indentations for a block: - -``` haskell -let - x = 1 - y = 2 - x + y - - -let x = 1 - y = 2 - x + y -``` - -Whereas these are incorrect: - -``` haskell -let x = 1 - y = 2 - x + y - -let x = 1 - y = 2 - x + y -``` - -### Literals -A literal expression is a basic form of Unison expression. Unison has the following types of literals: - -* A _64-bit unsigned integer_ of type `.base.Nat` (which stands for _natural number_) consists of digits from 0 to 9. The smallest `Nat` is `0` and the largest is `18446744073709551615`. -* A _64-bit signed integer_ of type `.base.Int` consists of a natural number immediately preceded by either `+` or `-`. For example, `4` is a `Nat`, whereas `+4` is an `Int`. The smallest `Int` is `-9223372036854775808` and the largest is `+9223372036854775807`. -* A _64-bit floating point number_ of type `.base.Float` consists of an optional sign (`+`/`-`), followed by two natural numbers separated by `.`. Floating point literals in Unison are [IEEE 754-1985](https://en.wikipedia.org/wiki/IEEE_754-1985) double-precision numbers. For example `1.6777216` is a valid floating point literal. -* A _text literal_ of type `.base.Text` is any sequence of Unicode characters between pairs of `"`. The escape character is `\`, so a `"` can be included in a text literal with the escape sequence `\"`. The full list of escape sequences is given in the [Escape Sequences](#escape-sequences) section below. For example, `"Hello, World!"` is a text literal. A text literal can span multiple lines. Newlines do not terminate text literals, but become part of the literal text. -* There are two _Boolean literals_: `true` and `false`, and they have type `Boolean`. -* A _hash literal_ begins with the character `#`. See the section **Hashes** for details on the lexical form of hash literals. A hash literal is a reference to a term or type. The type or term that it references must have a definition whose hash digest matches the hash in the literal. The type of a hash literal is the same as the type of its referent. `#a0v829` is an example of a hash literal. -* A _literal list_ has the general form `[v1, v2, ... vn]` where `v1` through `vn` are expressions. A literal list may be empty. For example, `[]`, `[x]`, and `[1,2,3]` are list literals. The expressions that form the elements of the list all must have the same type. If that type is `T`, then the type of the list literal is `.base.List T` or `[T]`. -* A _function literal_ or _lambda_ has the form `p1 p2 ... pn -> e`, where `p1` through `pn` are [regular identifiers](#identifiers) and `e` is a Unison expression (the _body_ of the lambda). The variables `p1` through `pn` are local variables in `e`, and they are bound to any values passed as arguments to the function when it’s called (see the section [Function Application](#function-application) for details on call semantics). For example `x -> x + 2` is a function literal. -* A _tuple literal_ has the form `(v1,v2, ..., vn)` where `v1` through `vn` are expressions. A value `(a,b)` has type `(A,B)` if `a` has type `A` and `b` has type `B`. The expression `(a)` is the same as the expression `a`. The nullary tuple `()` (pronounced “unit”) is of the trivial type `()`. See [tuple types](#tuple-types) for details on these types and more ways of constructing tuples. - -#### Escape sequences -Text literals can include the following escape sequences: - -* `\0` = null character -* `\a` = alert (bell) -* `\b` = backspace -* `\f` = form feed -* `\n` = new line -* `\r` = carriage return -* `\t` = horizontal tab -* `\v` = vertical tab -* `\\` = literal `\` character -* `\'` = literal `'` character -* `\"` = literal `"` character - -### Comments -A line comment starts with `--` and is followed by any sequence of characters. A line that contains a comment can’t contain anything other than a comment and whitespace. Line comments are currently ignored by Unison. - -A line starting with `---` and containing no other characters is a _fold_. Any text below the fold is ignored by Unison. - -Unison does not currently support block comments. A comment can span multiple lines by adding `--` to the front of each line of the comment. - -### Type annotations -A type annotation has the form `e:T` where `e` is an expression and `T` is a type. This tells Unison that `e` should be of type `T` (or a subtype of type `T`), and Unison will check whether this is true. It's a type error for the actual type of `e` to be anything other than a type that conforms to `T`. - -### Parenthesized expressions -Any expression can appear in parentheses, and an expression `(e)` is the same as the expression `e`. Parentheses can be used to delimit where an expression begins and ends. For example `(f : P -> Q) y` is an application of the function `f` of type `P -> Q` to the argument `y`. The parentheses are needed to tell Unison that `y` is an argument to `f`, not a part of the type annotation expression. - -### Function application -A function application `f a1 a2 an` applies the function `f` to the arguments `a1` through `an`. - -The above syntax is valid where `f` is a [regular identifier](#identifiers). If the function name is an operator such as `*`, then the syntax for application is infix : `a1 * a2`. Any operator can be used in prefix position by surrounding it in parentheses: `(*) a1 a2`. Any [regular identifier](#identifiers) can be used infix by surrounding it in backticks: ``a1 `f` a2`` - -All Unison functions are of arity 1. That is, they take exactly one argument. An n-ary function is modeled either as a unary function that returns a further function (a partially applied function) which accepts the rest of the arguments, or as a unary function that accepts a tuple. - -Function application associates to the left, so the expression `f a b` is the same as `(f a) b`. If `f` has type `T1 -> T2 -> Tn` then `f a` is well typed only if `a` has type `T1`. The type of `f a` is then `T2 -> Tn`. The type constructor of function types, `->`, associates to the right. So `T1 -> T2 -> Tn` parenthesizes as `T1 -> (T2 -> TN)`. - -The evaluation semantics of function application is applicative order [Call-by-Value](https://en.wikipedia.org/wiki/Evaluation_strategy#Call_by_value). In the expression `f x y`, `x` and `y` are fully evaluated in left-to-right order, then `f` is fully evaluated, then `x` and `y` are substituted into the body of `f`, and lastly the body is evaluated. - -An exception to the evaluation semantics is [Boolean expressions](#boolean-expressions), which have non-strict semantics. - -Unison performs [tail call elimination](https://en.wikipedia.org/wiki/Tail_call) at compile-time. - -### Boolean expressions -A Boolean expression has type `Boolean` which has two values, `true` and `false`. - -#### Conditional expressions -A _conditional expression_ has the form `if c then t else f`, where `c` is an expression of type `Boolean`, and `t` and `f` are expressions of any type, but `t` and `f` must have the same type. - -Evaluation of conditional expressions is non-strict. The evaluation semantics of `if c then t else f` are: -* The condition `c` is always evaluated. -* If `c` evaluates to `true`, the expression `t` is evaluated and `f` remains unevaluated. The whole expression reduces to the value of `t`. -* If `c` evaluates to `false`, the expression `f` is evaluated and `t` remains unevaluated. The whole expression reduces to the value of `f`. - -The keywords `if`, `then`, and `else` each introduce a [Block](#blocks-and-statements) as follows: - -``` haskell -if - -then - -else - -``` - -#### Boolean conjunction and disjunction -A _Boolean conjunction expression_ is a `Boolean` expression of the form `and a b` where `a` and `b` are `Boolean` expressions. Note that `and` is not a function, but built-in syntax. - -The evaluation semantics of `and a b` are equivalent to `if a then b else false`. - -A _Boolean disjunction expression_ is a `Boolean` expression of the form `or a b` where `a` and `b` are `Boolean` expressions. Note that `or` is not a function, but built-in syntax. - -The evaluation semantics of `or a b` are equivalent to `if a then true else b`. - -### Delayed computations -An expression can appear _delayed_ as `'e`, which is the same as `_ -> e`. If `e` has type `T`, then `'e` has type `() -> T`. - -If `c` is a delayed computation, it can be _forced_ with `!c`, which is the same as `c ()`. The expression `c` must have a type `() -> t` for some type `t`, in which case `!c` has type `t`. - -Delayed computations are important for writing expressions that require [abilities](#abilities-and-ability-handlers). For example: - -``` haskell -use io - -program : '{IO} () -program = 'let - printLine "What is your name?" - name = !readLine - printLine ("Hello, " ++ name) -``` - -This example defines a small I/O program. The type `{IO} ()` by itself is not allowed as the type of a top-level definition, since the `IO` ability must be provided by a handler, see [abilities and ability handlers](#abilities-and-ability-handlers)). Instead, `program` has the type `'{IO} ()` (note the `'` indicating a delayed computation). Inside a handler for `IO`, this computation can be forced with `!program`. - -Inside the program, `!readLine` has to be forced, as the type of `io.readLine` is `'{IO} Text`, a delayed computation which, when forced, reads a line from standard input. - -### Case expressions and pattern matching - -A _case expression_ has the general form: - -``` Haskell -case e of - pattern_1 -> block_1 - pattern_2 -> block_2 - ... - pattern_n -> block_n -``` - -Where `e` is an expression, called the _scrutinee_ of the case expression, and each _case_ has a [pattern to match against the value of the scrutinee](#pattern-matching) and a [block](#blocks-and-statements) to evaluate in case it matches. - -The evaluation semantics of case expressions are as follows: -1. The scrutinee is evaluated. -2. The first pattern is evaluated and matched against the value of the scrutinee. -3. If the pattern matches, any variables in the pattern are substituted into the block to the right of its `->` (called the _match body_) and the block is evaluated. If the pattern doesn’t match then the next pattern is tried and so on. - -It's possible for Unison to actually evaluate cases in a different order, but such evaluation should always have the same observable behavior as trying the patterns in sequence. - -It is an error if none of the patterns match. In this version of Unison, the error occurs at runtime. In a future version, this should be a compile-time error. - -#### Pattern matching -A _pattern_ has one of the following forms: - -##### Blank patterns -A _blank pattern_ has the form `_`. It matches any expression without creating a variable binding. - -For example: - -``` haskell -case 42 of - _ -> "Always matches" -``` - -##### Literal patterns -A _literal pattern_ is a literal `Boolean`, `Nat`, `Int`, `Float`, or `Text`. A literal pattern matches if the scrutinee has that exact value. - -For example: - -``` haskell -case 2 + 2 of - 4 -> "Matches" - _ -> "Doesn't match" -``` - -##### Variable patterns -A _variable pattern_ is a [regular identifier](#identifiers) and matches any expression. The expression that it matches will be bound to that identifier as a variable in the match body. - -For example, this expression evaluates to `3`: - -``` haskell -case 1 + 1 of - x -> x + 1 -``` - -##### As-patterns -An _as-pattern_ has the form `v@p` where `v` is a [regular identifier](#identifiers) and `p` is a pattern. This pattern matches if `p` matches, and the variable `v` will be bound in the body to the value matching `p`. - -For example, this expression evaluates to `3`: - -``` haskell -case 1 + 1 of - x@4 -> x * 2 - y@2 -> y + 1 - _ -> 22 -``` - -##### Constructor patterns -A _constructor pattern_ has the form `C p1 p2 ... pn` where `C` is the name of a data constructor in scope, and `p1` through `pn` are patterns such that `n` is the arity of `C`. Note that `n` may be zero. This pattern matches if the scrutinee reduces to a fully applied invocation of the data constructor `C` and the patterns `p1` through `pn` match the arguments to the constructor. - -For example, this expression uses `Some` and `None`, the constructors of the `Optional` type, to return the 3rd element of the list `xs` if present or `0` if there was no 3rd element. - -``` haskell -case List.at 3 xs of - None -> 0 - Some x -> x -``` - -##### List patterns - -A _list pattern_ matches a `List t` for some type `t` and has one of three forms: - -1. `head +: tail` matches a list with at least one element. The pattern `head` is matched against the first element of the list and `tail` is matched against the suffix of the list with the first element removed. -2. `init :+ last` matches a list with at least one element. The pattern `init` is matched against the prefix of the list with the last element removed, and `last` is matched against the last element of the list. -3. A _literal list pattern_ has the form `[p1, p2, ... pn]` where `p1` through `pn` are patterns. The patterns `p1` through `pn` are matched against the elements of the list. This pattern only matches if the length of the scrutinee is the same as the number of elements in the pattern. The pattern `[]` matches the empty list. - -Examples: - -``` haskell -first : [a] -> Optional a -first as = case as of - h +: _ -> Some h - [] -> None - -last : [a] -> Optional a -last as = case as of - _ :+ l -> Some l - [] -> None - -exactlyOne : [a] -> Boolean -exactlyOne a = case a of - [_] -> true - _ -> false -``` - -##### Tuple patterns - A _tuple pattern_ has the form `(p1, p2, ... pn)` where `p1` through `pn` are patterns. The pattern matches if the scrutinee is a tuple of the same arity as the pattern and `p1` through `pn` match against the elements of the tuple. The pattern `(p)` is the same as the pattern `p`, and the pattern `()` matches the literal value `()` of the trivial type `()` (both pronounced “unit”). - -For example, this expression evaluates to `4`: - -``` haskell -case (1,2,3) of - (a,_,c) -> a + c -``` - -##### Ability patterns -An _ability pattern_ only appears in an _ability handler_ and has one of two forms (see [Abilities and ability handlers](#abilities-and-ability-handlers) for details): - -1. `{C p1 p2 ... pn -> k}` where `C` is the name of an ability constructor in scope, and `p1` through `pn` are patterns such that `n` is the arity of `C`. Note that `n` may be zero. This pattern matches if the scrutinee reduces to a fully applied invocation of the ability constructor `C` and the patterns `p1` through `pn` match the arguments to the constructor. The scrutinee must be of type `Request A T` for some ability `{A}` and type `T`. The variable `k` will be bound to the continuation of the program. If the scrutinee has type `Request A T` and `C` has type `X ->{A} Y`, then `k` has type `Y -> {A} T`. -2. `{p}` where `p` is a pattern. This matches the case where the computation is _pure_ (the value of type `Request A T` calls none of the constructors of the ability `{A}`). A pattern match on an `Request` is not complete unless this case is handled. - -See the section on [abilities and ability handlers](#abilities-and-ability-handlers) for examples of ability patterns. - -##### Guard patterns -A _guard pattern_ has the form `p | g` where `p` is a pattern and `g` is a Boolean expression that may reference any variables bound in `p`. The pattern matches if `p` matches and `g` evaluates to `true`. - -For example, the following expression evaluates to 6: - -``` haskell -case 1 + 2 of - x | x == 4 -> 0 - x | x + 1 == 4 -> 6 - _ -> 42 -``` - -## Hashes -A _hash_ in Unison is a 512-bit SHA3 digest of a term or a type's internal structure, excluding all names. The textual representation of a hash is its [base32Hex](https://github.com/multiformats/multibase#multibase-table-v100-rc-semver) Unicode encoding. - -Unison attributes a hash to every term and type declaration, and the hash may be used to unambiguously refer to that term or type in all contexts. As far as Unison is concerned, the hash of a term or type is its _true name_. - -### Literal hash references - -A term, type, data constructor, or ability constructor may be unambiguously referenced by hash. Literal hash references have the following structure: - -* A _term definition_ has a hash of the form `#x` where `x` is the base32Hex encoding of the hash of the term. For example `#a0v829`. -* A term or type definition that’s part of a _cycle of mutually recursive definitions_ hashes to the form `#x.n` where `x` is the hash of the cycle and `n` is the term or type’s index in its cycle. A cycle has a canonical order determined by sorting all the members of the cycle by their individual hashes (with the cycle removed). -* A data constructor hashes to the form `#x#c` where `x` is the hash of the data type definition and `c` is the index of that data constructor in the type definition. -* A data constructor in a cyclic type definition hashes to the form `#x.n#c` where `#x.n` is the hash of the data type and `c` is the data constructor’s index in the type definition. -* A _built-in reference_ to a Unison built-in term or type `n` has a hash of the form `##n`. `##Nat` is an example of a built-in reference. - -### Short hashes -A hash literal may use a prefix of the base32Hex encoded SHA3 digest instead of the whole thing. For example the programmer may use a short hash like `#r1mtr0` instead of the much longer 104-character representation of the full 512-bit hash. If the short hash is long enough to be unambiguous given the [environment](#name-resolution-and-the-environment), Unison will substitute the full hash at compile time. When rendering code as text, Unison may calculate the minimum disambiguating hash length before rendering a hash. - -## Unison types -This section describes informally the structure of types in Unison. - -Formally, Unison’s type system is an implementation of the system described by Joshua Dunfield and Neelakantan R. Krishnaswami in their 2013 paper [Complete and Easy Bidirectional Typechecking for Higher-Rank Polymorphism](https://arxiv.org/abs/1306.6032). - -Unison extends that type system with, [pattern matching](#pattern-matching), [scoped type variables](#scoped-type-variables), _ability types_ (also known as _algebraic effects_). See the section titled [Abilities and Ability Handlers](#abilities-and-ability-handlers) for details on ability types. - -### Types in Unison - -Unison attributes a type to every valid expression. For example: - -* `4 < 5` has type `Boolean` -* `42 + 3` has type `Nat`, -* `"hello"` has type `Text` -* the list `[1,2,3]` has type `[Nat]` -* the function `(x -> x)` has type `forall a. a -> a` - -The meanings of these types and more are explained in the sections below. - -A full treatise on types is beyond the scope of this document. In short, types help enforce that Unison programs make logical sense. Every expression must be well typed, or Unison will give a compile-time type error. For example: - -* `[1,2,3]` is well typed, since lists require all elements to be of the same type. -* `42 + "hello"` is not well typed, since the type of `+` disallows adding numbers and text together. -* `printLine "Hello, World!"` is well typed in some contexts and not others. It's a type error for instance to use I/O functions where an `IO` [ability](#abilities-and-ability-handlers) is not provided. - -Types are of the following general forms. - -### Type variables -Type variables are [regular identifiers](#identifiers) beginning with a lowercase letter. For example `a`, `x0`, and `foo` are valid type variables. - -### Polymorphic types -A _universally quantified_ or _polymorphic_ type has the form `forall v1 v2 vn. t`, where `t` is a type. The type `t` may involve the variables `v1` through `vn`. - -The symbol `∀` is an alias for `forall`. - -A type like `forall x. F x` can be written simply as `F x` (the `forall x` is implied) as long as `x` is free in `F x` (it is not bound by an outer scope; see [Scoped Type Variables](#scoped-type-variables) below). - -A polymorphic type may be _instantiated_ at any given type. For example, the empty list `[]` has type `forall x. [x]`. So it's a type-polymorphic value. Its type can be instantiated at `Int`, for example, which binds `x` to `Int` resulting in `[Int]` which is also a valid type for the empty list. In fact, we can say that the empty list `[]` is a value of type `[x]` _for all_ choices of element type `e`, hence the type `forall x. [x]`. - -Likewise the identity function `(x -> x)`, which simply returns its argument, has a polymorphic type `forall t. t -> t`. It has type `t -> t` for all choices of `t`. - -### Scoped type variables -Type variables introduced by a type signature for a term remain in scope throughout the definition of that term. - -For example in the following snippet, the type annotation `temp:x` is telling Unison that `temp` has the type `x` which is bound in the type signature, so `temp` and `a` have the same type. - -``` haskell -ex1 : x -> y -> x -ex1 a b = - -- refers to the type x in the outer scope - temp : x - temp = a - a -``` - -To explicitly shadow a type variable in scope, the variable can be reintroduced in the inner scope by a `forall` binder, as follows: - -``` haskell -ex2 : x -> y -> x -ex2 a b = - -- doesn’t refer to x in outer scope - id : ∀ x . x -> x - id v = v - temp = id 42 - id a -``` - -Note that here the type variable `x` in the type of `id` gets instantiated to two different types. First `id 42` instantiates it to `Nat`, then `id a`, instantiates it to the outer scope's type `x`. - -### Type constructors -Just as values are built using data constructors, types are built from _type constructors_. Nullary type constructors like `Nat`, `Int`, `Float` are already types, but other type constructors like `List` and `->` (see [built-in type constructors](#built-in-type-constructors)) take type parameters in order to yield types. `List` is a unary type constructor, so it takes one type (the type of the list elements), and `->` is a binary type constructor. `List Nat` is a type and `Nat -> Int` is a type. - -#### Kinds of Types -Types are to values as _kinds_ are to type constructors. Unison attributes a kind to every type constructor, which is determined by its number of type parameters and the kinds of those type parameters. - -A type must be well kinded, just like an expression must be well typed, and for the same reason. However, there is currently no syntax for kinds and they do not appear in Unison programs (this will certainly change in a future version of Unison). - -Unison’s kinds have the following forms: - -* A nullary type constructor or ordinary type has kind `Type`. -* A type constructor has kind `k1 -> k2` where `k1` and `k2` are kinds. - -For example `List`, a unary type constructor, has kind `Type -> Type` as it takes a type and yields a type. A binary type constructor like `->` has kind `Type -> Type -> Type`, as it takes two types (it actually takes a type and yields a partially applied unary type constructor that takes the other type). A type constructor of kind `(Type -> Type) -> Type` is a _higher-order_ type constructor (it takes a unary type constructor and yields a type). - -### Type application -A type constructor is applied to a type or another type constructor, depending on its kind, similarly to how functions are applied to arguments at the term level. `C T` applies the type constructor `C` to the type `T`. Type application associates to the left, so the type `A B C` is the same as the type `(A B) C`. - -### Function types -The type `X -> Y` is a type for functions that take arguments of type `X` and yield results of type `Y`. Application of the binary type constructor `->` associates to the right, so the type `X -> Y -> Z` is the same as the type `X -> (Y -> Z)`. - -### Tuple types -The type `(A,B)` is a type for binary tuples (pairs) of values, one of type `A` and another of type `B`. The type `(A,B,C)` is a triple, and so on. - -The type `(A)` is the same as the type `A` and is not considered a tuple. - -The nullary tuple type `()` is the type of the unique value also written `()` and is pronouced “unit”. - -In the standard Unison syntax, tuples of arity 2 and higher are actually of a type `Tuple a b` for some types `a` and `b`. For example, `(X,Y)` is syntactic shorthand for the type `Tuple X (Tuple Y ())`. - -Tuples are either constructed with the syntactic shorthand `(a,b)` (see [tuple literals](#tuple-literals)) or with the built-in `Tuple.Cons` data constructor: `Tuple.Cons a (Tuple.Cons b ())`. - -### Built-in types -Unison provides the following built-in types: - -* `.base.Nat` is the type of 64-bit natural numbers, also known as unsigned integers. They range from 0 to 18,446,744,073,709,551,615. -* `.base.Int` is the type of 64-bit signed integers. They range from -9,223,372,036,854,775,808 to +9,223,372,036,854,775,807. -* `.base.Float` is the type of [IEEE 754-1985](https://en.wikipedia.org/wiki/IEEE_754-1985) double-precision floating point numbers. -* `.base.Boolean` is the type of Boolean expressions whose value is `true` or `false`. -* `.base.Bytes` is the type of arbitrary-length 8-bit byte sequences. -* `.base.Text` is the type of arbitrary-length strings of Unicode text. -* The trivial type `()` (pronounced “unit”) is the type of the nullary tuple. There is a single data constructor of type `()` and it’s also written `()`. - -See [literals](#literals) for more on how values of some of these types are constructed. - -### Built-in type constructors -Unison has the following built-in type constructors. - -* `(->)` is the constructor of function types. A type `X -> Y` is the type of functions from `X` to `Y`. -* `base.Tuple` is the constructor of tuple types. See [tuple types](#tuple-types) for details on tuples. -* `.base.List` is the constructor of list types. A type `List T` is the type of arbitrary-length sequences of values of type `T`. The type `[T]` is an alias for `List T`. -* `.base.Request` is the constructor of requests for abilities. A type `Request A T` is the type of values received by ability handlers for the ability `A` where current continuation requires a value of type `T`. - -## Abilities and ability handlers -Unison provides a system of _abilities_ and _ability handlers_ as a means of modeling computational effects in a purely functional language. - -Unison is a purely functional language, so no expressions are allowed to have _side effects_, meaning they are evaluated to a result and nothing else. But we still need to be able to write programs that have _effects_, for example writing to disk, communicating over a network, generating randomness, looking at the clock, and so on. Ability types are Unison's way of allowing an expression to request effects it would like to have. Handlers then interpret those requests, often by translating them in turn to a computation that uses the built-in `IO` ability. Unison has a built-in handler for the `IO` ability which cannot be invoked in Unison programs (it can only be invoked by the Unison runtime). This allows Unison to provide I/O effects in a purely functional setting. See [input and output](#input-and-output) for details on the `IO` ability. - -Unison's system of abilities is based on the Frank language by Sam Lindley, Conor McBride, and Craig McLaughlin (https://arxiv.org/pdf/1611.09259.pdf). Unison diverges slightly from the scheme detailed in this paper. In particular, Unison's ability polymorphism is provided by ordinary polymorphic types, and a Unison type with an empty ability set explicitly disallows any abilities. In Frank, the empty ability set implies an ability-polymorphic type. - -### Abilities in function types - -The general form for a function type in Unison is `I ->{A} O`, where `I` is the input type of the function, `O` is the output type, and `A` is the set of _abilities_ that the function requires. - -A function type in Unison like `A -> B` is really syntactic sugar for a type `A ->{e} B` where `e` is some set of abilities, possibly empty. A function that definitely requires no abilities has a type like `A ->{} B` (it has an empty set of abilities). - -If a function `f` calls in its implementation another function requiring ability set `{A}`, then `f` will require `A` in its ability set as well. If `f` also calls a function requiring abilities `{B}`, then `f` will require abilities `{A,B}`. - -Stated the other way around, `f` can only be called in contexts where the abilities `{A,B}` are available. Abilities are provided by `handle` blocks. See the [Ability Handlers](f) section below. The only exception to abilities being provided by handlers is the built-in provider of the `IO` ability in the Unison runtime. - -### User-defined abilities - -A user-defined ability is declared with an `ability` declaration such as: - -``` haskell -ability Store v where - get : v - put : v -> () -``` - -This results in a new ability type constructor `Store` which takes a type argument `v`. It also creates two value-level constructors named `get` and `put`. The idea is that `get` provides the ability to "get" a value of type `v` from somewhere, and `put` allows "putting" a value of type `v` somewhere. Where exactly these values of type `v` will be kept depends on the handler. - -The `Store` constructors `get` and `put` have the following types: - -* `get : forall v. {Store v} v` -* `put : forall v. v ->{Store v} ()` - -The type `{Store v}` means that the computation which results in that type requires a `Store v` ability and cannot be executed except in the context of an _ability handler_ that provides the ability. - -### Ability handlers - -A constructor `{A} T` for some ability `A` and some type `T` (or a function which uses such a constructor), can only be used in a scope where the ability `A` is provided. Abilities are provided by `handle` expressions: - -``` haskell -handle h in x -``` - -This expression gives `x` access to abilities handled by the function `h` which must have the type `Request A T -> T` if `x` has type `{A} T`. The type constructor `Request` is a special builtin provided by Unison which will pass arguments of type `Request A T` to a handler for the ability `A`. - -The examples in the next section should help clarify how ability handlers work. - -#### Pattern matching on ability constructors - -Each constructor of an ability corresponds with a _pattern_ that can be used for pattern matching in ability handlers. The general form of such a pattern is: - -``` haskell -{A.c p_1 p_2 p_n -> k} -``` - -Where `A` is the name of the ability, `c` is the name of the constructor, `p_1` through `p_n` are patterns matching the arguments to the constructor, and `k` is a _continuation_ for the program. If the value matching the pattern has type `Request A T` and the constructor of that value had type `X ->{A} Y`, then `k` has type `Y -> {A} T`. - -The continuation will always be a function accepting the return value of the ability constructor, and the body of this function is the remainder of the `handle .. in` block immediately following the call to the constructor. See below for an example. - -A handler can choose to call the continuation or not, or to call it multiple times. For example, a handler can ignore the continuation in order to handle an ability that aborts the execution of the program: - -``` haskell -ability Abort where - aborting : () - --- Returns `a` immediately if the program `e` calls `abort` -abortHandler : a -> Request Abort a -> a -abortHandler a e = case e of - { Abort.aborting -> _ } -> a - { x } -> x - -p : Nat -p = handle abortHandler 0 in - x = 4 - Abort.aborting - x + 2 - -``` - -The program `p` evaluates to `0`. If we remove the `Abort.aborting` call, it evaluates to `6`. - -Note that although the ability constructor is given the signature `aborting : ()`, its actual type is `{Abort} ()`. - -The pattern `{ Abort.aborting -> _ }` matches when the `Abort.aborting` call in `p` occurs. This pattern ignores its continuation since it will not invoke it (which is how it aborts the program). The continuation at this point is the expression `_ -> x + 2`. - -The pattern `{ x }` matches the case where the computation is pure (makes no further requests for the `Abort` ability and the continuation is empty). A pattern match on a `Request` is not complete unless this case is handled. - -When a handler calls the continuation, it needs describe how the ability is provided in the continuation of the program, usually with a recursive call, like this: - -``` haskell -use .base Request - -ability Store v where - get : v - put : v -> () - -storeHandler : v -> Request (Store v) a -> a -storeHandler storedValue s = case s of - {Store.get -> k} -> - handle storeHandler storedValue in k storedValue - {Store.put v -> k} -> - handle storeHandler v in k () - {a} -> a -``` - -Note that the `storeHandler` has a `handle` clause that uses `storeHandler` itself to handle the `Requests`s made by the continuation. So it’s a recursive definition. The initial "stored value" of type `v` is given to the handler in its argument named `storedValue`, and the changing value is captured by the fact that different values are passed to each recursive invocation of the handler. - -In the pattern for `Store.get`, the continuation `k` expects a `v`, since the return type of `get` is `v`. In the pattern for `Store.put`, the continuation `k` expects `()`, which is the return type of `put`. - -It's worth noting that this is a mutual recursion between `storeHandler` and the various continuations (all named `k`). This is no cause for concern, as they call each other in tail position and the Unison compiler performs [tail call elimination](#function-application). - -An example use of the above handler: - -``` haskell -modifyStore : (v -> v) ->{Store v} () -modifyStore f = - v = Store.get - Store.put (f v) -``` - -Here, when the handler receives `Store.get`, the continuation is `v -> Store.put (f v)`. When the handler receives `Store.put`, the continuation is `_ -> ()`. - -## Name resolution and the environment -During typechecking, Unison substitutes free variables in an expression by looking them up in an environment populated from a _codebase_ of available definitions. A Unison codebase is a database of term and type definitions, indexed by [hashes](#hashes) and names. - -A name in the environment can refer to either terms or types, or both (a type name can never be confused with a term name). If a name is unambiguous (refers to only one term and/or type in the environment), Unison substitutes that name in the expression with a reference to the definition. - -[Hash literals](#hashes) in the program are substituted with references to the definitions in the environment whose hashes they match. - -If a free term variable in the program cannot be found in the environment and is not the name of another term in scope in the program itself, or if an free variable matches more than one name (it’s ambiguous), Unison tries _type-directed name resolution_. - -### Type-directed name resolution -During typechecking, if Unison encounters a free term variable that is not a term name in the environment, Unison attempts _type-directed name resolution_, which: - -1. Finds term definitions in the environment whose _unqualified_ name is the same as the free variable. -2. If exactly one of those terms has a type that conforms to the expected type of the variable (the type system has always inferred this type already at this point), perform that substitution and resume typechecking. - -If name resolution is unable to find the definition of a name, or is unable to disambiguate an ambiguous name, Unison reports an error. - - - - diff --git a/docs/repoformats/v2.markdown b/docs/repoformats/v2.markdown index dfaa71c445..b29541b2e2 100644 --- a/docs/repoformats/v2.markdown +++ b/docs/repoformats/v2.markdown @@ -76,7 +76,7 @@ where the former references objects that must exist in the database and the latt ### Serialization formats for codebase objects -Each of the object blob formats begins with a varint format id. For a given object type, different format ids indicate different ways of representing the same object -- not a way of representing a slightly different type of object. This enables us to make different storage-lager efficiency tradeoffs based on an object's contents or a user's access patterns. +Each of the object blob formats begins with a varint format id. For a given object type, different format ids indicate different ways of representing the same object -- not a way of representing a slightly different type of object. This enables us to make different storage-layer efficiency tradeoffs based on an object's contents or a user's access patterns. These different formats are enumerated in `U.Codebase.Sqlite.{Term,Decl,Branch,Patch}.Format.hs` respectively, and collected below: @@ -247,4 +247,4 @@ data BranchLocalIds = LocalIds `ObjectId`s are used to reference types and terms in the namespace. Note that they are not `HashId`s, because the namespace shouldn't be able to refer to definitions that aren't in the database. `PatchObjectIds` reference the object ids of patch objects, as you might imagine. -`branchChildLookup` contains two fields: a `CausalHashId` which points to the history of the child, and the `BranchObjectId` which proves that the relevant namespace slice is also present. In general, a codebase may not have the namespace slice corresponding to every causal id, but it ought to have them for the children of another namespace slice it does have (thus, the `BranchObjectId` is used). The causal relationship stored relationally rather than as blobs, and the `CausalHashId` is a useful index into the `causal_parents` table. \ No newline at end of file +`branchChildLookup` contains two fields: a `CausalHashId` which points to the history of the child, and the `BranchObjectId` which proves that the relevant namespace slice is also present. In general, a codebase may not have the namespace slice corresponding to every causal id, but it ought to have them for the children of another namespace slice it does have (thus, the `BranchObjectId` is used). The causal relationship stored relationally rather than as blobs, and the `CausalHashId` is a useful index into the `causal_parents` table. diff --git a/parser-typechecker/package.yaml b/parser-typechecker/package.yaml index 4fa7d38941..ab978bbfc8 100644 --- a/parser-typechecker/package.yaml +++ b/parser-typechecker/package.yaml @@ -37,29 +37,22 @@ library: - base16 >= 0.2.1.0 - base64-bytestring - basement - - bifunctors - bytes - bytestring - cereal - containers >= 0.6.3 - - comonad - - concurrent-supply - configurator - cryptonite - data-default - directory - either - fuzzyfind - - guid - data-memocombinators - - edit-distance - errors - exceptions - extra - filepath - - filepattern - fingertree - - free - fsnotify - generic-monoid - hashable @@ -67,23 +60,20 @@ library: - haskeline - http-types - http-media - - io-streams - lens - ListLike - megaparsec >= 5.0.0 && < 7.0.0 - memory - mmorph - - monad-loops - monad-validate - mtl - - murmur-hash - mutable-containers - natural-transformation - network - network-simple - nonempty-containers + - optparse-applicative >= 0.16.1.0 - openapi3 - - optparse-applicative - pem - process - primitive @@ -97,9 +87,7 @@ library: - servant-docs - servant-openapi3 - servant-server - - servant-auth-server - shellmet - - split - stm - strings - sqlite-simple @@ -111,11 +99,8 @@ library: - tls - transformers - unliftio - - unliftio-core - utf8-string - - util - unicode-show - - validation - vector - wai - warp @@ -129,6 +114,8 @@ library: - unison-core - unison-core1 - unison-util + - open-browser + - uri-encode executables: unison: @@ -146,6 +133,7 @@ executables: - lens - megaparsec - mtl + - optparse-applicative >= 0.16.1.0 - safe - shellmet - template-haskell diff --git a/parser-typechecker/src/Unison/Builtin.hs b/parser-typechecker/src/Unison/Builtin.hs index 1a480ac744..efdd0ef2f3 100644 --- a/parser-typechecker/src/Unison/Builtin.hs +++ b/parser-typechecker/src/Unison/Builtin.hs @@ -16,6 +16,7 @@ module Unison.Builtin ,intrinsicTermReferences ,intrinsicTypeReferences ,isBuiltinType + ,typeOf ,typeLookup ,termRefTypes ) where @@ -247,6 +248,9 @@ termRefTypes = foldl' go mempty builtinsSrc where D r t -> Map.insert (R.Builtin r) t m _ -> m +typeOf :: Var v => a -> (Type v -> a) -> R.Reference -> a +typeOf a f r = maybe a f (Map.lookup r termRefTypes) + builtinsSrc :: Var v => [BuiltinDSL v] builtinsSrc = [ B "Int.+" $ int --> int --> int @@ -278,6 +282,8 @@ builtinsSrc = , B "Int.toFloat" $ int --> float , B "Int.trailingZeros" $ int --> nat , B "Int.popCount" $ int --> nat + , B "Int.fromRepresentation" $ nat --> int + , B "Int.toRepresentation" $ int --> nat , B "Nat.*" $ nat --> nat --> nat , B "Nat.+" $ nat --> nat --> nat @@ -308,6 +314,20 @@ builtinsSrc = , B "Nat.trailingZeros" $ nat --> nat , B "Nat.popCount" $ nat --> nat + , B "Bytes.decodeNat64be" $ bytes --> optionalt (tuple [nat, bytes]) + , B "Bytes.decodeNat64le" $ bytes --> optionalt (tuple [nat, bytes]) + , B "Bytes.decodeNat32be" $ bytes --> optionalt (tuple [nat, bytes]) + , B "Bytes.decodeNat32le" $ bytes --> optionalt (tuple [nat, bytes]) + , B "Bytes.decodeNat16be" $ bytes --> optionalt (tuple [nat, bytes]) + , B "Bytes.decodeNat16le" $ bytes --> optionalt (tuple [nat, bytes]) + + , B "Bytes.encodeNat64be" $ nat --> bytes + , B "Bytes.encodeNat64le" $ nat --> bytes + , B "Bytes.encodeNat32be" $ nat --> bytes + , B "Bytes.encodeNat32le" $ nat --> bytes + , B "Bytes.encodeNat16be" $ nat --> bytes + , B "Bytes.encodeNat16le" $ nat --> bytes + , B "Float.+" $ float --> float --> float , B "Float.-" $ float --> float --> float , B "Float.*" $ float --> float --> float @@ -317,6 +337,8 @@ builtinsSrc = , B "Float.<=" $ float --> float --> boolean , B "Float.>=" $ float --> float --> boolean , B "Float.==" $ float --> float --> boolean + , B "Float.fromRepresentation" $ nat --> float + , B "Float.toRepresentation" $ float --> nat -- Trigonmetric Functions , B "Float.acos" $ float --> float @@ -517,10 +539,12 @@ ioBuiltins = , ("IO.isSeekable.impl.v3", handle --> iof boolean) , ("IO.seekHandle.impl.v3", handle --> smode --> int --> iof unit) , ("IO.handlePosition.impl.v3", handle --> iof nat) + , ("IO.getEnv.impl.v1", text --> iof text) , ("IO.getBuffering.impl.v3", handle --> iof bmode) , ("IO.setBuffering.impl.v3", handle --> bmode --> iof unit) , ("IO.getBytes.impl.v3", handle --> nat --> iof bytes) , ("IO.putBytes.impl.v3", handle --> bytes --> iof unit) + , ("IO.getLine.impl.v1", handle --> iof text) , ("IO.systemTime.impl.v3", unit --> iof nat) , ("IO.getTempDirectory.impl.v3", unit --> iof text) , ("IO.createTempDirectory.impl.v3", text --> iof text) @@ -531,6 +555,7 @@ ioBuiltins = , ("IO.createDirectory.impl.v3", text --> iof unit) , ("IO.removeDirectory.impl.v3", text --> iof unit) , ("IO.renameDirectory.impl.v3", text --> text --> iof unit) + , ("IO.directoryContents.impl.v3", text --> iof (list text)) , ("IO.removeFile.impl.v3", text --> iof unit) , ("IO.renameFile.impl.v3", text --> text --> iof unit) , ("IO.getFileTimestamp.impl.v3", text --> iof nat) diff --git a/parser-typechecker/src/Unison/Builtin/Decls.hs b/parser-typechecker/src/Unison/Builtin/Decls.hs index 9c3488e51a..e30989ed4b 100644 --- a/parser-typechecker/src/Unison/Builtin/Decls.hs +++ b/parser-typechecker/src/Unison/Builtin/Decls.hs @@ -4,6 +4,7 @@ module Unison.Builtin.Decls where +import Control.Lens (_3,over) import Data.List (elemIndex, find) import qualified Data.Map as Map import Data.Text (Text, unpack) @@ -30,10 +31,17 @@ import qualified Unison.Var as Var lookupDeclRef :: Text -> Reference lookupDeclRef str - | [(_, d, _)] <- filter (\(v, _, _) -> v == Var.named str) decls = Reference.DerivedId d + | [(_, d)] <- filter (\(v, _) -> v == Var.named str) decls = Reference.DerivedId d | otherwise = error $ "lookupDeclRef: missing \"" ++ unpack str ++ "\"" where - decls = builtinDataDecls @Symbol + decls = [ (a,b) | (a,b,_) <- builtinDataDecls @Symbol ] + +lookupEffectRef :: Text -> Reference +lookupEffectRef str + | [(_, d)] <- filter (\(v, _) -> v == Var.named str) decls = Reference.DerivedId d + | otherwise = error $ "lookupEffectRef: missing \"" ++ unpack str ++ "\"" + where + decls = [ (a,b) | (a,b,_) <- builtinEffectDecls @Symbol ] unitRef, pairRef, optionalRef, eitherRef :: Reference unitRef = lookupDeclRef "Unit" @@ -43,7 +51,7 @@ eitherRef = lookupDeclRef "Either" testResultRef, linkRef, docRef, ioErrorRef, stdHandleRef :: Reference failureRef, ioFailureRef, tlsFailureRef :: Reference -tlsSignedCertRef, tlsPrivateKeyRef :: Reference +exceptionRef, tlsSignedCertRef, tlsPrivateKeyRef :: Reference isPropagatedRef, isTestRef :: Reference isPropagatedRef = lookupDeclRef "IsPropagated" @@ -54,6 +62,7 @@ docRef = lookupDeclRef "Doc" ioErrorRef = lookupDeclRef "io2.IOError" stdHandleRef = lookupDeclRef "io2.StdHandle" failureRef = lookupDeclRef "io2.Failure" +exceptionRef = lookupEffectRef "Exception" ioFailureRef = lookupDeclRef "io2.IOFailure" tlsFailureRef = lookupDeclRef "io2.TlsFailure" tlsSignedCertRef = lookupDeclRef "io2.Tls.SignedCert" @@ -75,8 +84,10 @@ constructorId ref name = do (_,_,dd) <- find (\(_,r,_) -> Reference.DerivedId r == ref) (builtinDataDecls @Symbol) elemIndex name $ DD.constructorNames dd -okConstructorId, failConstructorId, docBlobId, docLinkId, docSignatureId, docSourceId, docEvaluateId, docJoinId, linkTermId, linkTypeId, eitherRightId, eitherLeftId :: ConstructorId +noneId, someId, okConstructorId, failConstructorId, docBlobId, docLinkId, docSignatureId, docSourceId, docEvaluateId, docJoinId, linkTermId, linkTypeId, eitherRightId, eitherLeftId :: ConstructorId isPropagatedConstructorId, isTestConstructorId, bufferModeNoBufferingId, bufferModeLineBufferingId, bufferModeBlockBufferingId, bufferModeSizedBlockBufferingId :: ConstructorId +Just noneId = constructorId optionalRef "Optional.None" +Just someId = constructorId optionalRef "Optional.Some" Just isPropagatedConstructorId = constructorId isPropagatedRef "IsPropagated.IsPropagated" Just isTestConstructorId = constructorId isTestRef "IsTest.IsTest" Just okConstructorId = constructorId testResultRef "Test.Result.Ok" @@ -293,13 +304,29 @@ builtinDataDecls = rs1 ++ rs , ((), v "Link.Type", Type.typeLink () `arr` var "Link") ] -builtinEffectDecls :: [(v, Reference.Id, DD.EffectDeclaration v ())] -builtinEffectDecls = [] +builtinEffectDecls :: Var v => [(v, Reference.Id, DD.EffectDeclaration v ())] +builtinEffectDecls = + case hashDecls $ Map.fromList [ (v "Exception", exception) ] of + Right a -> over _3 DD.EffectDeclaration <$> a + Left e -> error $ "builtinEffectDecls: " <> show e + where + v = Var.named + var name = Type.var () (v name) + arr = Type.arrow' + self t = Type.cleanupAbilityLists $ Type.effect () [var "Exception"] t + exception = DataDeclaration + Structural + () + [] + [ ((), v "Exception.raise", Type.forall () (v "x") (failureType () `arr` self (var "x"))) + ] pattern UnitRef <- (unUnitRef -> True) pattern PairRef <- (unPairRef -> True) pattern EitherRef <- ((==) eitherRef -> True) pattern OptionalRef <- (unOptionalRef -> True) +pattern OptionalNone' <- Term.Constructor' OptionalRef ((==) noneId -> True) +pattern OptionalSome' d <- Term.App' (Term.Constructor' OptionalRef ((==) someId -> True)) d pattern TupleType' ts <- (unTupleType -> Just ts) pattern TupleTerm' xs <- (unTupleTerm -> Just xs) pattern TuplePattern ps <- (unTuplePattern -> Just ps) @@ -343,7 +370,7 @@ pattern LinkType ty <- Term.App' (Term.Constructor' LinkRef LinkTypeId) ty unitType, pairType, optionalType, testResultType, eitherType, ioErrorType, fileModeType, filePathType, bufferModeType, seekModeType, - stdHandleType, failureType + stdHandleType, failureType, exceptionType :: Ord v => a -> Type v a unitType a = Type.ref a unitRef pairType a = Type.ref a pairRef @@ -357,6 +384,7 @@ bufferModeType a = Type.ref a bufferModeRef seekModeType a = Type.ref a seekModeRef stdHandleType a = Type.ref a stdHandleRef failureType a = Type.ref a failureRef +exceptionType a = Type.ref a exceptionRef tlsSignedCertType :: Var v => a -> Type v a tlsSignedCertType a = Type.ref a tlsSignedCertRef diff --git a/parser-typechecker/src/Unison/Codebase.hs b/parser-typechecker/src/Unison/Codebase.hs index 9e858a0ea9..8a7453e587 100644 --- a/parser-typechecker/src/Unison/Codebase.hs +++ b/parser-typechecker/src/Unison/Codebase.hs @@ -7,6 +7,9 @@ import Control.Lens ((%=), _1, _2) import Control.Monad.Except (ExceptT (ExceptT), runExceptT) import Control.Monad.State (State, evalState, get) import Data.Bifunctor (bimap) +import Control.Error.Util (hush) +import Data.Maybe as Maybe +import Data.List as List import qualified Data.Map as Map import qualified Data.Set as Set import qualified Unison.ABT as ABT @@ -16,7 +19,7 @@ import Unison.Codebase.Branch (Branch) import qualified Unison.Codebase.Branch as Branch import qualified Unison.Codebase.CodeLookup as CL import Unison.Codebase.Editor.Git (withStatus) -import Unison.Codebase.Editor.RemoteRepo (RemoteNamespace, RemoteRepo) +import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, WriteRepo) import Unison.Codebase.GitError (GitError) import Unison.Codebase.Patch (Patch) import qualified Unison.Codebase.Reflog as Reflog @@ -71,9 +74,9 @@ data Codebase m v a = , putTypeDeclaration :: Reference.Id -> Decl v a -> m () , getRootBranch :: m (Either GetRootBranchError (Branch m)) - , putRootBranch :: Branch m -> m () - , rootBranchUpdates :: m (IO (), IO (Set Branch.Hash)) - , getBranchForHash :: Branch.Hash -> m (Maybe (Branch m)) + , putRootBranch :: Branch m -> m () + , rootBranchUpdates :: m (IO (), IO (Set Branch.Hash)) + , getBranchForHashImpl :: Branch.Hash -> m (Maybe (Branch m)) , putBranch :: Branch m -> m () , branchExists :: Branch.Hash -> m Bool @@ -86,8 +89,8 @@ data Codebase m v a = , syncFromDirectory :: CodebasePath -> SyncMode -> Branch m -> m () -- This copies all the dependencies of `b` from this Codebase , syncToDirectory :: CodebasePath -> SyncMode -> Branch m -> m () - , viewRemoteBranch' :: RemoteNamespace -> m (Either GitError (m (), Branch m, CodebasePath)) - , pushGitRootBranch :: Branch m -> RemoteRepo -> SyncMode -> m (Either GitError ()) + , viewRemoteBranch' :: ReadRemoteNamespace -> m (Either GitError (m (), Branch m, CodebasePath)) + , pushGitRootBranch :: Branch m -> WriteRepo -> SyncMode -> m (Either GitError ()) -- Watch expressions are part of the codebase, the `Reference.Id` is -- the hash of the source of the watch expression, and the `Term v a` @@ -126,6 +129,25 @@ data Codebase m v a = , beforeImpl :: Maybe (Branch.Hash -> Branch.Hash -> m Bool) } +-- Attempt to find the Branch in the current codebase cache and root up to 3 levels deep +-- If not found, attempt to find it in the Codebase (sqlite) +getBranchForHash :: Monad m => Codebase m v a -> Branch.Hash -> m (Maybe (Branch m)) +getBranchForHash codebase h = + let + nestedChildrenForDepth depth b = + if depth == 0 then [] + else + b : (Map.elems (Branch._children (Branch.head b)) >>= nestedChildrenForDepth (depth - 1)) + + headHashEq = (h ==) . Branch.headHash + + find rb = List.find headHashEq (nestedChildrenForDepth 3 rb) + in do + rootBranch <- hush <$> getRootBranch codebase + case rootBranch of + Just rb -> maybe (getBranchForHashImpl codebase h) (pure . Just) (find rb) + Nothing -> getBranchForHashImpl codebase h + lca :: Monad m => Codebase m v a -> Branch m -> Branch m -> m (Maybe (Branch m)) lca code b1@(Branch.headHash -> h1) b2@(Branch.headHash -> h2) = case lcaImpl code of Nothing -> Branch.lca b1 b2 @@ -203,6 +225,12 @@ getTypeOfConstructor codebase (Reference.DerivedId r) cid = do getTypeOfConstructor _ r cid = error $ "Don't know how to getTypeOfConstructor " ++ show r ++ " " ++ show cid +lookupWatchCache :: (Monad m) => Codebase m v a -> Reference -> m (Maybe (Term v a)) +lookupWatchCache codebase (Reference.DerivedId h) = do + m1 <- getWatch codebase UF.RegularWatch h + maybe (getWatch codebase UF.TestWatch h) (pure . Just) m1 +lookupWatchCache _ Reference.Builtin{} = pure Nothing + typeLookupForDependencies :: (Monad m, Var v, BuiltinAnnotation a) => Codebase m v a -> Set Reference -> m (TL.TypeLookup v a) @@ -323,6 +351,11 @@ getTypeOfTerm c r = case r of pure $ fmap (const builtinAnnotation) <$> Map.lookup r Builtin.termRefTypes +getTypeOfReferent :: (BuiltinAnnotation a, Var v, Monad m) + => Codebase m v a -> Referent.Referent -> m (Maybe (Type v a)) +getTypeOfReferent c (Referent.Ref r) = getTypeOfTerm c r +getTypeOfReferent c (Referent.Con r cid _) = + getTypeOfConstructor c r cid -- The dependents of a builtin type is the set of builtin terms which -- mention that type. @@ -373,7 +406,7 @@ importRemoteBranch :: forall m v a. MonadIO m => Codebase m v a -> - RemoteNamespace -> + ReadRemoteNamespace -> SyncMode -> m (Either GitError (Branch m)) importRemoteBranch codebase ns mode = runExceptT do @@ -392,7 +425,7 @@ importRemoteBranch codebase ns mode = runExceptT do viewRemoteBranch :: MonadIO m => Codebase m v a -> - RemoteNamespace -> + ReadRemoteNamespace -> m (Either GitError (m (), Branch m)) viewRemoteBranch codebase ns = runExceptT do (cleanup, branch, _) <- ExceptT $ viewRemoteBranch' codebase ns diff --git a/parser-typechecker/src/Unison/Codebase/Conversion/Sync12.hs b/parser-typechecker/src/Unison/Codebase/Conversion/Sync12.hs index 1de51313a8..8ef7a60c87 100644 --- a/parser-typechecker/src/Unison/Codebase/Conversion/Sync12.hs +++ b/parser-typechecker/src/Unison/Codebase/Conversion/Sync12.hs @@ -295,7 +295,7 @@ checkTermComponent t h n = do typeDeps = Type.dependencies typ let checkDecl = \case Reference.Builtin {} -> pure () - Reference.DerivedId (Reference.Id h' _ n') -> + Reference.Derived h' _ n' -> getDeclStatus h' >>= \case Just DeclOk -> pure () Just _ -> Except.throwError TermMissingDependencies @@ -303,10 +303,10 @@ checkTermComponent t h n = do checkTerm = \case Reference.Builtin {} -> pure () - Reference.DerivedId (Reference.Id h' _ _) + Reference.Derived h' _ _ | h == h' -> pure () -- ignore self-references - Reference.DerivedId (Reference.Id h' _ n') -> + Reference.Derived h' _ n' -> getTermStatus h' >>= \case Just TermOk -> pure () Just _ -> Except.throwError TermMissingDependencies @@ -330,7 +330,7 @@ checkWatchComponent t k r@(Reference.Id h _ _) = do let deps = Term.labeledDependencies watchResult let checkDecl = \case Reference.Builtin {} -> pure () - Reference.DerivedId (Reference.Id h' _ n') -> + Reference.Derived h' _ n' -> getDeclStatus h' >>= \case Just DeclOk -> pure () Just _ -> Except.throwError WatchMissingDependencies @@ -338,10 +338,10 @@ checkWatchComponent t k r@(Reference.Id h _ _) = do checkTerm = \case Reference.Builtin {} -> pure () - Reference.DerivedId (Reference.Id h' _ _) + Reference.Derived h' _ _ | h == h' -> pure () -- ignore self-references - Reference.DerivedId (Reference.Id h' _ n') -> + Reference.Derived h' _ n' -> getTermStatus h' >>= \case Just TermOk -> pure () Just _ -> Except.throwError WatchMissingDependencies @@ -366,8 +366,8 @@ checkDeclComponent t h n = do let deps = DD.declDependencies decl checkDecl = \case Reference.Builtin {} -> pure () - Reference.DerivedId (Reference.Id h' _ _) | h == h' -> pure () - Reference.DerivedId (Reference.Id h' _ n') -> + Reference.Derived h' _ _ | h == h' -> pure () + Reference.Derived h' _ n' -> getDeclStatus h' >>= \case Just DeclOk -> pure () Just _ -> Except.throwError DeclMissingDependencies @@ -450,14 +450,14 @@ repairPatch (Patch termEdits typeEdits) = do -- reference to it. See Sync22.syncPatchLocalIds helpTermEdit = \case Reference.Builtin _ -> pure True - Reference.DerivedId (Reference.Id h _ n) -> + Reference.Derived h _ n -> getTermStatus h >>= \case Nothing -> Validate.refute . Set.singleton $ T h n Just TermOk -> pure True Just _ -> pure False helpTypeEdit = \case Reference.Builtin _ -> pure True - Reference.DerivedId (Reference.Id h _ n) -> + Reference.Derived h _ n -> getDeclStatus h >>= \case Nothing -> Validate.refute . Set.singleton $ D h n Just DeclOk -> pure True @@ -506,7 +506,7 @@ validateTermReferent = \case validateTermReference :: (S m n, V m n) => Reference.Reference -> n Bool validateTermReference = \case Reference.Builtin {} -> pure True - Reference.DerivedId (Reference.Id h _i n) -> + Reference.Derived h _i n -> getTermStatus h >>= \case Nothing -> Validate.refute . Set.singleton $ T h n Just TermOk -> pure True @@ -515,7 +515,7 @@ validateTermReference = \case validateTypeReference :: (S m n, V m n) => Reference.Reference -> n Bool validateTypeReference = \case Reference.Builtin {} -> pure True - Reference.DerivedId (Reference.Id h _i n) -> + Reference.Derived h _i n -> getDeclStatus h >>= \case Nothing -> Validate.refute . Set.singleton $ D h n Just DeclOk -> pure True diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Command.hs b/parser-typechecker/src/Unison/Codebase/Editor/Command.hs index 265e2bd451..3feccc057f 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Command.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/Command.hs @@ -78,6 +78,8 @@ data Command m i v a where -- Escape hatch. Eval :: m a -> Command m i v a + UI :: Command m i v () + HQNameQuery :: Maybe Path -> Branch m @@ -177,13 +179,13 @@ data Command m i v a where Merge :: Branch.MergeMode -> Branch m -> Branch m -> Command m i v (Branch m) ViewRemoteBranch :: - RemoteNamespace -> Command m i v (Either GitError (m (), Branch m)) + ReadRemoteNamespace -> Command m i v (Either GitError (m (), Branch m)) -- we want to import as little as possible, so we pass the SBH/path as part -- of the `RemoteNamespace`. The Branch that's returned should be fully -- imported and not retain any resources from the remote codebase ImportRemoteBranch :: - RemoteNamespace -> SyncMode -> Command m i v (Either GitError (Branch m)) + ReadRemoteNamespace -> SyncMode -> Command m i v (Either GitError (Branch m)) -- Syncs the Branch to some codebase and updates the head to the head of this causal. -- Any definitions in the head of the supplied branch that aren't in the target @@ -191,7 +193,7 @@ data Command m i v a where SyncLocalRootBranch :: Branch m -> Command m i v () SyncRemoteRootBranch :: - RemoteRepo -> Branch m -> SyncMode -> Command m i v (Either GitError ()) + WriteRepo -> Branch m -> SyncMode -> Command m i v (Either GitError ()) AppendToReflog :: Text -> Branch m -> Branch m -> Command m i v () @@ -246,6 +248,7 @@ lookupEvalResult v (_, m) = view _5 <$> Map.lookup v m commandName :: Command m i v a -> String commandName = \case Eval{} -> "Eval" + UI -> "UI" ConfigLookup{} -> "ConfigLookup" Input -> "Input" Notify{} -> "Notify" diff --git a/parser-typechecker/src/Unison/Codebase/Editor/DisplayObject.hs b/parser-typechecker/src/Unison/Codebase/Editor/DisplayObject.hs index 4d67212852..df07f20dbf 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/DisplayObject.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/DisplayObject.hs @@ -4,11 +4,17 @@ module Unison.Codebase.Editor.DisplayObject where import Unison.Prelude import Unison.ShortHash +import Data.Bifunctor -data DisplayObject a = BuiltinObject | MissingObject ShortHash | UserObject a +data DisplayObject b a = BuiltinObject b | MissingObject ShortHash | UserObject a deriving (Eq, Ord, Show, Functor, Generic) -toMaybe :: DisplayObject a -> Maybe a +instance Bifunctor DisplayObject where + bimap _ _ (MissingObject sh) = MissingObject sh + bimap f _ (BuiltinObject b) = BuiltinObject (f b) + bimap _ f (UserObject a) = UserObject (f a) + +toMaybe :: DisplayObject b a -> Maybe a toMaybe = \case UserObject a -> Just a _ -> Nothing diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Git.hs b/parser-typechecker/src/Unison/Codebase/Editor/Git.hs index 4d434e7c44..7184c5d95a 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Git.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/Git.hs @@ -10,7 +10,7 @@ import Control.Monad.Except (MonadError, throwError) import qualified Data.Text as Text import Shellmet (($?), ($^), ($|)) import System.FilePath (()) -import Unison.Codebase.Editor.RemoteRepo (RemoteRepo (GitRepo)) +import Unison.Codebase.Editor.RemoteRepo (ReadRepo (ReadGitRepo)) import Unison.Codebase.GitError (GitError) import qualified Unison.Codebase.GitError as GitError import qualified Unison.Util.Exception as Ex @@ -56,12 +56,8 @@ withStatus str ma = do -- | Given a remote git repo url, and branch/commit hash (currently -- not allowed): checks for git, clones or updates a cached copy of the repo -pullBranch :: (MonadIO m, MonadCatch m, MonadError GitError m) => RemoteRepo -> m CodebasePath -pullBranch (GitRepo _uri (Just t)) = error $ - "Pulling a specific commit isn't fully implemented or tested yet.\n" ++ - "InputPatterns.parseUri was expected to have prevented you " ++ - "from supplying the git treeish `" ++ Text.unpack t ++ "`!" -pullBranch repo@(GitRepo uri Nothing) = do +pullBranch :: (MonadIO m, MonadCatch m, MonadError GitError m) => ReadRepo -> m CodebasePath +pullBranch repo@(ReadGitRepo uri) = do checkForGit localPath <- tempGitDir uri ifM (doesDirectoryExist localPath) diff --git a/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs b/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs index f88e6c70c4..2c4c8dcf9a 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs @@ -32,6 +32,7 @@ import qualified Unison.Parsers as Parsers import qualified Unison.Reference as Reference import qualified Unison.Codebase.Runtime as Runtime import Unison.Codebase.Runtime (Runtime) +import qualified Unison.Server.CodebaseServer as Server import qualified Unison.Term as Term import qualified Unison.UnisonFile as UF import Unison.Util.Free ( Free ) @@ -45,6 +46,7 @@ import qualified Unison.PrettyPrintEnv as PPE import Unison.Term (Term) import Unison.Type (Type) import qualified Unison.Codebase.Editor.AuthorInfo as AuthorInfo +import Web.Browser (openBrowser) typecheck :: (Monad m, Var v) @@ -84,17 +86,22 @@ commandLine -> (NumberedOutput v -> IO NumberedArgs) -> (SourceName -> IO LoadSourceResult) -> Codebase IO v Ann + -> Maybe Server.BaseUrl -> (Int -> IO gen) -> Free (Command IO i v) a -> IO a -commandLine config awaitInput setBranchRef rt notifyUser notifyNumbered loadSource codebase rngGen = +commandLine config awaitInput setBranchRef rt notifyUser notifyNumbered loadSource codebase serverBaseUrl rngGen = flip State.evalStateT 0 . Free.fold go where go :: forall x . Command IO i v x -> State.StateT Int IO x go x = case x of -- Wait until we get either user input or a unison file update - Eval m -> lift $ m - Input -> lift $ awaitInput + Eval m -> lift m + UI -> + case serverBaseUrl of + Just url -> lift . void $ openBrowser (Server.urlFor Server.UI url) + Nothing -> lift (return ()) + Input -> lift awaitInput Notify output -> lift $ notifyUser output NotifyNumbered output -> lift $ notifyNumbered output ConfigLookup name -> diff --git a/parser-typechecker/src/Unison/Codebase/Editor/HandleInput.hs b/parser-typechecker/src/Unison/Codebase/Editor/HandleInput.hs index d087a5ba18..1549128e3d 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/HandleInput.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/HandleInput.hs @@ -35,7 +35,7 @@ import Unison.Codebase.Editor.SlurpResult (SlurpResult(..)) import qualified Unison.Codebase.Editor.SlurpResult as Slurp import Unison.Codebase.Editor.SlurpComponent (SlurpComponent(..)) import qualified Unison.Codebase.Editor.SlurpComponent as SC -import Unison.Codebase.Editor.RemoteRepo (RemoteNamespace, printNamespace) +import Unison.Codebase.Editor.RemoteRepo (printNamespace, WriteRemotePath, writeToRead, writePathToRead) import qualified Unison.CommandLine.InputPattern as InputPattern import qualified Unison.CommandLine.InputPatterns as InputPatterns @@ -135,6 +135,7 @@ import qualified Control.Error.Util as ErrorUtil import Unison.Util.Monoid (intercalateMap) import qualified Unison.Util.Star3 as Star3 import qualified Unison.Util.Pretty as P +import qualified Unison.Util.Relation as Relation import Data.List.NonEmpty (NonEmpty) import qualified Data.List.NonEmpty as Nel import Unison.Codebase.Editor.AuthorInfo (AuthorInfo(..)) @@ -283,7 +284,7 @@ loop = do ParseErrors text [ err | Result.Parsing err <- toList notes ] Just (Left errNames) -> do ns <- makeShadowedPrintNamesFromHQ hqs errNames - ppe <- prettyPrintEnv (Names3.suffixify ns) + ppe <- suffixifiedPPE ns let tes = [ err | Result.TypeError err <- toList notes ] cbs = [ bug | Result.CompilerBug (Result.TypecheckerBug bug) @@ -397,20 +398,17 @@ loop = do DeleteTypeI def -> "delete.type " <> hqs' def DeleteBranchI opath -> "delete.namespace " <> ops' opath DeletePatchI path -> "delete.patch " <> ps' path - ReplaceTermI src target p -> - "replace.term " <> HQ.toText src <> " " - <> HQ.toText target <> " " - <> opatch p - ReplaceTypeI src target p -> - "replace.type " <> HQ.toText src <> " " - <> HQ.toText target <> " " - <> opatch p + ReplaceI src target p -> + "replace " <> HQ.toText src <> " " + <> HQ.toText target <> " " + <> opatch p ResolveTermNameI path -> "resolve.termName " <> hqs' path ResolveTypeNameI path -> "resolve.typeName " <> hqs' path AddI _selection -> "add" UpdateI p _selection -> "update " <> opatch p PropagatePatchI p scope -> "patch " <> ps' p <> " " <> p' scope UndoI{} -> "undo" + UiI -> "ui" ExecuteI s -> "execute " <> Text.pack s IOTestI hq -> "io.test " <> HQ.toText hq LinkI md defs -> @@ -466,6 +464,7 @@ loop = do DebugBranchHistoryI{} -> wat DebugTypecheckedUnisonFileI{} -> wat DebugDumpNamespacesI{} -> wat + DebugDumpNamespaceSimpleI{} -> wat DebugClearWatchI {} -> wat QuitI{} -> wat DeprecateTermI{} -> undefined @@ -502,7 +501,7 @@ loop = do handleFailedDelete failed failedDependents = do failed <- loadSearchResults $ SR.fromNames failed failedDependents <- loadSearchResults $ SR.fromNames failedDependents - ppe <- prettyPrintEnv =<< makePrintNamesFromLabeled' + ppe <- fqnPPE =<< makePrintNamesFromLabeled' (foldMap SR'.labeledDependencies $ failed <> failedDependents) respond $ CantDelete ppe failed failedDependents saveAndApplyPatch patchPath'' patchName patch' = do @@ -640,9 +639,7 @@ loop = do case uf of Nothing -> do let parseNames0 = (`Names3.Names` mempty) basicPrettyPrintNames0 - -- use suffixed names for resolving the argument to display - parseNames = Names3.suffixify parseNames0 - results = Names3.lookupHQTerm hq parseNames + results = Names3.lookupHQTerm hq parseNames0 if Set.null results then respond $ SearchTermsNotFound [hq] else if Set.size results > 1 then @@ -910,6 +907,8 @@ loop = do diffHelper (Branch.head prev) (Branch.head root') >>= respondNumbered . uncurry Output.ShowDiffAfterUndo + UiI -> eval UI + AliasTermI src dest -> do referents <- resolveHHQS'Referents src case (toList referents, toList (getTerms dest)) of @@ -1008,44 +1007,18 @@ loop = do fixupOutput = fmap Path.toName . HQ'.toHQ . Path.unsplitHQ NamesI thing -> do - parseNames0 <- Names3.suffixify0 <$> basicParseNames0 - let filtered = case thing of - HQ.HashOnly shortHash -> - Names.filterBySHs (Set.singleton shortHash) parseNames0 - HQ.HashQualified n sh -> - Names.filterByHQs (Set.singleton $ HQ'.HashQualified n sh) parseNames0 - HQ.NameOnly n -> - Names.filterByHQs (Set.singleton $ HQ'.NameOnly n) parseNames0 - let printNames0 = basicPrettyPrintNames0 - printNames = Names printNames0 mempty - terms' ::Set (Referent, Set (HQ'.HashQualified Name)) - terms' = (`Set.map` Names.termReferents filtered) $ - \r -> (r, Names3.termName hqLength r printNames) + ns0 <- basicParseNames0 + let ns = Names ns0 mempty + terms = Names3.lookupHQTerm thing ns + types = Names3.lookupHQType thing ns + printNames = Names basicPrettyPrintNames0 mempty + terms' :: Set (Referent, Set (HQ'.HashQualified Name)) + terms' = Set.map go terms where + go r = (r, Names3.termName hqLength r printNames) types' :: Set (Reference, Set (HQ'.HashQualified Name)) - types' = (`Set.map` Names.typeReferences filtered) $ - \r -> (r, Names3.typeName hqLength r printNames) + types' = Set.map go types where + go r = (r, Names3.typeName hqLength r printNames) respond $ ListNames hqLength (toList types') (toList terms') --- let (p, hq) = p0 --- namePortion = HQ'.toName hq --- case hq of --- HQ'.NameOnly _ -> --- respond $ uncurry ListNames (results p namePortion) --- HQ'.HashQualified _ sh -> let --- (terms, types) = results p namePortion --- -- filter terms and types based on `sh : ShortHash` --- terms' = filter (Reference.isPrefixOf sh . Referent.toReference . fst) terms --- types' = filter (Reference.isPrefixOf sh . fst) types --- in respond $ ListNames terms' types' --- where --- results p namePortion = let --- name = Path.toName . Path.unprefix currentPath' . Path.snoc' p --- $ namePortion --- ns = prettyPrintNames0 --- terms = [ (r, Names.namesForReferent ns r) --- | r <- toList $ Names.termsNamed ns name ] --- types = [ (r, Names.namesForReference ns r) --- | r <- toList $ Names.typesNamed ns name ] --- in (terms, types) LinkI mdValue srcs -> do manageLinks False srcs [mdValue] Metadata.insert @@ -1081,7 +1054,7 @@ loop = do fileByName = do ns <- maybe mempty UF.typecheckedToNames0 <$> use latestTypecheckedFile - fnames <- pure $ Names3.suffixify (Names3.Names ns mempty) + fnames <- pure $ Names3.Names ns mempty case Names3.lookupHQTerm dotDoc fnames of s | Set.size s == 1 -> do -- the displayI command expects full term names, so we resolve @@ -1107,7 +1080,7 @@ loop = do respond $ ListOfLinks ppe out codebaseByName = do - parseNames <- Names3.suffixify0 <$> basicParseNames0 + parseNames <- basicParseNames0 case Names3.lookupHQTerm dotDoc (Names3.Names parseNames mempty) of s | Set.size s == 1 -> displayI ConsoleLocation dotDoc | Set.size s == 0 -> respond $ ListOfLinks mempty [] @@ -1269,7 +1242,7 @@ loop = do lift do numberedArgs .= fmap searchResultToHQString results results' <- loadSearchResults results - ppe <- prettyPrintEnv . Names3.suffixify =<< + ppe <- suffixifiedPPE =<< makePrintNamesFromLabeled' (foldMap SR'.labeledDependencies results') respond $ ListOfDefinitions ppe isVerbose results' @@ -1291,22 +1264,41 @@ loop = do BranchUtil.makeDeleteTermName (resolveSplit' (HQ'.toName <$> hq)) go r = stepManyAt . fmap makeDelete . toList . Set.delete r $ conflicted - ReplaceTermI from to patchPath -> do + ReplaceI from to patchPath -> do let patchPath' = fromMaybe defaultPatchPath patchPath patch <- getPatchAt patchPath' QueryResult fromMisses' fromHits <- hqNameQuery [from] QueryResult toMisses' toHits <- hqNameQuery [to] - let fromRefs = termReferences fromHits - toRefs = termReferences toHits + let termsFromRefs = termReferences fromHits + termsToRefs = termReferences toHits + typesFromRefs = typeReferences fromHits + typesToRefs = typeReferences toHits + --- Here are all the kinds of misses + --- [X] [X] + --- [Type] [Term] + --- [Term] [Type] + --- [Type] [X] + --- [Term] [X] + --- [X] [Type] + --- [X] [Term] -- Type hits are term misses - fromMisses = fromMisses' - <> (HQ'.toHQ . SR.typeName <$> typeResults fromHits) - toMisses = toMisses' + termFromMisses = fromMisses' <> (HQ'.toHQ . SR.typeName <$> typeResults fromHits) - go :: Reference + termToMisses = toMisses' + <> (HQ'.toHQ . SR.typeName <$> typeResults toHits) + -- Term hits are type misses + typeFromMisses = fromMisses' + <> (HQ'.toHQ . SR.termName <$> termResults fromHits) + typeToMisses = toMisses' + <> (HQ'.toHQ . SR.termName <$> termResults toHits) + + termMisses = termFromMisses <> termToMisses + typeMisses = typeFromMisses <> typeToMisses + + replaceTerms :: Reference -> Reference -> Action m (Either Event Input) v () - go fr tr = do + replaceTerms fr tr = do mft <- eval $ LoadTypeOfTerm fr mtt <- eval $ LoadTypeOfTerm tr let termNotFound = respond . TermNotFound' @@ -1325,61 +1317,46 @@ loop = do patch (patchPath'', patchName) = resolveSplit' patchPath' saveAndApplyPatch patchPath'' patchName patch' - misses = fromMisses <> toMisses - ambiguous t rs = - let rs' = Set.map Referent.Ref $ Set.fromList rs - in case t of - HQ.HashOnly h -> - hashConflicted h rs' - (Path.parseHQSplit' . HQ.toString -> Right n) -> - termConflicted n rs' - _ -> respond . BadName $ HQ.toString t - unless (null misses) $ - respond $ SearchTermsNotFound misses - case (fromRefs, toRefs) of - ([fr], [tr]) -> go fr tr - ([_], tos) -> ambiguous to tos - (frs, _) -> ambiguous from frs - ReplaceTypeI from to patchPath -> do - let patchPath' = fromMaybe defaultPatchPath patchPath - QueryResult fromMisses' fromHits <- hqNameQuery [from] - QueryResult toMisses' toHits <- hqNameQuery [to] - patch <- getPatchAt patchPath' - let fromRefs = typeReferences fromHits - toRefs = typeReferences toHits - -- Term hits are type misses - fromMisses = fromMisses' - <> (HQ'.toHQ . SR.termName <$> termResults fromHits) - toMisses = toMisses' - <> (HQ'.toHQ . SR.termName <$> termResults fromHits) - go :: Reference + + replaceTypes :: Reference -> Reference -> Action m (Either Event Input) v () - go fr tr = do + replaceTypes fr tr = do let patch' = -- The modified patch over Patch.typeEdits (R.insert fr (TypeEdit.Replace tr) . R.deleteDom fr) patch (patchPath'', patchName) = resolveSplit' patchPath' saveAndApplyPatch patchPath'' patchName patch' - misses = fromMisses <> toMisses + ambiguous t rs = let rs' = Set.map Referent.Ref $ Set.fromList rs in case t of HQ.HashOnly h -> hashConflicted h rs' (Path.parseHQSplit' . HQ.toString -> Right n) -> - typeConflicted n $ Set.fromList rs - -- This is unlikely to happen, as t has to be a parsed - -- hash-qualified name already. - -- Still, the types say we need to handle this case. + termConflicted n rs' _ -> respond . BadName $ HQ.toString t - unless (null misses) $ - respond $ SearchTermsNotFound misses - case (fromRefs, toRefs) of - ([fr], [tr]) -> go fr tr - ([_], tos) -> ambiguous to tos - (frs, _) -> ambiguous from frs + + mismatch typeName termName = respond $ TypeTermMismatch typeName termName + + + case (termsFromRefs, termsToRefs, typesFromRefs, typesToRefs) of + ([], [], [], []) -> respond $ SearchTermsNotFound termMisses + ([_], [], [], [_]) -> mismatch to from + ([], [_], [_], []) -> mismatch from to + ([_], [], _, _) -> respond $ SearchTermsNotFound termMisses + ([], [_], _, _) -> respond $ SearchTermsNotFound termMisses + (_, _, [_], []) -> respond $ SearchTermsNotFound typeMisses + (_, _, [], [_]) -> respond $ SearchTermsNotFound typeMisses + ([fr], [tr], [], []) -> replaceTerms fr tr + ([], [], [fr], [tr]) -> replaceTypes fr tr + (froms, [_], [], []) -> ambiguous from froms + ([], [], froms, [_]) -> ambiguous from froms + ([_], tos, [], []) -> ambiguous to tos + ([], [], [_], tos) -> ambiguous to tos + (_, _, _, _) -> error "unpossible" + LoadI maybePath -> case maybePath <|> (fst <$> latestFile') of Nothing -> respond NoUnisonFile @@ -1544,7 +1521,7 @@ loop = do names <- makePrintNamesFromLabeled' $ LD.referents testTerms <> LD.referents [ DD.okConstructorReferent, DD.failConstructorReferent ] - ppe <- prettyPrintEnv names + ppe <- fqnPPE names respond $ TestResults stats ppe showOk showFail (oks cachedTests) (fails cachedTests) let toCompute = Set.difference testRefs (Map.keysSet cachedTests) @@ -1593,13 +1570,11 @@ loop = do ExecuteI main -> addRunMain main uf >>= \case NoTermWithThatName -> do - let names0 = basicPrettyPrintNames0 - ppe <- prettyPrintEnv (Names3.Names names0 mempty) + ppe <- suffixifiedPPE (Names3.Names basicPrettyPrintNames0 mempty) mainType <- eval RuntimeMain respond $ NoMainFunction main ppe [mainType] TermHasBadType ty -> do - let names0 = Names3.suffixify0 basicPrettyPrintNames0 - ppe <- prettyPrintEnv (Names3.Names names0 mempty) + ppe <- suffixifiedPPE (Names3.Names basicPrettyPrintNames0 mempty) mainType <- eval RuntimeMain respond $ BadMainFunction main ty ppe [mainType] RunMainSuccess unisonFile -> do @@ -1611,13 +1586,12 @@ loop = do Right _ -> pure () -- TODO IOTestI main -> do + -- todo - allow this to run tests from scratch file, using addRunMain testType <- eval RuntimeTest - parseNames0 <- (`Names3.Names` mempty) <$> basicPrettyPrintNames0A - ppe <- prettyPrintEnv parseNames0 + parseNames <- (`Names3.Names` mempty) <$> basicPrettyPrintNames0A + ppe <- suffixifiedPPE parseNames -- use suffixed names for resolving the argument to display let - parseNames = Names3.suffixify parseNames0 - oks results = [ (r, msg) | (r, Term.List' ts) <- results @@ -1634,7 +1608,7 @@ loop = do [Referent.Ref ref] -> do typ <- loadTypeOfTerm (Referent.Ref ref) case typ of - Just typ | Typechecker.isSubtype testType typ -> do + Just typ | Typechecker.isSubtype typ testType -> do let a = ABT.annotation tm tm = DD.forceTerm a a (Term.ref a ref) in do -- v Don't cache IO tests @@ -1643,8 +1617,8 @@ loop = do Left e -> respond (EvaluationFailure e) Right tm' -> respond $ TestResults Output.NewlyComputed ppe True True (oks [(ref, tm')]) (fails [(ref, tm')]) - _ -> respond $ NoMainFunction "main" ppe [testType] - _ -> respond $ NoMainFunction "main" ppe [testType] + _ -> respond $ NoMainFunction (HQ.toString main) ppe [testType] + _ -> respond $ NoMainFunction (HQ.toString main) ppe [testType] -- UpdateBuiltinsI -> do -- stepAt updateBuiltins @@ -1692,12 +1666,12 @@ loop = do (Path.toAbsoluteSplit currentPath') maybePath patch <- eval . Eval . Branch.getPatch seg . Branch.head =<< getAt p - ppe <- prettyPrintEnv =<< + ppe <- suffixifiedPPE =<< makePrintNamesFromLabeled' (Patch.labeledDependencies patch) respond $ ListEdits patch ppe PullRemoteBranchI mayRepo path syncMode -> unlessError do - ns <- resolveConfiguredGitUrl Pull path mayRepo + ns <- maybe (writePathToRead <$> resolveConfiguredGitUrl Pull path) pure mayRepo lift $ unlessGitError do b <- importRemoteBranch ns syncMode let msg = Just $ PullAlreadyUpToDate ns path @@ -1707,26 +1681,19 @@ loop = do PushRemoteBranchI mayRepo path syncMode -> do let srcAbs = resolveToAbsolute path srcb <- getAt srcAbs - let expandRepo (r, rp) = (r, Nothing, rp) unlessError do - (repo, sbh, remotePath) <- - resolveConfiguredGitUrl Push path (fmap expandRepo mayRepo) - case sbh of - Nothing -> lift $ unlessGitError do - (cleanup, remoteRoot) <- unsafeTime "Push viewRemoteBranch" $ - viewRemoteBranch (repo, Nothing, Path.empty) - -- We don't merge `srcb` with the remote namespace, `r`, we just - -- replace it. The push will be rejected if this rewinds time - -- or misses any new updates in `r` that aren't in `srcb` already. - let newRemoteRoot = Branch.modifyAt remotePath (const srcb) remoteRoot - unsafeTime "Push syncRemoteRootBranch" $ - syncRemoteRootBranch repo newRemoteRoot syncMode - lift . eval $ Eval cleanup - lift $ respond Success - Just{} -> - error $ "impossible match, resolveConfiguredGitUrl shouldn't return" - <> " `Just` unless it was passed `Just`; and here it is passed" - <> " `Nothing` by `expandRepo`." + (repo, remotePath) <- maybe (resolveConfiguredGitUrl Push path) pure mayRepo + lift $ unlessGitError do + (cleanup, remoteRoot) <- unsafeTime "Push viewRemoteBranch" $ + viewRemoteBranch (writeToRead repo, Nothing, Path.empty) + -- We don't merge `srcb` with the remote namespace, `r`, we just + -- replace it. The push will be rejected if this rewinds time + -- or misses any new updates in `r` that aren't in `srcb` already. + let newRemoteRoot = Branch.modifyAt remotePath (const srcb) remoteRoot + unsafeTime "Push syncRemoteRootBranch" $ + syncRemoteRootBranch repo newRemoteRoot syncMode + lift . eval $ Eval cleanup + lift $ respond Success ListDependentsI hq -> -- todo: add flag to handle transitive efficiently resolveHQToLabeledDependencies hq >>= \lds -> if null lds @@ -1823,6 +1790,11 @@ loop = do prettyDefn renderR (r, (Foldable.toList -> names, Foldable.toList -> links)) = P.lines (P.shown <$> if null names then [NameSegment ""] else names) <> P.newline <> prettyLinks renderR r links void . eval . Eval . flip State.execStateT mempty $ goCausal [getCausal root'] + DebugDumpNamespaceSimpleI -> do + for_ (Relation.toList . Branch.deepTypes . Branch.head $ root') \(r, name) -> + traceM $ show name ++ ",Type," ++ Text.unpack (Reference.toText r) + for_ (Relation.toList . Branch.deepTerms . Branch.head $ root') \(r, name) -> + traceM $ show name ++ ",Term," ++ Text.unpack (Referent.toText r) DebugClearWatchI {} -> eval ClearWatchCache DeprecateTermI {} -> notImplemented DeprecateTypeI {} -> notImplemented @@ -1860,26 +1832,20 @@ loop = do resolveConfiguredGitUrl :: PushPull -> Path' - -> Maybe RemoteNamespace - -> ExceptT (Output v) (Action' m v) RemoteNamespace - resolveConfiguredGitUrl pushPull destPath' = \case - Just ns -> pure ns - Nothing -> ExceptT do - let destPath = resolveToAbsolute destPath' - let configKey = gitUrlKey destPath - (eval . ConfigLookup) configKey >>= \case - Just url -> - case P.parse UriParser.repoPath (Text.unpack configKey) url of - Left e -> - pure . Left $ - ConfiguredGitUrlParseError pushPull destPath' url (show e) - Right (repo, Just sbh, remotePath) -> - pure . Left $ - ConfiguredGitUrlIncludesShortBranchHash pushPull repo sbh remotePath - Right ns -> - pure . Right $ ns - Nothing -> - pure . Left $ NoConfiguredGitUrl pushPull destPath' + -> ExceptT (Output v) (Action' m v) WriteRemotePath + resolveConfiguredGitUrl pushPull destPath' = ExceptT do + let destPath = resolveToAbsolute destPath' + let configKey = gitUrlKey destPath + (eval . ConfigLookup) configKey >>= \case + Just url -> + case P.parse UriParser.writeRepoPath (Text.unpack configKey) url of + Left e -> + pure . Left $ + ConfiguredGitUrlParseError pushPull destPath' url (show e) + Right ns -> + pure . Right $ ns + Nothing -> + pure . Left $ NoConfiguredGitUrl pushPull destPath' gitUrlKey = configKey "GitUrl" @@ -1891,10 +1857,10 @@ loop = do resolveHQToLabeledDependencies :: Functor m => HQ.HashQualified Name -> Action' m v (Set LabeledDependency) resolveHQToLabeledDependencies = \case HQ.NameOnly n -> do - parseNames <- Names3.suffixify0 <$> basicParseNames0 + parseNames <- basicParseNames0 let terms, types :: Set LabeledDependency - terms = Set.map LD.referent . R.lookupDom n $ Names3.terms0 parseNames - types = Set.map LD.typeRef . R.lookupDom n $ Names3.types0 parseNames + terms = Set.map LD.referent . Name.searchBySuffix n $ Names3.terms0 parseNames + types = Set.map LD.typeRef . Name.searchBySuffix n $ Names3.types0 parseNames pure $ terms <> types -- rationale: the hash should be unique enough that the name never helps HQ.HashQualified _n sh -> resolveHashOnly sh @@ -1993,16 +1959,21 @@ propagatePatchNoSync => Patch -> Path.Absolute -> Action' m v Bool -propagatePatchNoSync patch scopePath = stepAtMNoSync' - (Path.unabsolute scopePath, lift . lift . Propagate.propagateAndApply patch) +propagatePatchNoSync patch scopePath = do + r <- use root + let nroot = Branch.toNames0 (Branch.head r) + stepAtMNoSync' (Path.unabsolute scopePath, + lift . lift . Propagate.propagateAndApply nroot patch) -- Returns True if the operation changed the namespace, False otherwise. propagatePatch :: (Monad m, Var v) => InputDescription -> Patch -> Path.Absolute -> Action' m v Bool -propagatePatch inputDescription patch scopePath = +propagatePatch inputDescription patch scopePath = do + r <- use root + let nroot = Branch.toNames0 (Branch.head r) stepAtM' (inputDescription <> " (applying patch)") (Path.unabsolute scopePath, - lift . lift . Propagate.propagateAndApply patch) + lift . lift . Propagate.propagateAndApply nroot patch) -- | Create the args needed for showTodoOutput and call it doShowTodoOutput :: Monad m => Patch -> Path.Absolute -> Action' m v () @@ -2196,6 +2167,8 @@ handleBackendError = \case Backend.NoBranchForHash h -> do sbhLength <- eval BranchHashLength respond . NoBranchWithHash $ SBH.fromHash sbhLength h + Backend.CouldntLoadBranch h -> do + respond . CouldntLoadBranch $ h Backend.CouldntExpandBranchHash sbh -> respond $ NoBranchWithHash sbh Backend.AmbiguousBranchHash h hashes -> respond $ BranchHashAmbiguous h hashes @@ -2666,7 +2639,7 @@ doSlurpUpdates typeEdits termEdits deprecated b0 = loadDisplayInfo :: Set Reference -> Action m i v ([(Reference, Maybe (Type v Ann))] - ,[(Reference, DisplayObject (DD.Decl v Ann))]) + ,[(Reference, DisplayObject () (DD.Decl v Ann))]) loadDisplayInfo refs = do termRefs <- filterM (eval . IsTerm) (toList refs) typeRefs <- filterM (eval . IsType) (toList refs) @@ -2698,9 +2671,9 @@ makeHistoricalParsingNames lexedHQs = do fixupNamesRelative currentPath rawHistoricalNames) loadTypeDisplayObject - :: Reference -> Action m i v (DisplayObject (DD.Decl v Ann)) + :: Reference -> Action m i v (DisplayObject () (DD.Decl v Ann)) loadTypeDisplayObject = \case - Reference.Builtin _ -> pure BuiltinObject + Reference.Builtin _ -> pure (BuiltinObject ()) Reference.DerivedId id -> maybe (MissingObject $ Reference.idToShortHash id) UserObject <$> eval (LoadType id) @@ -2718,8 +2691,11 @@ lexedSource name src = do parseNames <- makeHistoricalParsingNames hqs pure (parseNames, (src, tokens)) -prettyPrintEnv :: Names -> Action' m v PPE.PrettyPrintEnv -prettyPrintEnv ns = eval CodebaseHashLength <&> (`PPE.fromNames` ns) +suffixifiedPPE :: Names -> Action' m v PPE.PrettyPrintEnv +suffixifiedPPE ns = eval CodebaseHashLength <&> (`PPE.fromSuffixNames` ns) + +fqnPPE :: Names -> Action' m v PPE.PrettyPrintEnv +fqnPPE ns = eval CodebaseHashLength <&> (`PPE.fromNames` ns) parseSearchType :: (Monad m, Var v) => Input -> String -> Action' m v (Either (Output v) (Type v Ann)) @@ -2730,7 +2706,7 @@ parseType :: (Monad m, Var v) parseType input src = do -- `show Input` is the name of the "file" being lexed (names0, lexed) <- lexedSource (Text.pack $ show input) (Text.pack src) - parseNames <- Names3.suffixify0 <$> basicParseNames0 + parseNames <- basicParseNames0 let names = Names3.push (Names3.currentNames names0) (Names3.Names parseNames (Names3.oldNames names0)) e <- eval $ ParseType names lexed @@ -2904,7 +2880,7 @@ executePPE => TypecheckedUnisonFile v a -> Action' m v PPE.PrettyPrintEnv executePPE unisonFile = - prettyPrintEnv =<< displayNames unisonFile + suffixifiedPPE =<< displayNames unisonFile -- Produce a `Names` needed to display all the hashes used in the given file. displayNames :: (Var v, Monad m) @@ -2951,3 +2927,4 @@ declOrBuiltin r = case r of pure . fmap DD.Builtin $ Map.lookup r Builtin.builtinConstructorType Reference.DerivedId id -> fmap DD.Decl <$> eval (LoadType id) + diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Input.hs b/parser-typechecker/src/Unison/Codebase/Editor/Input.hs index 49538295dd..7477f4985f 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Input.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/Input.hs @@ -50,10 +50,10 @@ data Input | MergeLocalBranchI Path' Path' Branch.MergeMode | PreviewMergeLocalBranchI Path' Path' | DiffNamespaceI Path' Path' -- old new - | PullRemoteBranchI (Maybe RemoteNamespace) Path' SyncMode - | PushRemoteBranchI (Maybe RemoteHead) Path' SyncMode - | CreatePullRequestI RemoteNamespace RemoteNamespace - | LoadPullRequestI RemoteNamespace RemoteNamespace Path' + | PullRemoteBranchI (Maybe ReadRemoteNamespace) Path' SyncMode + | PushRemoteBranchI (Maybe WriteRemotePath) Path' SyncMode + | CreatePullRequestI ReadRemoteNamespace ReadRemoteNamespace + | LoadPullRequestI ReadRemoteNamespace ReadRemoteNamespace Path' | ResetRootI (Either ShortBranchHash Path') -- todo: Q: Does it make sense to publish to not-the-root of a Github repo? -- Does it make sense to fork from not-the-root of a Github repo? @@ -98,8 +98,7 @@ data Input -- -- create and remove update directives | DeprecateTermI PatchPath Path.HQSplit' | DeprecateTypeI PatchPath Path.HQSplit' - | ReplaceTermI (HQ.HashQualified Name) (HQ.HashQualified Name) (Maybe PatchPath) - | ReplaceTypeI (HQ.HashQualified Name) (HQ.HashQualified Name) (Maybe PatchPath) + | ReplaceI (HQ.HashQualified Name) (HQ.HashQualified Name) (Maybe PatchPath) | RemoveTermReplacementI (HQ.HashQualified Name) (Maybe PatchPath) | RemoveTypeReplacementI (HQ.HashQualified Name) (Maybe PatchPath) | UndoI @@ -137,8 +136,10 @@ data Input | DebugBranchHistoryI | DebugTypecheckedUnisonFileI | DebugDumpNamespacesI + | DebugDumpNamespaceSimpleI | DebugClearWatchI | QuitI + | UiI deriving (Eq, Show) -- Some commands, like `view`, can dump output to either console or a file. diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Output.hs b/parser-typechecker/src/Unison/Codebase/Editor/Output.hs index be7848c873..0aef180970 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Output.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/Output.hs @@ -22,7 +22,7 @@ import Unison.Codebase.Editor.Input import Unison.Codebase (GetRootBranchError) import Unison.Codebase.Editor.SlurpResult (SlurpResult(..)) import Unison.Codebase.GitError -import Unison.Codebase.Path (Path', Path) +import Unison.Codebase.Path (Path') import Unison.Codebase.Patch (Patch) import Unison.Name ( Name ) import Unison.Names2 ( Names ) @@ -79,7 +79,7 @@ data NumberedOutput v | ShowDiffAfterMergePropagate Path.Path' Path.Absolute Path.Path' PPE.PrettyPrintEnv (BranchDiffOutput v Ann) | ShowDiffAfterMergePreview Path.Path' Path.Absolute PPE.PrettyPrintEnv (BranchDiffOutput v Ann) | ShowDiffAfterPull Path.Path' Path.Absolute PPE.PrettyPrintEnv (BranchDiffOutput v Ann) - | ShowDiffAfterCreatePR RemoteNamespace RemoteNamespace PPE.PrettyPrintEnv (BranchDiffOutput v Ann) + | ShowDiffAfterCreatePR ReadRemoteNamespace ReadRemoteNamespace PPE.PrettyPrintEnv (BranchDiffOutput v Ann) -- | ShowDiffAfterCreateAuthor NameSegment Path.Path' Path.Absolute PPE.PrettyPrintEnv (BranchDiffOutput v Ann) @@ -98,7 +98,7 @@ data Output v | BadMainFunction String (Type v Ann) PPE.PrettyPrintEnv [Type v Ann] | BranchEmpty (Either ShortBranchHash Path') | BranchNotEmpty Path' - | LoadPullRequest RemoteNamespace RemoteNamespace Path' Path' Path' Path' + | LoadPullRequest ReadRemoteNamespace ReadRemoteNamespace Path' Path' Path' Path' | CreatedNewBranch Path.Absolute | BranchAlreadyExists Path' | PatchAlreadyExists Path.Split' @@ -121,6 +121,7 @@ data Output v | TermNotFound Path.HQSplit' | TypeNotFound' ShortHash | TermNotFound' ShortHash + | TypeTermMismatch (HQ.HashQualified Name) (HQ.HashQualified Name) | SearchTermsNotFound [HQ.HashQualified Name] -- ask confirmation before deleting the last branch that contains some defns -- `Path` is one of the paths the user has requested to delete, and is paired @@ -157,8 +158,8 @@ data Output v -- "display" definitions, possibly to a FilePath on disk (e.g. editing) | DisplayDefinitions (Maybe FilePath) PPE.PrettyPrintEnvDecl - (Map Reference (DisplayObject (Decl v Ann))) - (Map Reference (DisplayObject (Term v Ann))) + (Map Reference (DisplayObject () (Decl v Ann))) + (Map Reference (DisplayObject (Type v Ann) (Term v Ann))) -- | Invariant: there's at least one conflict or edit in the TodoOutput. | TodoOutput PPE.PrettyPrintEnvDecl (TO.TodoOutput v Ann) | TestIncrementalOutputStart PPE.PrettyPrintEnv (Int,Int) Reference (Term v Ann) @@ -178,10 +179,9 @@ data Output v | ConfiguredMetadataParseError Path' String (P.Pretty P.ColorText) | NoConfiguredGitUrl PushPull Path' | ConfiguredGitUrlParseError PushPull Path' Text String - | ConfiguredGitUrlIncludesShortBranchHash PushPull RemoteRepo ShortBranchHash Path | DisplayLinks PPE.PrettyPrintEnvDecl Metadata.Metadata - (Map Reference (DisplayObject (Decl v Ann))) - (Map Reference (DisplayObject (Term v Ann))) + (Map Reference (DisplayObject () (Decl v Ann))) + (Map Reference (DisplayObject (Type v Ann) (Term v Ann))) | MetadataMissingType PPE.PrettyPrintEnv Referent | TermMissingType Reference | MetadataAmbiguous (HQ.HashQualified Name) PPE.PrettyPrintEnv [Referent] @@ -193,7 +193,7 @@ data Output v | StartOfCurrentPathHistory | History (Maybe Int) [(ShortBranchHash, Names.Diff)] HistoryTail | ShowReflog [ReflogEntry] - | PullAlreadyUpToDate RemoteNamespace Path' + | PullAlreadyUpToDate ReadRemoteNamespace Path' | MergeAlreadyUpToDate Path' Path' | PreviewMergeAlreadyUpToDate Path' Path' -- | No conflicts or edits remain for the current patch. @@ -208,6 +208,7 @@ data Output v | BadName String | DefaultMetadataNotification | BadRootBranch GetRootBranchError + | CouldntLoadBranch Branch.Hash | NoOp deriving (Show) @@ -238,6 +239,7 @@ isFailure :: Ord v => Output v -> Bool isFailure o = case o of Success{} -> False BadRootBranch{} -> True + CouldntLoadBranch{} -> True NoUnisonFile{} -> True InvalidSourceName{} -> True SourceLoadFailed{} -> True @@ -267,6 +269,7 @@ isFailure o = case o of TypeNotFound'{} -> True TermNotFound{} -> True TermNotFound'{} -> True + TypeTermMismatch{} -> True SearchTermsNotFound ts -> not (null ts) DeleteBranchConfirmation{} -> False CantDelete{} -> True @@ -286,7 +289,7 @@ isFailure o = case o of Typechecked{} -> False DisplayDefinitions _ _ m1 m2 -> null m1 && null m2 DisplayRendered{} -> False - TodoOutput _ todo -> TO.todoScore todo /= 0 && not (TO.noConflicts todo) + TodoOutput _ todo -> TO.todoScore todo > 0 || not (TO.noConflicts todo) TestIncrementalOutputStart{} -> False TestIncrementalOutputEnd{} -> False TestResults _ _ _ _ _ fails -> not (null fails) @@ -297,7 +300,6 @@ isFailure o = case o of ConfiguredMetadataParseError{} -> True NoConfiguredGitUrl{} -> True ConfiguredGitUrlParseError{} -> True - ConfiguredGitUrlIncludesShortBranchHash{} -> True DisplayLinks{} -> False MetadataMissingType{} -> True MetadataAmbiguous{} -> True diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs b/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs index 9ec10150f8..096e45fc32 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs @@ -1,9 +1,11 @@ {-# OPTIONS_GHC -Wno-partial-type-signatures #-} +{-# LANGUAGE BangPatterns #-} {-# LANGUAGE PartialTypeSignatures #-} {-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} -module Unison.Codebase.Editor.Propagate where +module Unison.Codebase.Editor.Propagate (propagateAndApply) where import Control.Error.Util ( hush ) import Control.Lens @@ -20,11 +22,13 @@ import Unison.Codebase.Patch ( Patch(..) ) import qualified Unison.Codebase.Patch as Patch import Unison.DataDeclaration ( Decl ) import qualified Unison.DataDeclaration as Decl +import qualified Unison.Name as Name import Unison.Names3 ( Names0 ) import qualified Unison.Names2 as Names import Unison.Parser ( Ann(..) ) import Unison.Reference ( Reference(..) ) import qualified Unison.Reference as Reference +import Unison.Referent ( Referent ) import qualified Unison.Referent as Referent import qualified Unison.Result as Result import qualified Unison.Term as Term @@ -46,7 +50,6 @@ import qualified Unison.Util.Star3 as Star3 import Unison.Type ( Type ) import qualified Unison.Type as Type import qualified Unison.Typechecker as Typechecker -import Unison.ConstructorType ( ConstructorType ) import qualified Unison.Runtime.IOSource as IOSource type F m i v = Free (Command m i v) @@ -54,13 +57,12 @@ type F m i v = Free (Command m i v) data Edits v = Edits { termEdits :: Map Reference TermEdit -- same info as `termEdits` but in more efficient form for calling `Term.updateDependencies` - , termReplacements :: Map Reference Reference + , termReplacements :: Map Referent Referent , newTerms :: Map Reference (Term v Ann, Type v Ann) , typeEdits :: Map Reference TypeEdit , typeReplacements :: Map Reference Reference , newTypes :: Map Reference (Decl v Ann) - , constructorReplacements :: Map (Reference, Int, ConstructorType) - (Reference, Int, ConstructorType) + , constructorReplacements :: Map Referent Referent } deriving (Eq, Show) noEdits :: Edits v @@ -69,35 +71,124 @@ noEdits = Edits mempty mempty mempty mempty mempty mempty mempty propagateAndApply :: forall m i v . (Applicative m, Var v) - => Patch + => Names0 + -> Patch -> Branch0 m -> F m i v (Branch0 m) -propagateAndApply patch branch = do - edits <- propagate patch branch +propagateAndApply rootNames patch branch = do + edits <- propagate rootNames patch branch f <- applyPropagate patch edits (pure . f . applyDeprecations patch) branch --- Creates a mapping from old data constructors to new data constructors --- by looking at the original names for the data constructors which are --- embedded in the Decl object because we carefully planned that. -generateConstructorMapping - :: Eq v - => Map v (Reference, Decl v _) - -> Map v (Reference, Decl.DataDeclaration v _) - -> Map - (Reference, Int, ConstructorType) - (Reference, Int, ConstructorType) -generateConstructorMapping oldComponent newComponent = Map.fromList - [ let t = Decl.constructorType oldDecl in ((oldR, oldC, t), (newR, newC, t)) - | (v1, (oldR, oldDecl)) <- Map.toList oldComponent - , (v2, (newR, newDecl)) <- Map.toList newComponent - , v1 == v2 - , (oldC, (_, oldName, _)) <- zip [0 ..] - $ Decl.constructors' (Decl.asDataDecl oldDecl) - , (newC, (_, newName, _)) <- zip [0 ..] $ Decl.constructors' newDecl - , oldName == newName - ] +-- This function produces constructor mappings for propagated type updates. +-- +-- For instance in `type Foo = Blah Bar | Zoink Nat`, if `Bar` is updated +-- from `Bar#old` to `Bar#new`, `Foo` will be a "propagated update" and +-- we want to map the `Foo#old.Blah` constructor to `Foo#new.Blah`. +-- +-- The function works by aligning same-named types and same-named constructors, +-- using the names of the types provided by the two maps and the names +-- of constructors embedded in the data decls themselves. +-- +-- This is correct, and relies only on the type and constructor names coming +-- out of the codebase and Decl.unhashComponent being unique, which they are. +-- +-- What happens is that the declaration component is pulled out of the codebase, +-- references are converted back to variables, substitutions are made in +-- constructor type signatures, and then the component is rehashed, which +-- re-canonicalizes the constructor orders in a possibly different way. +-- +-- The unique names for the types and constructors are just carried through +-- unchanged through this process, so their being the same establishes that they +-- had the same role in the two versions of the cycle. +propagateCtorMapping + :: (Var v, Show a) + => Map v (Reference, Decl v a) + -> Map v (Reference, Decl.DataDeclaration v a) + -> Map Referent Referent +propagateCtorMapping oldComponent newComponent = let + singletons = Map.size oldComponent == 1 && Map.size newComponent == 1 + isSingleton c = null . drop 1 $ Decl.constructors' c + r = Map.fromList + [ (oldCon, newCon) + | (v1, (oldR, oldDecl)) <- Map.toList oldComponent + , (v2, (newR, newDecl)) <- Map.toList newComponent + , v1 == v2 || singletons + , let t = Decl.constructorType oldDecl + , (oldC, (_,ol'Name,_)) <- zip [0 ..] $ Decl.constructors' (Decl.asDataDecl oldDecl) + , (newC, (_,newName,_)) <- zip [0 ..] $ Decl.constructors' newDecl + , ol'Name == newName || (isSingleton (Decl.asDataDecl oldDecl) && isSingleton newDecl) + , oldR /= newR + , let oldCon = Referent.Con oldR oldC t + newCon = Referent.Con newR newC t + ] + in if debugMode then traceShow ("constructorMappings", r) r else r + + +-- TODO: Use of this function will go away soon, once constructor mappings can be +-- added directly to the patch. +-- +-- Given a set of type replacements, this creates a mapping from the constructors +-- of the old type(s) to the constructors of the new types. +-- +-- Constructors for the same-unqualified-named type with a same-unqualified-name +-- constructor are mapped to each other. +-- +-- If the cycle is size 1 for old and new, then the type names need not be the same, +-- and if the number of constructors is 1, then the constructor names need not +-- be the same. +genInitialCtorMapping :: + forall v m i . Var v => Names0 -> Map Reference Reference -> F m i v (Map Referent Referent) +genInitialCtorMapping rootNames initialTypeReplacements = do + let mappings :: (Reference,Reference) -> _ (Map Referent Referent) + mappings (old,new) = do + old <- unhashTypeComponent old + new <- fmap (over _2 (either Decl.toDataDecl id)) <$> unhashTypeComponent new + pure $ ctorMapping old new + Map.unions <$> traverse mappings (Map.toList initialTypeReplacements) + where + -- True if the unqualified versions of the names in the two sets overlap + -- ex: {foo.bar, foo.baz} matches the set {blah.bar}. + unqualifiedNamesMatch :: Set Name.Name -> Set Name.Name -> Bool + unqualifiedNamesMatch n1 n2 | debugMode && traceShow ("namesMatch", n1, n2) False = undefined + unqualifiedNamesMatch n1 n2 = + (not . Set.null) (Set.intersection (Set.map Name.unqualified n1) + (Set.map Name.unqualified n2)) + ctorNamesMatch oldR newR = + unqualifiedNamesMatch (Names.namesForReferent rootNames oldR) + (Names.namesForReferent rootNames newR) + + typeNamesMatch typeMapping oldType newType = + Map.lookup oldType typeMapping == Just newType || + unqualifiedNamesMatch (Names.namesForReference rootNames oldType) + (Names.namesForReference rootNames oldType) + + ctorMapping + :: Map v (Reference, Decl v a) + -> Map v (Reference, Decl.DataDeclaration v a) + -> Map Referent Referent + ctorMapping oldComponent newComponent = let + singletons = Map.size oldComponent == 1 && Map.size newComponent == 1 + isSingleton c = null . drop 1 $ Decl.constructors' c + r = Map.fromList + [ (oldCon, newCon) + | (_, (oldR, oldDecl)) <- Map.toList oldComponent + , (_, (newR, newDecl)) <- Map.toList newComponent + , typeNamesMatch initialTypeReplacements oldR newR || singletons + , let t = Decl.constructorType oldDecl + , (oldC, _) <- zip [0 ..] $ Decl.constructors' (Decl.asDataDecl oldDecl) + , (newC, _) <- zip [0 ..] $ Decl.constructors' newDecl + , let oldCon = Referent.Con oldR oldC t + newCon = Referent.Con newR newC t + , ctorNamesMatch oldCon newCon + || (isSingleton (Decl.asDataDecl oldDecl) && isSingleton newDecl) + , oldR /= newR + ] + in if debugMode then traceShow ("constructorMappings", r) r else r + +debugMode :: Bool +debugMode = False -- Note: this function adds definitions to the codebase as it propagates. -- Description: @@ -132,10 +223,12 @@ generateConstructorMapping oldComponent newComponent = Map.fromList propagate :: forall m i v . (Applicative m, Var v) - => Patch + => Names0 -- TODO: this argument can be removed once patches have term replacement + -- of type `Referent -> Referent` + -> Patch -> Branch0 m -> F m i v (Edits v) -propagate patch b = case validatePatch patch of +propagate rootNames patch b = case validatePatch patch of Nothing -> do eval $ Notify PatchNeedsToBeConflictFree pure noEdits @@ -146,8 +239,29 @@ propagate patch b = case validatePatch patch of (Set.fromList [ r | Referent.Ref r <- Set.toList $ Branch.deepReferents b ] ) - initialDirty <- - R.dom <$> computeFrontier (eval . GetDependents) patch names0 + + -- TODO: these are just used for tracing, could be deleted if we don't care + -- about printing meaningful names for definitions during propagation, or if + -- we want to just remove the tracing. + refName r = -- could just become show r if we don't care + let rns = Names.namesForReferent rootNames (Referent.Ref r) + <> Names.namesForReference rootNames r + in case toList rns of + [] -> show r + n : _ -> show n + -- this could also become show r if we're removing the dependency on Names0 + referentName r = case toList (Names.namesForReferent rootNames r) of + [] -> Referent.toString r + n : _ -> show n + + initialDirty <- R.dom <$> computeFrontier (eval . GetDependents) patch names0 + + let initialTypeReplacements = Map.mapMaybe TypeEdit.toReference initialTypeEdits + -- TODO: once patches can directly contain constructor replacements, this + -- line can turn into a pure function that takes the subset of the term replacements + -- in the patch which have a `Referent.Con` as their LHS. + initialCtorMappings <- genInitialCtorMapping rootNames initialTypeReplacements + order <- sortDependentsGraph initialDirty entireBranch let @@ -166,12 +280,14 @@ propagate patch b = case validatePatch patch of Reference.Builtin _ -> collectEdits es seen todo Reference.DerivedId _ -> go r todo where + debugCtors = + unlines [ referentName old <> " -> " <> referentName new + | (old,new) <- Map.toList constructorReplacements ] + go r _ | debugMode && traceShow ("Rewriting: ", refName r) False = undefined + go _ _ | debugMode && trace ("** Constructor replacements:\n\n" <> debugCtors) False = undefined go r todo = - if Map.member r termEdits - || Map.member r typeEdits - || Set.member r seen - then - collectEdits es seen todo + if Map.member r termEdits || Set.member r seen || Map.member r typeEdits + then collectEdits es seen todo else do haveType <- eval $ IsType r @@ -194,8 +310,10 @@ propagate patch b = case validatePatch patch of $ Reference.componentFor r let todo' = todo <> getOrdered dependents collectEdits edits' seen' todo' + doType :: Reference -> F m i v (Maybe (Edits v), Set Reference) doType r = do + when debugMode $ traceM ("Rewriting type: " <> refName r) componentMap <- unhashTypeComponent r let componentMap' = over _2 (Decl.updateDependencies typeReplacements) @@ -238,22 +356,24 @@ propagate patch b = case validatePatch patch of seen' = seen <> Set.fromList (view _1 . view _2 <$> joinedStuff) writeTypes = traverse_ (\(Reference.DerivedId id, tp) -> eval $ PutDecl id tp) - constructorMapping = - constructorReplacements - <> generateConstructorMapping componentMap hashedComponents' + !newCtorMappings = let + r = propagateCtorMapping componentMap hashedComponents' + in if debugMode then traceShow ("constructorMappings: ", r) r else r + constructorReplacements' = constructorReplacements <> newCtorMappings writeTypes $ Map.toList newNewTypes pure ( Just $ Edits termEdits - termReplacements + (newCtorMappings <> termReplacements) newTerms typeEdits' typeReplacements' newTypes' - constructorMapping + constructorReplacements' , seen' ) doTerm :: Reference -> F m i v (Maybe (Edits v), Set Reference) doTerm r = do + when debugMode (traceM $ "Rewriting term: " <> show r) componentMap <- unhashTermComponent r let componentMap' = over @@ -263,7 +383,9 @@ propagate patch b = case validatePatch patch of seen' = seen <> Set.fromList (view _1 <$> Map.elems componentMap) mayComponent <- verifyTermComponent componentMap' es case mayComponent of - Nothing -> pure (Nothing, seen') + Nothing -> do + when debugMode (traceM $ refName r <> " did not typecheck after substitutions") + pure (Nothing, seen') Just componentMap'' -> do let joinedStuff = @@ -282,7 +404,7 @@ propagate patch b = case validatePatch patch of (r, TermEdit.Replace r' $ TermEdit.typing newType oldType) termReplacements' = termReplacements <> (Map.fromList . fmap toReplacement) joinedStuff - toReplacement (r, r', _, _, _) = (r, r') + toReplacement (r, r', _, _, _) = (Referent.Ref r, Referent.Ref r') newTerms' = newTerms <> (Map.fromList . fmap toNewTerm) joinedStuff toNewTerm (_, r', tm, _, tp) = (r', (tm, tp)) @@ -305,16 +427,18 @@ propagate patch b = case validatePatch patch of ) collectEdits (Edits initialTermEdits - (Map.mapMaybe TermEdit.toReference initialTermEdits) + (initialTermReplacements initialCtorMappings initialTermEdits) mempty initialTypeEdits - (Map.mapMaybe TypeEdit.toReference initialTypeEdits) - mempty + initialTypeReplacements mempty + initialCtorMappings ) mempty -- things to skip (getOrdered initialDirty) where + initialTermReplacements ctors es = ctors <> + (Map.mapKeys Referent.Ref . fmap Referent.Ref . Map.mapMaybe TermEdit.toReference) es sortDependentsGraph :: Set Reference -> Set Reference -> _ (Map Reference Int) sortDependentsGraph dependencies restrictTo = do closure <- transitiveClosure @@ -365,27 +489,6 @@ propagate patch b = case validatePatch patch of in Map.fromList [ (v, (r, tm, tp)) | (r, (v, tm, tp)) <- Map.toList m' ] unhash . Map.fromList . catMaybes <$> traverse termInfo (toList component) - unhashTypeComponent - :: forall m v - . (Applicative m, Var v) - => Reference - -> F m i v (Map v (Reference, Decl v _)) - unhashTypeComponent ref = do - let - component = Reference.members $ Reference.componentFor ref - typeInfo :: Reference -> F m i v (Maybe (Reference, Decl v Ann)) - typeInfo typeRef = case typeRef of - Reference.DerivedId id -> do - declm <- eval $ LoadType id - decl <- maybe (error $ "Missing type declaration " <> show typeRef) - pure - declm - pure $ Just (typeRef, decl) - Reference.Builtin{} -> pure Nothing - unhash = - Map.fromList . map reshuffle . Map.toList . Decl.unhashComponent - where reshuffle (r, (v, decl)) = (v, (r, decl)) - unhash . Map.fromList . catMaybes <$> traverse typeInfo (toList component) verifyTermComponent :: Map v (Reference, Term v _, a) -> Edits v @@ -418,6 +521,24 @@ propagate patch b = case validatePatch patch of $ runIdentity (Result.toMaybe typecheckResult) >>= hush +unhashTypeComponent :: Var v => Reference -> F m i v (Map v (Reference, Decl v Ann)) +unhashTypeComponent ref = do + let + component = Reference.members $ Reference.componentFor ref + typeInfo :: Reference -> F m i v (Maybe (Reference, Decl v Ann)) + typeInfo typeRef = case typeRef of + Reference.DerivedId id -> do + declm <- eval $ LoadType id + decl <- maybe (error $ "Missing type declaration " <> show typeRef) + pure + declm + pure $ Just (typeRef, decl) + Reference.Builtin{} -> pure Nothing + unhash = + Map.fromList . map reshuffle . Map.toList . Decl.unhashComponent + where reshuffle (r, (v, decl)) = (v, (r, decl)) + unhash . Map.fromList . catMaybes <$> traverse typeInfo (toList component) + applyDeprecations :: Applicative m => Patch -> Branch0 m -> Branch0 m applyDeprecations patch = deleteDeprecatedTerms deprecatedTerms . deleteDeprecatedTypes deprecatedTypes @@ -439,51 +560,59 @@ applyDeprecations patch = deleteDeprecatedTerms deprecatedTerms applyPropagate :: Var v => Applicative m => Patch -> Edits v -> F m i v (Branch0 m -> Branch0 m) applyPropagate patch Edits {..} = do - let termRefs = Map.mapMaybe TermEdit.toReference termEdits - typeRefs = Map.mapMaybe TypeEdit.toReference typeEdits - termTypes = Map.map (Type.toReference . snd) newTerms + let termTypes = Map.map (Type.toReference . snd) newTerms -- recursively update names and delete deprecated definitions - pure $ Branch.stepEverywhere (updateLevel termRefs typeRefs termTypes) + pure $ Branch.stepEverywhere (updateLevel termReplacements typeReplacements termTypes) where + isPropagated r = Set.notMember r allPatchTargets + allPatchTargets = Patch.allReferenceTargets patch + propagatedMd :: forall r . r -> (r, Metadata.Type, Metadata.Value) + propagatedMd r = (r, IOSource.isPropagatedReference, IOSource.isPropagatedValue) + updateLevel - :: Map Reference Reference + :: Map Referent Referent -> Map Reference Reference -> Map Reference Reference -> Branch0 m -> Branch0 m updateLevel termEdits typeEdits termTypes Branch0 {..} = - Branch.branch0 termsWithCons types _children _edits + Branch.branch0 terms types _children _edits where - isPropagated = (`Set.notMember` allPatchTargets) where - allPatchTargets = Patch.allReferenceTargets patch - - terms = foldl' replaceTerm _terms (Map.toList termEdits) - types = foldl' replaceType _types (Map.toList typeEdits) - - updateMetadata r r' (tp, v) = if v == r then (typeOf r' tp, r') else (tp, v) - where typeOf r t = fromMaybe t $ Map.lookup r termTypes - - propagatedMd :: r -> (r, Metadata.Type, Metadata.Value) - propagatedMd r = (r, IOSource.isPropagatedReference, IOSource.isPropagatedValue) - termsWithCons = - foldl' replaceConstructor terms (Map.toList constructorReplacements) - replaceTerm s (r, r') = - (if isPropagated r' - then Metadata.insert (propagatedMd (Referent.Ref r')) - else Metadata.delete (propagatedMd (Referent.Ref r'))) . - Star3.replaceFact (Referent.Ref r) (Referent.Ref r') $ - Star3.mapD3 (updateMetadata r r') s - - replaceConstructor s ((oldr, oldc, oldt), (newr, newc, newt)) = - -- always insert the metadata since patches can't contain ctor mappings (yet) - Metadata.insert (propagatedMd con') . - Star3.replaceFact (Referent.Con oldr oldc oldt) con' $ s + isPropagatedReferent (Referent.Con _ _ _) = True + isPropagatedReferent (Referent.Ref r) = isPropagated r + + terms0 = Star3.replaceFacts replaceConstructor constructorReplacements _terms + terms = updateMetadatas Referent.Ref + $ Star3.replaceFacts replaceTerm termEdits terms0 + types = updateMetadatas id + $ Star3.replaceFacts replaceType typeEdits _types + + updateMetadatas ref s = clearPropagated $ Star3.mapD3 go s where - con' = Referent.Con newr newc newt - replaceType s (r, r') = + clearPropagated s = foldl' go s allPatchTargets where + go s r = Metadata.delete (propagatedMd $ ref r) s + go (tp,v) = case Map.lookup (Referent.Ref v) termEdits of + Just (Referent.Ref r) -> (typeOf r tp, r) + _ -> (tp,v) + typeOf r t = fromMaybe t $ Map.lookup r termTypes + + replaceTerm :: Referent -> Referent -> _ -> _ + replaceTerm r r' s = + (if isPropagatedReferent r' + then Metadata.insert (propagatedMd r') . Metadata.delete (propagatedMd r) + else Metadata.delete (propagatedMd r')) $ s + + replaceConstructor :: Referent -> Referent -> _ -> _ + replaceConstructor (Referent.Con _ _ _) !new s = + -- TODO: revisit this once patches have constructor mappings + -- at the moment, all constructor replacements are autopropagated + -- rather than added manually + Metadata.insert (propagatedMd new) $ s + replaceConstructor _ _ s = s + + replaceType _ r' s = (if isPropagated r' then Metadata.insert (propagatedMd r') - else Metadata.delete (propagatedMd r')) . - Star3.replaceFact r r' $ s + else Metadata.delete (propagatedMd r')) $ s -- typePreservingTermEdits :: Patch -> Patch -- typePreservingTermEdits Patch {..} = Patch termEdits mempty diff --git a/parser-typechecker/src/Unison/Codebase/Editor/RemoteRepo.hs b/parser-typechecker/src/Unison/Codebase/Editor/RemoteRepo.hs index 9648b398f0..146a97736e 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/RemoteRepo.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/RemoteRepo.hs @@ -3,30 +3,38 @@ module Unison.Codebase.Editor.RemoteRepo where import Unison.Prelude -import Unison.Util.Monoid as Monoid -import Data.Text as Text import qualified Unison.Codebase.Path as Path import Unison.Codebase.Path (Path) import Unison.Codebase.ShortBranchHash (ShortBranchHash) import qualified Unison.Codebase.ShortBranchHash as SBH -data RemoteRepo = GitRepo { url :: Text, commit :: Maybe Text } - deriving (Eq, Ord, Show) +data ReadRepo = ReadGitRepo { url :: Text {-, commitish :: Maybe Text -}} deriving (Eq, Ord, Show) +data WriteRepo = WriteGitRepo { url' :: Text {-, branch :: Maybe Text -}} deriving (Eq, Ord, Show) -printRepo :: RemoteRepo -> Text -printRepo GitRepo{..} = url <> Monoid.fromMaybe (Text.cons ':' <$> commit) +writeToRead :: WriteRepo -> ReadRepo +writeToRead (WriteGitRepo url) = ReadGitRepo url -printNamespace :: RemoteRepo -> Maybe ShortBranchHash -> Path -> Text +writePathToRead :: WriteRemotePath -> ReadRemoteNamespace +writePathToRead (w, p) = (writeToRead w, Nothing, p) + +printReadRepo :: ReadRepo -> Text +printReadRepo ReadGitRepo{..} = url -- <> Monoid.fromMaybe (Text.cons ':' <$> commit) +printWriteRepo :: WriteRepo -> Text +printWriteRepo WriteGitRepo{..} = url' -- <> Monoid.fromMaybe (Text.cons ':' <$> branch) + +printNamespace :: ReadRepo -> Maybe ShortBranchHash -> Path -> Text printNamespace repo sbh path = - printRepo repo <> case sbh of + printReadRepo repo <> case sbh of Nothing -> if path == Path.empty then mempty else ":." <> Path.toText path Just sbh -> ":#" <> SBH.toText sbh <> if path == Path.empty then mempty else "." <> Path.toText path - -printHead :: RemoteRepo -> Path -> Text -printHead repo path = printNamespace repo Nothing path -type RemoteNamespace = (RemoteRepo, Maybe ShortBranchHash, Path) -type RemoteHead = (RemoteRepo, Path) +printHead :: WriteRepo -> Path -> Text +printHead repo path = + printWriteRepo repo + <> if path == Path.empty then mempty else ":." <> Path.toText path + +type ReadRemoteNamespace = (ReadRepo, Maybe ShortBranchHash, Path) +type WriteRemotePath = (WriteRepo, Path) diff --git a/parser-typechecker/src/Unison/Codebase/Editor/TodoOutput.hs b/parser-typechecker/src/Unison/Codebase/Editor/TodoOutput.hs index fd8af80b7b..9d8f61b0e4 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/TodoOutput.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/TodoOutput.hs @@ -24,10 +24,10 @@ data TodoOutput v a = TodoOutput { todoScore :: Score , todoFrontier :: ( [(Reference, Maybe (Type v a))] - , [(Reference, DisplayObject (Decl v a))]) + , [(Reference, DisplayObject () (Decl v a))]) , todoFrontierDependents :: ( [(Score, Reference, Maybe (Type v a))] - , [(Score, Reference, DisplayObject (Decl v a))]) + , [(Score, Reference, DisplayObject () (Decl v a))]) , nameConflicts :: Names0 , editConflicts :: Patch } deriving (Show) diff --git a/parser-typechecker/src/Unison/Codebase/Editor/UriParser.hs b/parser-typechecker/src/Unison/Codebase/Editor/UriParser.hs index 99f4c12642..2aa6fdc5dc 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/UriParser.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/UriParser.hs @@ -1,6 +1,6 @@ {-# LANGUAGE OverloadedStrings #-} -module Unison.Codebase.Editor.UriParser (repoPath) where +module Unison.Codebase.Editor.UriParser (repoPath,writeRepo,writeRepoPath) where import qualified Text.Megaparsec as P import qualified Text.Megaparsec.Char.Lexer as L @@ -9,7 +9,7 @@ import Data.Text as Text import Unison.Codebase.Path (Path(..)) import qualified Unison.Codebase.Path as Path -import Unison.Codebase.Editor.RemoteRepo (RemoteRepo(..), RemoteNamespace) +import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, ReadRepo (ReadGitRepo), WriteRemotePath, WriteRepo (WriteGitRepo)) import Unison.Codebase.ShortBranchHash (ShortBranchHash(..)) import Unison.Prelude import qualified Unison.Hash as Hash @@ -31,16 +31,29 @@ type P = P.Parsec () Text -- SSH Protocol -- $ git clone ssh://[user@]server/project.git[:treeish][:[#hash][.path]] -- $ git clone [user@]server:project.git[:treeish][:[#hash][.path]] --- Git Protocol (obsolete) -repoPath :: P RemoteNamespace +repoPath :: P ReadRemoteNamespace repoPath = P.label "generic git repo" $ do protocol <- parseProtocol treeish <- P.optional treeishSuffix - let repo = GitRepo (printProtocol protocol) treeish - nshashPath <- P.optional (C.char ':' *> namespaceHashPath) - case nshashPath of - Nothing -> pure (repo, Nothing, Path.empty) - Just (sbh, p) -> pure (repo, sbh, p) + case treeish of + Just t -> fail $ "Specifying a git 'commit-ish' (" ++ Text.unpack t ++ ") is not currently supported." + ++ " " ++ "If you need this, add your 2¢ at https://github.com/unisonweb/unison/issues/1436." + Nothing -> do + let repo = ReadGitRepo (printProtocol protocol) + nshashPath <- P.optional (C.char ':' *> namespaceHashPath) + case nshashPath of + Nothing -> pure (repo, Nothing, Path.empty) + Just (sbh, p) -> pure (repo, sbh, p) + +writeRepo :: P WriteRepo +writeRepo = P.label "repo root for writing" $ do + WriteGitRepo . printProtocol <$> parseProtocol + +writeRepoPath :: P WriteRemotePath +writeRepoPath = P.label "generic git repo" $ do + repo <- writeRepo + path <- P.optional (C.char ':' *> absolutePath) + pure (repo, fromMaybe Path.empty path) -- does this not exist somewhere in megaparsec? yes in 7.0 symbol :: Text -> P Text @@ -141,16 +154,17 @@ parseProtocol = P.label "parseProtocol" $ namespaceHashPath :: P (Maybe ShortBranchHash, Path) namespaceHashPath = do sbh <- P.optional shortBranchHash - p <- P.optional $ do - void $ C.char '.' + p <- P.optional absolutePath + pure (sbh, fromMaybe Path.empty p) + +absolutePath :: P Path +absolutePath = do + void $ C.char '.' + Path . Seq.fromList . fmap (NameSegment . Text.pack) <$> P.sepBy1 ((:) <$> C.satisfy Unison.Lexer.wordyIdStartChar <*> P.many (C.satisfy Unison.Lexer.wordyIdChar)) (C.char '.') - case p of - Nothing -> pure (sbh, Path.empty) - Just p -> pure (sbh, makePath p) - where makePath = Path . Seq.fromList . fmap (NameSegment . Text.pack) treeishSuffix :: P Text treeishSuffix = P.label "git treeish" . P.try $ do diff --git a/parser-typechecker/src/Unison/Codebase/Editor/VersionParser.hs b/parser-typechecker/src/Unison/Codebase/Editor/VersionParser.hs index 3e638cda85..0ac0a7d473 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/VersionParser.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/VersionParser.hs @@ -13,7 +13,7 @@ import Data.Void (Void) -- |"release/M1j.2" -> "releases._M1j" -- "devel/*" -> "trunk" -defaultBaseLib :: Parsec Void Text RemoteNamespace +defaultBaseLib :: Parsec Void Text ReadRemoteNamespace defaultBaseLib = fmap makeNS $ devel <|> release where devel, release, version :: Parsec Void Text Text @@ -21,7 +21,7 @@ defaultBaseLib = fmap makeNS $ devel <|> release release = fmap ("releases._" <>) $ "release/" *> version <* eof version = fmap Text.pack $ try (someTill anyChar "." <* many anyChar) <|> many anyChar - makeNS :: Text -> RemoteNamespace - makeNS t = ( GitRepo "https://github.com/unisonweb/base" Nothing + makeNS :: Text -> ReadRemoteNamespace + makeNS t = ( ReadGitRepo "https://github.com/unisonweb/base" , Nothing , Path.fromText t) diff --git a/parser-typechecker/src/Unison/Codebase/FileCodebase.hs b/parser-typechecker/src/Unison/Codebase/FileCodebase.hs index 691010bb2a..3e44ee25de 100644 --- a/parser-typechecker/src/Unison/Codebase/FileCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/FileCodebase.hs @@ -31,7 +31,7 @@ import qualified U.Util.Cache as Cache import qualified Unison.Codebase.Init as Codebase import Unison.Codebase.Branch (headHash) import Unison.Codebase.Editor.Git (gitIn, gitTextIn, pullBranch, withIOError, withStatus) -import Unison.Codebase.Editor.RemoteRepo (RemoteNamespace, RemoteRepo (GitRepo), printRepo) +import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, WriteRepo (WriteGitRepo), writeToRead, printWriteRepo) import Unison.Codebase.FileCodebase.Common ( Err (CantParseBranchHead), branchFromFiles, @@ -263,7 +263,7 @@ branchHeadUpdates root = do -- * Git stuff viewRemoteBranch' :: forall m. (MonadIO m, MonadCatch m) - => Branch.Cache m -> RemoteNamespace -> ExceptT GitError m (Branch m, CodebasePath) + => Branch.Cache m -> ReadRemoteNamespace -> ExceptT GitError m (Branch m, CodebasePath) viewRemoteBranch' cache (repo, sbh, path) = do -- set up the cache dir remotePath <- time "Git fetch" $ pullBranch repo @@ -295,12 +295,12 @@ pushGitRootBranch => Codebase.SyncToDir m -> Branch.Cache m -> Branch m - -> RemoteRepo + -> WriteRepo -> SyncMode -> ExceptT GitError m () pushGitRootBranch syncToDirectory cache branch repo syncMode = do -- Pull the remote repo into a staging directory - (remoteRoot, remotePath) <- viewRemoteBranch' cache (repo, Nothing, Path.empty) + (remoteRoot, remotePath) <- viewRemoteBranch' cache (writeToRead repo, Nothing, Path.empty) ifM (pure (remoteRoot == Branch.empty) ||^ lift (remoteRoot `Branch.before` branch)) -- ours is newer 👍, meaning this is a fast-forward push, @@ -309,7 +309,7 @@ pushGitRootBranch syncToDirectory cache branch repo syncMode = do (throwError $ GitError.PushDestinationHasNewStuff repo) where stageAndPush remotePath = do - let repoString = Text.unpack $ printRepo repo + let repoString = Text.unpack $ printWriteRepo repo withStatus ("Staging files for upload to " ++ repoString ++ " ...") $ lift (syncToDirectory remotePath syncMode branch) updateCausalHead (branchHeadDir remotePath) (Branch._history branch) @@ -320,8 +320,8 @@ pushGitRootBranch syncToDirectory cache branch repo syncMode = do `withIOError` (throwError . GitError.PushException repo . show)) (throwError $ GitError.PushNoOp repo) -- Commit our changes - push :: CodebasePath -> RemoteRepo -> IO Bool -- withIOError needs IO - push remotePath (GitRepo url gitbranch) = do + push :: CodebasePath -> WriteRepo -> IO Bool -- withIOError needs IO + push remotePath (WriteGitRepo url) = do -- has anything changed? status <- gitTextIn remotePath ["status", "--short"] if Text.null status then @@ -331,11 +331,5 @@ pushGitRootBranch syncToDirectory cache branch repo syncMode = do gitIn remotePath ["commit", "-q", "-m", "Sync branch " <> Text.pack (show $ headHash branch)] -- Push our changes to the repo - case gitbranch of - Nothing -> gitIn remotePath ["push", "--quiet", url] - Just gitbranch -> error $ - "Pushing to a specific branch isn't fully implemented or tested yet.\n" - ++ "InputPatterns.parseUri was expected to have prevented you " - ++ "from supplying the git treeish `" ++ Text.unpack gitbranch ++ "`!" - -- gitIn remotePath ["push", "--quiet", url, gitbranch] + gitIn remotePath ["push", "--quiet", url] pure True diff --git a/parser-typechecker/src/Unison/Codebase/GitError.hs b/parser-typechecker/src/Unison/Codebase/GitError.hs index 20a01d70a1..dee76d036d 100644 --- a/parser-typechecker/src/Unison/Codebase/GitError.hs +++ b/parser-typechecker/src/Unison/Codebase/GitError.hs @@ -4,7 +4,7 @@ import Unison.Prelude import Unison.Codebase.ShortBranchHash (ShortBranchHash) import qualified Unison.Codebase.Branch as Branch -import Unison.Codebase.Editor.RemoteRepo (RemoteRepo) +import Unison.Codebase.Editor.RemoteRepo (ReadRepo, WriteRepo) import U.Codebase.Sqlite.DbId (SchemaVersion) type CodebasePath = FilePath @@ -12,17 +12,17 @@ type CodebasePath = FilePath data GitError = NoGit | UnrecognizableCacheDir Text CodebasePath | UnrecognizableCheckoutDir Text CodebasePath - | CloneException RemoteRepo String - | PushException RemoteRepo String - | PushNoOp RemoteRepo + | CloneException ReadRepo String + | PushException WriteRepo String + | PushNoOp WriteRepo -- url commit Diff of what would change on merge with remote - | PushDestinationHasNewStuff RemoteRepo - | NoRemoteNamespaceWithHash RemoteRepo ShortBranchHash - | RemoteNamespaceHashAmbiguous RemoteRepo ShortBranchHash (Set Branch.Hash) - | CouldntLoadRootBranch RemoteRepo Branch.Hash - | CouldntParseRootBranch RemoteRepo String - | CouldntOpenCodebase RemoteRepo CodebasePath - | UnrecognizedSchemaVersion RemoteRepo CodebasePath SchemaVersion + | PushDestinationHasNewStuff WriteRepo + | NoRemoteNamespaceWithHash ReadRepo ShortBranchHash + | RemoteNamespaceHashAmbiguous ReadRepo ShortBranchHash (Set Branch.Hash) + | CouldntLoadRootBranch ReadRepo Branch.Hash + | CouldntParseRootBranch ReadRepo String + | CouldntOpenCodebase ReadRepo CodebasePath + | UnrecognizedSchemaVersion ReadRepo CodebasePath SchemaVersion | SomeOtherError String | CouldntLoadSyncedBranch Branch.Hash deriving Show diff --git a/parser-typechecker/src/Unison/Codebase/MainTerm.hs b/parser-typechecker/src/Unison/Codebase/MainTerm.hs index 3bd514b97c..fda02c3772 100644 --- a/parser-typechecker/src/Unison/Codebase/MainTerm.hs +++ b/parser-typechecker/src/Unison/Codebase/MainTerm.hs @@ -54,17 +54,17 @@ getMainTerm loadTypeOfTerm parseNames0 mainName mainType = _ -> pure (BadType mainName Nothing) _ -> pure (NotFound mainName) --- '{io2.IO} () +-- '{io2.IO, Exception} () builtinMain :: Var v => a -> Type.Type v a builtinMain a = Type.arrow a (Type.ref a DD.unitRef) io - where io = Type.effect1 a (Type.builtinIO a) (Type.ref a DD.unitRef) + where io = Type.effect a [Type.builtinIO a, DD.exceptionType a] (Type.ref a DD.unitRef) -- [Result] resultArr :: Ord v => a -> Type.Type v a resultArr a = Type.app a (Type.ref a Type.listRef) (Type.ref a DD.testResultRef) builtinResultArr :: Ord v => a -> Type.Type v a -builtinResultArr a = Type.effect1 a (Type.builtinIO a) (resultArr a) +builtinResultArr a = Type.effect a [Type.builtinIO a, DD.exceptionType a] (resultArr a) -- '{io2.IO} [Result] builtinTest :: Ord v => a -> Type.Type v a diff --git a/parser-typechecker/src/Unison/Codebase/Serialization/V1.hs b/parser-typechecker/src/Unison/Codebase/Serialization/V1.hs index ecb7b0754a..7793eff071 100644 --- a/parser-typechecker/src/Unison/Codebase/Serialization/V1.hs +++ b/parser-typechecker/src/Unison/Codebase/Serialization/V1.hs @@ -239,7 +239,6 @@ putReference r = case r of putHash hash putLength i putLength n - _ -> error "unpossible" getReference :: MonadGet m => m Reference getReference = do diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index 6cab43e216..1c8eca35d7 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -69,7 +69,7 @@ import Unison.Codebase.Branch (Branch (..)) import qualified Unison.Codebase.Branch as Branch import qualified Unison.Codebase.Causal as Causal import Unison.Codebase.Editor.Git (gitIn, gitTextIn, pullBranch) -import Unison.Codebase.Editor.RemoteRepo (RemoteNamespace, RemoteRepo (GitRepo), printRepo) +import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, WriteRepo (WriteGitRepo), writeToRead, printWriteRepo) import Unison.Codebase.GitError (GitError) import qualified Unison.Codebase.GitError as GitError import qualified Unison.Codebase.Init as Codebase @@ -992,7 +992,7 @@ syncProgress = Sync.Progress need done warn allDone viewRemoteBranch' :: forall m. (MonadIO m, MonadCatch m) => - RemoteNamespace -> + ReadRemoteNamespace -> m (Either GitError (m (), Branch m, CodebasePath)) viewRemoteBranch' (repo, sbh, path) = runExceptT do -- set up the cache dir @@ -1036,7 +1036,7 @@ pushGitRootBranch :: (MonadIO m, MonadCatch m) => Connection -> Branch m -> - RemoteRepo -> + WriteRepo -> m (Either GitError ()) pushGitRootBranch srcConn branch repo = runExceptT @GitError do -- pull the remote repo to the staging directory @@ -1048,7 +1048,7 @@ pushGitRootBranch srcConn branch repo = runExceptT @GitError do -- if it fails, rollback to the savepoint and clean up. -- set up the cache dir - remotePath <- time "Git fetch" $ pullBranch repo + remotePath <- time "Git fetch" $ pullBranch (writeToRead repo) destConn <- openOrCreateCodebaseConnection "push.dest" remotePath flip runReaderT destConn $ Q.savepoint "push" @@ -1085,7 +1085,7 @@ pushGitRootBranch srcConn branch repo = runExceptT @GitError do shutdownConnection destConn void $ push remotePath repo where - repoString = Text.unpack $ printRepo repo + repoString = Text.unpack $ printWriteRepo repo setRepoRoot :: Q.DB m => Branch.Hash -> m () setRepoRoot h = do let h2 = Cv.causalHash1to2 h @@ -1132,8 +1132,8 @@ pushGitRootBranch srcConn branch repo = runExceptT @GitError do hasDeleteShm = any isShmDelete statusLines -- Commit our changes - push :: CodebasePath -> RemoteRepo -> IO Bool -- withIOError needs IO - push remotePath (GitRepo url gitbranch) = time "SqliteCodebase.pushGitRootBranch.push" $ do + push :: CodebasePath -> WriteRepo -> IO Bool -- withIOError needs IO + push remotePath (WriteGitRepo url) = time "SqliteCodebase.pushGitRootBranch.push" $ do -- has anything changed? -- note: -uall recursively shows status for all files in untracked directories -- we want this so that we see @@ -1159,14 +1159,5 @@ pushGitRootBranch srcConn branch repo = runExceptT @GitError do remotePath ["commit", "-q", "-m", "Sync branch " <> Text.pack (show $ Branch.headHash branch)] -- Push our changes to the repo - case gitbranch of - Nothing -> gitIn remotePath ["push", "--quiet", url] - Just gitbranch -> - error $ - "Pushing to a specific branch isn't fully implemented or tested yet.\n" - ++ "InputPatterns.parseUri was expected to have prevented you " - ++ "from supplying the git treeish `" - ++ Text.unpack gitbranch - ++ "`!" - -- gitIn remotePath ["push", "--quiet", url, gitbranch] + gitIn remotePath ["push", "--quiet", url] pure True diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs index 34e9abae35..0f029232c1 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs @@ -242,8 +242,7 @@ symbol2to1 :: V2.Symbol -> V1.Symbol symbol2to1 (V2.Symbol i t) = V1.Symbol i (Var.User t) symbol1to2 :: V1.Symbol -> V2.Symbol -symbol1to2 (V1.Symbol i (Var.User t)) = V2.Symbol i t -symbol1to2 x = error $ "unimplemented: symbol1to2 " ++ show x +symbol1to2 (V1.Symbol i varType) = V2.Symbol i (Var.rawName varType) shortHashSuffix1to2 :: Text -> V1.Reference.Pos shortHashSuffix1to2 = diff --git a/parser-typechecker/src/Unison/Codebase/TranscriptParser.hs b/parser-typechecker/src/Unison/Codebase/TranscriptParser.hs index dc0af1dd8d..99ae297906 100644 --- a/parser-typechecker/src/Unison/Codebase/TranscriptParser.hs +++ b/parser-typechecker/src/Unison/Codebase/TranscriptParser.hs @@ -298,6 +298,7 @@ run dir configFile stanzas codebase = do printNumbered loadPreviousUnisonBlock codebase + Nothing rng free case o of diff --git a/parser-typechecker/src/Unison/Codecs.hs b/parser-typechecker/src/Unison/Codecs.hs deleted file mode 100644 index f1dceddc1e..0000000000 --- a/parser-typechecker/src/Unison/Codecs.hs +++ /dev/null @@ -1,340 +0,0 @@ -{-# LANGUAGE PatternSynonyms #-} - -module Unison.Codecs where - --- A format for encoding runtime values, with sharing for compiled nodes. - -import Unison.Prelude - -import Control.Arrow (second) -import Control.Monad.State -import Data.Bits (Bits) -import qualified Data.Bytes.Serial as BS -import Data.Bytes.Signed (Unsigned) -import Data.Bytes.VarInt (VarInt(..)) -import qualified Data.ByteString as B -import Data.ByteString.Builder (doubleBE, int64BE, toLazyByteString) -import qualified Data.ByteString.Lazy as BL -import Data.Bytes.Put -import qualified Unison.ABT as ABT -import qualified Unison.Blank as Blank -import qualified Unison.DataDeclaration as DD -import qualified Unison.Hash as Hash -import Unison.Reference (Reference, pattern Builtin, pattern Derived) -import qualified Unison.Referent as Referent -import qualified Unison.ConstructorType as ConstructorType -import Unison.Term -import Unison.UnisonFile (UnisonFile, pattern UnisonFile) -import qualified Unison.UnisonFile as UF -import Unison.Var (Var) -import qualified Unison.Var as Var -import Unison.Pattern (Pattern) -import qualified Unison.Pattern as Pattern - -type Pos = Word64 - -serializeTerm :: (MonadPut m, MonadState Pos m, Var v) - => Term v a - -> m Pos -serializeTerm x = do - let putTag = do putWord8 111; putWord8 0 - let incPosition = do pos <- get; modify' (+1); pure pos - case ABT.out x of - ABT.Var v -> do - putTag - putWord8 0 - lengthEncode $ Var.name v - incPosition - ABT.Abs v body -> do - pbody <- serializeTerm body - putTag - putWord8 1 - lengthEncode $ Var.name v - putBackref pbody - incPosition - ABT.Cycle body -> do - pbody <- serializeTerm body - putTag - putWord8 10 - putBackref pbody - incPosition - ABT.Tm f -> case f of - Ann e _ -> do - serializeTerm e -- ignore types (todo: revisit) - Ref ref -> do - putTag - putWord8 2 - serializeReference ref - incPosition - Constructor ref id -> do - putTag - putWord8 3 - serializeReference ref - putWord32be $ fromIntegral id - incPosition - Request ref id -> do - putTag - putWord8 4 - serializeReference ref - putWord32be $ fromIntegral id - incPosition - Text text -> do - putTag - putWord8 5 - lengthEncode text - incPosition - Int n -> do - putTag - putWord8 6 - serializeInt n - incPosition - Nat n -> do - putTag - putWord8 6 - serializeNat n - incPosition - Float n -> do - putTag - putWord8 6 - serializeFloat n - incPosition - Boolean b -> do - putTag - putWord8 6 - serializeBoolean b - incPosition - List v -> do - elementPositions <- traverse serializeTerm v - putTag - putWord8 7 - putLength $ length elementPositions - traverse_ putBackref elementPositions - incPosition - Lam body -> do - pos <- serializeTerm body - putTag - putWord8 8 - putBackref pos - incPosition - App fn arg -> do - posf <- serializeTerm fn - posarg <- serializeTerm arg - putTag - putWord8 9 - putBackref posf - putLength (1 :: Int) - putBackref posarg - incPosition - Let _ binding body -> do - posbind <- serializeTerm binding - posbod <- serializeTerm body - putTag - putWord8 11 - putBackref posbind - putBackref posbod - incPosition - If c t f -> do - posc <- serializeTerm c - post <- serializeTerm t - posf <- serializeTerm f - putTag - putWord8 12 - putBackref posc - putBackref post - putBackref posf - incPosition - And x y -> do - posx <- serializeTerm x - posy <- serializeTerm y - putTag - putWord8 13 - putBackref posx - putBackref posy - incPosition - Or x y -> do - posx <- serializeTerm x - posy <- serializeTerm y - putTag - putWord8 14 - putBackref posx - putBackref posy - incPosition - Match scrutinee cases -> do - poss <- serializeTerm scrutinee - casePositions <- traverse serializeCase1 cases - putTag - putWord8 15 - putBackref poss - putLength $ length casePositions - traverse_ serializeCase2 casePositions - incPosition - Blank b -> error $ "cannot serialize program with blank " ++ - fromMaybe "" (Blank.nameb b) - Handle h body -> do - hpos <- serializeTerm h - bpos <- serializeTerm body - putTag - putWord8 16 - putBackref hpos - putBackref bpos - incPosition - LetRec _ bs body -> do - positions <- traverse serializeTerm bs - pbody <- serializeTerm body - putTag - putWord8 19 - putLength $ length positions - traverse_ putBackref positions - putBackref pbody - incPosition - Char c -> do - putTag - putWord8 20 - putWord64be $ fromIntegral $ fromEnum c - incPosition - TermLink ref -> do - putTag - putWord8 21 - serializeReferent ref - incPosition - TypeLink ref -> do - putTag - putWord8 22 - serializeReference ref - incPosition - -serializePattern :: MonadPut m => Pattern a -> m () -serializePattern p = case p of - -- note: the putWord8 0 is the tag before any unboxed pattern - Pattern.Boolean _ b -> putWord8 0 *> serializeBoolean b - Pattern.Int _ n -> putWord8 0 *> serializeInt n - Pattern.Nat _ n -> putWord8 0 *> serializeNat n - Pattern.Float _ n -> putWord8 0 *> serializeFloat n - Pattern.Var _ -> putWord8 1 - Pattern.Unbound _ -> putWord8 2 - Pattern.Constructor _ r cid ps -> do - putWord8 3 - serializeReference r - putWord32be $ fromIntegral cid - putLength (length ps) - traverse_ serializePattern ps - Pattern.As _ p -> do - putWord8 4 - serializePattern p - Pattern.EffectPure _ p -> do - putWord8 5 - serializePattern p - Pattern.EffectBind _ r cid ps k -> do - putWord8 6 - serializeReference r - putWord32be $ fromIntegral cid - putLength (length ps) - traverse_ serializePattern ps - serializePattern k - _ -> error "todo: delete me after deleting PatternP - serializePattern match failure" - -serializeFloat :: MonadPut m => Double -> m () -serializeFloat n = do - putByteString . BL.toStrict . toLazyByteString $ doubleBE n - putWord8 3 - -serializeNat :: MonadPut m => Word64 -> m () -serializeNat n = do - putWord64be n - putWord8 2 - -serializeInt :: MonadPut m => Int64 -> m () -serializeInt n = do - putByteString . BL.toStrict . toLazyByteString $ int64BE n - putWord8 1 - -serializeBoolean :: MonadPut m => Bool -> m () -serializeBoolean False = putWord64be 0 *> putWord8 0 -serializeBoolean True = putWord64be 1 *> putWord8 0 - -serializeCase2 :: MonadPut m => MatchCase loc Pos -> m () -serializeCase2 (MatchCase p guard body) = do - serializePattern p - serializeMaybe putBackref guard - putBackref body - -serializeCase1 :: (Var v, MonadPut m, MonadState Pos m) - => MatchCase p (Term v a) -> m (MatchCase p Pos) -serializeCase1 (MatchCase p guard body) = do - posg <- traverse serializeTerm guard - posb <- serializeTerm body - pure $ MatchCase p posg posb - -putBackref :: MonadPut m => Pos -> m () -putBackref = BS.serialize . VarInt - -putLength :: (MonadPut m, Integral n, Integral (Unsigned n), - Bits n, Bits (Unsigned n)) - => n -> m () -putLength = BS.serialize . VarInt - -serializeMaybe :: (MonadPut m) => (a -> m ()) -> Maybe a -> m () -serializeMaybe f b = case b of - Nothing -> putWord8 0 - Just x -> putWord8 1 *> f x - -lengthEncode :: MonadPut m => Text -> m () -lengthEncode text = do - let bs = encodeUtf8 text - putLength $ B.length bs - putByteString bs - -serializeFoldable :: (MonadPut m, Foldable f) => (a -> m ()) -> f a -> m () -serializeFoldable f fa = do - putLength $ length fa - traverse_ f fa - -serializeReferent :: MonadPut m => Referent.Referent -> m () -serializeReferent r = case r of - Referent.Ref r -> putWord8 0 *> serializeReference r - Referent.Con r cid ct -> do - putWord8 1 - serializeReference r - putLength cid - serializeConstructorType ct - -serializeConstructorType :: MonadPut m => ConstructorType.ConstructorType -> m () -serializeConstructorType ct = case ct of - ConstructorType.Data -> putWord8 0 - ConstructorType.Effect -> putWord8 1 - -serializeReference :: MonadPut m => Reference -> m () -serializeReference ref = case ref of - Builtin text -> do - putWord8 0 - lengthEncode text - Derived hash i n -> do - putWord8 1 - let bs = Hash.toBytes hash - putLength $ B.length bs - putByteString bs - putLength i - putLength n - _ -> error "impossible" - -serializeConstructorArities :: MonadPut m => Reference -> [Int] -> m () -serializeConstructorArities r constructorArities = do - serializeReference r - serializeFoldable (putWord32be . fromIntegral) constructorArities - -serializeFile - :: (MonadPut m, MonadState Pos m, Monoid a, Var v) - => UnisonFile v a -> Term v a -> m () -serializeFile uf@(UnisonFile dataDecls effectDecls _ _) tm = do - let body = UF.uberTerm' uf tm - let dataDecls' = second DD.constructorArities <$> toList dataDecls - let effectDecls' = - second (DD.constructorArities . DD.toDataDecl) <$> toList effectDecls - -- traceM $ show effectDecls' - serializeFoldable (uncurry serializeConstructorArities) dataDecls' - serializeFoldable (uncurry serializeConstructorArities) effectDecls' - -- NB: we rewrite the term to minimize away let rec cycles, as let rec - -- blocks aren't allowed to have effects - pos <- serializeTerm body - putWord8 0 - putBackref pos diff --git a/parser-typechecker/src/Unison/CommandLine/DisplayValues.hs b/parser-typechecker/src/Unison/CommandLine/DisplayValues.hs index 153fc7fc5b..5d6ccf1f19 100644 --- a/parser-typechecker/src/Unison/CommandLine/DisplayValues.hs +++ b/parser-typechecker/src/Unison/CommandLine/DisplayValues.hs @@ -30,6 +30,7 @@ import qualified Unison.Util.SyntaxText as S import qualified Unison.Codebase.Editor.DisplayObject as DO import qualified Unison.CommandLine.OutputMessages as OutputMessages import qualified Unison.ConstructorType as CT +import qualified Unison.Builtin as Builtin type Pretty = P.Pretty P.ColorText @@ -132,19 +133,18 @@ displayPretty pped terms typeOf eval types tm = go tm tms = [ ref | DD.TupleTerm' [DD.EitherRight' (DD.Doc2Term (toRef -> Just ref)),_anns] <- toList es ] typeMap <- let -- todo: populate the variable names / kind once BuiltinObject supports that - go ref@(Reference.Builtin _) = pure (ref, DO.BuiltinObject) + go ref@(Reference.Builtin _) = pure (ref, DO.BuiltinObject ()) go ref = (ref,) <$> do decl <- types ref let missing = DO.MissingObject (SH.unsafeFromText $ Reference.toText ref) pure $ maybe missing DO.UserObject decl in Map.fromList <$> traverse go tys termMap <- let - -- todo: populate the type signature once BuiltinObject supports that - go ref@(Reference.Builtin _) = pure (ref, DO.BuiltinObject) - go ref = (ref,) <$> do - tm <- terms ref - let missing = DO.MissingObject (SH.unsafeFromText $ Reference.toText ref) - pure $ maybe missing DO.UserObject tm + go ref = (ref,) <$> case ref of + Reference.Builtin _ -> pure $ Builtin.typeOf missing DO.BuiltinObject ref + _ -> maybe missing DO.UserObject <$> terms ref + where + missing = DO.MissingObject (SH.unsafeFromText $ Reference.toText ref) in Map.fromList <$> traverse go tms -- in docs, we use suffixed names everywhere let pped' = pped { PPE.unsuffixifiedPPE = PPE.suffixifiedPPE pped } diff --git a/parser-typechecker/src/Unison/CommandLine/InputPatterns.hs b/parser-typechecker/src/Unison/CommandLine/InputPatterns.hs index 9943307529..1b26833767 100644 --- a/parser-typechecker/src/Unison/CommandLine/InputPatterns.hs +++ b/parser-typechecker/src/Unison/CommandLine/InputPatterns.hs @@ -7,7 +7,6 @@ module Unison.CommandLine.InputPatterns where import Unison.Prelude import qualified Control.Lens.Cons as Cons -import qualified Control.Lens as Lens import Data.Bifunctor (first) import Data.List (intercalate, isPrefixOf) import Data.List.Extra (nubOrdOn) @@ -41,7 +40,7 @@ import qualified Unison.Util.Pretty as P import qualified Unison.Util.Relation as R import qualified Unison.Codebase.Editor.SlurpResult as SR import qualified Unison.Codebase.Editor.UriParser as UriParser -import Unison.Codebase.Editor.RemoteRepo (RemoteNamespace) +import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, WriteRemotePath, WriteRepo) import qualified Unison.Codebase.Editor.RemoteRepo as RemoteRepo import Data.Tuple.Extra (uncurry3) @@ -309,6 +308,11 @@ docs = InputPattern "docs" [] [(Required, definitionQueryArg)] [s] -> first fromString $ Input.DocsI <$> Path.parseHQSplit' s _ -> Left (I.help docs)) +ui :: InputPattern +ui = InputPattern "ui" [] [] + "`ui` opens the Codebase UI in the default browser." + (const $ pure Input.UiI) + undo :: InputPattern undo = InputPattern "undo" [] [] "`undo` reverts the most recent change to the codebase." @@ -798,9 +802,7 @@ push = InputPattern [] -> Right $ Input.PushRemoteBranchI Nothing Path.relativeEmpty' SyncMode.ShortCircuit url : rest -> do - (repo, sbh, path) <- parseUri "url" url - when (isJust sbh) - $ Left "Can't push to a particular remote namespace hash." + (repo, path) <- parsePushPath "url" url p <- case rest of [] -> Right Path.relativeEmpty' [path] -> first fromString $ Path.parsePath' path @@ -825,9 +827,7 @@ pushExhaustive = InputPattern [] -> Right $ Input.PushRemoteBranchI Nothing Path.relativeEmpty' SyncMode.Complete url : rest -> do - (repo, sbh, path) <- parseUri "url" url - when (isJust sbh) - $ Left "Can't push to a particular remote namespace hash." + (repo, path) <- parsePushPath "url" url p <- case rest of [] -> Right Path.relativeEmpty' [path] -> first fromString $ Path.parsePath' path @@ -879,17 +879,20 @@ loadPullRequest = InputPattern "pull-request.load" ["pr.load"] pure $ Input.LoadPullRequestI baseRepo headRepo destPath _ -> Left (I.help loadPullRequest) ) -parseUri :: String -> String -> Either (P.Pretty P.ColorText) RemoteNamespace +parseUri :: String -> String -> Either (P.Pretty P.ColorText) ReadRemoteNamespace parseUri label input = do - ns <- first (fromString . show) -- turn any parsing errors into a Pretty. + first (fromString . show) -- turn any parsing errors into a Pretty. (P.parse UriParser.repoPath label (Text.pack input)) - case (RemoteRepo.commit . Lens.view Lens._1) ns of - Nothing -> pure ns - Just commit -> Left . P.wrap $ - "I don't totally know how to address specific git commits (e.g. " - <> P.group (P.text commit <> ")") <> " yet." - <> "If you need this, add your 2¢ at" - <> P.backticked "https://github.com/unisonweb/unison/issues/1436" + +parseWriteRepo :: String -> String -> Either (P.Pretty P.ColorText) WriteRepo +parseWriteRepo label input = do + first (fromString . show) -- turn any parsing errors into a Pretty. + (P.parse UriParser.writeRepo label (Text.pack input)) + +parsePushPath :: String -> String -> Either (P.Pretty P.ColorText) WriteRemotePath +parsePushPath label input = do + first (fromString . show) -- turn any parsing errors into a Pretty. + (P.parse UriParser.writeRepoPath label (Text.pack input)) squashMerge :: InputPattern squashMerge = @@ -928,12 +931,16 @@ diffNamespace :: InputPattern diffNamespace = InputPattern "diff.namespace" [] - [(Required, pathArg), (Required, pathArg)] + [(Required, pathArg), (Optional, pathArg)] (P.column2 [ ( "`diff.namespace before after`" , P.wrap "shows how the namespace `after` differs from the namespace `before`" ) + , ( "`diff.namespace before`" + , P.wrap + "shows how the current namespace differs from the namespace `before`" + ) ] ) (\case @@ -941,6 +948,9 @@ diffNamespace = InputPattern before <- Path.parsePath' before after <- Path.parsePath' after pure $ Input.DiffNamespaceI before after + [before] -> first fromString $ do + before <- Path.parsePath' before + pure $ Input.DiffNamespaceI before Path.currentPath _ -> Left $ I.help diffNamespace ) @@ -975,12 +985,11 @@ replaceEdit -> Maybe Input.PatchPath -> Input ) - -> String -> InputPattern -replaceEdit f s = self +replaceEdit f = self where self = InputPattern - ("replace." <> s) + "replace" [] [ (Required, definitionQueryArg) , (Required, definitionQueryArg) @@ -988,17 +997,10 @@ replaceEdit f s = self ] (P.wrapColumn2 [ ( makeExample self ["", "", ""] - , "Replace the " - <> P.string s - <> " in the given patch " - <> "with the " - <> P.string s - <> " ." + , "Replace the term/type in the given patch with the term/type ." ) , ( makeExample self ["", ""] - , "Replace the " - <> P.string s - <> " with in the default patch." + , "Replace the term/type with in the default patch." ) ] ) @@ -1014,11 +1016,8 @@ replaceEdit f s = self _ -> Left $ I.help self ) -replaceType :: InputPattern -replaceType = replaceEdit Input.ReplaceTypeI "type" - -replaceTerm :: InputPattern -replaceTerm = replaceEdit Input.ReplaceTermI "term" +replace :: InputPattern +replace = replaceEdit Input.ReplaceI viewReflog :: InputPattern viewReflog = InputPattern @@ -1309,6 +1308,12 @@ debugDumpNamespace = InputPattern "Dump the namespace to a text file" (const $ Right Input.DebugDumpNamespacesI) +debugDumpNamespaceSimple :: InputPattern +debugDumpNamespaceSimple = InputPattern + "debug.dump-namespace-simple" [] [(Required, noCompletions)] + "Dump the namespace to a text file" + (const $ Right Input.DebugDumpNamespaceSimpleI) + debugClearWatchCache :: InputPattern debugClearWatchCache = InputPattern "debug.clear-cache" [] [(Required, noCompletions)] @@ -1406,6 +1411,7 @@ validInputs = , view , display , displayTo + , ui , docs , findPatch , viewPatch @@ -1425,8 +1431,7 @@ validInputs = , unlink , links , createAuthor - , replaceTerm - , replaceType + , replace , deleteTermReplacement , deleteTypeReplacement , test @@ -1443,6 +1448,7 @@ validInputs = , debugBranchHistory , debugFileHashes , debugDumpNamespace + , debugDumpNamespaceSimple , debugClearWatchCache ] diff --git a/parser-typechecker/src/Unison/CommandLine/Main.hs b/parser-typechecker/src/Unison/CommandLine/Main.hs index 8c5a01868e..dd8babf80f 100644 --- a/parser-typechecker/src/Unison/CommandLine/Main.hs +++ b/parser-typechecker/src/Unison/CommandLine/Main.hs @@ -17,10 +17,11 @@ import System.IO.Error (isDoesNotExistError) import Unison.Codebase.Branch (Branch) import qualified Unison.Codebase.Branch as Branch import Unison.Codebase.Editor.Input (Input (..), Event) +import qualified Unison.Server.CodebaseServer as Server import qualified Unison.Codebase.Editor.HandleInput as HandleInput import qualified Unison.Codebase.Editor.HandleCommand as HandleCommand import Unison.Codebase.Editor.Command (LoadSourceResult(..)) -import Unison.Codebase.Editor.RemoteRepo (RemoteNamespace, printNamespace) +import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, printNamespace) import Unison.Codebase (Codebase) import Unison.CommandLine import Unison.PrettyTerminal @@ -39,7 +40,6 @@ import qualified Unison.Codebase.Path as Path import qualified Unison.Codebase.Runtime as Runtime import qualified Unison.Codebase as Codebase import qualified Unison.CommandLine.InputPattern as IP -import qualified Unison.Runtime.Interface as RTI import qualified Unison.Util.Pretty as P import qualified Unison.Util.TQueue as Q import Text.Regex.TDFA @@ -144,7 +144,7 @@ welcomeMessage dir version = , P.wrap ("Type " <> P.hiBlue "help" <> " to get help. 😎") ] -hintFreshCodebase :: RemoteNamespace -> P.Pretty P.ColorText +hintFreshCodebase :: ReadRemoteNamespace -> P.Pretty P.ColorText hintFreshCodebase ns = P.wrap $ "Enter " <> (P.hiBlue . P.group) @@ -153,14 +153,16 @@ hintFreshCodebase ns = main :: FilePath - -> Maybe RemoteNamespace + -> Maybe ReadRemoteNamespace -> Path.Absolute -> (Config, IO ()) -> [Either Event Input] + -> Runtime.Runtime Symbol -> Codebase IO Symbol Ann -> String + -> Maybe Server.BaseUrl -> IO () -main dir defaultBaseLib initialPath (config,cancelConfig) initialInputs codebase version = do +main dir defaultBaseLib initialPath (config, cancelConfig) initialInputs runtime codebase version serverBaseUrl = do dir' <- shortenDirectory dir root <- fromMaybe Branch.empty . rightMay <$> Codebase.getRootBranch codebase putPrettyLn $ case defaultBaseLib of @@ -169,7 +171,6 @@ main dir defaultBaseLib initialPath (config,cancelConfig) initialInputs codebase _ -> welcomeMessage dir' version eventQueue <- Q.newIO do - runtime <- RTI.startRuntime -- we watch for root branch tip changes, but want to ignore ones we expect. rootRef <- newIORef root pathRef <- newIORef initialPath @@ -241,6 +242,7 @@ main dir defaultBaseLib initialPath (config,cancelConfig) initialInputs codebase putPrettyNonempty p $> args) loadSourceFile codebase + serverBaseUrl (const Random.getSystemDRG) free case o of diff --git a/parser-typechecker/src/Unison/CommandLine/OutputMessages.hs b/parser-typechecker/src/Unison/CommandLine/OutputMessages.hs index ec50e14427..9def8de13e 100644 --- a/parser-typechecker/src/Unison/CommandLine/OutputMessages.hs +++ b/parser-typechecker/src/Unison/CommandLine/OutputMessages.hs @@ -106,12 +106,11 @@ import qualified Unison.Hash as Hash import qualified Unison.Codebase.Causal as Causal import qualified Unison.Codebase.Editor.RemoteRepo as RemoteRepo import qualified Unison.Util.List as List -import qualified Unison.Util.Monoid as Monoid import Data.Tuple (swap) import Unison.Codebase.ShortBranchHash (ShortBranchHash) import qualified Unison.ShortHash as SH import Unison.LabeledDependency as LD -import Unison.Codebase.Editor.RemoteRepo (RemoteRepo) +import Unison.Codebase.Editor.RemoteRepo (ReadRepo, WriteRepo) import U.Codebase.Sqlite.DbId (SchemaVersion(SchemaVersion)) type Pretty = P.Pretty P.ColorText @@ -243,7 +242,7 @@ notifyNumbered o = case o of <> "or" <> IP.makeExample' IP.viewReflog <> "to undo this change." -prettyRemoteNamespace :: (RemoteRepo.RemoteRepo, +prettyRemoteNamespace :: (RemoteRepo.ReadRepo, Maybe ShortBranchHash, Path.Path) -> P.Pretty P.ColorText prettyRemoteNamespace = @@ -267,7 +266,10 @@ notifyUser dir o = case o of $ "I couldn't find a root namespace with the hash " <> prettySBH (SBH.fullFromHash h) <> "." - + CouldntLoadBranch h -> + pure . P.fatalCallout . P.wrap $ "I have reason to believe that" + <> P.shown h <> "exists in the codebase, but there was a failure" + <> "when I tried to load it." WarnIncomingRootBranch current hashes -> pure $ if null hashes then P.wrap $ "Please let someone know I generated an empty IncomingRootBranch" @@ -395,6 +397,13 @@ notifyUser dir o = case o of ] EvaluationFailure err -> pure err + TypeTermMismatch typeName termName -> + pure + $ P.warnCallout "I was expecting either two types or two terms but was given a type " + <> P.syntaxToColor (prettyHashQualified typeName) + <> " and a term " + <> P.syntaxToColor (prettyHashQualified termName) + <> "." SearchTermsNotFound hqs | null hqs -> pure mempty SearchTermsNotFound hqs -> pure @@ -664,26 +673,26 @@ notifyUser dir o = case o of TodoOutput names todo -> pure (todoOutput names todo) GitError input e -> pure $ case e of CouldntOpenCodebase repo localPath -> P.wrap $ "I couldn't open the repository at" - <> prettyRepoBranch repo <> "in the cache directory at" + <> prettyReadRepo repo <> "in the cache directory at" <> P.backticked' (P.string localPath) "." UnrecognizedSchemaVersion repo localPath (SchemaVersion v) -> P.wrap $ "I don't know how to interpret schema version " <> P.shown v - <> "in the repository at" <> prettyRepoBranch repo + <> "in the repository at" <> prettyReadRepo repo <> "in the cache directory at" <> P.backticked' (P.string localPath) "." CouldntParseRootBranch repo s -> P.wrap $ "I couldn't parse the string" <> P.red (P.string s) <> "into a namespace hash, when opening the repository at" - <> P.group (prettyRepoBranch repo <> ".") + <> P.group (prettyReadRepo repo <> ".") CouldntLoadSyncedBranch h -> P.wrap $ "I just finished importing the branch" <> P.red (P.shown h) <> "but now I can't find it." NoGit -> P.wrap $ "I couldn't find git. Make sure it's installed and on your path." CloneException repo msg -> P.wrap $ - "I couldn't clone the repository at" <> prettyRepoBranch repo <> ";" + "I couldn't clone the repository at" <> prettyReadRepo repo <> ";" <> "the error was:" <> (P.indentNAfterNewline 2 . P.group . P.string) msg PushNoOp repo -> P.wrap $ - "The repository at" <> prettyRepoBranch repo <> "is already up-to-date." + "The repository at" <> prettyWriteRepo repo <> "is already up-to-date." PushException repo msg -> P.wrap $ - "I couldn't push to the repository at" <> prettyRepoRevision repo <> ";" + "I couldn't push to the repository at" <> prettyWriteRepo repo <> ";" <> "the error was:" <> (P.indentNAfterNewline 2 . P.group . P.string) msg UnrecognizableCacheDir uri localPath -> P.wrap $ "A cache directory for" <> P.backticked (P.text uri) <> "already exists at" @@ -695,7 +704,7 @@ notifyUser dir o = case o of <> "result as a git repository, so I'm not sure what to do next." PushDestinationHasNewStuff repo -> P.callout "⏸" . P.lines $ [ - P.wrap $ "The repository at" <> prettyRepoRevision repo + P.wrap $ "The repository at" <> prettyWriteRepo repo <> "has some changes I don't know about.", "", P.wrap $ "If you want to " <> push <> "you can do:", "", @@ -710,14 +719,14 @@ notifyUser dir o = case o of CouldntLoadRootBranch repo hash -> P.wrap $ "I couldn't load the designated root hash" <> P.group ("(" <> fromString (Hash.showBase32Hex hash) <> ")") - <> "from the repository at" <> prettyRepoRevision repo + <> "from the repository at" <> prettyReadRepo repo NoRemoteNamespaceWithHash repo sbh -> P.wrap - $ "The repository at" <> prettyRepoRevision repo + $ "The repository at" <> prettyReadRepo repo <> "doesn't contain a namespace with the hash prefix" <> (P.blue . P.text . SBH.toText) sbh RemoteNamespaceHashAmbiguous repo sbh hashes -> P.lines [ P.wrap $ "The namespace hash" <> prettySBH sbh - <> "at" <> prettyRepoRevision repo + <> "at" <> prettyReadRepo repo <> "is ambiguous." <> "Did you mean one of these hashes?", "", @@ -831,30 +840,6 @@ notifyUser dir o = case o of , P.wrap $ "Type" <> P.backticked ("help " <> pushPull "push" "pull" pp) <> "for more information." ] --- | ConfiguredGitUrlIncludesShortBranchHash ShortBranchHash - ConfiguredGitUrlIncludesShortBranchHash pp repo sbh remotePath -> - pure . P.lines $ - [ P.wrap - $ "The `GitUrl.` entry in .unisonConfig for the current path has the value" - <> (P.group . (<>",") . P.blue . P.text) - (RemoteRepo.printNamespace repo (Just sbh) remotePath) - <> "which specifies a namespace hash" - <> P.group (P.blue (prettySBH sbh) <> ".") - , "" - , P.wrap $ - pushPull "I can't push to a specific hash, because it's immutable." - ("It's no use for repeated pulls," - <> "because you would just get the same immutable namespace each time.") - pp - , "" - , P.wrap $ "You can use" - <> P.backticked ( - pushPull "push" "pull" pp - <> " " - <> P.text (RemoteRepo.printNamespace repo Nothing remotePath)) - <> "if you want to" <> pushPull "push onto" "pull from" pp - <> "the latest." - ] NoBranchWithHash _h -> pure . P.callout "😶" $ P.wrap $ "I don't know of a namespace with that hash." NotImplemented -> pure $ P.wrap "That's not implemented yet. Sorry! 😬" @@ -1134,8 +1119,8 @@ formatMissingStuff terms types = displayDefinitions' :: Var v => Ord a1 => PPE.PrettyPrintEnvDecl - -> Map Reference.Reference (DisplayObject (DD.Decl v a1)) - -> Map Reference.Reference (DisplayObject (Term v a1)) + -> Map Reference.Reference (DisplayObject () (DD.Decl v a1)) + -> Map Reference.Reference (DisplayObject (Type v a1) (Term v a1)) -> Pretty displayDefinitions' ppe0 types terms = P.syntaxToColor $ P.sep "\n\n" (prettyTypes <> prettyTerms) where @@ -1149,12 +1134,14 @@ displayDefinitions' ppe0 types terms = P.syntaxToColor $ P.sep "\n\n" (prettyTyp go ((n, r), dt) = case dt of MissingObject r -> missing n r - BuiltinObject -> builtin n + BuiltinObject typ -> + P.hang ("builtin " <> prettyHashQualified n <> " :") + (TypePrinter.prettySyntax (ppeBody r) typ) UserObject tm -> TermPrinter.prettyBinding (ppeBody r) n tm go2 ((n, r), dt) = case dt of MissingObject r -> missing n r - BuiltinObject -> builtin n + BuiltinObject _ -> builtin n UserObject decl -> case decl of Left d -> DeclPrinter.prettyEffectDecl (ppeBody r) r n d Right d -> DeclPrinter.prettyDataDecl (ppeBody r) r n d @@ -1192,8 +1179,8 @@ displayRendered outputLoc pp = displayDefinitions :: Var v => Ord a1 => Maybe FilePath -> PPE.PrettyPrintEnvDecl - -> Map Reference.Reference (DisplayObject (DD.Decl v a1)) - -> Map Reference.Reference (DisplayObject (Term v a1)) + -> Map Reference.Reference (DisplayObject () (DD.Decl v a1)) + -> Map Reference.Reference (DisplayObject (Type v a1) (Term v a1)) -> IO Pretty displayDefinitions _outputLoc _ppe types terms | Map.null types && Map.null terms = pure $ P.callout "😶" "No results to display." @@ -1300,17 +1287,17 @@ prettyTypeResultHeaderFull' (SR'.TypeResult' (HQ'.toHQ -> name) dt r (Set.map HQ where greyHash = styleHashQualified' id P.hiBlack prettyDeclTriple :: Var v => - (HQ.HashQualified Name, Reference.Reference, DisplayObject (DD.Decl v a)) + (HQ.HashQualified Name, Reference.Reference, DisplayObject () (DD.Decl v a)) -> Pretty prettyDeclTriple (name, _, displayDecl) = case displayDecl of - BuiltinObject -> P.hiBlack "builtin " <> P.hiBlue "type " <> P.blue (P.syntaxToColor $ prettyHashQualified name) + BuiltinObject _ -> P.hiBlack "builtin " <> P.hiBlue "type " <> P.blue (P.syntaxToColor $ prettyHashQualified name) MissingObject _ -> mempty -- these need to be handled elsewhere UserObject decl -> case decl of Left ed -> P.syntaxToColor $ DeclPrinter.prettyEffectHeader name ed Right dd -> P.syntaxToColor $ DeclPrinter.prettyDataHeader name dd prettyDeclPair :: Var v => - PPE.PrettyPrintEnv -> (Reference, DisplayObject (DD.Decl v a)) + PPE.PrettyPrintEnv -> (Reference, DisplayObject () (DD.Decl v a)) -> Pretty prettyDeclPair ppe (r, dt) = prettyDeclTriple (PPE.typeName ppe r, r, dt) @@ -1321,7 +1308,7 @@ renderNameConflicts conflictedTypeNames conflictedTermNames = showConflictedNames "terms" conflictedTermNames, tip $ "This occurs when merging branches that both independently introduce the same name. Use " <> makeExample IP.view (prettyName <$> take 3 allNames) - <> "to see the conflicting defintions, then use " + <> "to see the conflicting definitions, then use " <> makeExample' (if (not . null) conflictedTypeNames then IP.renameType else IP.renameTerm) <> "to resolve the conflicts." @@ -2027,21 +2014,11 @@ prettyTermName :: PPE.PrettyPrintEnv -> Referent -> Pretty prettyTermName ppe r = P.syntaxToColor $ prettyHashQualified (PPE.termName ppe r) -prettyRepoRevision :: RemoteRepo -> Pretty -prettyRepoRevision (RemoteRepo.GitRepo url treeish) = - P.blue (P.text url) <> prettyRevision treeish - where - prettyRevision treeish = - Monoid.fromMaybe $ - treeish <&> \treeish -> "at revision" <> P.blue (P.text treeish) +prettyReadRepo :: ReadRepo -> Pretty +prettyReadRepo (RemoteRepo.ReadGitRepo url) = P.blue (P.text url) -prettyRepoBranch :: RemoteRepo -> Pretty -prettyRepoBranch (RemoteRepo.GitRepo url treeish) = - P.blue (P.text url) <> prettyRevision treeish - where - prettyRevision treeish = - Monoid.fromMaybe $ - treeish <&> \treeish -> "at branch" <> P.blue (P.text treeish) +prettyWriteRepo :: WriteRepo -> Pretty +prettyWriteRepo (RemoteRepo.WriteGitRepo url) = P.blue (P.text url) isTestOk :: Term v Ann -> Bool isTestOk tm = case tm of diff --git a/parser-typechecker/src/Unison/DeclPrinter.hs b/parser-typechecker/src/Unison/DeclPrinter.hs index 3c956fc4c2..e0d98f1f24 100644 --- a/parser-typechecker/src/Unison/DeclPrinter.hs +++ b/parser-typechecker/src/Unison/DeclPrinter.hs @@ -50,36 +50,40 @@ prettyEffectDecl -> HashQualified Name -> EffectDeclaration v a -> Pretty SyntaxText -prettyEffectDecl ppe r name = prettyGADT ppe r name . toDataDecl +prettyEffectDecl ppe r name = prettyGADT ppe CT.Effect r name . toDataDecl prettyGADT :: Var v => PrettyPrintEnv + -> CT.ConstructorType -> Reference -> HashQualified Name -> DataDeclaration v a -> Pretty SyntaxText -prettyGADT env r name dd = P.hang header . P.lines $ constructor <$> zip +prettyGADT env ctorType r name dd = P.hang header . P.lines $ constructor <$> zip [0 ..] (DD.constructors' dd) where constructor (n, (_, _, t)) = - prettyPattern env r name n + prettyPattern env ctorType r name n <> (fmt S.TypeAscriptionColon " :") `P.hang` TypePrinter.pretty0 env Map.empty (-1) t header = prettyEffectHeader name (DD.EffectDeclaration dd) <> (fmt S.ControlKeyword " where") prettyPattern :: PrettyPrintEnv + -> CT.ConstructorType -> Reference -> HashQualified Name -> Int -> Pretty SyntaxText -prettyPattern env r namespace n = styleHashQualified'' - (fmt S.Constructor) +prettyPattern env ctorType ref namespace cid = styleHashQualified'' + (fmt (S.Referent conRef)) ( HQ.stripNamespace (fromMaybe "" $ Name.toText <$> HQ.toName namespace) - $ PPE.patternName env r n + $ PPE.termName env conRef ) + where + conRef = Referent.Con ref cid ctorType prettyDataDecl :: Var v @@ -96,15 +100,15 @@ prettyDataDecl env r name dd = constructor (n, (_, _, (Type.ForallsNamed' _ t))) = constructor' n t constructor (n, (_, _, t) ) = constructor' n t constructor' n t = case Type.unArrows t of - Nothing -> prettyPattern env r name n + Nothing -> prettyPattern env CT.Data r name n Just ts -> case fieldNames env r name dd of - Nothing -> P.group . P.hang' (prettyPattern env r name n) " " + Nothing -> P.group . P.hang' (prettyPattern env CT.Data r name n) " " $ P.spaced (TypePrinter.prettyRaw env Map.empty 10 <$> init ts) Just fs -> P.group $ (fmt S.DelimiterChar "{ ") <> P.sep ((fmt S.DelimiterChar ",") <> " " `P.orElse` "\n ") (field <$> zip fs (init ts)) <> (fmt S.DelimiterChar " }") - field (fname, typ) = P.group $ styleHashQualified'' (fmt S.Constructor) fname <> + field (fname, typ) = P.group $ styleHashQualified'' (fmt (S.Reference r)) fname <> (fmt S.TypeAscriptionColon " :") `P.hang` TypePrinter.prettyRaw env Map.empty (-1) typ header = prettyDataHeader name dd <> (fmt S.DelimiterChar (" = " `P.orElse` "\n = ")) diff --git a/parser-typechecker/src/Unison/FileParser.hs b/parser-typechecker/src/Unison/FileParser.hs index baf09329a7..e81a102f08 100644 --- a/parser-typechecker/src/Unison/FileParser.hs +++ b/parser-typechecker/src/Unison/FileParser.hs @@ -52,9 +52,9 @@ file = do -- -- There's some more complicated logic below to have suffix-based name resolution -- make use of _terms_ from the local file. - local (\e -> e { names = Names.push (Names.suffixify0 locals) namesStart }) $ do + local (\e -> e { names = Names.push locals namesStart }) $ do names <- asks names - stanzas0 <- local (\e -> e { names = names }) $ sepBy semi stanza + stanzas0 <- sepBy semi stanza let stanzas = fmap (TermParser.substImports names imports) <$> stanzas0 _ <- closeBlock let (termsr, watchesr) = foldl' go ([], []) stanzas @@ -68,7 +68,9 @@ file = do let (terms, watches) = (reverse termsr, reverse watchesr) -- suffixified local term bindings shadow any same-named thing from the outer codebase scope -- example: `foo.bar` in local file scope will shadow `foo.bar` and `bar` in codebase scope - let (curNames, resolveLocals) = (Names.deleteTerms0 locals (Names.currentNames names), resolveLocals) + let (curNames, resolveLocals) = + ( Names.shadowSuffixedTerms0 locals (Names.currentNames names) + , resolveLocals ) where -- All locally declared term variables, running example: -- [foo.alice, bar.alice, zonk.bob] diff --git a/parser-typechecker/src/Unison/Parser.hs b/parser-typechecker/src/Unison/Parser.hs index 7d5ea2cc98..586ba25ccf 100644 --- a/parser-typechecker/src/Unison/Parser.hs +++ b/parser-typechecker/src/Unison/Parser.hs @@ -105,6 +105,7 @@ data Error v | DuplicateTypeNames [(v, [Ann])] | DuplicateTermNames [(v, [Ann])] | PatternArityMismatch Int Int Ann -- PatternArityMismatch expectedArity actualArity location + | FloatPattern Ann deriving (Show, Eq, Ord) data Ann @@ -244,8 +245,7 @@ run' p s name env = then L.lexer name (trace (L.debugLex''' "lexer receives" s) s) else L.lexer name s pTraced = traceRemainingTokens "parser receives" *> p - env' = env { names = Names.suffixify (names env) } - in runParserT pTraced name (Input lex) env' + in runParserT pTraced name (Input lex) env run :: Ord v => P v a -> String -> ParsingEnv -> Either (Err v) a run p s = run' p s "" diff --git a/parser-typechecker/src/Unison/PrettyPrintEnv.hs b/parser-typechecker/src/Unison/PrettyPrintEnv.hs index e01c10c48c..3b1f0cc2c3 100644 --- a/parser-typechecker/src/Unison/PrettyPrintEnv.hs +++ b/parser-typechecker/src/Unison/PrettyPrintEnv.hs @@ -17,7 +17,6 @@ import qualified Unison.Names3 as Names import qualified Unison.Reference as Reference import qualified Unison.Referent as Referent import qualified Unison.ConstructorType as CT -import qualified Unison.HashQualified' as HQ' import qualified Data.Set as Set data PrettyPrintEnv = PrettyPrintEnv { @@ -35,12 +34,14 @@ instance Show PrettyPrintEnv where fromNames :: Int -> Names -> PrettyPrintEnv fromNames len names = PrettyPrintEnv terms' types' where - terms' r = shortestName . Set.map HQ'.toHQ $ Names.termName len r names - types' r = shortestName . Set.map HQ'.toHQ $ Names.typeName len r names + terms' r = shortestName . Set.map Name.convert $ Names.termName len r names + types' r = shortestName . Set.map Name.convert $ Names.typeName len r names shortestName ns = safeHead $ HQ.sortByLength (toList ns) fromSuffixNames :: Int -> Names -> PrettyPrintEnv -fromSuffixNames len names = fromNames len (Names.suffixify names) +fromSuffixNames len names = PrettyPrintEnv terms' types' where + terms' r = safeHead $ Names.suffixedTermName len r names + types' r = safeHead $ Names.suffixedTypeName len r names fromNamesDecl :: Int -> Names -> PrettyPrintEnvDecl fromNamesDecl len names = diff --git a/parser-typechecker/src/Unison/PrintError.hs b/parser-typechecker/src/Unison/PrintError.hs index 9a37d99a8c..e2c219b0e1 100644 --- a/parser-typechecker/src/Unison/PrintError.hs +++ b/parser-typechecker/src/Unison/PrintError.hs @@ -177,9 +177,9 @@ renderTypeError e env src = case e of <> style ErrorSite "if" <> "-expression has to be" AndMismatch -> - "The arguments to " <> style ErrorSite "and" <> " have to be" + "The arguments to " <> style ErrorSite "&&" <> " have to be" OrMismatch -> - "The arguments to " <> style ErrorSite "or" <> " have to be" + "The arguments to " <> style ErrorSite "||" <> " have to be" GuardMismatch -> "The guard expression for a " <> style ErrorSite "match" @@ -359,6 +359,26 @@ renderTypeError e env src = case e of ] , debugSummary note ] + AbilityCheckFailure {..} + | C.InSubtype{} :<| _ <- C.path note -> mconcat + [ "The expression " + , describeStyle ErrorSite + , "\n\n" + , " needs the abilities: {" + , commas (renderType' env) requested + , "}\n" + , " but was assumed to only require: {" + , commas (renderType' env) ambient + , "}" + , "\n\n" + , "This is likely a result of using an un-annotated " + , "function as an argument with concrete abilities. " + , "Try adding an annotation to the function definition whose " + , "body is red." + , "\n\n" + , annotatedAsErrorSite src abilityCheckFailureSite + , debugSummary note + ] AbilityCheckFailure {..} -> mconcat [ "The expression " , describeStyle ErrorSite @@ -418,7 +438,7 @@ renderTypeError e env src = case e of , "\n\n" , annotatedAsErrorSite src termSite , case expectedType of - Type.Var' (TypeVar.Existential _ _) -> "\nThere are no constraints on its type." + Type.Var' (TypeVar.Existential{}) -> "\nThere are no constraints on its type." _ -> "\nWhatever it is, it has a type that conforms to " <> style Type1 (renderType' env expectedType) @@ -762,7 +782,7 @@ renderContext env ctx@(C.Context es) = " Γ\n " -> Pretty (AnnotatedText a) showElem _ctx (C.Var v) = case v of TypeVar.Universal x -> "@" <> renderVar x - TypeVar.Existential _ x -> "'" <> renderVar x + e -> Pr.shown e showElem ctx (C.Solved _ v (Type.Monotype t)) = "'" <> shortName v <> " = " <> renderType' env (C.apply ctx t) showElem ctx (C.Ann v t) = @@ -1089,6 +1109,14 @@ prettyParseError s = \case <> "but this one has " <> Pr.hiRed (Pr.shown actual) <> "arguments:", annotatedAsErrorSite s loc ] + go (Parser.FloatPattern loc) = msg where + msg = Pr.indentN 2 . Pr.callout "😶" $ Pr.lines + [ Pr.wrap + $ "Floating point pattern matching is disallowed. Instead," + <> "it is recommended to test that a value is within" + <> "an acceptable error bound of the expected value." + , annotatedAsErrorSite s loc + ] go (Parser.UseEmpty tok) = msg where msg = Pr.indentN 2 . Pr.callout "😶" $ Pr.lines [ Pr.wrap $ "I was expecting something after the " <> Pr.hiRed "use" <> "keyword", "", diff --git a/parser-typechecker/src/Unison/Runtime/ANF.hs b/parser-typechecker/src/Unison/Runtime/ANF.hs index 0b9a58e7da..cc0f4cf458 100644 --- a/parser-typechecker/src/Unison/Runtime/ANF.hs +++ b/parser-typechecker/src/Unison/Runtime/ANF.hs @@ -42,6 +42,7 @@ module Unison.Runtime.ANF , float , lamLift , inlineAlias + , addDefaultCases , ANormalF(.., AApv, ACom, ACon, AKon, AReq, APrm, AFOp) , ANormal , RTag @@ -94,7 +95,7 @@ import qualified Data.Text as Text import qualified Unison.ABT as ABT import qualified Unison.ABT.Normalized as ABTN import qualified Unison.Type as Ty -import qualified Unison.Builtin.Decls as Ty (unitRef,seqViewRef) +import qualified Unison.Builtin.Decls as Ty import qualified Unison.Var as Var import Unison.Typechecker.Components (minimize') import Unison.Pattern (SeqOp(..)) @@ -354,6 +355,24 @@ saturate dat = ABT.visitPure $ \case fvs = foldMap freeVars args args' = saturate dat <$> args +addDefaultCases :: Var v => Monoid a => String -> Term v a -> Term v a +addDefaultCases = ABT.visitPure . defaultCaseVisitor + +defaultCaseVisitor + :: Var v => Monoid a => String -> Term v a -> Maybe (Term v a) +defaultCaseVisitor func m@(Match' scrut cases) + | scrut <- addDefaultCases func scrut + , cases <- fmap (addDefaultCases func) <$> cases + = Just $ match a scrut (cases ++ [dflt]) + where + a = ABT.annotation m + v = Var.freshIn mempty $ typed Var.Blank + txt = "pattern match failure in function `" <> func <> "`" + dflt = MatchCase (P.Var a) Nothing + . ABT.abs' a v + $ apps' (placeholder a txt) [var a v] +defaultCaseVisitor _ _ = Nothing + inlineAlias :: Var v => Monoid a => Term v a -> Term v a inlineAlias = ABT.visitPure $ \case Let1Named' v b@(Var' _) e -> Just . inlineAlias $ ABT.subst v b e @@ -1030,11 +1049,23 @@ anfBlock (Match' scrut cas) = do mdf AccumEmpty -> pure (sctx <> cx, pure $ TMatch v MatchEmpty) anfBlock (Let1Named' v b e) - = anfBlock b >>= \(bctx, (d0, cb)) -> bindLocal [v] $ do - (ectx, ce) <- anfBlock e - d <- bindDirection d0 - let octx = bctx <> directed [ST1 d v BX cb] <> ectx - pure (octx, ce) + = anfBlock b >>= \case + (bctx, (Direct, TVar u)) -> do + (ectx, ce) <- anfBlock e + pure (bctx <> ectx, ABTN.rename v u <$> ce) + (bctx, (d0, cb)) -> bindLocal [v] $ do + (ectx, ce) <- anfBlock e + d <- bindDirection d0 + let octx = bctx <> directed [ST1 d v BX cb] <> ectx + pure (octx, ce) +anfBlock (Apps' (Blank' b) args) = do + nm <- fresh + (actx, cas) <- anfArgs args + pure ( actx <> pure [ST1 Direct nm BX (TLit (T msg))] + , pure $ TPrm EROR (nm : cas) + ) + where + msg = Text.pack . fromMaybe "blank expression" $ nameb b anfBlock (Apps' f args) = do (fctx, (d, cf)) <- anfFunc f (actx, cas) <- anfArgs args diff --git a/parser-typechecker/src/Unison/Runtime/ANF/Serialize.hs b/parser-typechecker/src/Unison/Runtime/ANF/Serialize.hs index 1eac557f34..741d5a57ea 100644 --- a/parser-typechecker/src/Unison/Runtime/ANF/Serialize.hs +++ b/parser-typechecker/src/Unison/Runtime/ANF/Serialize.hs @@ -1,34 +1,53 @@ {-# language LambdaCase #-} {-# language BangPatterns #-} +{-# language PatternSynonyms #-} module Unison.Runtime.ANF.Serialize where +import Prelude hiding (putChar, getChar) + +import Basement.Block (Block) + +import Control.Applicative (liftA2) import Control.Monad +import Data.Bits (Bits) import Data.Bytes.Put import Data.Bytes.Get hiding (getBytes) +import qualified Data.Bytes.Get as Ser import Data.Bytes.VarInt import Data.Bytes.Serial +import Data.Bytes.Signed (Unsigned) import Data.ByteString (ByteString) +import qualified Data.ByteString as B import Data.Foldable (traverse_) import Data.Functor ((<&>)) -import Data.Map as Map (Map, fromList, lookup) +import Data.Map as Map (Map, fromList, lookup, toList) import Data.Serialize.Put (runPutLazy) +import Data.Text (Text) +import Data.Text.Encoding (encodeUtf8, decodeUtf8) import Data.Word (Word8, Word16, Word64) +import Data.Int (Int64) +import qualified Data.ByteArray as BA import qualified Data.Sequence as Seq import qualified Data.ByteString.Lazy as L import GHC.Stack -import Unison.Codebase.Serialization.V1 as V1 +import Unison.Hash (Hash) import Unison.Util.EnumContainers as EC -import Unison.Reference (Reference) +import Unison.Reference (Reference(..), pattern Derived, Id(..)) +import Unison.Referent (Referent, pattern Ref, pattern Con) import Unison.ABT.Normalized (Term(..)) import Unison.Runtime.Exception import Unison.Runtime.ANF as ANF hiding (Tag) import Unison.Var (Var(..), Type(ANFBlank)) +import qualified Unison.Util.Bytes as Bytes +import qualified Unison.Hash as Hash +import qualified Unison.ConstructorType as CT + data TmTag = VarT | ForceT | AppT | HandleT | ShiftT | MatchT | LitT @@ -49,6 +68,9 @@ data BLTag = TextT | ListT | TmLinkT | TyLinkT | BytesT data VaTag = PartialT | DataT | ContT | BLitT data CoTag = KET | MarkT | PushT +unknownTag :: String -> a +unknownTag t = exn $ "unknown " ++ t ++ " word" + class Tag t where tag2word :: t -> Word8 word2tag :: Word8 -> t @@ -78,7 +100,7 @@ instance Tag TmTag where 9 -> NameVarT 10 -> LetDirT 11 -> LetIndT - _ -> exn "unknown TmTag word" + _ -> unknownTag "TmTag" instance Tag FnTag where tag2word = \case @@ -96,7 +118,7 @@ instance Tag FnTag where 3 -> FConT 4 -> FReqT 5 -> FPrimT - _ -> exn "unknown FnTag word" + _ -> unknownTag "FnTag" instance Tag MtTag where tag2word = \case @@ -114,7 +136,7 @@ instance Tag MtTag where 3 -> MEmptyT 4 -> MDataT 5 -> MSumT - _ -> exn "unknown MtTag word" + _ -> unknownTag "MtTag" instance Tag LtTag where tag2word = \case @@ -134,7 +156,7 @@ instance Tag LtTag where 4 -> CT 5 -> LMT 6 -> LYT - _ -> exn "unknown LtTag word" + _ -> unknownTag "LtTag" instance Tag BLTag where tag2word = \case @@ -150,7 +172,7 @@ instance Tag BLTag where 2 -> TmLinkT 3 -> TyLinkT 4 -> BytesT - _ -> exn "unknown BLTag word" + t -> unknownTag "BLTag" t instance Tag VaTag where tag2word = \case @@ -164,7 +186,7 @@ instance Tag VaTag where 1 -> DataT 2 -> ContT 3 -> BLitT - _ -> exn "unknown VaTag word" + t -> unknownTag "VaTag" t instance Tag CoTag where tag2word = \case @@ -175,7 +197,7 @@ instance Tag CoTag where 0 -> KET 1 -> MarkT 2 -> PushT - _ -> exn "unknown CoTag word" + t -> unknownTag "CoTag" t putTag :: MonadPut m => Tag t => t -> m () putTag = putWord8 . tag2word @@ -402,7 +424,7 @@ putLit (I i) = putTag IT *> putInt i putLit (N n) = putTag NT *> putNat n putLit (F f) = putTag FT *> putFloat f putLit (T t) = putTag TT *> putText t -putLit (C c) = putTag CT *> V1.putChar c +putLit (C c) = putTag CT *> putChar c putLit (LM r) = putTag LMT *> putReferent r putLit (LY r) = putTag LYT *> putReference r @@ -412,7 +434,7 @@ getLit = getTag >>= \case NT -> N <$> getNat FT -> F <$> getFloat TT -> T <$> getText - CT -> C <$> V1.getChar + CT -> C <$> getChar LMT -> LM <$> getReferent LYT -> LY <$> getReference @@ -605,3 +627,157 @@ serializeValue v = runPutS (putVersion *> putValue v) serializeValueLazy :: Value -> L.ByteString serializeValueLazy v = runPutLazy (putVersion *> putValue v) where putVersion = putWord32be 1 + +-- Some basics, moved over from V1 serialization +putChar :: MonadPut m => Char -> m () +putChar = serialize . VarInt . fromEnum + +getChar :: MonadGet m => m Char +getChar = toEnum . unVarInt <$> deserialize + +putFloat :: MonadPut m => Double -> m () +putFloat = serializeBE + +getFloat :: MonadGet m => m Double +getFloat = deserializeBE + +putNat :: MonadPut m => Word64 -> m () +putNat = putWord64be + +getNat :: MonadGet m => m Word64 +getNat = getWord64be + +putInt :: MonadPut m => Int64 -> m () +putInt = serializeBE + +getInt :: MonadGet m => m Int64 +getInt = deserializeBE + +putLength :: + (MonadPut m, Integral n, Integral (Unsigned n), + Bits n, Bits (Unsigned n)) + => n -> m () +putLength = serialize . VarInt + +getLength :: + (MonadGet m, Integral n, Integral (Unsigned n), + Bits n, Bits (Unsigned n)) + => m n +getLength = unVarInt <$> deserialize + +putFoldable + :: (Foldable f, MonadPut m) => (a -> m ()) -> f a -> m () +putFoldable putA as = do + putLength (length as) + traverse_ putA as + +putMap :: MonadPut m => (a -> m ()) -> (b -> m ()) -> Map a b -> m () +putMap putA putB m = putFoldable (putPair putA putB) (Map.toList m) + +getList :: MonadGet m => m a -> m [a] +getList a = getLength >>= (`replicateM` a) + +getMap :: (MonadGet m, Ord a) => m a -> m b -> m (Map a b) +getMap getA getB = Map.fromList <$> getList (getPair getA getB) + +putMaybe :: MonadPut m => Maybe a -> (a -> m ()) -> m () +putMaybe Nothing _ = putWord8 0 +putMaybe (Just a) putA = putWord8 1 *> putA a + +getMaybe :: MonadGet m => m a -> m (Maybe a) +getMaybe getA = getWord8 >>= \tag -> case tag of + 0 -> pure Nothing + 1 -> Just <$> getA + _ -> unknownTag "Maybe" tag + +putPair :: MonadPut m => (a -> m ()) -> (b -> m ()) -> (a,b) -> m () +putPair putA putB (a,b) = putA a *> putB b + +getPair :: MonadGet m => m a -> m b -> m (a,b) +getPair = liftA2 (,) + +getBytes :: MonadGet m => m Bytes.Bytes +getBytes = Bytes.fromChunks <$> getList getBlock + +putBytes :: MonadPut m => Bytes.Bytes -> m () +putBytes = putFoldable putBlock . Bytes.chunks + +getBlock :: MonadGet m => m (Bytes.View (Block Word8)) +getBlock = getLength >>= fmap (Bytes.view . BA.convert) . getByteString + +putBlock :: MonadPut m => Bytes.View (Block Word8) -> m () +putBlock b = putLength (BA.length b) *> putByteString (BA.convert b) + +putHash :: MonadPut m => Hash -> m () +putHash h = do + let bs = Hash.toBytes h + putLength (B.length bs) + putByteString bs + +getHash :: MonadGet m => m Hash +getHash = do + len <- getLength + bs <- B.copy <$> Ser.getBytes len + pure $ Hash.fromBytes bs + +putReferent :: MonadPut m => Referent -> m () +putReferent = \case + Ref r -> do + putWord8 0 + putReference r + Con r i ct -> do + putWord8 1 + putReference r + putLength i + putConstructorType ct + +getReferent :: MonadGet m => m Referent +getReferent = do + tag <- getWord8 + case tag of + 0 -> Ref <$> getReference + 1 -> Con <$> getReference <*> getLength <*> getConstructorType + _ -> unknownTag "getReferent" tag + +getConstructorType :: MonadGet m => m CT.ConstructorType +getConstructorType = getWord8 >>= \case + 0 -> pure CT.Data + 1 -> pure CT.Effect + t -> unknownTag "getConstructorType" t + +putConstructorType :: MonadPut m => CT.ConstructorType -> m () +putConstructorType = \case + CT.Data -> putWord8 0 + CT.Effect -> putWord8 1 + +putText :: MonadPut m => Text -> m () +putText text = do + let bs = encodeUtf8 text + putLength $ B.length bs + putByteString bs + +getText :: MonadGet m => m Text +getText = do + len <- getLength + bs <- B.copy <$> Ser.getBytes len + pure $ decodeUtf8 bs + +putReference :: MonadPut m => Reference -> m () +putReference r = case r of + Builtin name -> do + putWord8 0 + putText name + Derived hash i n -> do + putWord8 1 + putHash hash + putLength i + putLength n + +getReference :: MonadGet m => m Reference +getReference = do + tag <- getWord8 + case tag of + 0 -> Builtin <$> getText + 1 -> DerivedId <$> (Id <$> getHash <*> getLength <*> getLength) + _ -> unknownTag "Reference" tag + diff --git a/parser-typechecker/src/Unison/Runtime/Builtin.hs b/parser-typechecker/src/Unison/Runtime/Builtin.hs index c5fd25405c..a226ec2a04 100644 --- a/parser-typechecker/src/Unison/Runtime/Builtin.hs +++ b/parser-typechecker/src/Unison/Runtime/Builtin.hs @@ -46,6 +46,7 @@ import Data.Default (def) import Data.ByteString (hGet, hPut) import Data.Text as Text (pack, unpack) import qualified Data.Text as Text +import qualified Data.Text.IO as Text import Data.Text.Encoding ( decodeUtf8', decodeUtf8' ) import qualified Data.ByteArray as BA import qualified Data.ByteString.Lazy as L @@ -102,7 +103,6 @@ import System.Directory as SYS ( getCurrentDirectory , setCurrentDirectory , getTemporaryDirectory - -- , getDirectoryContents , doesPathExist , doesDirectoryExist , renameDirectory @@ -110,9 +110,13 @@ import System.Directory as SYS , renameFile , createDirectoryIfMissing , removeDirectoryRecursive + , getDirectoryContents , getModificationTime , getFileSize ) +import System.Environment as SYS + ( getEnv + ) import System.IO.Temp (createTempDirectory) import qualified Control.Concurrent.STM as STM @@ -455,6 +459,11 @@ appends = binop0 0 $ \[x,y] -> TPrm CATS [x,y] conss = binop0 0 $ \[x,y] -> TPrm CONS [x,y] snocs = binop0 0 $ \[x,y] -> TPrm SNOC [x,y] +coerceType :: Var v => Reference -> Reference -> SuperNormal v +coerceType fromType toType = unop0 1 $ \[x,r] + -> unbox x fromType r + $ TCon toType 0 [r] + takes, drops, sizes, ats, emptys :: Var v => SuperNormal v takes = binop0 1 $ \[x0,y,x] -> unbox x0 Ty.natRef x @@ -677,6 +686,20 @@ watch -> TLets Direct [] [] (TPrm PRNT [t]) $ TVar v +raise :: Var v => SuperNormal v +raise + = unop0 4 $ \[r,f,n,j,k] + -> TMatch r . flip (MatchData Ty.exceptionRef) Nothing $ mapFromList + [ (0, ([BX], TAbs f $ TVar f)) + , (i, ([UN,BX] + , TAbss [j,f] + . TShift Ty.exceptionRef k + . TLetD n BX (TLit $ T "builtin.raise") + $ TPrm EROR [n, f])) + ] + where + i = fromIntegral $ builtinTypeNumbering Map.! Ty.exceptionRef + code'missing :: Var v => SuperNormal v code'missing = unop0 1 $ \[link,b] @@ -899,6 +922,18 @@ outMaybe maybe result = , (1, ([BX], TAbs maybe $ TCon Ty.optionalRef 1 [maybe])) ] +outMaybeTup :: forall v. Var v => v -> v -> v -> v -> v -> v -> v -> ANormal v +outMaybeTup a b n u bp p result = + TMatch result . MatchSum $ mapFromList + [ (0, ([], TCon Ty.optionalRef 0 [])) + , (1, ([UN,BX], TAbss [a,b] + . TLetD u BX (TCon Ty.unitRef 0 []) + . TLetD bp BX (TCon Ty.pairRef 0 [b,u]) + . TLetD n BX (TCon Ty.natRef 0 [a]) + . TLetD p BX (TCon Ty.pairRef 0 [n,bp]) + $ TCon Ty.optionalRef 1 [p])) + ] + outIoFail :: forall v. Var v => v -> v -> v -> v -> ANormal v outIoFail stack1 stack2 fail result = TMatch result . MatchSum $ mapFromList @@ -962,6 +997,20 @@ outIoFailBool stack1 stack2 stack3 bool fail result = $ TCon eitherReference 1 [bool]) ] +outIoFailG + :: Var v => v -> v -> v -> v -> v + -> ((ANormal v -> ANormal v) -> ([Mem], ANormal v)) + -> ANormal v +outIoFailG stack1 stack2 fail result output k + = TMatch result . MatchSum $ mapFromList + [ (0, ([BX, BX],) + . TAbss [stack1, stack2] + . TLetD fail BX (TCon Ty.failureRef 0 [stack1, stack2]) + $ TCon eitherReference 0 [fail]) + , (1, k $ \t -> TLetD output BX t + $ TCon eitherReference 1 [output]) + ] + -- Input / Output glue -- -- These are pairings of input and output functions to handle a @@ -1046,6 +1095,7 @@ wordDirect wordType instr where (b1,ub1) = fresh2 + -- Nat -> a -> c -- Works for an type that's packed into a word, just -- pass `wordBoxDirect Ty.natRef`, `wordBoxDirect Ty.floatRef` @@ -1085,6 +1135,18 @@ boxToEFBox = where (arg, result, stack1, stack2, fail) = fresh5 +-- a -> Either Failure (Maybe b) +boxToEFMBox :: ForeignOp +boxToEFMBox + = inBx arg result + . outIoFailG stack1 stack2 fail result output $ \k -> + ([UN], TAbs stack3 . TMatch stack3 . MatchSum $ mapFromList + [ (0, ([], k $ TCon Ty.optionalRef 0 [])) + , (1, ([BX], TAbs stack4 . k $ TCon Ty.optionalRef 1 [stack4])) + ]) + where + (arg, result, stack1, stack2, stack3, stack4, fail, output) = fresh8 + -- a -> Maybe b boxToMaybeBox :: ForeignOp boxToMaybeBox = @@ -1092,6 +1154,15 @@ boxToMaybeBox = where (arg, maybe, result) = fresh3 +-- a -> Maybe b +boxToMaybeTup :: ForeignOp +boxToMaybeTup = + inBx arg result $ outMaybeTup a b c u bp p result + where + (arg, a, b, c, u, bp, p, result) = fresh8 + + + -- a -> Either Failure Bool boxToEFBool :: ForeignOp boxToEFBool = inBx arg result @@ -1176,6 +1247,8 @@ builtinLookup , ("Int.<=", lei) , ("Int.>", gti) , ("Int.>=", gei) + , ("Int.fromRepresentation", coerceType Ty.natRef Ty.intRef) + , ("Int.toRepresentation", coerceType Ty.intRef Ty.natRef) , ("Int.increment", inci) , ("Int.signum", sgni) , ("Int.negate", negi) @@ -1225,7 +1298,6 @@ builtinLookup , ("Nat.toText", n2t) , ("Nat.fromText", t2n) , ("Nat.popCount", popn) - , ("Float.+", addf) , ("Float.-", subf) , ("Float.*", mulf) @@ -1234,6 +1306,8 @@ builtinLookup , ("Float.log", logf) , ("Float.logBase", logbf) , ("Float.sqrt", sqrtf) + , ("Float.fromRepresentation", coerceType Ty.natRef Ty.floatRef) + , ("Float.toRepresentation", coerceType Ty.floatRef Ty.natRef) , ("Float.min", minf) , ("Float.max", maxf) @@ -1328,7 +1402,9 @@ builtinLookup , ("Universal.>=", geu) , ("Universal.<=", leu) + -- internal stuff , ("jumpCont", jumpk) + , ("raise", raise) , ("IO.forkComp.v2", fork'comp) @@ -1408,7 +1484,10 @@ declareForeigns = do declareForeign "IO.setBuffering.impl.v3" set'buffering . mkForeignIOF $ uncurry hSetBuffering - declareForeign "IO.getBytes.impl.v3" boxNatToEFBox . mkForeignIOF $ \(h,n) -> Bytes.fromArray <$> hGet h n + declareForeign "IO.getLine.impl.v1" boxToEFBox $ mkForeignIOF Text.hGetLine + + declareForeign "IO.getBytes.impl.v3" boxNatToEFBox . mkForeignIOF + $ \(h,n) -> Bytes.fromArray <$> hGet h n declareForeign "IO.putBytes.impl.v3" boxBoxToEF0 . mkForeignIOF $ \(h,bs) -> hPut h (Bytes.toArray bs) declareForeign "IO.systemTime.impl.v3" unitToEFNat @@ -1431,6 +1510,9 @@ declareForeigns = do declareForeign "IO.fileExists.impl.v3" boxToEFBool $ mkForeignIOF doesPathExist + declareForeign "IO.getEnv.impl.v1" boxToEFBox + $ mkForeignIOF getEnv + declareForeign "IO.isDirectory.impl.v3" boxToEFBool $ mkForeignIOF doesDirectoryExist @@ -1443,6 +1525,9 @@ declareForeigns = do declareForeign "IO.renameDirectory.impl.v3" boxBoxToEF0 $ mkForeignIOF $ uncurry renameDirectory + declareForeign "IO.directoryContents.impl.v3" boxToEFBox + $ mkForeignIOF $ (fmap pack <$>) . getDirectoryContents + declareForeign "IO.removeFile.impl.v3" boxToEF0 $ mkForeignIOF removeFile @@ -1523,7 +1608,7 @@ declareForeigns = do declareForeign "MVar.read.impl.v3" boxBoxToEFBox . mkForeignIOF $ \(mv :: MVar Closure) -> readMVar mv - declareForeign "MVar.tryRead.impl.v3" boxToEFBox + declareForeign "MVar.tryRead.impl.v3" boxToEFMBox . mkForeignIOF $ \(mv :: MVar Closure) -> tryReadMVar mv declareForeign "Char.toText" (wordDirect Ty.charRef) . mkForeign $ @@ -1720,6 +1805,20 @@ declareForeigns = do declareForeign "Bytes.fromBase64" boxToEBoxBox . mkForeign $ pure . Bytes.fromBase64 declareForeign "Bytes.fromBase64UrlUnpadded" boxDirect . mkForeign $ pure . Bytes.fromBase64UrlUnpadded + declareForeign "Bytes.decodeNat64be" boxToMaybeTup . mkForeign $ pure . Bytes.decodeNat64be + declareForeign "Bytes.decodeNat64le" boxToMaybeTup . mkForeign $ pure . Bytes.decodeNat64le + declareForeign "Bytes.decodeNat32be" boxToMaybeTup . mkForeign $ pure . Bytes.decodeNat32be + declareForeign "Bytes.decodeNat32le" boxToMaybeTup . mkForeign $ pure . Bytes.decodeNat32le + declareForeign "Bytes.decodeNat16be" boxToMaybeTup . mkForeign $ pure . Bytes.decodeNat16be + declareForeign "Bytes.decodeNat16le" boxToMaybeTup . mkForeign $ pure . Bytes.decodeNat16le + + declareForeign "Bytes.encodeNat64be" (wordDirect Ty.natRef) . mkForeign $ pure . Bytes.encodeNat64be + declareForeign "Bytes.encodeNat64le" (wordDirect Ty.natRef) . mkForeign $ pure . Bytes.encodeNat64le + declareForeign "Bytes.encodeNat32be" (wordDirect Ty.natRef) . mkForeign $ pure . Bytes.encodeNat32be + declareForeign "Bytes.encodeNat32le" (wordDirect Ty.natRef) . mkForeign $ pure . Bytes.encodeNat32le + declareForeign "Bytes.encodeNat16be" (wordDirect Ty.natRef) . mkForeign $ pure . Bytes.encodeNat16be + declareForeign "Bytes.encodeNat16le" (wordDirect Ty.natRef) . mkForeign $ pure . Bytes.encodeNat16le + hostPreference :: Maybe Text -> SYS.HostPreference hostPreference Nothing = SYS.HostAny hostPreference (Just host) = SYS.Host $ Text.unpack host diff --git a/parser-typechecker/src/Unison/Runtime/Decompile.hs b/parser-typechecker/src/Unison/Runtime/Decompile.hs index 73f3374cc3..98304b3673 100644 --- a/parser-typechecker/src/Unison/Runtime/Decompile.hs +++ b/parser-typechecker/src/Unison/Runtime/Decompile.hs @@ -110,7 +110,8 @@ decompileForeign topTerms f = Right $ typeLink () l | Just s <- unwrapSeq f = list' () <$> traverse (decompile topTerms) s -decompileForeign _ f = err $ "cannot decompile Foreign: " ++ show f +decompileForeign _ f + = err $ "cannot decompile Foreign: " ++ show f decompileBytes :: Var v => By.Bytes -> Term v () decompileBytes diff --git a/parser-typechecker/src/Unison/Runtime/Foreign.hs b/parser-typechecker/src/Unison/Runtime/Foreign.hs index c4932165f2..dc7720495d 100644 --- a/parser-typechecker/src/Unison/Runtime/Foreign.hs +++ b/parser-typechecker/src/Unison/Runtime/Foreign.hs @@ -16,7 +16,7 @@ module Unison.Runtime.Foreign , Failure(..) ) where -import Control.Concurrent (ThreadId) +import Control.Concurrent (ThreadId, MVar) import Data.Text (Text, unpack) import Data.Tagged (Tagged(..)) import Network.Socket (Socket) @@ -33,7 +33,7 @@ import qualified Crypto.Hash as Hash import Unsafe.Coerce data Foreign where - Wrap :: Reference -> e -> Foreign + Wrap :: Reference -> !e -> Foreign promote :: (a -> a -> r) -> b -> c -> r promote (~~) x y = unsafeCoerce x ~~ unsafeCoerce y @@ -43,6 +43,10 @@ ref2eq r | r == Ty.textRef = Just $ promote ((==) @Text) | r == Ty.termLinkRef = Just $ promote ((==) @Referent) | r == Ty.typeLinkRef = Just $ promote ((==) @Reference) + | r == Ty.bytesRef = Just $ promote ((==) @Bytes) + -- Note: MVar equality is just reference equality, so it shouldn't + -- matter what type the MVar holds. + | r == Ty.mvarRef = Just $ promote ((==) @(MVar ())) | otherwise = Nothing ref2cmp :: Reference -> Maybe (a -> b -> Ordering) diff --git a/parser-typechecker/src/Unison/Runtime/Foreign/Function.hs b/parser-typechecker/src/Unison/Runtime/Foreign/Function.hs index c79072f541..09b86eeda0 100644 --- a/parser-typechecker/src/Unison/Runtime/Foreign/Function.hs +++ b/parser-typechecker/src/Unison/Runtime/Foreign/Function.hs @@ -16,6 +16,7 @@ import GHC.IO.Exception (IOException(..), IOErrorType(..)) import Control.Concurrent (ThreadId) import Control.Concurrent.MVar (MVar) import Control.Concurrent.STM (TVar) +import Control.Exception (evaluate) import qualified Data.Char as Char import Data.Foldable (toList) import Data.Text (Text, pack, unpack) @@ -95,7 +96,7 @@ instance ForeignConvention Closure where readForeign _ [ ] _ _ = foreignCCError "Closure" writeForeign ustk bstk c = do bstk <- bump bstk - (ustk, bstk) <$ poke bstk c + (ustk, bstk) <$ (poke bstk =<< evaluate c) instance ForeignConvention Text where readForeign = readForeignBuiltin diff --git a/parser-typechecker/src/Unison/Runtime/IOSource.hs b/parser-typechecker/src/Unison/Runtime/IOSource.hs index 12b30f79c7..44f97a3b36 100644 --- a/parser-typechecker/src/Unison/Runtime/IOSource.hs +++ b/parser-typechecker/src/Unison/Runtime/IOSource.hs @@ -103,6 +103,64 @@ prettyAnnotatedRef = typeNamed "Pretty.Annotated" ansiColorRef = typeNamed "ANSI.Color" consoleTextRef = typeNamed "ConsoleText" +pattern Doc2Ref <- ((== doc2Ref) -> True) +doc2WordId = constructorNamed doc2Ref "Doc2.Word" +doc2CodeId = constructorNamed doc2Ref "Doc2.Code" +doc2CodeBlockId = constructorNamed doc2Ref "Doc2.CodeBlock" +doc2BoldId = constructorNamed doc2Ref "Doc2.Bold" +doc2ItalicId = constructorNamed doc2Ref "Doc2.Italic" +doc2StrikethroughId = constructorNamed doc2Ref "Doc2.Strikethrough" +doc2StyleId = constructorNamed doc2Ref "Doc2.Style" +doc2AnchorId = constructorNamed doc2Ref "Doc2.Anchor" +doc2BlockquoteId = constructorNamed doc2Ref "Doc2.Blockquote" +doc2BlanklineId = constructorNamed doc2Ref "Doc2.Blankline" +doc2LinebreakId = constructorNamed doc2Ref "Doc2.Linebreak" +doc2SectionBreakId = constructorNamed doc2Ref "Doc2.SectionBreak" +doc2TooltipId = constructorNamed doc2Ref "Doc2.Tooltip" +doc2AsideId = constructorNamed doc2Ref "Doc2.Aside" +doc2CalloutId = constructorNamed doc2Ref "Doc2.Callout" +doc2TableId = constructorNamed doc2Ref "Doc2.Table" +doc2FoldedId = constructorNamed doc2Ref "Doc2.Folded" +doc2ParagraphId = constructorNamed doc2Ref "Doc2.Paragraph" +doc2BulletedListId = constructorNamed doc2Ref "Doc2.BulletedList" +doc2NumberedListId = constructorNamed doc2Ref "Doc2.NumberedList" +doc2SectionId = constructorNamed doc2Ref "Doc2.Section" +doc2NamedLinkId = constructorNamed doc2Ref "Doc2.NamedLink" +doc2ImageId = constructorNamed doc2Ref "Doc2.Image" +doc2SpecialId = constructorNamed doc2Ref "Doc2.Special" +doc2JoinId = constructorNamed doc2Ref "Doc2.Join" +doc2UntitledSectionId = constructorNamed doc2Ref "Doc2.UntitledSection" +doc2ColumnId = constructorNamed doc2Ref "Doc2.Column" +doc2GroupId = constructorNamed doc2Ref "Doc2.Group" + +pattern Doc2Word txt <- Term.App' (Term.Constructor' Doc2Ref ((==) doc2WordId -> True)) (Term.Text' txt) +pattern Doc2Code d <- Term.App' (Term.Constructor' Doc2Ref ((==) doc2CodeId -> True)) d +pattern Doc2CodeBlock lang d <- Term.Apps' (Term.Constructor' Doc2Ref ((==) doc2CodeBlockId -> True)) [Term.Text' lang, d] +pattern Doc2Bold d <- Term.App' (Term.Constructor' Doc2Ref ((==) doc2BoldId -> True)) d +pattern Doc2Italic d <- Term.App' (Term.Constructor' Doc2Ref ((==) doc2ItalicId -> True)) d +pattern Doc2Strikethrough d <- Term.App' (Term.Constructor' Doc2Ref ((==) doc2StrikethroughId -> True)) d +pattern Doc2Style s d <- Term.Apps' (Term.Constructor' Doc2Ref ((==) doc2StyleId -> True)) [Term.Text' s, d] +pattern Doc2Anchor id d <- Term.Apps' (Term.Constructor' Doc2Ref ((==) doc2AnchorId -> True)) [Term.Text' id, d] +pattern Doc2Blockquote d <- Term.App' (Term.Constructor' Doc2Ref ((==) doc2BlockquoteId -> True)) d +pattern Doc2Blankline <- Term.Constructor' Doc2Ref ((==) doc2BlanklineId -> True) +pattern Doc2Linebreak <- Term.Constructor' Doc2Ref ((==) doc2LinebreakId -> True) +pattern Doc2SectionBreak <- Term.Constructor' Doc2Ref ((==) doc2SectionBreakId -> True) +pattern Doc2Tooltip d tip <- Term.Apps' (Term.Constructor' Doc2Ref ((==) doc2TooltipId -> True)) [d, tip] +pattern Doc2Aside d <- Term.App' (Term.Constructor' Doc2Ref ((==) doc2AsideId -> True)) d +pattern Doc2Callout icon d <- Term.Apps' (Term.Constructor' Doc2Ref ((==) doc2CalloutId -> True)) [icon, d] +pattern Doc2Table ds <- Term.App' (Term.Constructor' Doc2Ref ((==) doc2TableId -> True)) (Term.List' (toList -> ds)) +pattern Doc2Folded isFolded d d2 <- Term.Apps' (Term.Constructor' Doc2Ref ((==) doc2FoldedId -> True)) [Term.Boolean' isFolded, d, d2] +pattern Doc2Paragraph ds <- Term.App' (Term.Constructor' Doc2Ref ((==) doc2ParagraphId -> True)) (Term.List' (toList -> ds)) +pattern Doc2BulletedList ds <- Term.App' (Term.Constructor' Doc2Ref ((==) doc2BulletedListId -> True)) (Term.List' (toList -> ds)) +pattern Doc2Section title ds <- Term.Apps' (Term.Constructor' Doc2Ref ((==) doc2SectionId -> True)) [title, Term.List' (toList -> ds)] +pattern Doc2NamedLink name dest <- Term.Apps' (Term.Constructor' Doc2Ref ((==) doc2NamedLinkId -> True)) [name, dest] +pattern Doc2Image alt link caption <- Term.Apps' (Term.Constructor' Doc2Ref ((==) doc2ImageId -> True)) [alt, link, caption] +pattern Doc2Special sf <- Term.App' (Term.Constructor' Doc2Ref ((==) doc2SpecialId -> True)) sf +pattern Doc2Join ds <- Term.App' (Term.Constructor' Doc2Ref ((==) doc2JoinId -> True)) (Term.List' (toList -> ds)) +pattern Doc2UntitledSection ds <- Term.App' (Term.Constructor' Doc2Ref ((==) doc2UntitledSectionId -> True)) (Term.List' (toList -> ds)) +pattern Doc2Column ds <- Term.App' (Term.Constructor' Doc2Ref ((==) doc2ColumnId -> True)) (Term.List' (toList -> ds)) +pattern Doc2Group d <- Term.App' (Term.Constructor' Doc2Ref ((==) doc2GroupId -> True)) d + pattern Doc2SpecialFormRef <- ((== doc2SpecialFormRef) -> True) doc2SpecialFormSourceId = constructorNamed doc2SpecialFormRef "Doc2.SpecialForm.Source" doc2SpecialFormFoldedSourceId = constructorNamed doc2SpecialFormRef "Doc2.SpecialForm.FoldedSource" @@ -135,8 +193,6 @@ pattern Doc2Example vs body <- Term.App' _term (Term.App' _any (Term.LamNamed' _ -- pulls out `body` in `Doc2.Term (Any 'body)` pattern Doc2Term body <- Term.App' _term (Term.App' _any (Term.LamNamed' _ body)) -pattern Doc2Ref <- ((== doc2Ref) -> True) - pattern Doc2TermRef <- ((== doc2TermRef) -> True) pattern PrettyAnnotatedRef <- ((== prettyAnnotatedRef) -> True) @@ -247,7 +303,7 @@ unique[b28d929d0a73d2c18eac86341a3bb9399f8550c11b5f35eabb2751e6803ccc20] type IsPropagated = IsPropagated d1 Doc.++ d2 = - use Doc + use Doc2 match (d1,d2) with (Join ds, Join ds2) -> Join (ds List.++ ds2) (Join ds, _) -> Join (ds `List.snoc` d2) @@ -535,7 +591,9 @@ syntax.docEmbedSignatureLink tm = syntax.docCode c = Code c syntax.docCodeBlock typ c = CodeBlock typ (docWord c) syntax.docVerbatim c = CodeBlock "raw" c +syntax.docEval : '{} a -> Doc2 syntax.docEval d = Special (Eval (Doc2.term d)) +syntax.docEvalInline : '{} a -> Doc2 syntax.docEvalInline a = Special (EvalInline (Doc2.term a)) syntax.docExample n a = Special (Example n (Doc2.term a)) syntax.docExampleBlock n a = Special (ExampleBlock n (Doc2.term a)) diff --git a/parser-typechecker/src/Unison/Runtime/Interface.hs b/parser-typechecker/src/Unison/Runtime/Interface.hs index 4e3b371819..5f151cae05 100644 --- a/parser-typechecker/src/Unison/Runtime/Interface.hs +++ b/parser-typechecker/src/Unison/Runtime/Interface.hs @@ -1,6 +1,7 @@ {-# language DataKinds #-} {-# language PatternGuards #-} {-# language NamedFieldPuns #-} +{-# language PatternSynonyms #-} {-# language ParallelListComp #-} {-# language OverloadedStrings #-} {-# language ScopedTypeVariables #-} @@ -24,7 +25,7 @@ import Data.Foldable import Data.Set as Set (Set, (\\), singleton, map, notMember, filter, fromList) import Data.Traversable (for) -import Data.Text (Text) +import Data.Text (Text, isPrefixOf) import Data.Word (Word64) import qualified Data.Map.Strict as Map @@ -33,8 +34,10 @@ import qualified Unison.ABT as Tm (substs) import qualified Unison.Term as Tm import Unison.DataDeclaration (declFields, declDependencies, Decl) +import qualified Unison.HashQualified as HQ import qualified Unison.LabeledDependency as RF import Unison.Reference (Reference) +import qualified Unison.Referent as RF (pattern Ref) import qualified Unison.Reference as RF import Unison.Util.EnumContainers as EC @@ -168,10 +171,11 @@ backrefAdd m ctx@ECtx{ decompTm } loadDeps :: CodeLookup Symbol IO () + -> PrettyPrintEnv -> EvalCtx -> Term Symbol -> IO EvalCtx -loadDeps cl ctx tm = do +loadDeps cl ppe ctx tm = do (tyrs, tmrs) <- collectDeps cl tm p <- refNumsTy (ccache ctx) <&> \m (r,_) -> case r of RF.DerivedId{} -> r `Map.notMember` dspec ctx @@ -183,7 +187,7 @@ loadDeps cl ctx tm = do ctx <- foldM (uncurry . allocType) ctx $ Prelude.filter p tyrs rtms <- traverse (\r -> (,) r <$> resolveTermRef cl r) $ Prelude.filter q tmrs - let (rgrp, rbkr) = intermediateTerms ctx rtms + let (rgrp, rbkr) = intermediateTerms ppe ctx rtms tyAdd = Set.fromList $ fst <$> tyrs backrefAdd rbkr ctx <$ cacheAdd0 tyAdd rgrp (ccache ctx) @@ -204,33 +208,44 @@ backrefLifted tm _ = Map.singleton 0 tm intermediateTerms :: HasCallStack - => EvalCtx + => PrettyPrintEnv + -> EvalCtx -> [(Reference, Term Symbol)] -> ( [(Reference, SuperGroup Symbol)] , Map.Map Reference (Map.Map Word64 (Term Symbol)) ) -intermediateTerms ctx rtms +intermediateTerms ppe ctx rtms = ((fmap.second) fst rint, Map.fromList $ (fmap.second) snd rint) - where rint = second (intermediateTerm ctx) <$> rtms + where + rint = rtms <&> \(ref, tm) -> + (ref, intermediateTerm ppe ref ctx tm) intermediateTerm :: HasCallStack - => EvalCtx + => PrettyPrintEnv + -> Reference + -> EvalCtx -> Term Symbol -> (SuperGroup Symbol, Map.Map Word64 (Term Symbol)) -intermediateTerm ctx tm +intermediateTerm ppe ref ctx tm = final . lamLift . splitPatterns (dspec ctx) + . addDefaultCases tmName . saturate (uncurryDspec $ dspec ctx) . inlineAlias $ tm where final (ll, dcmp) = (superNormalize ll, backrefLifted ll dcmp) + tmName = HQ.toString . termName ppe $ RF.Ref ref prepareEvaluation - :: HasCallStack => Term Symbol -> EvalCtx -> IO (EvalCtx, Word64) -prepareEvaluation tm ctx = do + :: HasCallStack + => PrettyPrintEnv + -> Term Symbol + -> EvalCtx + -> IO (EvalCtx, Word64) +prepareEvaluation ppe tm ctx = do missing <- cacheAdd rgrp (ccache ctx) when (not . null $ missing) . fail $ reportBug "E029347" $ "Error in prepareEvaluation, cache is missing: " <> show missing @@ -247,7 +262,7 @@ prepareEvaluation tm ctx = do | rmn <- RF.DerivedId $ Tm.hashClosedTerm tm = (rmn, [(rmn, tm)]) - (rgrp, rbkr) = intermediateTerms ctx rtms + (rgrp, rbkr) = intermediateTerms ppe ctx rtms watchHook :: IORef Closure -> Stack 'UN -> Stack 'BX -> IO () watchHook r _ bstk = peek bstk >>= writeIORef r @@ -289,7 +304,8 @@ bugMsg ppe name tm , "" , sorryMsg ] - | name == "pattern match failure" = P.callout icon . P.lines $ + | "pattern match failure" `isPrefixOf` name + = P.callout icon . P.lines $ [ P.wrap ("I've encountered a" <> P.red (P.text name) <> "while scrutinizing:") , "" @@ -299,6 +315,11 @@ bugMsg ppe name tm \possible inputs" , sorryMsg ] + | name == "builtin.raise" = P.callout icon . P.lines $ + [ P.wrap ("The program halted with an unhandled exception:") + , "" + , P.indentN 2 $ pretty ppe tm + ] bugMsg ppe name tm = P.callout icon . P.lines $ [ P.wrap ("I've encountered a call to" <> P.red (P.text name) <> "with the following value:") @@ -328,8 +349,8 @@ startRuntime = do { terminate = pure () , evaluate = \cl ppe tm -> catchInternalErrors $ do ctx <- readIORef ctxVar - ctx <- loadDeps cl ctx tm - (ctx, init) <- prepareEvaluation tm ctx + ctx <- loadDeps cl ppe ctx tm + (ctx, init) <- prepareEvaluation ppe tm ctx writeIORef ctxVar ctx evalInContext ppe ctx init , mainType = builtinMain External diff --git a/parser-typechecker/src/Unison/Runtime/MCode.hs b/parser-typechecker/src/Unison/Runtime/MCode.hs index a5a0745f58..27152f4c0b 100644 --- a/parser-typechecker/src/Unison/Runtime/MCode.hs +++ b/parser-typechecker/src/Unison/Runtime/MCode.hs @@ -308,6 +308,7 @@ data UPrim1 data UPrim2 -- integral = ADDI | SUBI | MULI | DIVI | MODI -- +,-,*,/,mod + | DIVN | MODN | SHLI | SHRI | SHRN | POWI -- shiftl,shiftr,shiftr,pow | EQLI | LEQI | LEQN -- ==,<=,<= | ANDN | IORN | XORN -- and,or,xor @@ -407,7 +408,8 @@ data Instr !Args -- arguments to pack -- Unpack the contents of a data type onto the stack - | Unpack !Int -- stack index of data to unpack + | Unpack !(Maybe Reference) -- debug reference + !Int -- stack index of data to unpack -- Push a particular value onto the appropriate stack | Lit !MLit -- value to push onto the stack @@ -737,13 +739,13 @@ emitSection _ _ _ ctx (TLit l) | otherwise = addCount 1 0 emitSection rns grpn rec ctx (TMatch v bs) | Just (i,BX) <- ctxResolve ctx v - , MatchData _ cs df <- bs - = Ins (Unpack i) + , MatchData r cs df <- bs + = Ins (Unpack (Just r) i) <$> emitDataMatching rns grpn rec ctx cs df | Just (i,BX) <- ctxResolve ctx v , MatchRequest hs0 df <- bs , hs <- mapFromList $ first (dnum rns) <$> M.toList hs0 - = Ins (Unpack i) + = Ins (Unpack Nothing i) <$> emitRequestMatching rns grpn rec ctx hs df | Just (i,UN) <- ctxResolve ctx v , MatchIntegral cs df <- bs @@ -914,7 +916,7 @@ emitLet _ grp _ _ _ ctx (TApp (FPrim p) args) = fmap (Ins . either emitPOp emitFOp p $ emitArgs grp ctx args) emitLet rns grp rec d vcs ctx bnd | Direct <- d - = internalBug $ "unsupported compound direct let" ++ show bnd + = internalBug $ "unsupported compound direct let: " ++ show bnd | Indirect w <- d = \esect -> f <$> emitSection rns grp rec (Block ctx) bnd @@ -937,9 +939,9 @@ emitPOp ANF.SUBN = emitP2 SUBI emitPOp ANF.MULI = emitP2 MULI emitPOp ANF.MULN = emitP2 MULI emitPOp ANF.DIVI = emitP2 DIVI -emitPOp ANF.DIVN = emitP2 DIVI +emitPOp ANF.DIVN = emitP2 DIVN emitPOp ANF.MODI = emitP2 MODI -- TODO: think about how these behave -emitPOp ANF.MODN = emitP2 MODI -- TODO: think about how these behave +emitPOp ANF.MODN = emitP2 MODN -- TODO: think about how these behave emitPOp ANF.POWI = emitP2 POWI emitPOp ANF.POWN = emitP2 POWI emitPOp ANF.SHLI = emitP2 SHLI diff --git a/parser-typechecker/src/Unison/Runtime/Machine.hs b/parser-typechecker/src/Unison/Runtime/Machine.hs index 700b37b4f8..0c5764e54f 100644 --- a/parser-typechecker/src/Unison/Runtime/Machine.hs +++ b/parser-typechecker/src/Unison/Runtime/Machine.hs @@ -33,6 +33,7 @@ import qualified Data.Primitive.PrimArray as PA import Text.Read (readMaybe) +import Unison.Builtin.Decls (exceptionRef) import Unison.Reference (Reference(Builtin)) import Unison.Referent (pattern Ref) import Unison.Symbol (Symbol) @@ -105,9 +106,11 @@ baseCCache ftm = 1 + maximum builtinTermNumbering fty = 1 + maximum builtinTypeNumbering + rns = emptyRNs { dnum = refLookup "ty" builtinTypeNumbering } + combs = mapWithKey - (\k v -> emitComb @Symbol emptyRNs k mempty (0,v)) + (\k v -> emitComb @Symbol rns k mempty (0,v)) numberedTermLookup info :: Show a => String -> a -> IO () @@ -122,6 +125,19 @@ eval0 !env !co = do bstk <- alloc eval env mempty ustk bstk KE co +topDEnv + :: M.Map Reference Word64 + -> M.Map Reference Word64 + -> (DEnv, K -> K) +topDEnv rfTy rfTm + | Just n <- M.lookup exceptionRef rfTy + , rcrf <- Builtin (Tx.pack "raise") + , Just j <- M.lookup rcrf rfTm + = ( EC.mapSingleton n (PAp (CIx rcrf j 0) unull bnull) + , Mark (EC.setSingleton n) mempty + ) +topDEnv _ _ = (mempty, id) + -- Entry point for evaluating a numbered combinator. -- An optional callback for the base of the stack may be supplied. -- @@ -134,10 +150,12 @@ apply0 !callback !env !i = do ustk <- alloc bstk <- alloc cmbrs <- readTVarIO $ combRefs env + (denv, kf) <- + topDEnv <$> readTVarIO (refTy env) <*> readTVarIO (refTm env) r <- case EC.lookup i cmbrs of Just r -> pure r Nothing -> die "apply0: missing reference to entry point" - apply env mempty ustk bstk k0 True ZArgs + apply env denv ustk bstk (kf k0) True ZArgs $ PAp (CIx r i 0) unull bnull where k0 = maybe KE (CB . Hook) callback @@ -219,7 +237,8 @@ exec !env !denv !ustk !bstk !k (BPrim1 LOAD i) = do reifyValue env v >>= \case Left miss -> do poke ustk 0 - pokeS bstk $ Sq.fromList $ Foreign . Wrap Rf.termLinkRef <$> miss + pokeS bstk + $ Sq.fromList $ Foreign . Wrap Rf.termLinkRef . Ref <$> miss Right x -> do poke ustk 1 poke bstk x @@ -237,10 +256,7 @@ exec !_ !denv !ustk !bstk !k (BPrim2 EQLU i j) = do x <- peekOff bstk i y <- peekOff bstk j ustk <- bump ustk - poke ustk - $ case universalCompare compare x y of - EQ -> 1 - _ -> 0 + poke ustk $ if universalEq (==) x y then 1 else 0 pure (denv, ustk, bstk, k) exec !_ !denv !ustk !bstk !k (BPrim2 CMPU i j) = do x <- peekOff bstk i @@ -256,8 +272,8 @@ exec !_ !denv !ustk !bstk !k (Pack r t args) = do bstk <- bump bstk poke bstk clo pure (denv, ustk, bstk, k) -exec !_ !denv !ustk !bstk !k (Unpack i) = do - (ustk, bstk) <- dumpData ustk bstk =<< peekOff bstk i +exec !_ !denv !ustk !bstk !k (Unpack r i) = do + (ustk, bstk) <- dumpData r ustk bstk =<< peekOff bstk i pure (denv, ustk, bstk, k) exec !_ !denv !ustk !bstk !k (Print i) = do t <- peekOffBi bstk i @@ -588,49 +604,55 @@ buildData !ustk !bstk !r !t (DArgV ui bi) = do {-# inline buildData #-} dumpData - :: Stack 'UN -> Stack 'BX -> Closure -> IO (Stack 'UN, Stack 'BX) -dumpData !ustk !bstk (Enum _ t) = do + :: Maybe Reference + -> Stack 'UN + -> Stack 'BX + -> Closure + -> IO (Stack 'UN, Stack 'BX) +dumpData !_ !ustk !bstk (Enum _ t) = do ustk <- bump ustk pokeN ustk t pure (ustk, bstk) -dumpData !ustk !bstk (DataU1 _ t x) = do +dumpData !_ !ustk !bstk (DataU1 _ t x) = do ustk <- bumpn ustk 2 pokeOff ustk 1 x pokeN ustk t pure (ustk, bstk) -dumpData !ustk !bstk (DataU2 _ t x y) = do +dumpData !_ !ustk !bstk (DataU2 _ t x y) = do ustk <- bumpn ustk 3 pokeOff ustk 2 y pokeOff ustk 1 x pokeN ustk t pure (ustk, bstk) -dumpData !ustk !bstk (DataB1 _ t x) = do +dumpData !_ !ustk !bstk (DataB1 _ t x) = do ustk <- bump ustk bstk <- bump bstk poke bstk x pokeN ustk t pure (ustk, bstk) -dumpData !ustk !bstk (DataB2 _ t x y) = do +dumpData !_ !ustk !bstk (DataB2 _ t x y) = do ustk <- bump ustk bstk <- bumpn bstk 2 pokeOff bstk 1 y poke bstk x pokeN ustk t pure (ustk, bstk) -dumpData !ustk !bstk (DataUB _ t x y) = do +dumpData !_ !ustk !bstk (DataUB _ t x y) = do ustk <- bumpn ustk 2 bstk <- bump bstk pokeOff ustk 1 x poke bstk y pokeN ustk t pure (ustk, bstk) -dumpData !ustk !bstk (DataG _ t us bs) = do +dumpData !_ !ustk !bstk (DataG _ t us bs) = do ustk <- dumpSeg ustk us S bstk <- dumpSeg bstk bs S ustk <- bump ustk pokeN ustk t pure (ustk, bstk) -dumpData !_ !_ clo = die $ "dumpData: bad closure: " ++ show clo +dumpData !mr !_ !_ clo + = die $ "dumpData: bad closure: " ++ show clo + ++ maybe "" (\r -> "\nexpected type: " ++ show r) mr {-# inline dumpData #-} -- Note: although the representation allows it, it is impossible @@ -901,6 +923,18 @@ uprim2 !ustk LEQN !i !j = do ustk <- bump ustk poke ustk $ if m <= n then 1 else 0 pure ustk +uprim2 !ustk DIVN !i !j = do + m <- peekOffN ustk i + n <- peekOffN ustk j + ustk <- bump ustk + pokeN ustk (m`div`n) + pure ustk +uprim2 !ustk MODN !i !j = do + m <- peekOffN ustk i + n <- peekOffN ustk j + ustk <- bump ustk + pokeN ustk (m`mod`n) + pure ustk uprim2 !ustk ADDF !i !j = do x <- peekOffD ustk i y <- peekOffD ustk j @@ -953,13 +987,13 @@ uprim2 !ustk EQLF !i !j = do x <- peekOffD ustk i y <- peekOffD ustk j ustk <- bump ustk - pokeD ustk (if x == y then 1 else 0) + poke ustk (if x == y then 1 else 0) pure ustk uprim2 !ustk LEQF !i !j = do x <- peekOffD ustk i y <- peekOffD ustk j ustk <- bump ustk - pokeD ustk (if x <= y then 1 else 0) + poke ustk (if x <= y then 1 else 0) pure ustk uprim2 !ustk ATN2 !i !j = do x <- peekOffD ustk i diff --git a/parser-typechecker/src/Unison/Runtime/Pattern.hs b/parser-typechecker/src/Unison/Runtime/Pattern.hs index 5a271ea608..5da3afbea3 100644 --- a/parser-typechecker/src/Unison/Runtime/Pattern.hs +++ b/parser-typechecker/src/Unison/Runtime/Pattern.hs @@ -410,9 +410,21 @@ splitMatrixBuiltin v (PM rs) = fmap (\(a,(b,c)) -> (a,b,c)) . toList . fmap buildMatrix - . fromListWith (++) + . fromListWith (flip (++)) + . expandIrrefutable $ splitRowBuiltin v =<< rs +expandIrrefutable + :: Var v + => [(P.Pattern (), [([P.Pattern v], PatternRow v)])] + -> [(P.Pattern (), [([P.Pattern v], PatternRow v)])] +expandIrrefutable rss = concatMap expand rss + where + specific = filter refutable $ fst <$> rss + expand tup@(p, rs) + | not (refutable p) = fmap (,rs) specific ++ [tup] + expand tup = [tup] + matchPattern :: [(v,PType)] -> SeqMatch -> P.Pattern () matchPattern vrs = \case E -> sz 0 @@ -671,8 +683,6 @@ mkRow sv (MatchCase (normalizeSeqP -> p0) g0 (AbsN' vs b)) | otherwise -> internalBug "mkRow: guard variables do not match body" Nothing -> Nothing - _ -> internalBug "mkRow: impossible" -mkRow _ _ = internalBug "mkRow: impossible" initialize :: Var v diff --git a/parser-typechecker/src/Unison/Runtime/Stack.hs b/parser-typechecker/src/Unison/Runtime/Stack.hs index 18603820ef..679b4d1545 100644 --- a/parser-typechecker/src/Unison/Runtime/Stack.hs +++ b/parser-typechecker/src/Unison/Runtime/Stack.hs @@ -17,6 +17,7 @@ module Unison.Runtime.Stack , Off , SZ , FP + , universalEq , universalCompare , marshalToForeign , unull @@ -164,6 +165,35 @@ closureNum Captured{} = 2 closureNum Foreign{} = 3 closureNum BlackHole{} = error "BlackHole" +universalEq + :: (Foreign -> Foreign -> Bool) + -> Closure + -> Closure + -> Bool +universalEq frn = eqc False + where + eql cm l r = length l == length r && and (zipWith cm l r) + eqc tyEq (DataC rf1 ct1 us1 bs1) (DataC rf2 ct2 us2 bs2) + = (if tyEq then rf1 == rf2 else True) + && ct1 == ct2 + && eql (==) us1 us2 + && eql (eqc $ tyEq || rf1 == Ty.anyRef) bs1 bs2 + eqc tyEq (PApV i1 us1 bs1) (PApV i2 us2 bs2) + = i1 == i2 + && eql (==) us1 us2 + && eql (eqc tyEq) bs1 bs2 + eqc _ (CapV k1 us1 bs1) (CapV k2 us2 bs2) + = k1 == k2 + && eql (==) us1 us2 + && eql (eqc True) bs1 bs2 + eqc tyEq (Foreign fl) (Foreign fr) + | Just sl <- maybeUnwrapForeign Ty.listRef fl + , Just sr <- maybeUnwrapForeign Ty.listRef fr + = length sl == length sr && and (Sq.zipWith (eqc tyEq) sl sr) + | otherwise = frn fl fr + eqc _ c d = closureNum c == closureNum d + + universalCompare :: (Foreign -> Foreign -> Ordering) -> Closure diff --git a/parser-typechecker/src/Unison/Server/Backend.hs b/parser-typechecker/src/Unison/Server/Backend.hs index aa73356f53..529244330e 100644 --- a/parser-typechecker/src/Unison/Server/Backend.hs +++ b/parser-typechecker/src/Unison/Server/Backend.hs @@ -1,20 +1,22 @@ {-# LANGUAGE AllowAmbiguousTypes #-} -{-# LANGUAGE TypeApplications #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeApplications #-} {-# LANGUAGE ViewPatterns #-} module Unison.Server.Backend where import Control.Lens (_2, over) -import Control.Error.Util ((??)) +import Control.Error.Util ((??),hush) import Control.Monad.Except ( ExceptT (..), throwError, ) -import Data.Bifunctor (first) +import Data.Bifunctor (first,bimap) +import Data.List.Extra (nubOrd) +import Data.Containers.ListUtils (nubOrdOn) import qualified Data.List as List import qualified Data.Map as Map import qualified Data.Set as Set @@ -24,6 +26,8 @@ import qualified Text.FuzzyFind as FZF import qualified Unison.ABT as ABT import qualified Unison.Builtin as B import qualified Unison.Builtin.Decls as Decls +import qualified Unison.Codebase.Runtime as Rt +import qualified Unison.Runtime.IOSource as DD import Unison.Codebase (Codebase) import qualified Unison.Codebase as Codebase import Unison.Codebase.Branch (Branch, Branch0) @@ -45,6 +49,7 @@ import Unison.Name as Name ( unsafeFromText, ) import qualified Unison.Name as Name +import qualified Unison.NamePrinter as NP import Unison.NameSegment (NameSegment(..)) import qualified Unison.NameSegment as NameSegment import qualified Unison.Names2 as Names @@ -78,9 +83,11 @@ import Unison.Util.Pretty (Width) import qualified Unison.Util.Pretty as Pretty import qualified Unison.Util.Relation as R import qualified Unison.Util.Star3 as Star3 -import Unison.Util.SyntaxText (SyntaxText) -import qualified Unison.Util.SyntaxText as SyntaxText +import qualified Unison.Util.SyntaxText as UST import Unison.Var (Var) +import qualified Unison.Server.Doc as Doc +import qualified Unison.UnisonFile as UF +import qualified Unison.Codebase.Editor.DisplayObject as DisplayObject data ShallowListEntry v a = ShallowTermEntry (TermEntry v a) @@ -103,6 +110,7 @@ data BackendError | CouldntExpandBranchHash ShortBranchHash | AmbiguousBranchHash ShortBranchHash (Set ShortBranchHash) | NoBranchForHash Branch.Hash + | CouldntLoadBranch Branch.Hash | MissingSignatureForTerm Reference type Backend m a = ExceptT BackendError m a @@ -188,6 +196,15 @@ data FoundRef = FoundTermRef Referent | FoundTypeRef Reference deriving (Eq, Ord, Show, Generic) +-- After finding a search results with fuzzy find we do some post processing to +-- refine the result: +-- * Sort: +-- we sort both on the FZF score and the number of segments in the FQN +-- preferring shorter FQNs over longer. This helps with things like forks +-- of base. +-- * Dedupe: +-- we dedupe on the found refs to avoid having several rows of a +-- definition with different names in the result set. fuzzyFind :: Monad m => Path @@ -195,12 +212,30 @@ fuzzyFind -> String -> [(FZF.Alignment, UnisonName, [FoundRef])] fuzzyFind path branch query = - fmap (fmap (either FoundTermRef FoundTypeRef) . toList) - . (over _2 Name.toText) - <$> fzfNames - where - fzfNames = Names.fuzzyFind (words query) printNames - printNames = basicPrettyPrintNames0 branch path + let + printNames = + basicPrettyPrintNames0 branch path + + fzfNames = + Names.fuzzyFind (words query) printNames + + toFoundRef = + fmap (fmap (either FoundTermRef FoundTypeRef) . toList) + + -- Remove dupes based on refs + dedupe = + nubOrdOn (\(_, _, refs) -> refs) + + -- Prefer shorter FQNs + rank (alignment, name, _) = + (Name.countSegments (Name.unsafeFromText name) + , negate (FZF.score alignment) + ) + + refine = + dedupe . sortOn rank + in + refine $ toFoundRef . over _2 Name.toText <$> fzfNames -- List the immediate children of a namespace findShallow @@ -228,7 +263,8 @@ termListEntry codebase b0 r n = do ot <- lift $ loadReferentType codebase r -- A term is a doc if its type conforms to the `Doc` type. let isDoc = case ot of - Just t -> Typechecker.isSubtype t $ Type.ref mempty Decls.docRef + Just t -> Typechecker.isSubtype t (Type.ref mempty Decls.docRef) || + Typechecker.isSubtype t (Type.ref mempty DD.doc2Ref) Nothing -> False -- A term is a test if it has a link of type `IsTest`. isTest = @@ -254,6 +290,37 @@ typeListEntry codebase r n = do _ -> pure Data pure $ TypeEntry r n tag +typeDeclHeader + :: forall v m + . Monad m + => Var v + => Codebase m v Ann + -> PPE.PrettyPrintEnv + -> Reference + -> Backend m (DisplayObject Syntax.SyntaxText Syntax.SyntaxText) +typeDeclHeader code ppe r = case Reference.toId r of + Just rid -> + (lift $ Codebase.getTypeDeclaration code rid) <&> \case + Nothing -> DisplayObject.MissingObject (Reference.toShortHash r) + Just decl -> + DisplayObject.UserObject $ + Syntax.convertElement <$> + Pretty.render defaultWidth (DeclPrinter.prettyDeclHeader name decl) + Nothing -> + pure (DisplayObject.BuiltinObject (formatTypeName ppe r)) + where + name = PPE.typeName ppe r + +formatTypeName :: PPE.PrettyPrintEnv -> Reference -> Syntax.SyntaxText +formatTypeName ppe = + fmap Syntax.convertElement . formatTypeName' ppe + +formatTypeName' :: PPE.PrettyPrintEnv -> Reference -> UST.SyntaxText +formatTypeName' ppe r = + Pretty.renderUnbroken . + NP.styleHashQualified id $ + PPE.typeName ppe r + termEntryToNamedTerm :: Var v => PPE.PrettyPrintEnv -> Maybe Width -> TermEntry v a -> NamedTerm termEntryToNamedTerm ppe typeWidth (TermEntry r name mayType tag) = NamedTerm @@ -370,7 +437,7 @@ searchBranchExact len names queries = searchTypes :: HQ.HashQualified Name -> [SR.SearchResult] searchTypes query = -- a bunch of references will match a HQ ref. - let refs = toList $ Names3.lookupHQType query names + let refs = toList $ Names3.lookupRelativeHQType query names mayName r Nothing = HQ'.fromNamedReference "" r mayName _ (Just n) = n in refs <&> \r -> @@ -388,7 +455,7 @@ searchBranchExact len names queries = searchTerms :: HQ.HashQualified Name -> [SR.SearchResult] searchTerms query = -- a bunch of references will match a HQ ref. - let refs = toList $ Names3.lookupHQTerm query names + let refs = toList $ Names3.lookupRelativeHQTerm query names mayName r Nothing = HQ'.fromNamedReferent "" r mayName _ (Just n) = n in refs <&> \r -> @@ -406,15 +473,14 @@ searchBranchExact len names queries = in [ searchTypes q <> searchTerms q | q <- queries ] -hqNameQuery' +hqNameQuery :: Monad m - => Bool - -> Maybe Path + => Maybe Path -> Branch m -> Codebase m v Ann -> [HQ.HashQualified Name] -> m QueryResult -hqNameQuery' doSuffixify relativeTo root codebase hqs = do +hqNameQuery relativeTo root codebase hqs = do -- Split the query into hash-only and hash-qualified-name queries. let (hqnames, hashes) = List.partition (isJust . HQ.toName) hqs -- Find the terms with those hashes. @@ -438,8 +504,7 @@ hqNameQuery' doSuffixify relativeTo root codebase hqs = do (\(n, tms) -> (n, toList $ mkTermResult n <$> toList tms)) <$> termRefs typeResults = (\(n, tps) -> (n, toList $ mkTypeResult n <$> toList tps)) <$> typeRefs - -- Suffixify the names - parseNames = (if doSuffixify then Names3.suffixify else id) parseNames0 + parseNames = parseNames0 -- Now do the actual name query resultss = searchBranchExact hqLength parseNames hqnames -- Handle query misses correctly @@ -458,29 +523,11 @@ hqNameQuery' doSuffixify relativeTo root codebase hqs = do >>= snd pure $ QueryResult (missingRefs ++ (fst <$> misses)) results -hqNameQuery - :: Monad m - => Maybe Path - -> Branch m - -> Codebase m v Ann - -> [HQ.HashQualified Name] - -> m QueryResult -hqNameQuery = hqNameQuery' False - -hqNameQuerySuffixify - :: Monad m - => Maybe Path - -> Branch m - -> Codebase m v Ann - -> [HQ.HashQualified Name] - -> m QueryResult -hqNameQuerySuffixify = hqNameQuery' True - -- TODO: Move this to its own module data DefinitionResults v = DefinitionResults - { termResults :: Map Reference (DisplayObject (Term v Ann)) - , typeResults :: Map Reference (DisplayObject (DD.Decl v Ann)) + { termResults :: Map Reference (DisplayObject (Type v Ann) (Term v Ann)) + , typeResults :: Map Reference (DisplayObject () (DD.Decl v Ann)) , noResults :: [HQ.HashQualified Name] } @@ -509,34 +556,37 @@ expandShortBranchHash codebase hash = do _ -> throwError . AmbiguousBranchHash hash $ Set.map (SBH.fromHash len) hashSet -prettyType +formatType' :: Var v => PPE.PrettyPrintEnv -> Width -> Type v a -> UST.SyntaxText +formatType' ppe w = + Pretty.render w . TypePrinter.pretty0 ppe mempty (-1) + +formatType :: Var v => PPE.PrettyPrintEnv -> Width -> Type v a -> Syntax.SyntaxText +formatType ppe w = mungeSyntaxText . formatType' ppe w + +formatSuffixedType :: Var v - => Width - -> PPE.PrettyPrintEnvDecl + => PPE.PrettyPrintEnvDecl + -> Width -> Type v Ann -> Syntax.SyntaxText -prettyType width ppe = - mungeSyntaxText . Pretty.render width . TypePrinter.pretty0 - (PPE.suffixifiedPPE ppe) - mempty - (-1) +formatSuffixedType ppe = formatType (PPE.suffixifiedPPE ppe) mungeSyntaxText - :: Functor g => g (SyntaxText.Element Reference) -> g Syntax.Element + :: Functor g => g (UST.Element Reference) -> g Syntax.Element mungeSyntaxText = fmap Syntax.convertElement prettyDefinitionsBySuffixes - :: forall v m - . Monad m - => Var v + :: forall v + . Var v => Maybe Path -> Maybe Branch.Hash -> Maybe Width -> Suffixify - -> Codebase m v Ann + -> Rt.Runtime v + -> Codebase IO v Ann -> [HQ.HashQualified Name] - -> Backend m DefinitionDisplayResults -prettyDefinitionsBySuffixes relativeTo root renderWidth suffixifyBindings codebase query + -> Backend IO DefinitionDisplayResults +prettyDefinitionsBySuffixes relativeTo root renderWidth suffixifyBindings rt codebase query = do branch <- resolveBranchHash root codebase DefinitionResults terms types misses <- definitionsBySuffixes relativeTo @@ -554,52 +604,103 @@ prettyDefinitionsBySuffixes relativeTo root renderWidth suffixifyBindings codeba getCurrentParseNames (fromMaybe Path.empty relativeTo) branch ppe = PPE.fromNamesDecl hqLength printNames width = mayDefault renderWidth + isAbsolute (Name.toText -> n) = "." `Text.isPrefixOf` n && n /= "." termFqns :: Map Reference (Set Text) termFqns = Map.mapWithKey f terms where - f k _ = - R.lookupRan (Referent.Ref' k) - . R.filterDom (\n -> "." `Text.isPrefixOf` n && n /= ".") - . R.mapDom Name.toText - . Names.terms - $ currentNames parseNames + rel = Names.terms $ currentNames parseNames + f k _ = Set.fromList . fmap Name.toText . filter isAbsolute . toList + $ R.lookupRan (Referent.Ref' k) rel typeFqns :: Map Reference (Set Text) typeFqns = Map.mapWithKey f types where - f k _ = - R.lookupRan k - . R.filterDom (\n -> "." `Text.isPrefixOf` n && n /= ".") - . R.mapDom Name.toText - . Names.types - $ currentNames parseNames + rel = Names.types $ currentNames parseNames + f k _ = Set.fromList . fmap Name.toText . filter isAbsolute . toList + $ R.lookupRan k rel flatten = Set.toList . fromMaybe Set.empty + + docNames :: Set (HQ'.HashQualified Name) -> [Name] + docNames hqs = fmap docify . nubOrd . join . map toList . Set.toList $ hqs + where docify n = Name.joinDot n "doc" + + selectDocs :: [Referent] -> Backend IO [Reference] + selectDocs rs = do + rts <- fmap join . for rs $ \case + Referent.Ref r -> + maybe [] (pure . (r,)) <$> lift (Codebase.getTypeOfTerm codebase r) + _ -> pure [] + pure [ r | (r, t) <- rts, Typechecker.isSubtype t (Type.ref mempty DD.doc2Ref) ] + + renderDoc :: Reference -> Backend IO [(HashQualifiedName, UnisonHash, Doc.Doc)] + renderDoc r = do + let name = bestNameForTerm @v (PPE.suffixifiedPPE ppe) width (Referent.Ref r) + let hash = Reference.toText r + map (name,hash,) . pure <$> + let tm = Term.ref () r + in Doc.renderDoc @v ppe terms typeOf eval decls tm + where + terms r@(Reference.Builtin _) = pure (Just (Term.ref () r)) + terms (Reference.DerivedId r) = + fmap Term.unannotate <$> lift (Codebase.getTerm codebase r) + + typeOf r = fmap void <$> lift (Codebase.getTypeOfReferent codebase r) + eval (Term.amap (const mempty) -> tm) = do + let ppes = PPE.suffixifiedPPE ppe + let codeLookup = Codebase.toCodeLookup codebase + let cache r = fmap Term.unannotate <$> Codebase.lookupWatchCache codebase r + r <- fmap hush . liftIO $ Rt.evaluateTerm' codeLookup cache ppes rt tm + lift $ case r of + Just tmr -> Codebase.putWatch codebase UF.RegularWatch + (Term.hashClosedTerm tm) + (Term.amap (const mempty) tmr) + Nothing -> pure () + pure $ r <&> Term.amap (const mempty) + + decls (Reference.DerivedId r) = fmap (DD.amap (const ())) <$> lift (Codebase.getTypeDeclaration codebase r) + decls _ = pure Nothing + + -- rs0 can be empty or the term fetched, so when viewing a doc term + -- you get both its source and its rendered form + docResults :: [Reference] -> [Name] -> Backend IO [(HashQualifiedName, UnisonHash, Doc.Doc)] + docResults rs0 docs = do + let refsFor n = Names3.lookupHQTerm (HQ.NameOnly n) parseNames + let rs = Set.unions (refsFor <$> docs) <> Set.fromList (Referent.Ref <$> rs0) + -- lookup the type of each, make sure it's a doc + docs <- selectDocs (toList rs) + -- render all the docs + join <$> traverse renderDoc docs + mkTermDefinition r tm = do ts <- lift (Codebase.getTypeOfTerm codebase r) - let bn = - bestNameForTerm @v (PPE.suffixifiedPPE ppe) width (Referent.Ref r) + let bn = bestNameForTerm @v (PPE.suffixifiedPPE ppe) width (Referent.Ref r) tag <- termEntryTag <$> termListEntry codebase (Branch.head branch) (Referent.Ref r) (HQ'.NameOnly (NameSegment bn)) - mk ts bn tag + docs <- docResults [r] $ docNames (Names3.termName hqLength (Referent.Ref r) printNames) + mk docs ts bn tag where - mk Nothing _ _ = throwError $ MissingSignatureForTerm r - mk (Just typeSig) bn tag = - pure - . TermDefinition (flatten $ Map.lookup r termFqns) + mk _ Nothing _ _ = throwError $ MissingSignatureForTerm r + mk docs (Just typeSig) bn tag = + pure $ + TermDefinition (flatten $ Map.lookup r termFqns) bn tag - (fmap mungeSyntaxText tm) - $ prettyType width ppe typeSig + (bimap mungeSyntaxText mungeSyntaxText tm) + (formatSuffixedType ppe width typeSig) + docs mkTypeDefinition r tp = do let bn = bestNameForType @v (PPE.suffixifiedPPE ppe) width r tag <- Just . typeEntryTag <$> typeListEntry codebase r (HQ'.NameOnly (NameSegment bn)) - pure . TypeDefinition (flatten $ Map.lookup r typeFqns) bn tag $ fmap - mungeSyntaxText - tp + docs <- docResults [] $ docNames (Names3.typeName hqLength r printNames) + pure $ TypeDefinition (flatten $ Map.lookup r typeFqns) + bn + tag + (bimap mungeSyntaxText mungeSyntaxText tp) + docs typeDefinitions <- Map.traverseWithKey mkTypeDefinition $ typesToSyntax suffixifyBindings width ppe types termDefinitions <- Map.traverseWithKey mkTermDefinition @@ -616,7 +717,7 @@ bestNameForTerm bestNameForTerm ppe width = Text.pack . Pretty.render width - . fmap SyntaxText.toPlain + . fmap UST.toPlain . TermPrinter.pretty0 @v ppe TermPrinter.emptyAc . Term.fromReferent mempty @@ -625,7 +726,7 @@ bestNameForType bestNameForType ppe width = Text.pack . Pretty.render width - . fmap SyntaxText.toPlain + . fmap UST.toPlain . TypePrinter.pretty0 @v ppe mempty (-1) . Type.ref () @@ -639,7 +740,7 @@ resolveBranchHash h codebase = case h of definitionsBySuffixes :: forall m v - . Monad m + . (MonadIO m) => Var v => Maybe Path -> Branch m @@ -649,7 +750,7 @@ definitionsBySuffixes definitionsBySuffixes relativeTo branch codebase query = do -- First find the hashes by name and note any query misses. QueryResult misses results <- lift - $ hqNameQuerySuffixify relativeTo branch codebase query + $ hqNameQuery relativeTo branch codebase query -- Now load the terms/types for those hashes. results' <- lift $ loadSearchResults codebase results let termTypes :: Map.Map Reference (Type v Ann) @@ -681,13 +782,15 @@ definitionsBySuffixes relativeTo branch codebase query = do Just (tm, typ) -> case tm of Term.Ann' _ _ -> UserObject tm _ -> UserObject (Term.ann (ABT.annotation tm) tm typ) - r@(Reference.Builtin _) -> pure (r, BuiltinObject) + r@(Reference.Builtin _) -> pure $ (r,) $ case Map.lookup r B.termRefTypes of + Nothing -> MissingObject $ Reference.toShortHash r + Just typ -> BuiltinObject (mempty <$ typ) let loadedDisplayTypes = Map.fromList . (`fmap` toList collatedTypes) $ \case r@(Reference.DerivedId i) -> (r, ) . maybe (MissingObject $ Reference.idToShortHash i) UserObject $ Map.lookup i loadedDerivedTypes - r@(Reference.Builtin _) -> (r, BuiltinObject) + r@(Reference.Builtin _) -> (r, BuiltinObject ()) pure $ DefinitionResults loadedDisplayTerms loadedDisplayTypes misses termsToSyntax @@ -696,8 +799,8 @@ termsToSyntax => Suffixify -> Width -> PPE.PrettyPrintEnvDecl - -> Map Reference.Reference (DisplayObject (Term v a)) - -> Map Reference.Reference (DisplayObject SyntaxText) + -> Map Reference.Reference (DisplayObject (Type v a) (Term v a)) + -> Map Reference.Reference (DisplayObject UST.SyntaxText UST.SyntaxText) termsToSyntax suff width ppe0 terms = Map.fromList . map go . Map.toList $ Map.mapKeys (first (PPE.termName ppeDecl . Referent.Ref) . dupe) @@ -708,8 +811,12 @@ termsToSyntax suff width ppe0 terms = else PPE.declarationPPE ppe0 r ppeDecl = (if suffixified suff then PPE.suffixifiedPPE else PPE.unsuffixifiedPPE) ppe0 - go ((n, r), dt) = - (r, Pretty.render width . TermPrinter.prettyBinding (ppeBody r) n <$> dt) + go ((n, r), dt) = (r,) $ case dt of + DisplayObject.BuiltinObject typ -> DisplayObject.BuiltinObject $ + formatType' (ppeBody r) width typ + DisplayObject.MissingObject sh -> DisplayObject.MissingObject sh + DisplayObject.UserObject tm -> DisplayObject.UserObject . + Pretty.render width . TermPrinter.prettyBinding (ppeBody r) n $ tm typesToSyntax :: Var v @@ -717,8 +824,8 @@ typesToSyntax => Suffixify -> Width -> PPE.PrettyPrintEnvDecl - -> Map Reference.Reference (DisplayObject (DD.Decl v a)) - -> Map Reference.Reference (DisplayObject SyntaxText) + -> Map Reference.Reference (DisplayObject () (DD.Decl v a)) + -> Map Reference.Reference (DisplayObject UST.SyntaxText UST.SyntaxText) typesToSyntax suff width ppe0 types = Map.fromList $ map go . Map.toList $ Map.mapKeys (first (PPE.typeName ppeDecl) . dupe) @@ -730,16 +837,11 @@ typesToSyntax suff width ppe0 types = ppeDecl = if suffixified suff then PPE.suffixifiedPPE ppe0 else PPE.unsuffixifiedPPE ppe0 - go ((n, r), dt) = - ( r - , (\case - Left d -> - Pretty.render width $ DeclPrinter.prettyEffectDecl (ppeBody r) r n d - Right d -> - Pretty.render width $ DeclPrinter.prettyDataDecl (ppeBody r) r n d - ) - <$> dt - ) + go ((n, r), dt) = (r,) $ case dt of + BuiltinObject _ -> BuiltinObject (formatTypeName' ppeDecl r) + MissingObject sh -> MissingObject sh + UserObject d -> UserObject . Pretty.render width $ + DeclPrinter.prettyDecl (ppeBody r) r n d loadSearchResults :: (Var v, Applicative m) @@ -760,9 +862,9 @@ loadTypeDisplayObject :: Applicative m => Codebase m v Ann -> Reference - -> m (DisplayObject (DD.Decl v Ann)) + -> m (DisplayObject () (DD.Decl v Ann)) loadTypeDisplayObject c = \case - Reference.Builtin _ -> pure BuiltinObject + Reference.Builtin _ -> pure (BuiltinObject ()) Reference.DerivedId id -> maybe (MissingObject $ Reference.idToShortHash id) UserObject <$> Codebase.getTypeDeclaration c id diff --git a/parser-typechecker/src/Unison/Server/CodebaseServer.hs b/parser-typechecker/src/Unison/Server/CodebaseServer.hs index 4c7929d465..b7644e2baa 100644 --- a/parser-typechecker/src/Unison/Server/CodebaseServer.hs +++ b/parser-typechecker/src/Unison/Server/CodebaseServer.hs @@ -10,7 +10,9 @@ module Unison.Server.CodebaseServer where import Control.Concurrent (newEmptyMVar, putMVar, readMVar) import Control.Concurrent.Async (race) +import Data.ByteString.Char8 (unpack) import Control.Exception (ErrorCall (..), throwIO) +import qualified Network.URI.Encode as URI import Control.Lens ((&), (.~)) import Data.Aeson () import qualified Data.ByteString as Strict @@ -18,7 +20,6 @@ import qualified Data.ByteString.Base64 as Base64 import qualified Data.ByteString.Char8 as C8 import qualified Data.ByteString.Lazy as Lazy import qualified Data.ByteString.Lazy.UTF8 as BLU -import Data.Monoid (Endo (..), appEndo) import Data.OpenApi (Info (..), License (..), OpenApi, URL (..)) import qualified Data.OpenApi.Lens as OpenApi import Data.Proxy (Proxy (..)) @@ -37,22 +38,6 @@ import Network.Wai.Handler.Warp setPort, withApplicationSettings, ) -import Options.Applicative - ( argument, - auto, - defaultPrefs, - execParserPure, - forwardOptions, - getParseResult, - help, - info, - internal, - long, - metavar, - option, - str, - strOption, - ) import Servant ( MimeRender (..), serve, @@ -87,11 +72,12 @@ import Servant.Server ) import Servant.Server.StaticFiles (serveDirectoryWebApp) import System.Directory (canonicalizePath, doesFileExist) -import System.Environment (getArgs, getExecutablePath, lookupEnv) +import System.Environment (getExecutablePath) import System.FilePath (()) import qualified System.FilePath as FilePath import System.Random.Stateful (getStdGen, newAtomicGenM, uniformByteStringM) import Unison.Codebase (Codebase) +import qualified Unison.Codebase.Runtime as Rt import Unison.Parser (Ann) import Unison.Prelude import Unison.Server.Endpoints.FuzzyFind (FuzzyFindAPI, serveFuzzyFind) @@ -129,6 +115,26 @@ type AuthedServerAPI = ("static" :> Raw) :<|> (Capture "token" Text :> ServerAPI instance ToSample Char where toSamples _ = singleSample 'x' +-- BaseUrl and helpers + +data BaseUrl = BaseUrl + { urlHost :: String, + urlToken :: Strict.ByteString, + urlPort :: Port + } + +data BaseUrlPath = UI | Api + +instance Show BaseUrl where + show url = urlHost url <> ":" <> show (urlPort url) <> "/" <> (URI.encode . unpack . urlToken $ url) + +urlFor :: BaseUrlPath -> BaseUrl -> String +urlFor path baseUrl = + case path of + UI -> show baseUrl <> "/ui" + Api -> show baseUrl <> "/api" + + handleAuth :: Strict.ByteString -> Text -> Handler () handleAuth expectedToken gotToken = if Text.decodeUtf8 expectedToken == gotToken @@ -166,12 +172,13 @@ serverAPI = Proxy app :: Var v - => Codebase IO v Ann + => Rt.Runtime v + -> Codebase IO v Ann -> FilePath -> Strict.ByteString -> Application -app codebase uiPath expectedToken = - serve serverAPI $ server codebase uiPath expectedToken +app rt codebase uiPath expectedToken = + serve serverAPI $ server rt codebase uiPath expectedToken genToken :: IO Strict.ByteString genToken = do @@ -205,86 +212,40 @@ ucmHostVar = "UCM_HOST" ucmTokenVar :: String ucmTokenVar = "UCM_TOKEN" --- The auth token required for accessing the server is passed to the function k -start - :: Var v - => Codebase IO v Ann - -> (Strict.ByteString -> Port -> IO ()) - -> IO () -start codebase k = do - envToken <- lookupEnv ucmTokenVar - envHost <- lookupEnv ucmHostVar - envPort <- (readMaybe =<<) <$> lookupEnv ucmPortVar - envUI <- lookupEnv ucmUIVar - args <- getArgs - let - p = - (,,,,) - <$> ( many $ argument str internal ) - <*> ( (<|> envToken) - <$> ( optional - . strOption - $ long "token" - <> metavar "STRING" - <> help "API auth token" - ) - ) - <*> ( (<|> envHost) - <$> ( optional - . strOption - $ long "host" - <> metavar "STRING" - <> help "UCM server host" - ) - ) - <*> ( (<|> envPort) - <$> ( optional - . option auto - $ long "port" - <> metavar "NUMBER" - <> help "UCM server port" - ) - ) - <*> ( (<|> envUI) - <$> (optional . strOption $ long "ui" <> metavar "DIR" <> help - "Path to codebase ui root" - ) - ) - mayOpts = - getParseResult $ execParserPure defaultPrefs (info p forwardOptions) args - case mayOpts of - Just (_, token, host, port, ui) -> startServer codebase k token host port ui - Nothing -> startServer codebase k Nothing Nothing Nothing Nothing +data CodebaseServerOpts = CodebaseServerOpts + { token :: Maybe String + , host :: Maybe String + , port :: Maybe Int + , codebaseUIPath :: Maybe FilePath + } deriving (Show, Eq) +-- The auth token required for accessing the server is passed to the function k startServer :: Var v - => Codebase IO v Ann - -> (Strict.ByteString -> Port -> IO ()) - -> Maybe String - -> Maybe String - -> Maybe Port - -> Maybe String + => CodebaseServerOpts + -> Rt.Runtime v + -> Codebase IO v Ann + -> (BaseUrl -> IO ()) -> IO () -startServer codebase k envToken envHost envPort envUI0 = do +startServer opts rt codebase onStart = do -- the `canonicalizePath` resolves symlinks exePath <- canonicalizePath =<< getExecutablePath - envUI <- canonicalizePath $ fromMaybe (FilePath.takeDirectory exePath "ui") envUI0 - token <- case envToken of + envUI <- canonicalizePath $ fromMaybe (FilePath.takeDirectory exePath "ui") (codebaseUIPath opts) + token <- case token opts of Just t -> return $ C8.pack t _ -> genToken - let settings = appEndo - ( foldMap (Endo . setPort) envPort - <> foldMap (Endo . setHost . fromString) envHost - ) - defaultSettings - a = app codebase envUI token - case envPort of - Nothing -> withApplicationSettings settings (pure a) (k token) + let baseUrl = BaseUrl "http://127.0.0.1" token + let settings = defaultSettings + & maybe id setPort (port opts) + & maybe id (setHost . fromString) (host opts) + let a = app rt codebase envUI token + case port opts of + Nothing -> withApplicationSettings settings (pure a) (onStart . baseUrl) Just p -> do started <- mkWaiter let settings' = setBeforeMainLoop (notify started ()) settings result <- race (runSettings settings' a) - (waitFor started *> k token p) + (waitFor started *> onStart (baseUrl p)) case result of Left () -> throwIO $ ErrorCall "Server exited unexpectedly!" Right x -> pure x @@ -311,16 +272,17 @@ serveUI tryAuth path _ = tryAuth *> serveIndex path server :: Var v - => Codebase IO v Ann + => Rt.Runtime v + -> Codebase IO v Ann -> FilePath -> Strict.ByteString -> Server AuthedServerAPI -server codebase uiPath token = +server rt codebase uiPath token = serveDirectoryWebApp (uiPath "static") :<|> ((\t -> serveUI (tryAuth t) uiPath :<|> ( ( (serveNamespace (tryAuth t) codebase) - :<|> (serveDefinitions (tryAuth t) codebase) + :<|> (serveDefinitions (tryAuth t) rt codebase) :<|> (serveFuzzyFind (tryAuth t) codebase) ) :<|> serveOpenAPI diff --git a/parser-typechecker/src/Unison/Server/Doc.hs b/parser-typechecker/src/Unison/Server/Doc.hs new file mode 100644 index 0000000000..3dc21f3716 --- /dev/null +++ b/parser-typechecker/src/Unison/Server/Doc.hs @@ -0,0 +1,278 @@ +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE DeriveFoldable #-} +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DeriveTraversable #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE ViewPatterns #-} + +module Unison.Server.Doc where + +import Control.Monad +import Control.Monad.Trans.Maybe (MaybeT(..), runMaybeT) +import Data.Foldable +import Data.Functor +import Data.Maybe (fromMaybe) +import Data.Text (Text) +import Data.Word +import GHC.Generics (Generic) +import Unison.Codebase.Editor.DisplayObject (DisplayObject) +import Unison.Reference (Reference) +import Unison.Referent (Referent) +import Unison.Server.Syntax (SyntaxText) +import Unison.Term (Term) +import Unison.Type (Type) +import Unison.Var (Var) +import qualified Data.Set as Set +import qualified Data.Text as Text +import qualified Unison.ABT as ABT +import qualified Unison.Builtin.Decls as DD +import qualified Unison.Builtin.Decls as Decls +import qualified Unison.Codebase.Editor.DisplayObject as DO +import qualified Unison.DataDeclaration as DD +import qualified Unison.DeclPrinter as DeclPrinter +import qualified Unison.NamePrinter as NP +import qualified Unison.PrettyPrintEnv as PPE +import qualified Unison.Reference as Reference +import qualified Unison.Referent as Referent +import qualified Unison.Runtime.IOSource as DD +import qualified Unison.Server.Syntax as Syntax +import qualified Unison.ShortHash as SH +import qualified Unison.Term as Term +import qualified Unison.TermPrinter as TermPrinter +import qualified Unison.Type as Type +import qualified Unison.TypePrinter as TypePrinter +import qualified Unison.Util.Pretty as P +import qualified Unison.Util.SyntaxText as S + +type Nat = Word64 + +data Doc + = Word Text + | Code Doc + | CodeBlock Text Doc + | Bold Doc + | Italic Doc + | Strikethrough Doc + | Style Text Doc + | Anchor Text Doc + | Blockquote Doc + | Blankline + | Linebreak + | SectionBreak + | Tooltip Doc Doc + | Aside Doc + | Callout (Maybe Doc) Doc + | Table [[Doc]] + | Folded Bool Doc Doc + | Paragraph [Doc] + | BulletedList [Doc] + | NumberedList Nat [Doc] + | Section Doc [Doc] + | NamedLink Doc Doc + | Image Doc Doc (Maybe Doc) + | Special SpecialForm + | Join [Doc] + | UntitledSection [Doc] + | Column [Doc] + | Group Doc + deriving (Eq,Show,Generic) + +type UnisonHash = Text + +data Ref a = Term a | Type a deriving (Eq,Show,Generic,Functor,Foldable,Traversable) + +data SpecialForm + = Source [Ref (UnisonHash, DisplayObject SyntaxText Src)] + | FoldedSource [Ref (UnisonHash, DisplayObject SyntaxText Src)] + | Example SyntaxText + | ExampleBlock SyntaxText + | Link SyntaxText + | Signature [SyntaxText] + | SignatureInline SyntaxText + | Eval SyntaxText SyntaxText + | EvalInline SyntaxText SyntaxText + | Embed SyntaxText + | EmbedInline SyntaxText + deriving (Eq,Show,Generic) + +-- `Src folded unfolded` +data Src = Src SyntaxText SyntaxText deriving (Eq,Show,Generic) + +renderDoc :: forall v m . (Var v, Monad m) + => PPE.PrettyPrintEnvDecl + -> (Reference -> m (Maybe (Term v ()))) + -> (Referent -> m (Maybe (Type v ()))) + -> (Term v () -> m (Maybe (Term v ()))) + -> (Reference -> m (Maybe (DD.Decl v ()))) + -> Term v () + -> m Doc +renderDoc pped terms typeOf eval types tm = eval tm >>= \case + Nothing -> pure $ Word "🆘 doc rendering failed during evaluation" + Just tm -> go tm + where + go = \case + DD.Doc2Word txt -> pure $ Word txt + DD.Doc2Code d -> Code <$> go d + DD.Doc2CodeBlock lang d -> CodeBlock lang <$> go d + DD.Doc2Bold d -> Bold <$> go d + DD.Doc2Italic d -> Italic <$> go d + DD.Doc2Strikethrough d -> Strikethrough <$> go d + DD.Doc2Style s d -> Style s <$> go d + DD.Doc2Anchor id d -> Anchor id <$> go d + DD.Doc2Blockquote d -> Blockquote <$> go d + DD.Doc2Blankline -> pure Blankline + DD.Doc2Linebreak -> pure Linebreak + DD.Doc2SectionBreak -> pure SectionBreak + DD.Doc2Tooltip d1 d2 -> Tooltip <$> go d1 <*> go d2 + DD.Doc2Aside d -> Aside <$> go d + DD.Doc2Callout Decls.OptionalNone' d -> Callout Nothing <$> go d + DD.Doc2Callout (Decls.OptionalSome' icon) d -> Callout <$> (Just <$> go icon) <*> go d + DD.Doc2Table rows -> Table <$> traverse r rows + where r (Term.List' ds) = traverse go (toList ds) + r _ = pure [Word "🆘 invalid table"] + DD.Doc2Folded isFolded d d2 -> Folded isFolded <$> go d <*> go d2 + DD.Doc2Paragraph ds -> Paragraph <$> traverse go ds + DD.Doc2BulletedList ds -> BulletedList <$> traverse go ds + DD.Doc2Section title ds -> Section <$> go title <*> traverse go ds + DD.Doc2NamedLink d1 d2 -> NamedLink <$> go d1 <*> go d2 + DD.Doc2Image d1 d2 Decls.OptionalNone' -> Image <$> go d1 <*> go d2 <*> pure Nothing + DD.Doc2Image d1 d2 (Decls.OptionalSome' d) -> Image <$> go d1 <*> go d2 <*> (Just <$> go d) + DD.Doc2Special sf -> Special <$> goSpecial sf + DD.Doc2Join ds -> Join <$> traverse go ds + DD.Doc2UntitledSection ds -> UntitledSection <$> traverse go ds + DD.Doc2Column ds -> Column <$> traverse go ds + DD.Doc2Group d -> Group <$> go d + wat -> pure . Word . Text.pack . P.toPlain (P.Width 80) . P.indent "🆘 " + . TermPrinter.pretty (PPE.suffixifiedPPE pped) $ wat + + formatPretty = fmap Syntax.convertElement . P.render (P.Width 70) + formatPrettyType ppe typ = formatPretty (TypePrinter.prettySyntax ppe typ) + + source :: Term v () -> m SyntaxText + source tm = (pure . formatPretty . TermPrinter.prettyBlock' True (PPE.suffixifiedPPE pped)) tm + + goSignatures :: [Referent] -> m [P.Pretty S.SyntaxText] + goSignatures rs = runMaybeT (traverse (MaybeT . typeOf) rs) >>= \case + Nothing -> pure ["🆘 codebase is missing type signature for these definitions"] + Just types -> pure . fmap P.group $ + TypePrinter.prettySignatures'' + (PPE.suffixifiedPPE pped) + [ (PPE.termName (PPE.suffixifiedPPE pped) r, ty) | (r,ty) <- zip rs types] + + goSpecial :: Term v () -> m SpecialForm + goSpecial = \case + + DD.Doc2SpecialFormFoldedSource (Term.List' es) -> FoldedSource <$> goSrc (toList es) + + -- Source [Either Link.Type Doc2.Term] + DD.Doc2SpecialFormSource (Term.List' es) -> Source <$> goSrc (toList es) + + -- Example Nat Doc2.Term + -- Examples like `foo x y` are encoded as `Example 2 (_ x y -> foo)`, where + -- 2 is the number of variables that should be dropped from the rendering. + -- So this will render as `foo x y`. + DD.Doc2SpecialFormExample n (DD.Doc2Example vs body) -> + Example <$> source ex + where ex = Term.lam' (ABT.annotation body) (drop (fromIntegral n) vs) body + + DD.Doc2SpecialFormExampleBlock n (DD.Doc2Example vs body) -> + ExampleBlock <$> source ex + where ex = Term.lam' (ABT.annotation body) (drop (fromIntegral n) vs) body + + -- Link (Either Link.Type Doc2.Term) + DD.Doc2SpecialFormLink e -> let + ppe = PPE.suffixifiedPPE pped + tm :: Referent -> P.Pretty S.SyntaxText + tm r = (NP.styleHashQualified'' (NP.fmt (S.Referent r)) . PPE.termName ppe) r + ty :: Reference -> P.Pretty S.SyntaxText + ty r = (NP.styleHashQualified'' (NP.fmt (S.Reference r)) . PPE.typeName ppe) r + in Link <$> case e of + DD.EitherLeft' (Term.TypeLink' r) -> (pure . formatPretty . ty) r + DD.EitherRight' (DD.Doc2Term (Term.Referent' r)) -> (pure . formatPretty . tm) r + _ -> source e + + DD.Doc2SpecialFormSignature (Term.List' tms) -> + let rs = [ r | DD.Doc2Term (Term.Referent' r) <- toList tms ] + in goSignatures rs <&> \s -> Signature (map formatPretty s) + + -- SignatureInline Doc2.Term + DD.Doc2SpecialFormSignatureInline (DD.Doc2Term (Term.Referent' r)) -> + goSignatures [r] <&> \s -> SignatureInline (formatPretty (P.lines s)) + + -- Eval Doc2.Term + DD.Doc2SpecialFormEval (DD.Doc2Term tm) -> eval tm >>= \case + Nothing -> Eval <$> source tm <*> pure evalErrMsg + Just result -> Eval <$> source tm <*> source result + + -- EvalInline Doc2.Term + DD.Doc2SpecialFormEvalInline (DD.Doc2Term tm) -> eval tm >>= \case + Nothing -> EvalInline <$> source tm <*> pure evalErrMsg + Just result -> EvalInline <$> source tm <*> source result + + -- Embed Any + DD.Doc2SpecialFormEmbed (Term.App' _ any) -> + source any <&> \p -> Embed ("{{ embed {{" <> p <> "}} }}") + + -- EmbedInline Any + DD.Doc2SpecialFormEmbedInline any -> + source any <&> \p -> EmbedInline ("{{ embed {{" <> p <> "}} }}") + + tm -> source tm <&> \p -> Embed ("🆘 unable to render " <> p) + + evalErrMsg = "🆘 An error occured during evaluation" + + goSrc :: [Term v ()] -> m [Ref (UnisonHash, DisplayObject SyntaxText Src)] + goSrc es = do + let toRef (Term.Ref' r) = Set.singleton r + toRef (Term.RequestOrCtor' r _) = Set.singleton r + toRef _ = mempty + ppe = PPE.suffixifiedPPE pped + goType :: Reference -> m (Ref (UnisonHash, DisplayObject SyntaxText Src)) + goType r@(Reference.Builtin _) = + pure (Type (Reference.toText r, DO.BuiltinObject name)) + where name = formatPretty . NP.styleHashQualified (NP.fmt (S.Reference r)) + . PPE.typeName ppe $ r + goType r = Type . (Reference.toText r,) <$> do + d <- types r + case d of + Nothing -> pure (DO.MissingObject (SH.unsafeFromText $ Reference.toText r)) + Just decl -> + pure $ DO.UserObject (Src folded full) + where + full = formatPretty (DeclPrinter.prettyDecl ppe r (PPE.typeName ppe r) decl) + folded = formatPretty (DeclPrinter.prettyDeclHeader (PPE.typeName ppe r) decl) + + go :: (Set.Set Reference, [Ref (UnisonHash, DisplayObject SyntaxText Src)]) + -> Term v () + -> m (Set.Set Reference, [Ref (UnisonHash, DisplayObject SyntaxText Src)]) + go s1@(!seen,!acc) = \case + -- we ignore the annotations; but this could be extended later + DD.TupleTerm' [DD.EitherRight' (DD.Doc2Term tm), _anns] -> + (seen <> toRef tm,) <$> acc' + where + acc' = case tm of + Term.Ref' r | Set.notMember r seen -> (:acc) . Term . (Reference.toText r,) <$> case r of + Reference.Builtin _ -> typeOf (Referent.Ref r) <&> \case + Nothing -> DO.BuiltinObject ("🆘 missing type signature") + Just ty -> DO.BuiltinObject (formatPrettyType ppe ty) + ref -> terms ref >>= \case + Nothing -> pure $ DO.MissingObject (SH.unsafeFromText $ Reference.toText ref) + Just tm -> do + typ <- fromMaybe (Type.builtin() "unknown") <$> typeOf (Referent.Ref ref) + let name = PPE.termName ppe (Referent.Ref ref) + let folded = formatPretty . P.lines $ TypePrinter.prettySignatures'' ppe [(name, typ)] + let full tm@(Term.Ann' _ _) _ = + formatPretty (TermPrinter.prettyBinding ppe name tm) + full tm typ = + formatPretty (TermPrinter.prettyBinding ppe name (Term.ann() tm typ)) + pure (DO.UserObject (Src folded (full tm typ))) + Term.RequestOrCtor' r _ | Set.notMember r seen -> (:acc) <$> goType r + _ -> pure acc + DD.TupleTerm' [DD.EitherLeft' (Term.TypeLink' ref), _anns] + | Set.notMember ref seen + -> (Set.insert ref seen,) . (:acc) <$> goType ref + _ -> pure s1 + reverse . snd <$> foldM go mempty es + diff --git a/parser-typechecker/src/Unison/Server/Endpoints/FuzzyFind.hs b/parser-typechecker/src/Unison/Server/Endpoints/FuzzyFind.hs index f76365827b..f8dd84cac7 100644 --- a/parser-typechecker/src/Unison/Server/Endpoints/FuzzyFind.hs +++ b/parser-typechecker/src/Unison/Server/Endpoints/FuzzyFind.hs @@ -11,13 +11,8 @@ module Unison.Server.Endpoints.FuzzyFind where import Control.Error (runExceptT) -import Control.Lens (view, _1) -import Data.Aeson -import Data.Function (on) -import Data.List (sortBy) -import qualified Data.Map as Map +import Data.Aeson ( defaultOptions, genericToEncoding, ToJSON(toEncoding) ) import Data.OpenApi (ToSchema) -import Data.Ord (Down (..)) import qualified Data.Text as Text import Servant ( QueryParam, @@ -40,12 +35,10 @@ import qualified Unison.Codebase.Branch as Branch import Unison.Codebase.Editor.DisplayObject import qualified Unison.Codebase.Path as Path import qualified Unison.Codebase.ShortBranchHash as SBH -import qualified Unison.HashQualified as HQ import qualified Unison.HashQualified' as HQ' import Unison.NameSegment import Unison.Parser (Ann) import Unison.Prelude -import qualified Unison.Reference as Reference import qualified Unison.Server.Backend as Backend import Unison.Server.Errors ( backendError, @@ -55,12 +48,9 @@ import Unison.Server.Syntax (SyntaxText) import Unison.Server.Types ( APIGet, APIHeaders, - DefinitionDisplayResults (..), HashQualifiedName, NamedTerm, NamedType, - Suffixify (..), - TypeDefinition (..), addHeaders, mayDefault, ) @@ -112,7 +102,7 @@ data FoundTerm = FoundTerm data FoundType = FoundType { bestFoundTypeName :: HashQualifiedName - , typeDef :: DisplayObject SyntaxText + , typeDef :: DisplayObject SyntaxText SyntaxText , namedType :: NamedType } deriving (Generic, Show) @@ -158,16 +148,14 @@ serveFuzzyFind h codebase mayRoot relativePath limit typeWidth query = branch <- Backend.resolveBranchHash root codebase let b0 = Branch.head branch alignments = - take (fromMaybe 10 limit) - . sortBy (compare `on` (Down . FZF.score . (view _1))) - $ Backend.fuzzyFind rel branch (fromMaybe "" query) + take (fromMaybe 10 limit) $ Backend.fuzzyFind rel branch (fromMaybe "" query) ppe = Backend.basicSuffixifiedNames hashLength branch rel join <$> traverse (loadEntry root (Just rel) ppe b0) alignments errFromEither backendError ea where - loadEntry root rel ppe b0 (a, (HQ'.NameOnly . NameSegment) -> n, refs) = - traverse - (\case + loadEntry _root _rel ppe b0 (a, (HQ'.NameOnly . NameSegment) -> n, refs) = + for refs $ + \case Backend.FoundTermRef r -> (\te -> ( a @@ -179,29 +167,12 @@ serveFuzzyFind h codebase mayRoot relativePath limit typeWidth query = ) <$> Backend.termListEntry codebase b0 r n Backend.FoundTypeRef r -> do - te <- Backend.typeListEntry codebase r n - DefinitionDisplayResults _ ts _ <- Backend.prettyDefinitionsBySuffixes - rel - root - typeWidth - (Suffixify True) - codebase - [HQ.HashOnly $ Reference.toShortHash r] - let - t = Map.lookup (Reference.toText r) ts - td = case t of - Just t -> t - Nothing -> - TypeDefinition mempty mempty Nothing - . MissingObject - $ Reference.toShortHash r - namedType = Backend.typeEntryToNamedType te - pure - ( a - , FoundTypeResult - $ FoundType (bestTypeName td) (typeDefinition td) namedType - ) - ) - refs + te <- Backend.typeListEntry codebase r n + let namedType = Backend.typeEntryToNamedType te + let typeName = Backend.bestNameForType @v ppe (mayDefault typeWidth) r + typeHeader <- Backend.typeDeclHeader codebase ppe r + let ft = FoundType typeName typeHeader namedType + pure (a, FoundTypeResult ft) + parsePath p = errFromEither (`badNamespace` p) $ Path.parsePath' p errFromEither f = either (throwError . f) pure diff --git a/parser-typechecker/src/Unison/Server/Endpoints/GetDefinitions.hs b/parser-typechecker/src/Unison/Server/Endpoints/GetDefinitions.hs index e795dd9bd5..f7cb3b18c5 100644 --- a/parser-typechecker/src/Unison/Server/Endpoints/GetDefinitions.hs +++ b/parser-typechecker/src/Unison/Server/Endpoints/GetDefinitions.hs @@ -24,6 +24,7 @@ import Servant.Docs import Servant.Server (Handler) import Unison.Codebase (Codebase) import qualified Unison.Codebase.Path as Path +import qualified Unison.Codebase.Runtime as Rt import Unison.Codebase.ShortBranchHash ( ShortBranchHash, ) @@ -40,6 +41,7 @@ import Unison.Server.Types APIHeaders, DefinitionDisplayResults, HashQualifiedName, + NamespaceFQN, Suffixify (..), addHeaders, defaultWidth, @@ -49,7 +51,7 @@ import Unison.Var (Var) type DefinitionsAPI = "getDefinition" :> QueryParam "rootBranch" ShortBranchHash - :> QueryParam "relativeTo" HashQualifiedName + :> QueryParam "relativeTo" NamespaceFQN :> QueryParams "names" HashQualifiedName :> QueryParam "renderWidth" Width :> QueryParam "suffixifyBindings" Suffixify @@ -77,7 +79,7 @@ instance ToParam (QueryParam "suffixifyBindings" Suffixify) where Normal -instance ToParam (QueryParam "relativeTo" HashQualifiedName) where +instance ToParam (QueryParam "relativeTo" NamespaceFQN) where toParam _ = DocQueryParam "relativeTo" [".", ".base", "foo.bar"] @@ -108,14 +110,15 @@ instance ToSample DefinitionDisplayResults where serveDefinitions :: Var v => Handler () + -> Rt.Runtime v -> Codebase IO v Ann -> Maybe ShortBranchHash - -> Maybe HashQualifiedName + -> Maybe NamespaceFQN -> [HashQualifiedName] -> Maybe Width -> Maybe Suffixify -> Handler (APIHeaders DefinitionDisplayResults) -serveDefinitions h codebase mayRoot relativePath hqns width suff = +serveDefinitions h rt codebase mayRoot relativePath hqns width suff = addHeaders <$> do h rel <- @@ -126,6 +129,7 @@ serveDefinitions h codebase mayRoot relativePath hqns width suff = root width (fromMaybe (Suffixify True) suff) + rt codebase $ HQ.unsafeFromText <$> hqns diff --git a/parser-typechecker/src/Unison/Server/Endpoints/ListNamespace.hs b/parser-typechecker/src/Unison/Server/Endpoints/ListNamespace.hs index 60a3095336..9f06c93e2b 100644 --- a/parser-typechecker/src/Unison/Server/Endpoints/ListNamespace.hs +++ b/parser-typechecker/src/Unison/Server/Endpoints/ListNamespace.hs @@ -15,8 +15,6 @@ import Data.OpenApi (ToSchema) import qualified Data.Text as Text import Servant ( QueryParam, - ServerError (errBody), - err400, throwError, (:>), ) @@ -34,9 +32,8 @@ import qualified Unison.Codebase.Branch as Branch import qualified Unison.Codebase.Causal as Causal import qualified Unison.Codebase.Path as Path import qualified Unison.Codebase.ShortBranchHash as SBH +import Unison.Codebase.ShortBranchHash (ShortBranchHash) import qualified Unison.Hash as Hash -import qualified Unison.HashQualified as HQ -import qualified Unison.Name as Name import qualified Unison.NameSegment as NameSegment import Unison.Parser (Ann) import Unison.Prelude @@ -44,7 +41,6 @@ import qualified Unison.PrettyPrintEnv as PPE import qualified Unison.Server.Backend as Backend import Unison.Server.Errors ( backendError, - badHQN, badNamespace, rootBranchError, ) @@ -52,6 +48,7 @@ import Unison.Server.Types ( APIGet, APIHeaders, HashQualifiedName, + NamespaceFQN, NamedTerm (..), NamedType (..), Size, @@ -59,13 +56,16 @@ import Unison.Server.Types UnisonName, addHeaders, ) -import qualified Unison.ShortHash as ShortHash import Unison.Util.Pretty (Width) import Unison.Var (Var) +import Control.Error.Util ((??)) + type NamespaceAPI = - "list" :> QueryParam "namespace" HashQualifiedName - :> APIGet NamespaceListing + "list" :> QueryParam "rootBranch" ShortBranchHash + :> QueryParam "relativeTo" NamespaceFQN + :> QueryParam "namespace" NamespaceFQN + :> APIGet NamespaceListing instance ToParam (QueryParam "namespace" Text) where toParam _ = @@ -80,14 +80,14 @@ instance ToSample NamespaceListing where [ ( "When no value is provided for `namespace`, the root namespace `.` is " <> "listed by default" , NamespaceListing - (Just ".") + "." "#gjlk0dna8dongct6lsd19d1o9hi5n642t8jttga5e81e91fviqjdffem0tlddj7ahodjo5" [Subnamespace $ NamedNamespace "base" "#19d1o9hi5n642t8jttg" 1244] ) ] data NamespaceListing = NamespaceListing - { namespaceListingName :: Maybe UnisonName, + { namespaceListingFQN :: UnisonName, namespaceListingHash :: UnisonHash, namespaceListingChildren :: [NamespaceObject] } @@ -160,60 +160,70 @@ serveNamespace :: Var v => Handler () -> Codebase IO v Ann - -> Maybe HashQualifiedName + -> Maybe ShortBranchHash + -> Maybe NamespaceFQN + -> Maybe NamespaceFQN -> Handler (APIHeaders NamespaceListing) -serveNamespace tryAuth codebase mayHQN = - addHeaders <$> (tryAuth *> (go tryAuth codebase mayHQN)) - where - go tryAuth codebase mayHQN = case mayHQN of - Nothing -> go tryAuth codebase $ Just "." - Just hqn -> do - parsedName <- parseHQN hqn - hashLength <- liftIO $ Codebase.hashLength codebase - case parsedName of - HQ.NameOnly n -> do - path' <- parsePath $ Name.toString n +serveNamespace tryAuth codebase mayRoot mayRelativeTo mayNamespaceName = + let + -- Various helpers + errFromEither f = either (throwError . f) pure + + parsePath p = errFromEither (`badNamespace` p) $ Path.parsePath' p + + doBackend a = do + ea <- liftIO $ runExceptT a + errFromEither backendError ea + + findShallow branch = doBackend $ Backend.findShallowInBranch codebase branch + + makeNamespaceListing ppe fqn hash entries = + pure . NamespaceListing fqn hash $ fmap + (backendListEntryToNamespaceObject ppe Nothing) + entries + + -- Lookup paths, root and listing and construct response + namespaceListing = do + root <- case mayRoot of + Nothing -> do gotRoot <- liftIO $ Codebase.getRootBranch codebase - root <- errFromEither rootBranchError gotRoot - let - p = - either id (Path.Absolute . Path.unrelative) $ Path.unPath' path' - ppe = Backend.basicSuffixifiedNames hashLength root - $ Path.fromPath' path' - entries <- findShallow p - processEntries - ppe - (Just $ Name.toText n) - (("#" <>) . Hash.base32Hex . Causal.unRawHash $ Branch.headHash root - ) - entries - HQ.HashOnly sh -> case SBH.fromText $ ShortHash.toText sh of - Nothing -> - throwError - . badNamespace "Malformed branch hash." - $ ShortHash.toString sh - Just h -> doBackend $ do - hash <- Backend.expandShortBranchHash codebase h - branch <- Backend.resolveBranchHash (Just hash) codebase - entries <- Backend.findShallowInBranch codebase branch - let ppe = Backend.basicSuffixifiedNames hashLength branch mempty - sbh = Text.pack . show $ SBH.fullFromHash hash - processEntries ppe Nothing sbh entries - HQ.HashQualified _ _ -> hashQualifiedNotSupported - errFromMaybe e = maybe (throwError e) pure - errFromEither f = either (throwError . f) pure - parseHQN hqn = errFromMaybe (badHQN hqn) $ HQ.fromText hqn - parsePath p = errFromEither (`badNamespace` p) $ Path.parsePath' p - doBackend a = do - ea <- liftIO $ runExceptT a - errFromEither backendError ea - findShallow p = doBackend $ Backend.findShallow codebase p - processEntries ppe name hash entries = - pure . NamespaceListing name hash $ fmap - (backendListEntryToNamespaceObject ppe Nothing) - entries - hashQualifiedNotSupported = throwError $ err400 - { errBody = "This server does not yet support searching namespaces by " - <> "hash-qualified name." - } + errFromEither rootBranchError gotRoot + Just sbh -> do + ea <- liftIO . runExceptT $ do + h <- Backend.expandShortBranchHash codebase sbh + mayBranch <- lift $ Codebase.getBranchForHash codebase h + mayBranch ?? Backend.CouldntLoadBranch h + errFromEither backendError ea + + -- Relative and Listing Path resolution + -- + -- The full listing path is a combination of the relativeToPath (prefix) and the namespace path + -- + -- For example: + -- "base.List" <> "Nonempty" + -- ↑ ↑ + -- relativeToPath namespacePath + -- + -- resulting in "base.List.map" which we can use via the root branch (usually the codebase hash) + -- to look up the namespace listing and present shallow name, so that the + -- definition "base.List.Nonempty.map", simple has the name "map" + -- + relativeToPath' <- (parsePath . Text.unpack) $ fromMaybe "." mayRelativeTo + namespacePath' <- (parsePath . Text.unpack) $ fromMaybe "." mayNamespaceName + + let path = Path.fromPath' relativeToPath' <> Path.fromPath' namespacePath' + let path' = Path.toPath' path + + -- Actually construct the NamespaceListing + + let listingBranch = Branch.getAt' path root + hashLength <- liftIO $ Codebase.hashLength codebase + + let shallowPPE = Backend.basicSuffixifiedNames hashLength root $ Path.fromPath' path' + let listingFQN = Path.toText . Path.unabsolute . either id (Path.Absolute . Path.unrelative) $ Path.unPath' path' + let listingHash = ("#" <>) . Hash.base32Hex . Causal.unRawHash $ Branch.headHash listingBranch + listingEntries <- findShallow listingBranch + makeNamespaceListing shallowPPE listingFQN listingHash listingEntries + in + addHeaders <$> (tryAuth *> namespaceListing) \ No newline at end of file diff --git a/parser-typechecker/src/Unison/Server/Errors.hs b/parser-typechecker/src/Unison/Server/Errors.hs index f574b20f7c..8e919b50c5 100644 --- a/parser-typechecker/src/Unison/Server/Errors.hs +++ b/parser-typechecker/src/Unison/Server/Errors.hs @@ -9,7 +9,7 @@ import Data.Set (Set) import qualified Data.Set as Set import qualified Data.Text.Lazy as Text import qualified Data.Text.Lazy.Encoding as Text -import Servant (ServerError (..), err400, err404, err500) +import Servant (ServerError (..), err400, err404, err500, err409) import qualified Unison.Codebase as Codebase import qualified Unison.Codebase.Path as Path import qualified Unison.Codebase.ShortBranchHash as SBH @@ -21,6 +21,7 @@ import Unison.Server.Types mungeShow, mungeString, ) +import qualified Unison.Codebase.Branch as Branch badHQN :: HashQualifiedName -> ServerError badHQN hqn = err400 @@ -36,6 +37,8 @@ backendError = \case Backend.BadRootBranch e -> rootBranchError e Backend.NoBranchForHash h -> noSuchNamespace . Text.toStrict . Text.pack $ show h + Backend.CouldntLoadBranch h -> + couldntLoadBranch h Backend.CouldntExpandBranchHash h -> noSuchNamespace . Text.toStrict . Text.pack $ show h Backend.AmbiguousBranchHash sbh hashes -> @@ -64,8 +67,14 @@ noSuchNamespace :: HashQualifiedName -> ServerError noSuchNamespace namespace = err404 { errBody = "The namespace " <> munge namespace <> " does not exist." } +couldntLoadBranch :: Branch.Hash -> ServerError +couldntLoadBranch h = + err404 { errBody = "The namespace " + <> munge (Text.toStrict . Text.pack $ show h) + <> " exists but couldn't be loaded." } + ambiguousNamespace :: HashQualifiedName -> Set HashQualifiedName -> ServerError -ambiguousNamespace name namespaces = err400 +ambiguousNamespace name namespaces = err409 { errBody = "Ambiguous namespace reference: " <> munge name <> ". It could refer to any of " diff --git a/parser-typechecker/src/Unison/Server/SearchResult'.hs b/parser-typechecker/src/Unison/Server/SearchResult'.hs index 19261b3606..080c97095e 100644 --- a/parser-typechecker/src/Unison/Server/SearchResult'.hs +++ b/parser-typechecker/src/Unison/Server/SearchResult'.hs @@ -32,7 +32,7 @@ data TermResult' v a = data TypeResult' v a = TypeResult' (HQ'.HashQualified Name) - (DisplayObject (Decl v a)) + (DisplayObject () (Decl v a)) Reference (Set (HQ'.HashQualified Name)) deriving (Eq, Show) diff --git a/parser-typechecker/src/Unison/Server/Syntax.hs b/parser-typechecker/src/Unison/Server/Syntax.hs index 550c446c7a..834e53d7b9 100644 --- a/parser-typechecker/src/Unison/Server/Syntax.hs +++ b/parser-typechecker/src/Unison/Server/Syntax.hs @@ -59,8 +59,6 @@ convertElement = \case SyntaxText.Referent r -> TermReference $ Referent.toText r SyntaxText.Reference r -> TypeReference $ Reference.toText r SyntaxText.Op s -> Op s - SyntaxText.Constructor -> Constructor - SyntaxText.Request -> Request SyntaxText.AbilityBraces -> AbilityBraces SyntaxText.ControlKeyword -> ControlKeyword SyntaxText.TypeOperator -> TypeOperator diff --git a/parser-typechecker/src/Unison/Server/Types.hs b/parser-typechecker/src/Unison/Server/Types.hs index 729e12a8d5..e8708e72f5 100644 --- a/parser-typechecker/src/Unison/Server/Types.hs +++ b/parser-typechecker/src/Unison/Server/Types.hs @@ -1,6 +1,7 @@ {-# LANGUAGE DataKinds #-} {-# LANGUAGE DerivingVia #-} {-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE StandaloneDeriving #-} {-# OPTIONS_GHC -Wno-orphans #-} @@ -18,7 +19,7 @@ import Data.OpenApi import qualified Data.Text.Lazy as Text import qualified Data.Text.Lazy.Encoding as Text import Servant.API - ( FromHttpApiData, + ( FromHttpApiData (..), Get, Header, Headers, @@ -28,6 +29,7 @@ import Servant.API import Unison.Codebase.Editor.DisplayObject ( DisplayObject, ) +import qualified Unison.Codebase.ShortBranchHash as SBH import Unison.Codebase.ShortBranchHash ( ShortBranchHash (..), ) @@ -35,17 +37,11 @@ import Unison.ConstructorType (ConstructorType) import qualified Unison.HashQualified as HQ import Unison.Name (Name) import Unison.Prelude -import qualified Unison.PrettyPrintEnv as PPE +import Unison.Server.Doc (Doc) +import qualified Unison.Server.Doc as Doc import Unison.Server.Syntax (SyntaxText) -import qualified Unison.Server.Syntax as Syntax import Unison.ShortHash (ShortHash) -import Unison.Type (Type) -import qualified Unison.TypePrinter as TypePrinter -import Unison.Util.Pretty - ( Width (..), - render, - ) -import Unison.Var (Var) +import Unison.Util.Pretty ( Width (..) ) type APIHeaders x = Headers @@ -58,6 +54,8 @@ type APIGet c = Get '[JSON] (APIHeaders c) type HashQualifiedName = Text +type NamespaceFQN = Text + type Size = Int type UnisonName = Text @@ -71,15 +69,17 @@ deriving instance ToSchema Name deriving via Bool instance FromHttpApiData Suffixify deriving instance ToParamSchema Suffixify -deriving via Text instance FromHttpApiData ShortBranchHash +instance FromHttpApiData ShortBranchHash where + parseUrlPiece = maybe (Left "Invalid ShortBranchHash") Right . SBH.fromText + deriving instance ToParamSchema ShortBranchHash deriving via Int instance FromHttpApiData Width deriving instance ToParamSchema Width -instance ToJSON a => ToJSON (DisplayObject a) where +instance (ToJSON b, ToJSON a) => ToJSON (DisplayObject b a) where toEncoding = genericToEncoding defaultOptions -deriving instance ToSchema a => ToSchema (DisplayObject a) +deriving instance (ToSchema b, ToSchema a) => ToSchema (DisplayObject b a) instance ToJSON ShortHash where toEncoding = genericToEncoding defaultOptions @@ -117,15 +117,17 @@ data TermDefinition = TermDefinition { termNames :: [HashQualifiedName] , bestTermName :: HashQualifiedName , defnTermTag :: Maybe TermTag - , termDefinition :: DisplayObject SyntaxText + , termDefinition :: DisplayObject SyntaxText SyntaxText , signature :: SyntaxText + , termDocs :: [(HashQualifiedName, UnisonHash, Doc)] } deriving (Eq, Show, Generic) data TypeDefinition = TypeDefinition { typeNames :: [HashQualifiedName] , bestTypeName :: HashQualifiedName , defnTypeTag :: Maybe TypeTag - , typeDefinition :: DisplayObject SyntaxText + , typeDefinition :: DisplayObject SyntaxText SyntaxText + , typeDocs :: [(HashQualifiedName, UnisonHash, Doc)] } deriving (Eq, Show, Generic) data DefinitionDisplayResults = @@ -196,9 +198,14 @@ instance ToJSON TypeTag where deriving instance ToSchema TypeTag -formatType :: Var v => PPE.PrettyPrintEnv -> Width -> Type v a -> SyntaxText -formatType ppe w = - fmap Syntax.convertElement . render w . TypePrinter.pretty0 ppe mempty (-1) +instance ToJSON Doc where +instance ToJSON Doc.SpecialForm where +instance ToJSON Doc.Src where +instance ToJSON a => ToJSON (Doc.Ref a) where +instance ToSchema Doc where +instance ToSchema Doc.SpecialForm where +instance ToSchema Doc.Src where +instance ToSchema a => ToSchema (Doc.Ref a) where munge :: Text -> LZ.ByteString munge = Text.encodeUtf8 . Text.fromStrict diff --git a/parser-typechecker/src/Unison/TermParser.hs b/parser-typechecker/src/Unison/TermParser.hs index e25c88f5a9..3103b38675 100644 --- a/parser-typechecker/src/Unison/TermParser.hs +++ b/parser-typechecker/src/Unison/TermParser.hs @@ -129,13 +129,17 @@ match = do _ <- P.try (openBlockWith "with") <|> do t <- anyToken P.customFailure (ExpectedBlockOpen "with" t) - (_arities, cases) <- unzip <$> sepBy1 semi matchCase + (_arities, cases) <- unzip <$> matchCases when (null cases) $ P.customFailure EmptyMatch _ <- closeBlock pure $ Term.match (ann start <> maybe (ann start) ann (lastMay cases)) scrutinee cases +matchCases :: Var v => P v [(Int, Term.MatchCase Ann (Term v Ann))] +matchCases = sepBy1 semi matchCase + <&> \cases -> [ (n,c) | (n,cs) <- cases, c <- cs ] + -- Returns the arity of the pattern and the `MatchCase`. Examples: -- -- (a, b) -> a - b -- arity 1 @@ -146,7 +150,7 @@ match = do -- -- 42, x -> ... -- (42, x) -> ... -matchCase :: Var v => P v (Int, Term.MatchCase Ann (Term v Ann)) +matchCase :: Var v => P v (Int, [Term.MatchCase Ann (Term v Ann)]) matchCase = do pats <- sepBy1 (reserved ",") parsePattern let boundVars' = [ v | (_,vs) <- pats, (_ann,v) <- vs ] @@ -155,10 +159,14 @@ matchCase = do pats -> foldr pair (unit (ann . last $ pats)) pats unit ann = Pattern.Constructor ann DD.unitRef 0 [] pair p1 p2 = Pattern.Constructor (ann p1 <> ann p2) DD.pairRef 0 [p1, p2] - guard <- optional $ reserved "|" *> infixAppOrBooleanOp - t <- block "->" + guardsAndBlocks <- many $ do + guard <- asum [ Nothing <$ P.try (reserved "|" *> quasikeyword "otherwise") + , optional $ reserved "|" *> infixAppOrBooleanOp ] + t <- block "->" + pure (guard, t) let absChain vs t = foldr (\v t -> ABT.abs' (ann t) v t) t vs - pure $ (length pats, Term.MatchCase pat (fmap (absChain boundVars') guard) (absChain boundVars' t)) + let mk (guard,t) = Term.MatchCase pat (fmap (absChain boundVars') guard) (absChain boundVars' t) + pure $ (length pats, mk <$> guardsAndBlocks) parsePattern :: forall v. Var v => P v (Pattern Ann, [(Ann, v)]) parsePattern = root @@ -182,7 +190,11 @@ parsePattern = root literal = (,[]) <$> asum [true, false, number, text, char] true = (\t -> Pattern.Boolean (ann t) True) <$> reserved "true" false = (\t -> Pattern.Boolean (ann t) False) <$> reserved "false" - number = number' (tok Pattern.Int) (tok Pattern.Nat) (tok Pattern.Float) + number = join $ + number' + (pure . tok Pattern.Int) + (pure . tok Pattern.Nat) + (tok (const . failCommitted . FloatPattern)) text = (\t -> Pattern.Text (ann t) (L.payload t)) <$> string char = (\c -> Pattern.Char (ann c) (L.payload c)) <$> character parenthesizedOrTuplePattern :: P v (Pattern Ann, [(Ann, v)]) @@ -290,7 +302,7 @@ checkCasesArities cases = go Nothing cases where lamCase = do start <- openBlockWith "cases" - cases <- sepBy1 semi matchCase + cases <- matchCases (arity, cases) <- checkCasesArities cases when (null cases) (P.customFailure EmptyMatch) _ <- closeBlock @@ -332,6 +344,11 @@ hashQualifiedPrefixTerm = resolveHashQualified =<< hqPrefixId hashQualifiedInfixTerm :: Var v => TermP v hashQualifiedInfixTerm = resolveHashQualified =<< hqInfixId +quasikeyword :: Ord v => String -> P v (L.Token ()) +quasikeyword kw = queryToken $ \case + L.WordyId s Nothing | s == kw -> Just () + _ -> Nothing + -- If the hash qualified is name only, it is treated as a var, if it -- has a short hash, we resolve that short hash immediately and fail -- committed if that short hash can't be found in the current environment diff --git a/parser-typechecker/src/Unison/TermPrinter.hs b/parser-typechecker/src/Unison/TermPrinter.hs index 5539925b7e..12c3c5a2c1 100644 --- a/parser-typechecker/src/Unison/TermPrinter.hs +++ b/parser-typechecker/src/Unison/TermPrinter.hs @@ -27,6 +27,7 @@ import Unison.NamePrinter ( styleHashQualified'' ) import qualified Unison.Pattern as Pattern import Unison.Pattern ( Pattern ) import Unison.Reference ( Reference ) +import qualified Unison.Reference as Reference import qualified Unison.Referent as Referent import Unison.Referent ( Referent ) import qualified Unison.Util.SyntaxText as S @@ -51,9 +52,11 @@ pretty :: Var v => PrettyPrintEnv -> Term v a -> Pretty ColorText pretty env = PP.syntaxToColor . pretty0 env emptyAc . printAnnotate env prettyBlock :: Var v => Bool -> PrettyPrintEnv -> Term v a -> Pretty ColorText -prettyBlock elideUnit env = - PP.syntaxToColor . pretty0 env (emptyBlockAc { elideUnit = elideUnit }) - . printAnnotate env +prettyBlock elideUnit env = PP.syntaxToColor . prettyBlock' elideUnit env + +prettyBlock' :: Var v => Bool -> PrettyPrintEnv -> Term v a -> Pretty SyntaxText +prettyBlock' elideUnit env = + pretty0 env (emptyBlockAc { elideUnit = elideUnit }) . printAnnotate env pretty' :: Var v => Maybe Width -> PrettyPrintEnv -> Term v a -> ColorText pretty' (Just width) n t = @@ -207,10 +210,16 @@ pretty0 Just c -> "?\\" ++ [c] Nothing -> '?': [c] Blank' id -> fmt S.Blank $ l "_" <> l (fromMaybe "" (Blank.nameb id)) - Constructor' ref i -> styleHashQualified'' (fmt S.Constructor) $ - elideFQN im $ PrettyPrintEnv.termName n (Referent.Con ref i CT.Data) - Request' ref i -> styleHashQualified'' (fmt S.Request) $ - elideFQN im $ PrettyPrintEnv.termName n (Referent.Con ref i CT.Effect) + Constructor' ref cid -> + styleHashQualified'' (fmt $ S.Referent conRef) name + where + name = elideFQN im $ PrettyPrintEnv.termName n conRef + conRef = Referent.Con ref cid CT.Data + Request' ref cid -> + styleHashQualified'' (fmt $ S.Referent conRef) name + where + name = elideFQN im $ PrettyPrintEnv.termName n conRef + conRef = Referent.Con ref cid CT.Effect Handle' h body -> paren (p >= 2) $ if PP.isMultiLine pb || PP.isMultiLine ph then PP.lines [ (fmt S.ControlKeyword "handle") `PP.hang` pb, @@ -313,12 +322,19 @@ pretty0 if isDocLiteral term then prettyDoc n im term else pretty0 n (a {docContext = NoDoc}) term - (TupleTerm' [x], _) -> let - pair = parenIfInfix name ic $ styleHashQualified'' (fmt S.Constructor) name - where name = elideFQN im $ PrettyPrintEnv.termName n (DD.pairCtorRef) in + (TupleTerm' [x], _) -> + let + conRef = DD.pairCtorRef + name = elideFQN im $ PrettyPrintEnv.termName n conRef + pair = parenIfInfix name ic $ styleHashQualified'' (fmt (S.Referent conRef)) name + in paren (p >= 10) $ pair `PP.hang` - PP.spaced [pretty0 n (ac 10 Normal im doc) x, fmt S.Constructor "()" ] - (TupleTerm' xs, _) -> paren True $ commaList xs + PP.spaced [pretty0 n (ac 10 Normal im doc) x, fmt (S.Referent DD.unitCtorRef) "()" ] + + (TupleTerm' xs, _) -> + let tupleLink p = fmt (S.Reference DD.unitRef) p + in PP.group (tupleLink "(" <> commaList xs <> tupleLink ")") + (Bytes' bs, _) -> fmt S.BytesLiteral "0xs" <> (PP.shown $ Bytes.fromWord8s (map fromIntegral bs)) BinaryAppsPred' apps lastArg -> paren (p >= 3) $ @@ -429,12 +445,17 @@ prettyPattern n c@(AmbientContext { imports = im }) p vs patt = case patt of TuplePattern pats | length pats /= 1 -> let (pats_printed, tail_vs) = patterns (-1) vs pats in (PP.parenthesizeCommas pats_printed, tail_vs) - Pattern.Constructor _ ref i [] -> - (styleHashQualified'' (fmt S.Constructor) $ elideFQN im (PrettyPrintEnv.patternName n ref i), vs) - Pattern.Constructor _ ref i pats -> + Pattern.Constructor _ ref cid [] -> + (styleHashQualified'' (fmt $ S.Referent conRef) name, vs) + where + name = elideFQN im $ PrettyPrintEnv.termName n conRef + conRef = Referent.Con ref cid CT.Data + Pattern.Constructor _ ref cid pats -> let (pats_printed, tail_vs) = patternsSep 10 PP.softbreak vs pats + name = elideFQN im $ PrettyPrintEnv.termName n conRef + conRef = Referent.Con ref cid CT.Data in ( paren (p >= 10) - $ styleHashQualified'' (fmt S.Constructor) (elideFQN im (PrettyPrintEnv.patternName n ref i)) + $ styleHashQualified'' (fmt $ S.Referent conRef) name `PP.hang` pats_printed , tail_vs) Pattern.As _ pat -> @@ -444,17 +465,24 @@ prettyPattern n c@(AmbientContext { imports = im }) p vs patt = case patt of Pattern.EffectPure _ pat -> let (printed, eventual_tail) = prettyPattern n c (-1) vs pat in (PP.sep " " [fmt S.DelimiterChar "{", printed, fmt S.DelimiterChar "}"], eventual_tail) - Pattern.EffectBind _ ref i pats k_pat -> + Pattern.EffectBind _ ref cid pats k_pat -> let (pats_printed , tail_vs ) = patternsSep 10 PP.softbreak vs pats (k_pat_printed, eventual_tail) = prettyPattern n c 0 tail_vs k_pat - in ((fmt S.DelimiterChar "{" ) <> - (PP.sep " " . PP.nonEmpty $ [ - styleHashQualified'' (fmt S.Request) $ elideFQN im (PrettyPrintEnv.patternName n ref i), - pats_printed, - fmt S.ControlKeyword "->", - k_pat_printed]) <> - (fmt S.DelimiterChar "}") - , eventual_tail) + name = elideFQN im $ PrettyPrintEnv.termName n conRef + conRef = Referent.Con ref cid CT.Effect + in ( PP.group ( + fmt S.DelimiterChar "{" <> + (PP.sep " " . PP.nonEmpty $ + [ styleHashQualified'' (fmt (S.Referent conRef)) $ name + , pats_printed + , fmt S.ControlKeyword "->" + , k_pat_printed + ] + ) <> + fmt S.DelimiterChar "}" + ) + , eventual_tail + ) Pattern.SequenceLiteral _ pats -> let (pats_printed, tail_vs) = patternsSep (-1) (fmt S.DelimiterChar ", ") vs pats in ((fmt S.DelimiterChar "[") <> pats_printed <> (fmt S.DelimiterChar "]"), tail_vs) @@ -483,6 +511,25 @@ type MatchCase' ann tm = ([Pattern ann], Maybe tm, tm) arity1Branches :: [MatchCase ann tm] -> [MatchCase' ann tm] arity1Branches bs = [ ([pat], guard, body) | MatchCase pat guard body <- bs ] +-- Groups adjacent cases with the same pattern together, +-- for easier pretty-printing, for instance: +-- +-- Foo x y | blah1 x -> body1 +-- Foo x y | blah2 y -> body2 +-- +-- becomes +-- +-- Foo x y, [x,y], [(blah1 x, body1), (blah2 y, body2)] +groupCases :: Ord v => [MatchCase' () (Term3 v ann)] + -> [([Pattern ()], [v], [(Maybe (Term3 v ann), Term3 v ann)])] +groupCases ms = go0 ms where + go0 [] = [] + go0 ms@((p1, _, AbsN' vs1 _) : _) = go2 (p1,vs1) [] ms + go2 (p0,vs0) acc [] = [(p0,vs0,reverse acc)] + go2 (p0, vs0) acc ms@((p1, g1, AbsN' vs body) : tl) + | p0 == p1 && vs == vs0 = go2 (p0, vs0) ((g1,body):acc) tl + | otherwise = (p0,vs0,reverse acc) : go0 ms + printCase :: Var v => PrettyPrintEnv @@ -490,31 +537,51 @@ printCase -> DocLiteralContext -> [MatchCase' () (Term3 v PrintAnnotation)] -> Pretty SyntaxText -printCase env im doc ms = PP.lines $ map each gridArrowsAligned where +printCase env im doc ms0 = PP.lines $ map each gridArrowsAligned where + ms = groupCases ms0 each (lhs, arrow, body) = PP.group $ (lhs <> arrow) `PP.hang` body - grid = go <$> ms + grid = go =<< ms gridArrowsAligned = tidy <$> zip (PP.align' (f <$> grid)) grid where f (a, b, _) = (a, Just b) tidy ((a', b'), (_, _, c)) = (a', b', c) - go (pats, guard, (AbsN' vs body)) = - (lhs, arrow, (uses [pretty0 env (ac 0 Block im' doc) body])) + + patLhs vs pats = case pats of + [pat] -> PP.group (fst (prettyPattern env (ac 0 Block im doc) (-1) vs pat)) + pats -> PP.group . PP.sep ("," <> PP.softbreak) . (`evalState` vs) . for pats $ \pat -> do + vs <- State.get + let (p, rem) = prettyPattern env (ac 0 Block im doc) (-1) vs pat + State.put rem + pure p + + arrow = fmt S.ControlKeyword "->" + goBody im' uses body = uses [pretty0 env (ac 0 Block im' doc) body] + printGuard (Just (ABT.AbsN' _ g)) = + -- strip off any Abs-chain around the guard, guard variables are rendered + -- like any other variable, ex: case Foo x y | x < y -> ... + PP.group $ PP.spaced [(fmt S.DelimiterChar " |"), pretty0 env (ac 2 Normal im doc) g] + printGuard Nothing = mempty + + go (pats, vs, [(guard, body)]) = + [(lhs, arrow, goBody im' uses body)] where - lhs = (case pats of - [pat] -> PP.group (fst (prettyPattern env (ac 0 Block im doc) (-1) vs pat)) - pats -> PP.group . PP.sep ("," <> PP.softbreak) . (`evalState` vs) . for pats $ \pat -> do - vs <- State.get - let (p, rem) = prettyPattern env (ac 0 Block im doc) (-1) vs pat - State.put rem - pure p) - <> printGuard guard - arrow = fmt S.ControlKeyword "->" - printGuard (Just g') = let (_, g) = ABT.unabs g' in - -- strip off any Abs-chain around the guard, guard variables are rendered - -- like any other variable, ex: case Foo x y | x < y -> ... - PP.group $ PP.spaced [(fmt S.DelimiterChar " |"), pretty0 env (ac 2 Normal im doc) g] - printGuard Nothing = mempty + lhs = patLhs vs pats <> printGuard guard (im', uses) = calcImports im body - go _ = (l "error", mempty, mempty) + -- If there's multiple guarded cases for this pattern, prints as: + -- MyPattern x y + -- | guard 1 -> 1 + -- | otherguard x y -> 2 + -- | otherwise -> 3 + go (pats, vs, unzip -> (guards, bodies)) = + (patLhs vs pats, mempty, mempty) + : zip3 (PP.indentN 2 . printGuard <$> guards) + (repeat arrow) + (printBody <$> bodies) + where + printGuard Nothing = (fmt S.DelimiterChar "|") <> fmt S.ControlKeyword " otherwise" + printGuard (Just (ABT.AbsN' _ g)) = + PP.group $ PP.spaced [(fmt S.DelimiterChar "|"), pretty0 env (ac 2 Normal im doc) g] + printBody b = let (im', uses) = calcImports im b + in goBody im' uses b {- Render a binding, producing output of the form @@ -1066,7 +1133,6 @@ immediateChildBlockTerms = \case _ -> [] where doCase (MatchCase _ _ (AbsN' _ body)) = [body] - doCase _ = error "bad match" [] doLet (v, Ann' tm _) = doLet (v, tm) doLet (v, LamsNamedOpt' _ body) = if isBlank $ Var.nameStr v then [] @@ -1378,11 +1444,22 @@ toDocVerbatim _ _ = Nothing toDocEval :: Ord v => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe (Term3 v PrintAnnotation) toDocEval ppe (App' (Ref' r) (Delay' tm)) | nameEndsWith ppe ".docEval" r = Just tm + | r == _oldDocEval = Just tm toDocEval _ _ = Nothing +-- Old hashes for docEval, docEvalInline w/ incorrect type signatures. +-- They are still used by some existing docs so the pretty-printer +-- recognizes it. +-- +-- See https://github.com/unisonweb/unison/issues/2238 +_oldDocEval, _oldDocEvalInline :: Reference +_oldDocEval = Reference.unsafeFromText "#0ua7gqa7kqnj80ulhmtcqsfgalmh4g9kg198dt2uen0s0jeebbo4ljnj4133cn1kbm38i2q3joivodtfei3jfln5scof0r0381k8dm0" +_oldDocEvalInline = Reference.unsafeFromText "#maleg6fmu3j0k0vgm99lgrsnhio3ba750hcainuv5jdi9scdsg43hpicmf6lovsa0mnaija7bjebnr5nas3qsj4r087hur1jh0rsfso" + toDocEvalInline :: Ord v => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe (Term3 v PrintAnnotation) toDocEvalInline ppe (App' (Ref' r) (Delay' tm)) | nameEndsWith ppe ".docEvalInline" r = Just tm + | r == _oldDocEvalInline = Just tm toDocEvalInline _ _ = Nothing toDocExample, toDocExampleBlock :: Ord v => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe (Term3 v PrintAnnotation) diff --git a/parser-typechecker/src/Unison/TypePrinter.hs b/parser-typechecker/src/Unison/TypePrinter.hs index 6acb8922f4..12677efa4c 100644 --- a/parser-typechecker/src/Unison/TypePrinter.hs +++ b/parser-typechecker/src/Unison/TypePrinter.hs @@ -23,7 +23,10 @@ import qualified Unison.Var as Var import qualified Unison.Builtin.Decls as DD pretty :: forall v a . (Var v) => PrettyPrintEnv -> Type v a -> Pretty ColorText -pretty ppe = PP.syntaxToColor . pretty0 ppe mempty (-1) +pretty ppe = PP.syntaxToColor . prettySyntax ppe + +prettySyntax :: forall v a . (Var v) => PrettyPrintEnv -> Type v a -> Pretty SyntaxText +prettySyntax ppe = pretty0 ppe mempty (-1) pretty' :: Var v => Maybe Width -> PrettyPrintEnv -> Type v a -> String pretty' (Just width) n t = @@ -143,7 +146,13 @@ prettySignatures' :: Var v => PrettyPrintEnv -> [(HashQualified Name, Type v a)] -> [Pretty ColorText] -prettySignatures' env ts = map PP.syntaxToColor $ PP.align +prettySignatures' env ts = map PP.syntaxToColor $ prettySignatures'' env ts + +prettySignatures'' + :: Var v => PrettyPrintEnv + -> [(HashQualified Name, Type v a)] + -> [Pretty SyntaxText] +prettySignatures'' env ts = PP.align [ ( styleHashQualified'' (fmt $ S.HashQualifier name) name , (fmt S.TypeAscriptionColon ": " <> pretty0 env Map.empty (-1) typ) `PP.orElse` ( fmt S.TypeAscriptionColon ": " diff --git a/parser-typechecker/src/Unison/Typechecker.hs b/parser-typechecker/src/Unison/Typechecker.hs index a0f8b6a536..42c5deb704 100644 --- a/parser-typechecker/src/Unison/Typechecker.hs +++ b/parser-typechecker/src/Unison/Typechecker.hs @@ -176,7 +176,7 @@ data Resolution v loc = -- | Infer the type of a 'Unison.Term', using type-directed name resolution -- to attempt to resolve unknown symbols. synthesizeAndResolve - :: (Monad f, Var v, Ord loc) => Env v loc -> TDNR f v loc (Type v loc) + :: (Monad f, Var v, Monoid loc, Ord loc) => Env v loc -> TDNR f v loc (Type v loc) synthesizeAndResolve env = do tm <- get (tp, notes) <- listen . lift $ synthesize env tm @@ -210,7 +210,7 @@ liftResult = lift . MaybeT . WriterT . pure . runIdentity . runResultT -- 3. No match at all. Throw an unresolved symbol at the user. typeDirectedNameResolution :: forall v loc f - . (Monad f, Var v, Ord loc) + . (Monad f, Var v, Ord loc, Monoid loc) => Notes v loc -> Type v loc -> Env v loc @@ -294,7 +294,7 @@ typeDirectedNameResolution oldNotes oldType env = do -> Result (Notes v loc) [Context.Suggestion v loc] resolve inferredType (NamedReference fqn foundType replace) = -- We found a name that matches. See if the type matches too. - case Context.isSubtype (TypeVar.liftType foundType) inferredType of + case Context.isSubtype (TypeVar.liftType foundType) (Context.relax inferredType) of Left bug -> const [] <$> compilerBug bug -- Suggest the import if the type matches. Right b -> pure diff --git a/parser-typechecker/src/Unison/Typechecker/Context.hs b/parser-typechecker/src/Unison/Typechecker/Context.hs index 206d7a0d4b..2313c5b8b6 100644 --- a/parser-typechecker/src/Unison/Typechecker/Context.hs +++ b/parser-typechecker/src/Unison/Typechecker/Context.hs @@ -34,6 +34,7 @@ module Unison.Typechecker.Context , typeErrors , infoNotes , Unknown(..) + , relax ) where @@ -56,6 +57,7 @@ import Data.Functor.Compose ( Compose(..) ) import Data.List import Data.List.NonEmpty ( NonEmpty ) import qualified Data.Map as Map +import Data.Ord ( comparing ) import qualified Data.Sequence as Seq import Data.Sequence.NonEmpty ( NESeq ) import qualified Data.Sequence.NonEmpty as NESeq @@ -86,11 +88,13 @@ type Term v loc = Term.Term' (TypeVar v loc) v loc type Monotype v loc = Type.Monotype (TypeVar v loc) loc type RedundantTypeAnnotation = Bool +type Wanted v loc = [(Maybe (Term v loc), Type v loc)] + pattern Universal v = Var (TypeVar.Universal v) -pattern Existential b v = Var (TypeVar.Existential b v) +pattern Existential b v <- Var (TypeVar.Existential b v) existential :: v -> Element v loc -existential = Existential B.Blank +existential v = Var (TypeVar.Existential B.Blank v) existential' :: Ord v => a -> B.Blank loc -> v -> Type.Type (TypeVar v loc) a existential' a blank v = ABT.annotatedVar a (TypeVar.Existential blank v) @@ -103,10 +107,15 @@ universal' a v = ABT.annotatedVar a (TypeVar.Universal v) -- | Elements of an ordered algorithmic context data Element v loc - = Var (TypeVar v loc) -- A variable declaration - | Solved (B.Blank loc) v (Monotype v loc) -- `v` is solved to some monotype - | Ann v (Type v loc) -- `v` has type `a`, maybe quantified - | Marker v -- used for scoping + -- | A variable declaration + = Var (TypeVar v loc) + -- | `v` is solved to some monotype + | Solved (B.Blank loc) v (Monotype v loc) + -- | `v` has type `a`, maybe quantified + | Ann v (Type v loc) + -- | used for scoping + | Marker v + instance (Ord loc, Var v) => Eq (Element v loc) where Var v == Var v2 = v == v2 @@ -354,12 +363,8 @@ scope p (MT m) = MT (mapErrors (scope' p) . m) -- | The typechecking environment data MEnv v loc = MEnv { env :: Env v loc, -- The typechecking state - abilities :: [Type v loc], -- Allowed ambient abilities dataDecls :: DataDeclarations v loc, -- Data declarations in scope - effectDecls :: EffectDeclarations v loc, -- Effect declarations in scope - -- Types for which ability check should be skipped. - -- See abilityCheck function for how this is used. - skipAbilityCheck :: [Type v loc] + effectDecls :: EffectDeclarations v loc -- Effect declarations in scope } newtype Context v loc = Context [(Element v loc, Info v loc)] @@ -377,6 +382,13 @@ data Info v loc = context0 :: Context v loc context0 = Context [] +occursAnn :: Var v => Ord loc => TypeVar v loc -> Context v loc -> Bool +occursAnn v (Context eis) = any p es + where + es = fst <$> eis + p (Ann _ ty) = v `Set.member` ABT.freeVars (applyCtx es ty) + p _ = False + -- | Focuses on the first element in the list that satisfies the predicate. -- Returns `(prefix, focusedElem, suffix)`, where `prefix` is in reverse order. focusAt :: (a -> Bool) -> [a] -> Maybe ([a], a, [a]) @@ -470,6 +482,10 @@ ordered ctx v v2 = Set.member v (existentials (retract' (existential v2) ctx)) debugEnabled :: Bool debugEnabled = False +debugShow :: Show a => a -> Bool +debugShow e | debugEnabled = traceShow e False +debugShow _ = False + debugPatternsEnabled :: Bool debugPatternsEnabled = False @@ -660,16 +676,6 @@ getDataDeclarations = fromMEnv dataDecls getEffectDeclarations :: M v loc (EffectDeclarations v loc) getEffectDeclarations = fromMEnv effectDecls -getAbilities :: M v loc [Type v loc] -getAbilities = fromMEnv abilities - -shouldPerformAbilityCheck :: (Ord loc, Var v) => Type v loc -> M v loc Bool -shouldPerformAbilityCheck t = do - skip <- fromMEnv skipAbilityCheck - skip <- traverse applyM skip - t <- applyM t - pure $ all (/= t) skip - compilerCrash :: CompilerBug v loc -> M v loc a compilerCrash bug = liftResult $ compilerBug bug @@ -733,7 +739,7 @@ extendUniversal v = do extendExistential :: (Var v) => v -> M v loc v extendExistential v = do v' <- freshenVar v - extendContext (Existential B.Blank v') + extendContext (Var (TypeVar.Existential B.Blank v')) pure v' extendExistentialTV :: Var v => v -> M v loc (TypeVar v loc) @@ -774,44 +780,59 @@ apply' solvedExistentials t = go t where loc :: ABT.Term f v loc -> loc loc = ABT.annotation --- Prepends the provided abilities onto the existing ambient for duration of `m` -withEffects :: [Type v loc] -> M v loc a -> M v loc a -withEffects abilities' m = - MT (\menv -> runM m (menv { abilities = abilities' ++ abilities menv })) - --- Replaces the ambient abilities with the provided for duration of `m` -withEffects0 :: [Type v loc] -> M v loc a -> M v loc a -withEffects0 abilities' m = - MT (\menv -> runM m (menv { abilities = abilities' })) - - -synthesizeApps :: (Foldable f, Var v, Ord loc) => Type v loc -> f (Term v loc) -> M v loc (Type v loc) -synthesizeApps ft args = - foldM go ft $ Foldable.toList args `zip` [1..] - where go ft arg = do +-- | Post-processes an action that wants abilities by filtering out +-- some handled abilities. +withEffects + :: Var v + => Ord loc + => [Type v loc] + -> M v loc (Wanted v loc) + -> M v loc (Wanted v loc) +withEffects handled act = do + want <- expandWanted =<< act + handled <- expandAbilities handled + pruneWanted [] want handled + +synthesizeApps + :: (Foldable f, Var v, Ord loc) + => Term v loc + -> Type v loc + -> f (Term v loc) -> M v loc (Type v loc, Wanted v loc) +synthesizeApps fun ft args = + foldM go (ft, []) $ Foldable.toList args `zip` [1..] + where go (ft, want) arg = do ctx <- getContext - synthesizeApp (apply ctx ft) arg + (t, rwant) <- synthesizeApp fun (apply ctx ft) arg + (t,) <$> coalesceWanted rwant want -- | Synthesize the type of the given term, `arg` given that a function of -- the given type `ft` is being applied to `arg`. Update the context in -- the process. -- e.g. in `(f:t) x` -- finds the type of (f x) given t and x. -synthesizeApp :: (Var v, Ord loc) => Type v loc -> (Term v loc, Int) -> M v loc (Type v loc) -synthesizeApp ft arg | debugEnabled && traceShow ("synthesizeApp"::String, ft, arg) False = undefined -synthesizeApp (Type.stripIntroOuters -> Type.Effect'' es ft) argp@(arg, argNum) = - scope (InSynthesizeApp ft arg argNum) $ abilityCheck es >> go ft +synthesizeApp + :: (Var v, Ord loc) + => Term v loc + -> Type v loc + -> (Term v loc, Int) + -> M v loc (Type v loc, Wanted v loc) +synthesizeApp _ ft arg + | debugEnabled && traceShow ("synthesizeApp"::String, ft, arg) False + = undefined +synthesizeApp fun (Type.stripIntroOuters -> Type.Effect'' es ft) argp@(arg, argNum) = + scope (InSynthesizeApp ft arg argNum) $ do + (t, w) <- go ft + (t,) <$> coalesceWanted ((Just fun,) <$> es) w where go (Type.Forall' body) = do -- Forall1App v <- ABT.freshen body freshenTypeVar appendContext [existential v] let ft2 = ABT.bindInheritAnnotation body (existential' () B.Blank v) - synthesizeApp ft2 argp - go (Type.Arrow' i o) = do -- ->App - let (es, _) = Type.stripEffect o - abilityCheck es - o <$ check arg i + synthesizeApp fun ft2 argp + go (Type.Arrow' i o0) = do -- ->App + let (es, o) = Type.stripEffect o0 + (o,) <$> checkWantedScoped ((Just fun,) <$> es) arg i go (Type.Var' (TypeVar.Existential b a)) = do -- a^App - [i,e,o] <- traverse freshenVar [Var.named "i", Var.named "synthsizeApp-refined-effect", Var.named "o"] + [i,e,o] <- traverse freshenVar [Var.named "i", Var.inferAbility, Var.named "o"] let it = existential' (loc ft) B.Blank i ot = existential' (loc ft) B.Blank o et = existential' (loc ft) B.Blank e @@ -821,9 +842,10 @@ synthesizeApp (Type.stripIntroOuters -> Type.Effect'' es ft) argp@(arg, argNum) ctxMid = [existential o, existential e, existential i, Solved b a soln] replaceContext (existential a) ctxMid - synthesizeApp (Type.getPolytype soln) argp + synthesizeApp fun (Type.getPolytype soln) argp go _ = getContext >>= \ctx -> failWith $ TypeMismatch ctx -synthesizeApp _ _ = error "unpossible - Type.Effect'' pattern always succeeds" +synthesizeApp _ _ _ + = error "unpossible - Type.Effect'' pattern always succeeds" -- For arity 3, creates the type `∀ a . a -> a -> a -> Sequence a` -- For arity 2, creates the type `∀ a . a -> a -> Sequence a` @@ -844,7 +866,7 @@ generalizeExistentials' generalizeExistentials' t = Type.generalize (filter isExistential . Set.toList $ ABT.freeVars t) t where - isExistential (TypeVar.Existential _ _) = True + isExistential TypeVar.Existential{} = True isExistential _ = False noteTopLevelType @@ -855,7 +877,7 @@ noteTopLevelType -> M v loc () noteTopLevelType e binding typ = case binding of Term.Ann' strippedBinding _ -> do - inferred <- (Just <$> synthesize strippedBinding) `orElse` pure Nothing + inferred <- (Just <$> synthesizeTop strippedBinding) `orElse` pure Nothing case inferred of Nothing -> btw $ topLevelComponent [(Var.reset (ABT.variable e), generalizeAndUnTypeVar typ, False)] @@ -867,75 +889,214 @@ noteTopLevelType e binding typ = case binding of _ -> btw $ topLevelComponent [(Var.reset (ABT.variable e), generalizeAndUnTypeVar typ, True)] --- | Synthesize the type of the given term, updating the context in the process. +synthesizeTop + :: Var v + => Ord loc + => Term v loc + -> M v loc (Type v loc) +synthesizeTop tm = do + (ty, want) <- synthesize tm + ctx <- getContext + want <- substAndDefaultWanted want (out ctx) + when (not $ null want) . failWith $ do + AbilityCheckFailure + [] + (Type.flattenEffects . snd =<< want) + ctx + applyM ty + where + out (Context es) = fmap fst es + +-- | Synthesize the type of the given term, updating the context in +-- the process. Also collect wanted abilities. -- | Figure 11 from the paper -synthesize :: forall v loc . (Var v, Ord loc) => Term v loc -> M v loc (Type v loc) -synthesize e | debugEnabled && traceShow ("synthesize"::String, e) False = undefined +synthesize + :: Var v + => Ord loc + => Term v loc + -> M v loc (Type v loc, Wanted v loc) +synthesize e | debugShow ("synthesize"::String, e) = undefined synthesize e = scope (InSynthesize e) $ case minimize' e of Left es -> failWith (DuplicateDefinitions es) Right e -> do - Type.Effect'' es t <- go e - abilityCheck es - pure t - where - l = loc e - go :: (Var v, Ord loc) => Term v loc -> M v loc (Type v loc) - go (Term.Var' v) = getContext >>= \ctx -> case lookupAnn ctx v of -- Var + (Type.Effect'' es t, want) <- synthesizeWanted e + want <- coalesceWanted (fmap (Just e,) es) want + pure (t, want) + +-- | Helper function for turning an ability request's type into the +-- results used by type checking. +wantRequest + :: Var v + => Ord loc + => Term v loc + -> Type v loc + -> (Type v loc, Wanted v loc) +wantRequest loc ~(Type.Effect'' es t) = (t, fmap (Just loc,) es) + +-- | This is the main worker for type synthesis. It was factored out +-- of the `synthesize` function. It handles the various actual +-- synthesis cases for terms, while the `synthesize` function wraps +-- this in some common pre/postprocessing. +-- +-- The return value is the synthesized type together with a list of +-- wanted abilities. +synthesizeWanted + :: Var v + => Ord loc + => Term v loc + -> M v loc (Type v loc, Wanted v loc) +synthesizeWanted (Term.Var' v) = getContext >>= \ctx -> + case lookupAnn ctx v of -- Var Nothing -> compilerCrash $ UndeclaredTermVariable v ctx - Just t -> pure t - go (Term.Blank' blank) = do + -- variables accesses are pure + Just t -> do + -- Note: we ungeneralize the type for ease of discarding. The + -- current algorithm isn't sensitive to keeping things + -- quantified, so it should be valid to not worry about + -- re-generalizing. + -- + -- Polymorphic ability variables in covariant positions in an + -- occurrence's type only add useless degrees of freedom to the + -- solver. They allow an occurrence to 'want' any row, but the + -- occurrence might as well be chosen to 'want' the empty row, + -- since that can be satisfied the most easily. The solver + -- generally has no way of deciding that these arbitrary degrees + -- of freedom are unnecessary later, and will get confused about + -- which variable ot instantiate, so we ought to discard them + -- early. + (vs, t) <- ungeneralize' t + pure (discardCovariant (Set.fromList vs) t, []) +synthesizeWanted (Term.Ref' h) + = compilerCrash $ UnannotatedReference h +synthesizeWanted (Term.Ann' (Term.Ref' _) t) + -- innermost Ref annotation assumed to be correctly provided by + -- `synthesizeClosed` + -- + -- Top level references don't have their own effects. + | Set.null s = do + t <- existentializeArrows t + -- See note about ungeneralizing above in the Var case. + t <- ungeneralize t + pure (discard t, []) + | otherwise = compilerCrash $ FreeVarsInTypeAnnotation s + where + s = ABT.freeVars t + discard ty = discardCovariant fvs ty + where + fvs = foldMap p $ ABT.freeVars ty + p (TypeVar.Existential _ v) = Set.singleton v + p _ = mempty + +synthesizeWanted (Term.Constructor' r cid) + -- Constructors do not have effects + = (,[]) . Type.purifyArrows <$> getDataConstructorType r cid +synthesizeWanted tm@(Term.Request' r cid) = + fmap (wantRequest tm) . ungeneralize . Type.purifyArrows + =<< getEffectConstructorType r cid +synthesizeWanted (Term.Let1Top' top binding e) = do + isClosed <- isClosed binding + -- note: no need to freshen binding, it can't refer to v + ((tb, wb), ctx2) <- markThenRetract Var.inferOther $ do + _ <- extendExistential Var.inferOther + synthesize binding + -- regardless of whether we generalize existentials, we'll need to + -- process the wanted abilities with respect to things falling out + -- of scope. + wb <- substAndDefaultWanted wb ctx2 + -- If the binding has no free variables, we generalize over its + -- existentials + tbinding <- + if isClosed then pure $ generalizeExistentials ctx2 tb + else applyM . applyCtx ctx2 $ tb + v' <- ABT.freshen e freshenVar + appendContext [Ann v' tbinding] + (t, w) <- synthesize (ABT.bindInheritAnnotation e (Term.var () v')) + t <- applyM t + when top $ noteTopLevelType e binding tbinding + want <- coalesceWanted w wb + -- doRetract $ Ann v' tbinding + pure (t, want) +synthesizeWanted (Term.LetRecNamed' [] body) = synthesizeWanted body +synthesizeWanted (Term.LetRecTop' isTop letrec) = do + ((t, want), ctx2) <- markThenRetract (Var.named "let-rec-marker") $ do + e <- annotateLetRecBindings isTop letrec + synthesize e + want <- substAndDefaultWanted want ctx2 + pure (generalizeExistentials ctx2 t, want) +synthesizeWanted (Term.Handle' h body) = do + -- To synthesize a handle block, we first synthesize the handler h, + -- then push its allowed abilities onto the current ambient set when + -- checking the body. Assuming that works, we also verify that the + -- handler only uses abilities in the current ambient set. + (ht, hwant) <- synthesize h + ht <- ungeneralize =<< applyM ht + ctx <- getContext + case ht of + -- common case, like `h : Request {Remote} a -> b`, brings + -- `Remote` into ambient when checking `body` + Type.Arrow' (Type.Apps' (Type.Ref' ref) [et,i]) o | ref == Type.effectRef -> do + let es = Type.flattenEffects et + bwant <- withEffects es $ checkWanted [] body i + o <- applyM o + let (oes, o') = Type.stripEffect o + want <- coalesceWanted (fmap (Just h,) oes ++ bwant) hwant + pure (o', want) + -- degenerate case, like `handle x -> 10 in ...` + -- todo: reviewme - I think just generate a type error in this case + -- Currently assuming no effects are handled. + Type.Arrow' (i@(Type.Var' (TypeVar.Existential _ v@(lookupSolved ctx -> Nothing)))) o -> do + r <- extendExistential v + let rt = existentialp (loc i) r + e0 = Type.apps (Type.ref (loc i) Type.effectRef) + [(loc i, Type.effects (loc i) []), (loc i, rt)] + subtype i e0 + o <- applyM o + let (oes, o') = Type.stripEffect o + want <- checkWanted (fmap (Just h,) oes) body rt + pure (o', want) + _ -> failWith $ HandlerOfUnexpectedType (loc h) ht + +synthesizeWanted (Term.Ann' e t) = checkScoped e t +synthesizeWanted tm@(Term.Apps' f args) = do -- ->EEEEE + (ft, fwant) <- synthesize f + ctx <- getContext + (vs, ft) <- ungeneralize' ft + (at, awant) <- scope (InFunctionCall vs f ft args) + $ synthesizeApps tm (apply ctx ft) args + (at,) <$> coalesceWanted awant fwant + + +-- From here down, the term location is used in the result, so it is +-- more convenient to use pattern guards. +synthesizeWanted e + -- literals + | Term.Float' _ <- e = pure (Type.float l, []) -- 1I=> + | Term.Int' _ <- e = pure (Type.int l, []) -- 1I=> + | Term.Nat' _ <- e = pure (Type.nat l, []) -- 1I=> + | Term.Boolean' _ <- e = pure (Type.boolean l, []) + | Term.Text' _ <- e = pure (Type.text l, []) + | Term.Char' _ <- e = pure (Type.char l, []) + | Term.TermLink' _ <- e = pure (Type.termLink l, []) + | Term.TypeLink' _ <- e = pure (Type.typeLink l, []) + + | Term.Blank' blank <- e = do v <- freshenVar Var.blank - appendContext [Existential blank v] - pure $ existential' l blank v -- forall (TypeVar.Universal v) (Type.universal v) - go (Term.Ann' (Term.Ref' _) t) = case ABT.freeVars t of - s | Set.null s -> - -- innermost Ref annotation assumed to be correctly provided by `synthesizeClosed` - existentializeArrows t - s -> compilerCrash $ FreeVarsInTypeAnnotation s - go (Term.Ref' h) = compilerCrash $ UnannotatedReference h - go (Term.Constructor' r cid) = - Type.purifyArrows <$> getDataConstructorType r cid - go (Term.Request' r cid) = - ungeneralize . Type.purifyArrows =<< getEffectConstructorType r cid - go (Term.Ann' e t) = checkScoped e t - go (Term.Float' _) = pure $ Type.float l -- 1I=> - go (Term.Int' _) = pure $ Type.int l -- 1I=> - go (Term.Nat' _) = pure $ Type.nat l -- 1I=> - go (Term.Boolean' _) = pure $ Type.boolean l - go (Term.Text' _) = pure $ Type.text l - go (Term.Char' _) = pure $ Type.char l - go (Term.TermLink' _) = pure $ Type.termLink l - go (Term.TypeLink' _) = pure $ Type.typeLink l - go (Term.Apps' f args) = do -- ->EEEEE - ft <- synthesize f - ctx <- getContext - (vs, ft) <- ungeneralize' ft - scope (InFunctionCall vs f ft args) $ synthesizeApps (apply ctx ft) args - go (Term.List' v) = do - ft <- vectorConstructorOfArity (loc e) (Foldable.length v) + appendContext [Var (TypeVar.Existential blank v)] + pure (existential' l blank v, []) + + | Term.List' v <- e = do + ft <- vectorConstructorOfArity l (Foldable.length v) case Foldable.toList v of - [] -> pure ft + [] -> pure (ft, []) v1 : _ -> - scope (InVectorApp (ABT.annotation v1)) $ synthesizeApps ft v - go (Term.Let1Top' top binding e) = do - isClosed <- isClosed binding - -- note: no need to freshen binding, it can't refer to v - (t, ctx2) <- markThenRetract Var.inferOther $ do - _ <- extendExistential Var.inferOther - synthesize binding - -- If the binding has no free variables, we generalize over its existentials - tbinding <- - if isClosed then pure $ generalizeExistentials ctx2 t - else applyM . applyCtx ctx2 $ t - v' <- ABT.freshen e freshenVar - appendContext [Ann v' tbinding] - t <- applyM =<< synthesize (ABT.bindInheritAnnotation e (Term.var() v')) - when top $ noteTopLevelType e binding tbinding - -- doRetract $ Ann v' tbinding - pure t - go (Term.Lam' body) = do -- ->I=> (Full Damas Milner rule) - -- arya: are there more meaningful locations we could put into and pull out of the abschain?) + scope (InVectorApp (ABT.annotation v1)) + $ synthesizeApps e ft v + + -- ->I=> (Full Damas Milner rule) + | Term.Lam' body <- e = do + -- arya: are there more meaningful locations we could put into and + -- pull out of the abschain?) [arg, i, e, o] <- sequence [ ABT.freshen body freshenVar , freshenVar (ABT.variable body) , freshenVar Var.inferAbility @@ -946,59 +1107,38 @@ synthesize e = scope (InSynthesize e) $ appendContext $ [existential i, existential e, existential o, Ann arg it] body' <- pure $ ABT.bindInheritAnnotation body (Term.var() arg) - if Term.isLam body' then withEffects0 [] $ check body' ot - else withEffects0 [et] $ check body' ot + if Term.isLam body' + then checkWithAbilities [] body' ot + else checkWithAbilities [et] body' ot ctx <- getContext - let t = Type.arrow l it (Type.effect l (apply ctx <$> [et]) ot) - pure t - go (Term.LetRecNamed' [] body) = synthesize body - go (Term.LetRecTop' isTop letrec) = do - (t, ctx2) <- markThenRetract (Var.named "let-rec-marker") $ do - e <- annotateLetRecBindings isTop letrec - synthesize e - pure $ generalizeExistentials ctx2 t - go (Term.If' cond t f) = do - scope InIfCond $ check cond (Type.boolean l) - scope (InIfBody $ ABT.annotation t) $ synthesizeApps (Type.iff2 l) [t, f] - go (Term.And' a b) = - scope InAndApp $ synthesizeApps (Type.andor' l) [a, b] - go (Term.Or' a b) = - scope InOrApp $ synthesizeApps (Type.andor' l) [a, b] - go (Term.Match' scrutinee cases) = do - scrutineeType <- synthesize scrutinee + let t = apply ctx $ Type.arrow l it (Type.effect l [et] ot) + pure (t, []) + + | Term.If' cond t f <- e = do + cwant <- scope InIfCond $ check cond (Type.boolean l) + (ty, bwant) <- + scope (InIfBody $ ABT.annotation t) + $ synthesizeApps e (Type.iff2 l) [t, f] + (ty,) <$> coalesceWanted bwant cwant + + | Term.And' a b <- e + = scope InAndApp $ synthesizeApps e (Type.andor' l) [a, b] + + | Term.Or' a b <- e + = scope InOrApp $ synthesizeApps e (Type.andor' l) [a, b] + + | Term.Match' scrutinee cases <- e = do + (scrutineeType, swant) <- synthesize scrutinee outputTypev <- freshenVar (Var.named "match-output") let outputType = existential' l B.Blank outputTypev appendContext [existential outputTypev] - checkCases scrutineeType outputType cases - ctx <- getContext - pure $ apply ctx outputType - go (Term.Handle' h body) = do - -- To synthesize a handle block, we first synthesize the handler h, - -- then push its allowed abilities onto the current ambient set when - -- checking the body. Assuming that works, we also verify that the - -- handler only uses abilities in the current ambient set. - ht <- synthesize h >>= applyM >>= ungeneralize + cwant <- checkCases scrutineeType outputType cases + want <- coalesceWanted cwant swant ctx <- getContext - case ht of - -- common case, like `h : Request {Remote} a -> b`, brings - -- `Remote` into ambient when checking `body` - Type.Arrow' (Type.Apps' (Type.Ref' ref) [et,i]) o | ref == Type.effectRef -> do - let es = Type.flattenEffects et - withEffects es $ check body i - o <- applyM o - let (oes, o') = Type.stripEffect o - abilityCheck oes - pure o' - -- degenerate case, like `handle x -> 10 in ...` - Type.Arrow' (i@(Type.Var' (TypeVar.Existential _ v@(lookupSolved ctx -> Nothing)))) o -> do - e <- extendExistential v - withEffects [existentialp (loc i) e] $ check body i - o <- applyM o - let (oes, o') = Type.stripEffect o - abilityCheck oes - pure o' - _ -> failWith $ HandlerOfUnexpectedType (loc h) ht - go _e = compilerCrash PatternMatchFailure + pure $ (apply ctx outputType, want) + where l = loc e + +synthesizeWanted _e = compilerCrash PatternMatchFailure checkCases :: Var v @@ -1006,8 +1146,8 @@ checkCases => Type v loc -> Type v loc -> [Term.MatchCase loc (Term v loc)] - -> M v loc () -checkCases _ _ [] = pure () + -> M v loc (Wanted v loc) +checkCases _ _ [] = pure [] checkCases scrutType outType cases@(Term.MatchCase _ _ t : _) = scope (InMatch (ABT.annotation t)) $ do mes <- requestType (cases <&> \(Term.MatchCase p _ _) -> p) @@ -1017,7 +1157,7 @@ checkCases scrutType outType cases@(Term.MatchCase _ _ t : _) vt = existentialp lo v appendContext [existential v] subtype (Type.effectV lo (lo, Type.effects lo es) (lo, vt)) sty - traverse_ (checkCase scrutType outType) cases + coalesceWanteds =<< traverse (checkCase scrutType outType) cases getEffect :: Var v => Ord loc => Reference -> Int -> M v loc (Type v loc) @@ -1046,14 +1186,13 @@ checkCase :: forall v loc . (Var v, Ord loc) => Type v loc -> Type v loc -> Term.MatchCase loc (Term v loc) - -> M v loc () + -> M v loc (Wanted v loc) checkCase scrutineeType outputType (Term.MatchCase pat guard rhs) = do scrutineeType <- applyM scrutineeType outputType <- applyM outputType - markThenRetract0 Var.inferOther $ do + markThenRetractWanted Var.inferOther $ do let peel t = case t of ABT.AbsN' vars bod -> (vars, bod) - _ -> ([], t) (rhsvs, rhsbod) = peel rhs mayGuard = snd . peel <$> guard (substs, remains) <- runStateT (checkPattern scrutineeType pat) rhsvs @@ -1061,9 +1200,11 @@ checkCase scrutineeType outputType (Term.MatchCase pat guard rhs) = do let subst = ABT.substsInheritAnnotation (second (Term.var ()) <$> substs) rhs' = subst rhsbod guard' = subst <$> mayGuard - for_ guard' $ \g -> scope InMatchGuard $ check g (Type.boolean (loc g)) + gwant <- for guard' $ \g -> scope InMatchGuard $ + checkWantedScoped [] g (Type.boolean (loc g)) outputType <- applyM outputType - scope InMatchBody $ check rhs' outputType + scope InMatchBody $ + checkWantedScoped (fromMaybe [] gwant) rhs' outputType -- For example: -- match scrute with @@ -1320,7 +1461,7 @@ annotateLetRecBindings isTop letrec = Foldable.for_ (zip bindings bindingTypes) $ \(b, t) -> -- note: elements of a cycle have to be pure, otherwise order of effects -- is unclear and chaos ensues - withEffects0 [] (checkScoped b t) + checkScopedWith b t [] ensureGuardedCycle (vs `zip` bindings) pure (bindings, bindingTypes) -- compute generalized types `gt1, gt2 ...` for each binding `b1, b2...`; @@ -1358,7 +1499,8 @@ existentialFunctionTypeFor e = do existentializeArrows :: Var v => Type v loc -> M v loc (Type v loc) existentializeArrows t = do - t <- Type.existentializeArrows (extendExistentialTV Var.inferAbility) t + let newVar = extendExistentialTV Var.inferAbility + t <- Type.existentializeArrows newVar t pure t ungeneralize :: (Var v, Ord loc) => Type v loc -> M v loc (Type v loc) @@ -1404,7 +1546,18 @@ forcedData ty = Type.freeVars ty -- | Apply the context to the input type, then convert any unsolved existentials -- to universals. generalizeExistentials :: (Var v, Ord loc) => [Element v loc] -> Type v loc -> Type v loc -generalizeExistentials = generalizeP existentialP +generalizeExistentials ctx ty0 = generalizeP pred ctx ty + where + gens = Set.fromList $ mapMaybe (fmap snd . existentialP) ctx + + ty = discardCovariant gens $ applyCtx ctx ty0 + fvs = Type.freeVars ty + + pred e + | pe@(Just (tv, _)) <- existentialP e + , tv `Set.member` fvs = pe + | otherwise = Nothing + generalizeP :: Var v @@ -1445,64 +1598,366 @@ variableP _ = Nothing checkScoped :: forall v loc . (Var v, Ord loc) - => Term v loc -> Type v loc -> M v loc (Type v loc) + => Term v loc -> Type v loc -> M v loc (Type v loc, Wanted v loc) checkScoped e (Type.Forall' body) = do v <- ABT.freshen body freshenTypeVar - (ty, pop) <- markThenRetract v $ do + ((ty, want), pop) <- markThenRetract v $ do x <- extendUniversal v let e' = Term.substTypeVar (ABT.variable body) (universal' () x) e checkScoped e' (ABT.bindInheritAnnotation body (universal' () x)) - pure $ generalizeP variableP pop ty + want <- substAndDefaultWanted want pop + pure (generalizeP variableP pop ty, want) checkScoped e t = do t <- existentializeArrows t - t <$ check e t + (t,) <$> check e t + +checkScopedWith + :: Var v + => Ord loc + => Term v loc + -> Type v loc + -> [Type v loc] + -> M v loc () +checkScopedWith tm ty ab = do + (_, want) <- checkScoped tm ty + subAbilities want ab + +markThenRetractWanted + :: Var v + => Ord loc + => v + -> M v loc (Wanted v loc) + -> M v loc (Wanted v loc) +markThenRetractWanted v m + = markThenRetract v m >>= uncurry substAndDefaultWanted + +-- This function handles merging two sets of wanted abilities, along +-- with some pruning of the set. This means that coalescing a list +-- with the empty list may result in a distinct list, but coalescing +-- again afterwards should not further change things. +-- +-- With respect to the above, it is presumed that the second argument +-- (`old`) has already been coalesced in this manner. So only the +-- contents of `new` may be reduced, and coalescing with the empty +-- list as `new` will just yield the `old` list. +-- +-- There are two main operations performed while merging. First, an +-- ability (currently) may only occur once in a row, so if it occurs +-- twice in a list, those two occurrences are unified. Second, some +-- references to ability polymorphic functions can lead to arbitrary +-- variables being 'wanted'. However, if these variables do not occur +-- in the context, and we are thus not trying to infer them, it is +-- pointless to add them to the wanted abilities, just making types +-- more complicated, and inference harder. So in that scenario, we +-- default the variable to {} and omit it. +coalesceWanted' + :: Var v + => Ord loc + => Wanted v loc + -> Wanted v loc + -> M v loc (Wanted v loc) +coalesceWanted' [] old = pure old +coalesceWanted' ((loc,n):new) old + | Just (_, o) <- find (headMatch n . snd) old = do + subtype n o + coalesceWanted new old + | Type.Var' u <- n = do + ctx <- getContext + (new, old) <- + -- Only add existential variables to the wanted list if they + -- occur in a type we're trying to infer in the context. If + -- they don't, they were added as instantiations of polymorphic + -- types that might as well just be instantiated to {}. + if keep ctx u + then pure (new, (loc, n):old) + else do + defaultAbility n + pure (new, old) + coalesceWanted new old + | otherwise = coalesceWanted' new ((loc, n):old) + where + keep ctx u@TypeVar.Existential{} = occursAnn u ctx + keep _ _ = True + +-- Wrapper for coalesceWanted' that ensures both lists are fully +-- expanded. +coalesceWanted + :: Var v + => Ord loc + => Wanted v loc + -> Wanted v loc + -> M v loc (Wanted v loc) +coalesceWanted new old = do + new <- expandWanted new + old <- expandWanted old + coalesceWanted' new old + +coalesceWanteds + :: Var v => Ord loc => [Wanted v loc] -> M v loc (Wanted v loc) +coalesceWanteds = foldM (flip coalesceWanted) [] + +-- | This implements the subtraction of handled effects from the +-- wanted effects of its body. Similar to merging, it presumes that +-- only one occurrence of each concrete ability is in play in the +-- scope, and will impose subtyping relations between the wanted and +-- handled abilities. +pruneWanted + :: Var v + => Ord loc + => Wanted v loc + -> Wanted v loc + -> [Type v loc] + -> M v loc (Wanted v loc) +pruneWanted acc [] _ = pure acc +pruneWanted acc ((loc, w):want) handled + | Just h <- find (headMatch w) handled = do + subtype w h + want <- expandWanted want + handled <- expandAbilities handled + pruneWanted acc want handled + | otherwise = pruneWanted ((loc, w):acc) want handled + +-- | Processes wanted effects with respect to a portion of context +-- that is being discarded. This has the following consequences: +-- - All solved variables are substituted into the wanted abilities +-- - Effects that would involve escaping universal variables cause +-- an error, because they cannot possibly be satisfied. +-- - Unsolved existential abilities are defaulted to the empty row +-- - Abilities containing unsolved existentials that are going out +-- of scope cause an error, because it is unclear what they ought +-- to be solved to. +substAndDefaultWanted + :: Var v + => Ord loc + => Wanted v loc + -> [Element v loc] + -> M v loc (Wanted v loc) +substAndDefaultWanted want ctx + | want <- (fmap.fmap) (applyCtx ctx) want + , want <- filter q want + , repush <- filter keep ctx + = appendContext repush *> coalesceWanted want [] + where + isExistential TypeVar.Existential{} = True + isExistential _ = False + + -- get the free variables of things that aren't just variables + necessary (Type.Var' _) = mempty + necessary t = Set.filter isExistential $ Type.freeVars t + + keeps = foldMap (necessary.snd) want + keep (Var v) = v `Set.member` keeps + keep _ = False + + p (Var v) | isExistential v = Just v + p _ = Nothing + + outScope = Set.fromList $ mapMaybe p ctx + + q (_, Type.Var' u) = u `Set.notMember` outScope + q _ = True + +-- Defaults unsolved ability variables to the empty row +defaultAbility :: Var v => Ord loc => Type v loc -> M v loc Bool +defaultAbility e@(Type.Var' (TypeVar.Existential b v)) + = (True <$ instantiateL b v eff0) `orElse` pure False + where + eff0 = Type.effects (loc e) [] +defaultAbility _ = pure False --- | Check that under the given context, `e` has type `t`, +-- Discards existential ability variables that only occur in covariant +-- position in the type. This is a useful step before generalization, +-- because it eliminates unnecessary variable parameterization. +-- +-- Expects a fully substituted type, so that it is unnecessary to +-- check if an existential in the type has been solved. +discardCovariant :: Var v => Set v -> Type v loc -> Type v loc +discardCovariant _ ty | debugShow ("discardCovariant", ty) = undefined +discardCovariant gens ty + = ABT.rewriteDown (strip $ keepVarsT True ty) ty + where + keepVarsT pos (Type.Arrow' i o) + = keepVarsT (not pos) i <> keepVarsT pos o + keepVarsT pos (Type.Effect1' e o) + = keepVarsT pos e <> keepVarsT pos o + keepVarsT pos (Type.Effects' es) = foldMap (keepVarsE pos) es + keepVarsT pos (Type.ForallNamed' _ t) = keepVarsT pos t + keepVarsT pos (Type.IntroOuterNamed' _ t) = keepVarsT pos t + keepVarsT _ t = foldMap exi $ Type.freeVars t + + exi (TypeVar.Existential _ v) = Set.singleton v + exi _ = mempty + + keepVarsE pos (Type.Var' (TypeVar.Existential _ v)) + | pos, v `Set.member` gens = mempty + | otherwise = Set.singleton v + keepVarsE pos e = keepVarsT pos e + + strip keep t@(Type.Effect1' es0 o) + = Type.effect (loc t) (discard keep $ Type.flattenEffects es0) o + strip _ t = t + + discard keep es = filter p es + where + p (Type.Var' (TypeVar.Existential _ v)) = v `Set.member` keep + p _ = True + +-- Ability inference prefers minimal sets of abilities when +-- possible. However, such inference may disqualify certain TDNR +-- candicates due to a subtyping check with an overly minimal type. +-- It may be that the candidate's type would work fine, because the +-- inference was overly conservative about guessing which abilities +-- are in play. +-- +-- `relax` adds an existential variable to the final inferred +-- abilities for such a function type if there isn't already one, +-- changing: +-- +-- T ->{..} U ->{..} V +-- +-- into: +-- +-- T ->{..} U ->{e, ..} V +-- +-- (where the `..` are presumed to be concrete) so that it can +-- behave better in the check. +-- +-- It's possible this would allow an ability set that doesn't work, +-- but this is only used for type directed name resolution. A +-- separate type check must pass if the candidate is allowed, which +-- will ensure that the location has the right abilities. +relax :: Var v => Ord loc => Type v loc -> Type v loc +relax t = relax' True v t + where + fvs = foldMap f $ Type.freeVars t + f (TypeVar.Existential _ v) = Set.singleton v + f _ = mempty + v = ABT.freshIn fvs $ Var.inferAbility + +-- The worker for `relax`. +-- +-- The boolean argument controls whether a non-arrow type is relaxed. +-- For example, the type: +-- +-- Nat +-- +-- is relaxed to: +-- +-- {e} Nat +-- +-- if True. This is desirable when doing TDNR, because a potential +-- effect reference may have type `{A} T` while the inferred necessary +-- type is just `T`. However, it is undesirable to add these variables +-- when relax' is used during variable instantiation, because it just +-- adds ability inference ambiguity. +relax' :: Var v => Ord loc => Bool -> v -> Type v loc -> Type v loc +relax' nonArrow v t + | Type.Arrow' i o <- t + = Type.arrow (ABT.annotation t) i $ relax' nonArrow v o + | Type.ForallsNamed' vs b <- t + = Type.foralls loc vs $ relax' nonArrow v b + | Type.Effect' es r <- t + , Type.Arrow' i o <- r + = Type.effect loc es . Type.arrow (ABT.annotation t) i $ relax' nonArrow v o + | Type.Effect' es r <- t + = if any open es then t else Type.effect loc (tv : es) r + | nonArrow = Type.effect loc [tv] t + | otherwise = t + where + open (Type.Var' (TypeVar.Existential{})) = True + open _ = False + loc = ABT.annotation t + tv = Type.var loc (TypeVar.Existential B.Blank v) + +checkWantedScoped + :: Var v + => Ord loc + => Wanted v loc + -> Term v loc + -> Type v loc + -> M v loc (Wanted v loc) +checkWantedScoped want m ty + = scope (InCheck m ty) $ checkWanted want m ty + +checkWanted + :: Var v + => Ord loc + => Wanted v loc + -> Term v loc + -> Type v loc + -> M v loc (Wanted v loc) +-- ForallI +checkWanted want m (Type.Forall' body) = do + v <- ABT.freshen body freshenTypeVar + markThenRetractWanted v $ do + x <- extendUniversal v + checkWanted want m + $ ABT.bindInheritAnnotation body (universal' () x) +-- =>I +-- Lambdas are pure, so they add nothing to the wanted set +checkWanted want (Term.Lam' body) (Type.Arrow'' i es o) = do + x <- ABT.freshen body freshenVar + markThenRetract0 x $ do + extendContext (Ann x i) + body <- pure $ ABT.bindInheritAnnotation body (Term.var () x) + checkWithAbilities es body o + pure want +checkWanted want (Term.Let1' binding m) t = do + v <- ABT.freshen m freshenVar + (tbinding, wbinding) <- synthesize binding + want <- coalesceWanted wbinding want + markThenRetractWanted v $ do + extendContext (Ann v tbinding) + checkWanted want (ABT.bindInheritAnnotation m (Term.var () v)) t +checkWanted want (Term.LetRecNamed' [] m) t + = checkWanted want m t +-- letrec can't have effects, so it doesn't extend the wanted set +checkWanted want (Term.LetRecTop' isTop lr) t + = markThenRetractWanted (Var.named "let-rec-marker") $ do + e <- annotateLetRecBindings isTop lr + checkWanted want e t +checkWanted want e t = do + (u, wnew) <- synthesize e + ctx <- getContext + subtype (apply ctx u) (apply ctx t) + coalesceWanted wnew want + +-- | Check that under the current context: +-- `m` has type `t` with abilities `es`, -- updating the context in the process. -check :: forall v loc . (Var v, Ord loc) => Term v loc -> Type v loc -> M v loc () -check e t | debugEnabled && traceShow ("check" :: String, e, t) False = undefined -check e0 t0 = scope (InCheck e0 t0) $ do +checkWithAbilities + :: Var v + => Ord loc + => [Type v loc] + -> Term v loc + -> Type v loc + -> M v loc () +checkWithAbilities es m t = do + want <- check m t + subAbilities want es + -- traverse_ defaultAbility es + +-- | Check that under the given context: +-- `m` has type `t` +-- updating the context in the process. +check + :: Var v + => Ord loc + => Term v loc + -> Type v loc + -> M v loc (Wanted v loc) +check m t | debugShow ("check" :: String, m, t) = undefined +check m0 t0 = scope (InCheck m0 t0) $ do ctx <- getContext - let Type.Effect'' es t = t0 - let e = minimize' e0 - case e of - Left e -> failWith $ DuplicateDefinitions e - Right e -> - if wellformedType ctx t0 - then case t of - -- expand existentials before checking - t@(Type.Var' (TypeVar.Existential _ _)) -> abilityCheck es >> go e (apply ctx t) - t -> go e (Type.stripIntroOuters t) - else failWith $ IllFormedType ctx - where - go :: Term v loc -> Type v loc -> M v loc () - go e (Type.Forall' body) = do -- ForallI - v <- ABT.freshen body freshenTypeVar - markThenRetract0 v $ do - x <- extendUniversal v - check e (ABT.bindInheritAnnotation body (universal' () x)) - go (Term.Lam' body) (Type.Arrow' i o) = do -- =>I - x <- ABT.freshen body freshenVar - markThenRetract0 x $ do - extendContext (Ann x i) - let Type.Effect'' es ot = o - body' <- pure $ ABT.bindInheritAnnotation body (Term.var() x) - withEffects0 es $ check body' ot - go (Term.Let1' binding e) t = do - v <- ABT.freshen e freshenVar - tbinding <- synthesize binding - markThenRetract0 v $ do - extendContext (Ann v tbinding) - check (ABT.bindInheritAnnotation e (Term.var () v)) t - go (Term.LetRecNamed' [] e) t = check e t - go (Term.LetRecTop' isTop letrec) t = - markThenRetract0 (Var.named "let-rec-marker") $ do - e <- annotateLetRecBindings isTop letrec - check e t - go e t = do -- Sub - a <- synthesize e - ctx <- getContext - subtype (apply ctx a) (apply ctx t) + case minimize' m0 of + Left m -> failWith $ DuplicateDefinitions m + Right m + | not (wellformedType ctx t0) + -> failWith $ IllFormedType ctx + | Type.Var' TypeVar.Existential{} <- t0 + -> applyM t0 >>= checkWanted [] m + | otherwise + -> checkWanted [] m (Type.stripIntroOuters t0) -- | `subtype ctx t1 t2` returns successfully if `t1` is a subtype of `t2`. -- This may have the effect of altering the context. @@ -1529,10 +1984,7 @@ subtype tx ty = scope (InSubtype tx ty) $ do -- (conservatively) that it's invariant, see -- discussion https://github.com/unisonweb/unison/issues/512 y1 <- applyM y1; y2 <- applyM y2 - subtype y1 y2 - y1 <- applyM y1; y2 <- applyM y2 - -- performing the subtype check in both directions means the types must be equal - subtype y2 y1 + equate y1 y2 go _ t (Type.Forall' t2) = do v <- ABT.freshen t2 freshenTypeVar markThenRetract0 v $ do @@ -1555,27 +2007,36 @@ subtype tx ty = scope (InSubtype tx ty) $ do subtype es (Type.effects (loc es) []) subtype a a2 go ctx (Type.Var' (TypeVar.Existential b v)) t -- `InstantiateL` - | Set.member v (existentials ctx) && notMember v (Type.freeVars t) = - instantiateL b v t + | Set.member v (existentials ctx) + && notMember v (Type.freeVars t) = do + e <- extendExistential Var.inferAbility + instantiateL b v (relax' False e t) go ctx t (Type.Var' (TypeVar.Existential b v)) -- `InstantiateR` - | Set.member v (existentials ctx) && notMember v (Type.freeVars t) = - instantiateR t b v - go _ (Type.Effects' es1) (Type.Effects' es2) = do - ctx <- getContext - let es1' = map (apply ctx) es1 - es2' = map (apply ctx) es2 - if all (`elem` es2') es1' then pure () else abilityCheck' es2' es1' + | Set.member v (existentials ctx) + && notMember v (Type.freeVars t) = do + e <- extendExistential Var.inferAbility + instantiateR (relax' False e t) b v + go _ (Type.Effects' es1) (Type.Effects' es2) + = subAbilities ((,) Nothing <$> es1) es2 go _ t t2@(Type.Effects' _) | expand t = subtype (Type.effects (loc t) [t]) t2 go _ t@(Type.Effects' _) t2 | expand t2 = subtype t (Type.effects (loc t2) [t2]) go ctx _ _ = failWith $ TypeMismatch ctx expand :: Type v loc -> Bool expand t = case t of - Type.Var' (TypeVar.Existential _ _) -> True + Type.Var' _ -> True Type.App' _ _ -> True Type.Ref' _ -> True _ -> False +equate :: Var v => Ord loc => Type v loc -> Type v loc -> M v loc () +equate y1 y2 = do + subtype y1 y2 + y1 <- applyM y1; y2 <- applyM y2 + -- performing the subtype check in both directions means + -- the types must be equal + subtype y2 y1 + -- | Instantiate the given existential such that it is -- a subtype of the given type, updating the context @@ -1641,6 +2102,29 @@ nameFrom :: Var v => v -> Type v loc -> v nameFrom _ (Type.Var' v) = TypeVar.underlying (Var.reset v) nameFrom ifNotVar _ = ifNotVar +refineEffectVar + :: Var v + => Ord loc + => loc + -> [Type v loc] + -> B.Blank loc + -> v + -> M v loc () +refineEffectVar _ es _ v + | debugShow ("refineEffectVar", es, v) = undefined +refineEffectVar _ [] _ _ = pure () +refineEffectVar l es blank v = do + slack <- freshenVar Var.inferAbility + evs <- traverse (\e -> freshenVar (nameFrom Var.inferAbility e)) es + let locs = loc <$> es + es' = zipWith existentialp locs evs + t' = Type.effects l (existentialp l slack : es') + s = Solved blank v (Type.Monotype t') + vs = existential slack : fmap existential evs + replaceContext (existential v) (vs ++ [s]) + Foldable.for_ (es `zip` evs) $ \(e,ev) -> + getContext >>= \ctx -> instantiateR (apply ctx e) B.Blank ev + -- | Instantiate the given existential such that it is -- a supertype of the given type, updating the context -- in the process. @@ -1724,79 +2208,169 @@ solve ctx v t = case lookupSolved ctx v of else pure Nothing _ -> compilerCrash $ UnknownExistentialVariable v ctx -abilityCheck' :: forall v loc . (Var v, Ord loc) => [Type v loc] -> [Type v loc] -> M v loc () -abilityCheck' [] [] = pure () -abilityCheck' ambient0 requested0 = go ambient0 requested0 where - go _ambient [] = pure () - go ambient0 (r:rs) = do - -- Note: if applyM returns an existential, it's unsolved - ambient <- traverse applyM ambient0 - r <- applyM r - -- 1. Look in ambient for exact match of head of `r` - case find (headMatch r) ambient of - -- 2a. If yes for `a` in ambient, do `subtype amb r` and done. - Just amb -> do - subtype amb r `orElse` die r +expandAbilities + :: Var v => Ord loc => [Type v loc] -> M v loc [Type v loc] +expandAbilities + = fmap (concatMap Type.flattenEffects) . traverse applyM + +expandWanted + :: Var v => Ord loc => Wanted v loc -> M v loc (Wanted v loc) +expandWanted + = (fmap.concatMap) (\(l, es) -> (,) l <$> Type.flattenEffects es) + . (traverse.traverse) applyM + + +pruneAbilities + :: Var v + => Ord loc + => Wanted v loc + -> [Type v loc] + -> M v loc (Wanted v loc) +pruneAbilities want0 have0 + | debugShow ("pruneAbilities", want0, have0) = undefined +pruneAbilities want0 have0 + = go [] (sortBy (comparing (isVar.snd)) want0) have0 + where + isVar (Type.Var' _) = True + isVar _ = False + + isExistential (Type.Var' TypeVar.Existential{}) = True + isExistential _ = False + + missing loc w = maybe id (scope . InSynthesize) loc $ do + ctx <- getContext + failWith $ AbilityCheckFailure + (Type.flattenEffects . apply ctx =<< have0) + [w] + ctx + + dflt = not $ any isExistential have0 + + go acc [] _ = pure acc + go acc ((loc, w):want) have + | Just v <- find (headMatch w) have = do + subtype v w `orElse` missing loc w + want <- expandWanted want + have <- expandAbilities have + go acc want have + | dflt = do + discard <- defaultAbility w + want <- expandWanted want + have <- expandAbilities have + if discard + then go acc want have + else go ((loc, w):acc) want have + | otherwise = go ((loc, w):acc) want have + +subAbilities + :: Var v + => Ord loc + => Wanted v loc + -> [Type v loc] + -> M v loc () +subAbilities want have + | debugShow ("subAbilities", want, have) = undefined +subAbilities want have = do + want <- expandWanted want + have <- expandAbilities have + want <- expandWanted =<< pruneAbilities want have + have <- expandAbilities have + case (want , mapMaybe ex have) of + ([], _) -> pure () + (want@((_, w):_), [(b, ve)]) -> + refineEffectVar (loc w) (snd <$> want) b ve -- `orElse` die src w + ((src, w):_, _) -> die src w + where + ex (Type.Var' (TypeVar.Existential b v)) = Just (b, v) + ex _ = Nothing + die src w = maybe id (scope . InSynthesize) src do + ctx <- getContext + failWith $ AbilityCheckFailure + (Type.flattenEffects . apply ctx =<< have) + [w] + ctx + +-- This function deals with collecting up a list of used abilities +-- during inference. Example: when inferring `x -> Stream.emit 42`, an +-- ambient existential `e` ability is created for the lambda. In the +-- body of the lambda, requests are made for various abilities and +-- this branch finds the first unsolved ambient ability, `e`, and +-- solves that to `{r, e'}` where `e'` is another fresh existential. +-- In this way, a lambda whose body uses multiple effects can be +-- inferred properly. +subAmbient + :: Var v + => Ord loc + => M v loc () + -> [Type v loc] + -> Type v loc + -> M v loc () +subAmbient die ambient r + -- find unsolved existential, 'e, that appears in ambient + | (b, e'):_ <- unsolveds = do + -- introduce fresh existential 'e2 to context + e2' <- extendExistential e' + let et2 = Type.effects (loc r) [r, existentialp (loc r) e2'] + instantiateR et2 b e' `orElse` die + | otherwise = die + where + unsolveds = (ambient >>= Type.flattenEffects >>= vars) + vars (Type.Var' (TypeVar.Existential b v)) = [(b,v)] + vars _ = [] + +abilityCheckSingle + :: Var v + => Ord loc + => M v loc () + -> [Type v loc] + -> Type v loc + -> M v loc () +abilityCheckSingle die ambient r + -- Look in ambient for exact match of head of `r` + -- Ex: given `State Nat`, `State` is the head + -- Ex: given `IO`, `IO` is the head + -- Ex: given `a`, where there's an exact variable + -- If yes for `a` in ambient, do `subtype a r` and done. + | Just a <- find (headMatch r) ambient + = subtype a r `orElse` die + -- It's an unsolved existential, instantiate it to all of ambient + | Type.Var' tv@(TypeVar.Existential b v) <- r + = let et2 = Type.effects (loc r) ambient + acyclic + | Set.member tv (Type.freeVars et2) + -- just need to trigger `orElse` in this case + = getContext >>= failWith . TypeMismatch + | otherwise = instantiateR et2 b v + in -- instantiate it to `{}` if can't cover all of ambient + acyclic + `orElse` instantiateR (Type.effects (loc r) []) b v + `orElse` die + | otherwise = subAmbient die ambient r + +headMatch :: Var v => Ord loc => Type v loc -> Type v loc -> Bool +headMatch (Type.App' f _) (Type.App' f2 _) = headMatch f f2 +headMatch r r2 = r == r2 + +abilityCheck' + :: Var v => Ord loc => [Type v loc] -> [Type v loc] -> M v loc () +abilityCheck' ambient0 requested0 = go ambient0 requested0 + where + go _ [] = pure () + go ambient0 (r0:rs) = applyM r0 >>= \case + Type.Effects' es -> go ambient0 (es ++ rs) + r -> do + ambient <- + concatMap Type.flattenEffects + <$> traverse applyM ambient0 + abilityCheckSingle die ambient r go ambient rs - -- Corner case where a unification caused `r` to expand to a - -- list of effects. This whole function should be restructured - -- such that this can go in a better spot. - Nothing | Type.Effects' es <- r -> go ambient (es ++ rs) - -- 2b. If no: - Nothing -> case r of - -- It's an unsolved existential, instantiate it to all of ambient - Type.Var' tv@(TypeVar.Existential b v) -> do - let et2 = Type.effects (loc r) ambient - acyclic - | Set.member tv (Type.freeVars et2) - -- just need to trigger `orElse` in this case - = getContext >>= failWith . TypeMismatch - | otherwise = instantiateR et2 b v - -- instantiate it to `{}` if can't cover all of ambient - acyclic - `orElse` instantiateR (Type.effects (loc r) []) b v - `orElse` die1 - go ambient rs - _ -> -- find unsolved existential, 'e, that appears in ambient - let unsolveds = (ambient >>= Type.flattenEffects >>= vars) - vars (Type.Var' (TypeVar.Existential b v)) = [(b,v)] - vars _ = [] - in case listToMaybe unsolveds of - Just (b, e') -> do - -- introduce fresh existential 'e2 to context - e2' <- extendExistential e' - let et2 = Type.effects (loc r) [r, existentialp (loc r) e2'] - instantiateR et2 b e' `orElse` die r - go ambient rs - _ -> die r - - headMatch :: Type v loc -> Type v loc -> Bool - headMatch (Type.App' f _) (Type.App' f2 _) = headMatch f f2 - headMatch r r2 = r == r2 - - -- as a last ditch effort, if the request is an existential and there are - -- no remaining unbound existentials left in ambient, we try to instantiate - -- the request to the ambient effect list - die r = case r of - Type.Var' (TypeVar.Existential b v) -> - instantiateL b v (Type.effects (loc r) ambient0) `orElse` die1 - -- instantiateL b v (Type.effects (loc r) []) `orElse` die1 - _ -> die1 -- and if that doesn't work, then we're really toast - - die1 = do + + die = do ctx <- getContext failWith $ AbilityCheckFailure (apply ctx <$> ambient0) (apply ctx <$> requested0) ctx -abilityCheck :: (Var v, Ord loc) => [Type v loc] -> M v loc () -abilityCheck requested = do - ambient <- getAbilities - requested' <- filterM shouldPerformAbilityCheck requested - ctx <- getContext - abilityCheck' (apply ctx <$> ambient >>= Type.flattenEffects) - (apply ctx <$> requested' >>= Type.flattenEffects) - verifyDataDeclarations :: (Var v, Ord loc) => DataDeclarations v loc -> Result v loc () verifyDataDeclarations decls = forM_ (Map.toList decls) $ \(_ref, decl) -> do let ctors = DD.constructors decl @@ -1816,7 +2390,7 @@ synthesizeClosed abilities lookupType term0 = let in case term of Left missingRef -> compilerCrashResult (UnknownTermReference missingRef) - Right term -> run [] datas effects $ do + Right term -> run datas effects $ do liftResult $ verifyDataDeclarations datas *> verifyDataDeclarations (DD.toDataDecl <$> effects) *> verifyClosedTerm term @@ -1851,15 +2425,14 @@ annotateRefs synth = ABT.visit f where run :: (Var v, Ord loc, Functor f) - => [Type v loc] - -> DataDeclarations v loc + => DataDeclarations v loc -> EffectDeclarations v loc -> MT v loc f a -> f a -run ambient datas effects m = +run datas effects m = fmap fst . runM m - $ MEnv (Env 1 context0) ambient datas effects [] + $ MEnv (Env 1 context0) datas effects synthesizeClosed' :: (Var v, Ord loc) => [Type v loc] @@ -1872,7 +2445,9 @@ synthesizeClosed' abilities term = do (t, ctx) <- markThenRetract (Var.named "start") $ do -- retract will cause notes to be written out for -- any `Blank`-tagged existentials passing out of scope - withEffects0 abilities (synthesize term) + (t, want) <- synthesize term + scope (InSynthesize term) $ + t <$ subAbilities want abilities setContext ctx0 -- restore the initial context pure $ generalizeExistentials ctx t @@ -1926,7 +2501,7 @@ isRedundant userType0 inferredType0 = do isSubtype :: (Var v, Ord loc) => Type v loc -> Type v loc -> Either (CompilerBug v loc) Bool isSubtype t1 t2 = - run [] Map.empty Map.empty (isSubtype' t1 t2) + run Map.empty Map.empty (isSubtype' t1 t2) isEqual :: (Var v, Ord loc) => Type v loc -> Type v loc -> Either (CompilerBug v loc) Bool @@ -1936,7 +2511,7 @@ isEqual t1 t2 = instance (Var v) => Show (Element v loc) where show (Var v) = case v of TypeVar.Universal x -> "@" <> show x - TypeVar.Existential _ x -> "'" ++ show x + e -> show e show (Solved _ v t) = "'"++Text.unpack (Var.name v)++" = "++TP.pretty' Nothing mempty (Type.getPolytype t) show (Ann v t) = Text.unpack (Var.name v) ++ " : " ++ TP.pretty' Nothing mempty t @@ -1947,7 +2522,7 @@ instance (Ord loc, Var v) => Show (Context v loc) where where showElem _ctx (Var v) = case v of TypeVar.Universal x -> "@" <> show x - TypeVar.Existential _ x -> "'" ++ show x + e -> show e showElem ctx (Solved _ v (Type.Monotype t)) = "'"++Text.unpack (Var.name v)++" = "++ TP.pretty' Nothing mempty (apply ctx t) showElem ctx (Ann v t) = Text.unpack (Var.name v) ++ " : " ++ TP.pretty' Nothing mempty (apply ctx t) showElem _ (Marker v) = "|"++Text.unpack (Var.name v)++"|" diff --git a/parser-typechecker/src/Unison/Util/Bytes.hs b/parser-typechecker/src/Unison/Util/Bytes.hs index 500b8d2848..580c5586ed 100644 --- a/parser-typechecker/src/Unison/Util/Bytes.hs +++ b/parser-typechecker/src/Unison/Util/Bytes.hs @@ -3,10 +3,12 @@ module Unison.Util.Bytes where +import Data.Bits (shiftR, shiftL, (.|.)) import Data.Char import Data.Memory.PtrMethods (memCompare, memEqual) import Data.Monoid (Sum(..)) -import Foreign.Ptr (plusPtr) +import Foreign.Ptr (Ptr, plusPtr) +import Foreign.Storable (poke) import System.IO.Unsafe (unsafeDupablePerformIO) import Unison.Prelude hiding (ByteString, empty) import Basement.Block (Block) @@ -86,6 +88,156 @@ at i bs = case Unison.Util.Bytes.drop i bs of Bytes (T.viewl -> hd T.:< _) -> Just (B.index hd 0) _ -> Nothing +dropBlock :: Int -> Bytes -> Maybe (View ByteString, Bytes) +dropBlock nBytes chunks = + go mempty chunks where + go acc (Bytes chunks) = + if B.length acc == nBytes then + Just (view acc, (Bytes chunks)) + else if B.length acc >= nBytes then + let v = view acc in + Just ((takeView nBytes v), Bytes ((dropView nBytes v) T.<| chunks)) + else + case chunks of + (T.viewl -> (head T.:< tail)) -> go (acc <> (B.convert head)) (Bytes tail) + _ -> Nothing + + +decodeNat64be :: Bytes -> Maybe (Word64, Bytes) +decodeNat64be bs = case dropBlock 8 bs of + Just (head, rest) -> + let + b8 = B.index head 0 + b7 = B.index head 1 + b6 = B.index head 2 + b5 = B.index head 3 + b4 = B.index head 4 + b3 = B.index head 5 + b2 = B.index head 6 + b1 = B.index head 7 + b = shiftL (fromIntegral b8) 56 + .|.shiftL (fromIntegral b7) 48 + .|.shiftL (fromIntegral b6) 40 + .|.shiftL (fromIntegral b5) 32 + .|.shiftL (fromIntegral b4) 24 + .|.shiftL (fromIntegral b3) 16 + .|.shiftL (fromIntegral b2) 8 + .|.fromIntegral b1 + in + Just(b, rest) + Nothing -> Nothing + +decodeNat64le :: Bytes -> Maybe (Word64, Bytes) +decodeNat64le bs = case dropBlock 8 bs of + Just (head, rest) -> + let + b1 = B.index head 0 + b2 = B.index head 1 + b3 = B.index head 2 + b4 = B.index head 3 + b5 = B.index head 4 + b6 = B.index head 5 + b7 = B.index head 6 + b8 = B.index head 7 + b = shiftL (fromIntegral b8) 56 + .|. shiftL (fromIntegral b7) 48 + .|. shiftL (fromIntegral b6) 40 + .|. shiftL (fromIntegral b5) 32 + .|. shiftL (fromIntegral b4) 24 + .|. shiftL (fromIntegral b3) 16 + .|. shiftL (fromIntegral b2) 8 + .|. fromIntegral b1 + in + Just(b, rest) + Nothing -> Nothing + +decodeNat32be :: Bytes -> Maybe (Word64, Bytes) +decodeNat32be bs = case dropBlock 4 bs of + Just (head, rest) -> + let + b4 = B.index head 0 + b3 = B.index head 1 + b2 = B.index head 2 + b1 = B.index head 3 + b = shiftL (fromIntegral b4) 24 + .|. shiftL (fromIntegral b3) 16 + .|. shiftL (fromIntegral b2) 8 + .|. fromIntegral b1 + in + Just(b, rest) + Nothing -> Nothing + +decodeNat32le :: Bytes -> Maybe (Word64, Bytes) +decodeNat32le bs = case dropBlock 4 bs of + Just (head, rest) -> + let + b1 = B.index head 0 + b2 = B.index head 1 + b3 = B.index head 2 + b4 = B.index head 3 + b = shiftL (fromIntegral b4) 24 + .|. shiftL (fromIntegral b3) 16 + .|. shiftL (fromIntegral b2) 8 + .|. fromIntegral b1 + in + Just(b, rest) + Nothing -> Nothing + +decodeNat16be :: Bytes -> Maybe (Word64, Bytes) +decodeNat16be bs = case dropBlock 2 bs of + Just (head, rest) -> + let + b2 = B.index head 0 + b1 = B.index head 1 + b = shiftL (fromIntegral b2) 8 + .|. fromIntegral b1 + in + Just(b, rest) + Nothing -> Nothing + +decodeNat16le :: Bytes -> Maybe (Word64, Bytes) +decodeNat16le bs = case dropBlock 2 bs of + Just (head, rest) -> + let + b1 = B.index head 0 + b2 = B.index head 1 + b = shiftL (fromIntegral b2) 8 + .|. fromIntegral b1 + in + Just(b, rest) + Nothing -> Nothing + + +fillBE :: Word64 -> Int -> Ptr Word8 -> IO () +fillBE n 0 p = poke p (fromIntegral n) >> return () +fillBE n i p = poke p (fromIntegral (shiftR n (i * 8))) + >> fillBE n (i - 1) (p `plusPtr` 1) + +encodeNat64be :: Word64 -> Bytes +encodeNat64be n = Bytes (T.singleton (view (B.unsafeCreate 8 (fillBE n 7)))) + +encodeNat32be :: Word64 -> Bytes +encodeNat32be n = Bytes (T.singleton (view (B.unsafeCreate 4 (fillBE n 3)))) + +encodeNat16be :: Word64 -> Bytes +encodeNat16be n = Bytes (T.singleton (view (B.unsafeCreate 2 (fillBE n 1)))) + +fillLE :: Word64 -> Int -> Int -> Ptr Word8 -> IO () +fillLE n i j p = + if i == j then + return () + else + poke p (fromIntegral (shiftR n (i * 8))) >> fillLE n (i + 1) j (p `plusPtr` 1) + +encodeNat64le :: Word64 -> Bytes +encodeNat64le n = Bytes (T.singleton (view (B.unsafeCreate 8 (fillLE n 0 8)))) + +encodeNat32le :: Word64 -> Bytes +encodeNat32le n = Bytes (T.singleton (view (B.unsafeCreate 4 (fillLE n 0 4)))) + +encodeNat16le :: Word64 -> Bytes +encodeNat16le n = Bytes (T.singleton (view (B.unsafeCreate 2 (fillLE n 0 2)))) + toBase16 :: Bytes -> Bytes toBase16 bs = foldl' step empty (chunks bs) where step bs b = snoc bs (BE.convertToBase BE.Base16 b :: ByteString) @@ -209,4 +361,3 @@ instance B.ByteArrayAccess bytes => B.ByteArrayAccess (View bytes) where length = viewSize withByteArray v f = B.withByteArray (unView v) $ \ptr -> f (ptr `plusPtr` (viewOffset v)) - diff --git a/parser-typechecker/src/Unison/Util/ColorText.hs b/parser-typechecker/src/Unison/Util/ColorText.hs index 0d4c1c3640..ef9acec2d4 100644 --- a/parser-typechecker/src/Unison/Util/ColorText.hs +++ b/parser-typechecker/src/Unison/Util/ColorText.hs @@ -134,8 +134,6 @@ defaultColors = \case ST.Referent _ -> Nothing ST.Op _ -> Nothing ST.Unit -> Nothing - ST.Constructor -> Nothing - ST.Request -> Nothing ST.AbilityBraces -> Just HiBlack ST.ControlKeyword -> Just Bold ST.LinkKeyword -> Just HiBlack diff --git a/parser-typechecker/src/Unison/Util/Star3.hs b/parser-typechecker/src/Unison/Util/Star3.hs index 491b4bfb59..8b5bbde33b 100644 --- a/parser-typechecker/src/Unison/Util/Star3.hs +++ b/parser-typechecker/src/Unison/Util/Star3.hs @@ -8,6 +8,7 @@ import Unison.Util.Relation (Relation) import qualified Data.Set as Set import qualified Unison.Hashable as H import qualified Unison.Util.Relation as R +import qualified Data.Map as Map -- Represents a set of (fact, d1, d2, d3), but indexed using a star schema so -- it can be efficiently queried from any of the dimensions. @@ -182,17 +183,36 @@ deleteFact facts Star3{..} = (facts R.<|| d2) (facts R.<|| d3) +-- Efficiently replace facts with those in the provided `Map`. +-- The `apply` function can be used to add other dimensions +-- in the same traversal. It is given `apply old new s` where +-- s is the current `Star` being accumulated. +-- +-- Currently used by update propagation but likely useful for +-- other bulk rewriting of namespaces. +replaceFacts :: (Ord fact, Ord d1, Ord d2, Ord d3) + => (fact -> fact -> Star3 fact d1 d2 d3 -> Star3 fact d1 d2 d3) + -> Map fact fact + -> Star3 fact d1 d2 d3 + -> Star3 fact d1 d2 d3 +replaceFacts apply m s = let + -- the intersection of `fact` and the replacement keys is often small, + -- so we compute that first (which can happen in O(size of intersection)) + -- rather than iterating over one or the other + replaceable = Map.keysSet m `Set.intersection` fact s + go s old = apply old new $ replaceFact old new s + where new = Map.findWithDefault old old m + in foldl' go s replaceable + replaceFact :: (Ord fact, Ord d1, Ord d2, Ord d3) => fact -> fact -> Star3 fact d1 d2 d3 -> Star3 fact d1 d2 d3 -replaceFact f f' Star3{..} = - let updateFact fact = - if Set.member f fact - then (Set.insert f' . Set.delete f) fact - else fact - in Star3 (updateFact fact) - (R.replaceDom f f' d1) - (R.replaceDom f f' d2) - (R.replaceDom f f' d3) +replaceFact f f' s@Star3{..} = + if Set.notMember f fact + then s + else Star3 (Set.insert f' . Set.delete f $ fact) + (R.replaceDom f f' d1) + (R.replaceDom f f' d2) + (R.replaceDom f f' d3) instance (Ord fact, Ord d1, Ord d2, Ord d3) => Semigroup (Star3 fact d1 d2 d3) where (<>) = mappend diff --git a/parser-typechecker/src/Unison/Util/SyntaxText.hs b/parser-typechecker/src/Unison/Util/SyntaxText.hs index 10100f69a9..2e1acc0b1b 100644 --- a/parser-typechecker/src/Unison/Util/SyntaxText.hs +++ b/parser-typechecker/src/Unison/Util/SyntaxText.hs @@ -25,8 +25,6 @@ data Element r = NumericLiteral | Reference r | Referent (Referent' r) | Op SeqOp - | Constructor - | Request | AbilityBraces -- let|handle|in|where|match|with|cases|->|if|then|else|and|or | ControlKeyword @@ -57,7 +55,7 @@ data Element r = NumericLiteral | DocDelimiter -- the 'include' in @[include], etc | DocKeyword - deriving (Eq, Ord, Show, Generic, Functor) + deriving (Eq, Ord, Show, Functor) syntax :: Element r -> SyntaxText' r -> SyntaxText' r syntax = annotate diff --git a/parser-typechecker/tests/Suite.hs b/parser-typechecker/tests/Suite.hs index 7a32d22650..512e8760cb 100644 --- a/parser-typechecker/tests/Suite.hs +++ b/parser-typechecker/tests/Suite.hs @@ -33,6 +33,7 @@ import qualified Unison.Test.UriParser as UriParser import qualified Unison.Test.Util.Bytes as Bytes import qualified Unison.Test.Util.PinBoard as PinBoard import qualified Unison.Test.Util.Pretty as Pretty +import qualified Unison.Test.Util.Relation as Relation import qualified Unison.Test.Var as Var import qualified Unison.Test.ANF as ANF import qualified Unison.Test.MCode as MCode @@ -57,6 +58,7 @@ test = tests , Range.test , ColorText.test , Bytes.test + , Relation.test , Path.test , Causal.test , Referent.test diff --git a/parser-typechecker/tests/Unison/Core/Test/Name.hs b/parser-typechecker/tests/Unison/Core/Test/Name.hs index a0769aa9bd..41fc2dcf36 100644 --- a/parser-typechecker/tests/Unison/Core/Test/Name.hs +++ b/parser-typechecker/tests/Unison/Core/Test/Name.hs @@ -5,9 +5,12 @@ module Unison.Core.Test.Name where import EasyTest import Unison.Name as Name import Unison.NameSegment as NameSegment +import qualified Unison.Util.Relation as R import Data.List ( intercalate ) import Data.Text ( pack ) +import qualified Data.Set as Set + test :: Test () test = scope "name" $ tests [ scope "suffixes" $ tests @@ -22,6 +25,39 @@ test = scope "name" $ tests , scope "segments" $ do n <- int' 0 10 segs <- listOf n $ pick [".", "foo"] - expectEqual (segments $ Name . pack $ intercalate "." segs) - (NameSegment . pack <$> segs) + expectEqual' (segments $ Name . pack $ intercalate "." segs) + (NameSegment . pack <$> segs) + ok + , scope "suffixSearch" $ do + let rel = R.fromList [ + (n "base.List.map", 1), + (n "base.Set.map", 2), + (n "foo.bar.baz", 3), + (n "a.b.c", 4), + (n "a1.b.c", 5), + (n "..", 6) + ] + n = Name.unsafeFromText + expectEqual' ([n "."]) (Name.convert <$> Name.segments (n "..")) + expectEqual' ([n "."]) (Name.convert <$> Name.reverseSegments (n "..")) + + expectEqual' (Set.fromList [1,2]) + (Name.searchBySuffix (n "map") rel) + expectEqual' (n "List.map") + (Name.shortestUniqueSuffix (n "base.List.map") 1 rel) + expectEqual' (n "Set.map") + (Name.shortestUniqueSuffix (n "base.Set.map") 2 rel) + expectEqual' (n "baz") + (Name.shortestUniqueSuffix (n "foo.bar.baz") 3 rel) + expectEqual' (n "a.b.c") + (Name.shortestUniqueSuffix (n "a.b.c") 3 rel) + expectEqual' (n "a1.b.c") + (Name.shortestUniqueSuffix (n "a1.b.c") 3 rel) + note . show $ Name.reverseSegments (n ".") + note . show $ Name.reverseSegments (n "..") + tests [ scope "(.) shortest unique suffix" $ + expectEqual' (n ".") (Name.shortestUniqueSuffix (n "..") 6 rel) + , scope "(.) search by suffix" $ + expectEqual' (Set.fromList [6]) (Name.searchBySuffix (n ".") rel) ] + ok ] diff --git a/parser-typechecker/tests/Unison/Test/GitSync.hs b/parser-typechecker/tests/Unison/Test/GitSync.hs index 36700ffcc4..a5b4fe88d6 100644 --- a/parser-typechecker/tests/Unison/Test/GitSync.hs +++ b/parser-typechecker/tests/Unison/Test/GitSync.hs @@ -315,6 +315,7 @@ test = scope "gitsync22" . tests $ ``` |]) , + -- TODO: remove the alias.type .defns.A A line once patch syncing is fixed pushPullTest "lightweightPatch" fmt (\repo -> [i| ```ucm @@ -328,8 +329,9 @@ test = scope "gitsync22" . tests $ ``` ```ucm .defns> add - .patches> replace.type .defns.A .defns.B - .patches> replace.term .defns.x .defns.y + .patches> replace .defns.A .defns.B + .patches> alias.type .defns.A A + .patches> replace .defns.x .defns.y .patches> push ${repo} ``` |]) diff --git a/parser-typechecker/tests/Unison/Test/MCode.hs b/parser-typechecker/tests/Unison/Test/MCode.hs index a3a416d6ad..18c61cd364 100644 --- a/parser-typechecker/tests/Unison/Test/MCode.hs +++ b/parser-typechecker/tests/Unison/Test/MCode.hs @@ -70,7 +70,7 @@ env m = mapInsert (bit 24) m $ cenv asrt :: Section -asrt = Ins (Unpack 0) +asrt = Ins (Unpack Nothing 0) $ Match 0 $ Test1 1 (Yield ZArgs) (Die "assertion failed") diff --git a/parser-typechecker/tests/Unison/Test/TermPrinter.hs b/parser-typechecker/tests/Unison/Test/TermPrinter.hs index fbf419dbc0..313f79c57b 100755 --- a/parser-typechecker/tests/Unison/Test/TermPrinter.hs +++ b/parser-typechecker/tests/Unison/Test/TermPrinter.hs @@ -189,7 +189,6 @@ test = scope "termprinter" $ tests , tc "match x with 1 -> foo" , tc "match x with +1 -> foo" , tc "match x with -1 -> foo" - , tc "match x with 3.14159 -> foo" , tcDiffRtt False "match x with\n\ \ true -> foo\n\ \ false -> bar" diff --git a/parser-typechecker/tests/Unison/Test/UriParser.hs b/parser-typechecker/tests/Unison/Test/UriParser.hs index fbea77318a..80e9725787 100644 --- a/parser-typechecker/tests/Unison/Test/UriParser.hs +++ b/parser-typechecker/tests/Unison/Test/UriParser.hs @@ -3,7 +3,7 @@ module Unison.Test.UriParser where import EasyTest -import Unison.Codebase.Editor.RemoteRepo (RemoteRepo(..)) +import Unison.Codebase.Editor.RemoteRepo (ReadRepo(..)) import Unison.Codebase.Path (Path(..)) import qualified Unison.Codebase.Path as Path import qualified Text.Megaparsec as P @@ -24,54 +24,54 @@ testAugmented = scope "augmented" . tests $ -- $ git clone /srv/git/project.git[:treeish][:[#hash][.path]] [ scope "local-protocol" . tests . map parseAugmented $ [ ("/srv/git/project.git", - (GitRepo "/srv/git/project.git" Nothing, Nothing, Path.empty)) - , ("/srv/git/project.git:abc:#def.hij.klm", - (GitRepo "/srv/git/project.git" (Just "abc"), sbh "def", path ["hij", "klm"])) + (ReadGitRepo "/srv/git/project.git", Nothing, Path.empty)) + -- , ("/srv/git/project.git:abc:#def.hij.klm", + -- (ReadGitRepo "/srv/git/project.git" (Just "abc"), sbh "def", path ["hij", "klm"])) , ("srv/git/project.git", - (GitRepo "srv/git/project.git" Nothing, Nothing, Path.empty)) - , ("srv/git/project.git:abc:#def.hij.klm", - (GitRepo "srv/git/project.git" (Just "abc"), sbh "def", path ["hij", "klm"])) + (ReadGitRepo "srv/git/project.git", Nothing, Path.empty)) + -- , ("srv/git/project.git:abc:#def.hij.klm", + -- (ReadGitRepo "srv/git/project.git" (Just "abc"), sbh "def", path ["hij", "klm"])) ], -- File Protocol -- $ git clone file:///srv/git/project.git[:treeish][:[#hash][.path]] <- imagined scope "file-protocol" . tests . map parseAugmented $ [ ("file:///srv/git/project.git", - (GitRepo "file:///srv/git/project.git" Nothing, Nothing, Path.empty)) - , ("file:///srv/git/project.git:abc:#def.hij.klm", - (GitRepo "file:///srv/git/project.git" (Just "abc"), sbh "def", path ["hij", "klm"])) + (ReadGitRepo "file:///srv/git/project.git", Nothing, Path.empty)) + -- , ("file:///srv/git/project.git:abc:#def.hij.klm", + -- (ReadGitRepo "file:///srv/git/project.git" (Just "abc"), sbh "def", path ["hij", "klm"])) , ("file://srv/git/project.git", - (GitRepo "file://srv/git/project.git" Nothing, Nothing, Path.empty)) - , ("file://srv/git/project.git:abc:#def.hij.klm", - (GitRepo "file://srv/git/project.git" (Just "abc"), sbh "def", path ["hij", "klm"])) + (ReadGitRepo "file://srv/git/project.git", Nothing, Path.empty)) + -- , ("file://srv/git/project.git:abc:#def.hij.klm", + -- (ReadGitRepo "file://srv/git/project.git" (Just "abc"), sbh "def", path ["hij", "klm"])) ], -- Smart / Dumb HTTP protocol -- $ git clone https://example.com/gitproject.git[:treeish][:[#hash][.path]] <- imagined scope "http-protocol" . tests . map parseAugmented $ [ ("https://example.com/git/project.git", - (GitRepo "https://example.com/git/project.git" Nothing, Nothing, Path.empty)) - , ("https://user@example.com/git/project.git:abc:#def.hij.klm]", - (GitRepo "https://user@example.com/git/project.git" (Just "abc"), sbh "def", path ["hij", "klm"])) + (ReadGitRepo "https://example.com/git/project.git", Nothing, Path.empty)) + -- , ("https://user@example.com/git/project.git:abc:#def.hij.klm]", + -- (ReadGitRepo "https://user@example.com/git/project.git" (Just "abc"), sbh "def", path ["hij", "klm"])) ], -- SSH Protocol -- $ git clone ssh://[user@]server/project.git[:treeish][:[#hash][.path]] scope "ssh-protocol" . tests . map parseAugmented $ [ ("ssh://git@8.8.8.8:222/user/project.git", - (GitRepo "ssh://git@8.8.8.8:222/user/project.git" Nothing, Nothing, Path.empty)) - , ("ssh://git@github.com/user/project.git:abc:#def.hij.klm", - (GitRepo "ssh://git@github.com/user/project.git" (Just "abc"), sbh "def", path ["hij", "klm"])) + (ReadGitRepo "ssh://git@8.8.8.8:222/user/project.git", Nothing, Path.empty)) + -- , ("ssh://git@github.com/user/project.git:abc:#def.hij.klm", + -- (ReadGitRepo "ssh://git@github.com/user/project.git" (Just "abc"), sbh "def", path ["hij", "klm"])) ], -- $ git clone [user@]server:project.git[:treeish][:[#hash][.path]] scope "scp-protocol" . tests . map parseAugmented $ [ ("git@github.com:user/project.git", - (GitRepo "git@github.com:user/project.git" Nothing, Nothing, Path.empty)) + (ReadGitRepo "git@github.com:user/project.git", Nothing, Path.empty)) , ("github.com:user/project.git", - (GitRepo "github.com:user/project.git" Nothing, Nothing, Path.empty)) - , ("git@github.com:user/project.git:abc:#def.hij.klm", - (GitRepo "git@github.com:user/project.git" (Just "abc"), sbh "def", path ["hij", "klm"])) + (ReadGitRepo "github.com:user/project.git", Nothing, Path.empty)) + -- , ("git@github.com:user/project.git:abc:#def.hij.klm", + -- (ReadGitRepo "git@github.com:user/project.git" (Just "abc"), sbh "def", path ["hij", "klm"])) ] ] -parseAugmented :: (Text, (RemoteRepo, Maybe ShortBranchHash, Path)) -> Test () +parseAugmented :: (Text, (ReadRepo, Maybe ShortBranchHash, Path)) -> Test () parseAugmented (s, r) = scope (Text.unpack s) $ case P.parse UriParser.repoPath "test case" s of Left x -> crash $ show x diff --git a/parser-typechecker/tests/Unison/Test/Util/Relation.hs b/parser-typechecker/tests/Unison/Test/Util/Relation.hs new file mode 100644 index 0000000000..d6c59e1a00 --- /dev/null +++ b/parser-typechecker/tests/Unison/Test/Util/Relation.hs @@ -0,0 +1,24 @@ +module Unison.Test.Util.Relation where + +import EasyTest +import Control.Monad +import qualified Unison.Util.Relation as R +import qualified Data.Set as Set + +test :: Test () +test = scope "util.relation" . tests $ [ + scope "searchDom" $ do + replicateM_ 100 $ do + -- check that `searchDom` gives equivalent results to linear search + -- through all the pairs in the relation + n <- int' 0 100 + q <- int' (-5) 20 + triples <- listOf n (liftM3 (,,) (int' 0 10) (int' 0 20) (int' 0 1000)) + let pairs = [((x,y),z) | (x,y,z) <- triples ] + let r = R.fromList pairs + expect' $ + R.searchDom (\(x,_) -> compare x q) r + == + Set.fromList [ z | ((x,_),z) <- pairs, x == q ] + ok + ] diff --git a/parser-typechecker/tests/Unison/Test/VersionParser.hs b/parser-typechecker/tests/Unison/Test/VersionParser.hs index 64b5741a75..b5e62bdfea 100644 --- a/parser-typechecker/tests/Unison/Test/VersionParser.hs +++ b/parser-typechecker/tests/Unison/Test/VersionParser.hs @@ -21,6 +21,6 @@ makeTest (version, path) = scope (unpack version) $ expectEqual (rightMay $ runParser defaultBaseLib "versionparser" version) (Just - ( GitRepo "https://github.com/unisonweb/base" Nothing + ( ReadGitRepo "https://github.com/unisonweb/base" , Nothing , Path.fromText path )) diff --git a/parser-typechecker/unison-parser-typechecker.cabal b/parser-typechecker/unison-parser-typechecker.cabal index 6dcf07184e..99266cb3bf 100644 --- a/parser-typechecker/unison-parser-typechecker.cabal +++ b/parser-typechecker/unison-parser-typechecker.cabal @@ -79,7 +79,6 @@ library Unison.Codebase.TranscriptParser Unison.Codebase.TypeEdit Unison.Codebase.Watch - Unison.Codecs Unison.CommandLine Unison.CommandLine.DisplayValues Unison.CommandLine.InputPattern @@ -116,6 +115,7 @@ library Unison.Runtime.Vector Unison.Server.Backend Unison.Server.CodebaseServer + Unison.Server.Doc Unison.Server.Endpoints.FuzzyFind Unison.Server.Endpoints.GetDefinitions Unison.Server.Endpoints.ListNamespace @@ -185,52 +185,43 @@ library , base16 >=0.2.1.0 , base64-bytestring , basement - , bifunctors , bytes , bytestring , cereal - , comonad - , concurrent-supply , configurator , containers >=0.6.3 , cryptonite , data-default , data-memocombinators , directory - , edit-distance , either , errors , exceptions , extra , filepath - , filepattern , fingertree - , free , fsnotify , fuzzyfind , generic-monoid - , guid , hashable , hashtables , haskeline , http-media , http-types - , io-streams , lens , megaparsec >=5.0.0 && <7.0.0 , memory , mmorph - , monad-loops , monad-validate , mtl - , murmur-hash , mutable-containers , natural-transformation , network , network-simple , nonempty-containers + , open-browser , openapi3 - , optparse-applicative + , optparse-applicative >=0.16.1.0 , pem , primitive , process @@ -241,12 +232,10 @@ library , safe , safe-exceptions , servant - , servant-auth-server , servant-docs , servant-openapi3 , servant-server , shellmet - , split , sqlite-simple , stm , strings @@ -265,10 +254,8 @@ library , unison-core1 , unison-util , unliftio - , unliftio-core + , uri-encode , utf8-string - , util - , validation , vector , wai , warp @@ -346,6 +333,7 @@ executable tests Unison.Test.Util.Bytes Unison.Test.Util.PinBoard Unison.Test.Util.Pretty + Unison.Test.Util.Relation Unison.Test.Var Unison.Test.VersionParser Paths_unison_parser_typechecker @@ -433,6 +421,8 @@ executable transcripts executable unison main-is: Main.hs other-modules: + ArgParse + Compat System.Path Version Paths_unison_parser_typechecker @@ -463,6 +453,7 @@ executable unison , lens , megaparsec , mtl + , optparse-applicative >=0.16.1.0 , safe , shellmet , template-haskell diff --git a/parser-typechecker/unison/ArgParse.hs b/parser-typechecker/unison/ArgParse.hs new file mode 100644 index 0000000000..f6671d7067 --- /dev/null +++ b/parser-typechecker/unison/ArgParse.hs @@ -0,0 +1,326 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ApplicativeDo #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ViewPatterns #-} + +-- | This module handles parsing CLI arguments into 'Command's. +-- See the excellent documentation at https://hackage.haskell.org/package/optparse-applicative +module ArgParse where + +import Control.Applicative (Alternative((<|>), many), (<**>), optional, Applicative (liftA2)) +import Data.Foldable ( Foldable(fold) ) +import Data.Functor ((<&>)) +import Data.List.NonEmpty (NonEmpty) +import Options.Applicative + ( CommandFields + , Mod + , ParseError(ShowHelpText) + , Parser + , ParserInfo + , ParserPrefs + , action + , auto + , columns + , command + , customExecParser + , flag + , flag' + , footerDoc + , fullDesc + , headerDoc + , help + , helpShowGlobals + , helper + , hsubparser + , info + , long + , metavar + , option + , parserFailure + , prefs + , progDesc + , renderFailure + , showHelpOnError + , strArgument + , strOption + ) +import Options.Applicative.Help ( (<+>), bold ) +import System.Environment (lookupEnv) +import Text.Read (readMaybe) +import Unison.Server.CodebaseServer (CodebaseServerOpts(..)) +import Unison.Util.Pretty (Width(..)) +import qualified Data.List as List +import qualified Data.List.NonEmpty as NE +import qualified Options.Applicative.Help.Pretty as P +import qualified Unison.PrettyTerminal as PT +import qualified Unison.Server.CodebaseServer as Server + +-- The name of a symbol to execute. +type SymbolName = String + +-- | Valid ways to provide source code to the run command +data RunSource = + RunFromPipe SymbolName + | RunFromSymbol SymbolName + | RunFromFile FilePath SymbolName + deriving (Show, Eq) + +data ShouldForkCodebase + = UseFork + | DontFork + deriving (Show, Eq) + +data ShouldSaveCodebase + = SaveCodebase + | DontSaveCodebase + deriving (Show, Eq) + +data IsHeadless = Headless | WithCLI + deriving (Show, Eq) + +-- | Represents commands the cli can run. +-- +-- Note that this is not one-to-one with command-parsers since some are simple variants. +-- E.g. run, run.file, run.pipe +data Command + = Launch IsHeadless CodebaseServerOpts + | PrintVersion + | Init + | Run RunSource + | Transcript ShouldForkCodebase ShouldSaveCodebase (NonEmpty FilePath ) + | UpgradeCodebase + deriving (Show, Eq) + +data CodebaseFormat + = V1 + | V2 + deriving (Show, Eq) + +-- | Options shared by sufficiently many subcommands. +data GlobalOptions = GlobalOptions + { codebasePath :: Maybe FilePath + , codebaseFormat :: CodebaseFormat + } deriving (Show, Eq) + +-- | The root-level 'ParserInfo'. +rootParserInfo :: String -> String -> CodebaseServerOpts -> ParserInfo (GlobalOptions, Command) +rootParserInfo progName version envOpts = + info ((,) <$> globalOptionsParser <*> commandParser envOpts <**> helper) + ( fullDesc + <> headerDoc (Just $ unisonHelp progName version)) + +type UsageRenderer = + Maybe String -- ^ Optional sub-command to render help for + -> String + +-- | Parse the command description, options, and usage information from provided cli arguments. +parseCLIArgs :: String -> String -> IO (UsageRenderer, GlobalOptions, Command) +parseCLIArgs progName version = do + (Width cols) <- PT.getAvailableWidth + envOpts <- codebaseServerOptsFromEnv + let parserInfo = rootParserInfo progName version envOpts + let preferences = prefs $ showHelpOnError <> helpShowGlobals <> columns cols + let usage = renderUsage progName parserInfo preferences + (globalOptions, command) <- customExecParser preferences parserInfo + pure $ (usage, globalOptions, command) + +-- | Load default options from environment variables. +codebaseServerOptsFromEnv :: IO CodebaseServerOpts +codebaseServerOptsFromEnv = do + token <- lookupEnv Server.ucmTokenVar + host <- lookupEnv Server.ucmHostVar + port <- lookupEnv Server.ucmPortVar <&> (>>= readMaybe) + codebaseUIPath <- lookupEnv Server.ucmUIVar + pure $ CodebaseServerOpts {..} + +-- | Purely renders the full help summary for the CLI, or an optional subcommand. +renderUsage :: String -> ParserInfo a -> ParserPrefs -> Maybe String -> String +renderUsage programName pInfo preferences subCommand = + let showHelpFailure = parserFailure preferences pInfo (ShowHelpText subCommand) mempty + (helpText, _exitCode) = renderFailure showHelpFailure programName + in helpText + +versionCommand :: Mod CommandFields Command +versionCommand = command "version" (info versionParser (fullDesc <> progDesc "Print the version of unison you're running")) + +initCommand :: Mod CommandFields Command +initCommand = command "init" (info initParser (progDesc initHelp)) + where + initHelp = "Initialise a unison codebase" + +runSymbolCommand :: Mod CommandFields Command +runSymbolCommand = + command "run" (info runSymbolParser (fullDesc <> progDesc "Execute a definition from the codebase")) + +runFileCommand :: Mod CommandFields Command +runFileCommand = + command "run.file" (info runFileParser (fullDesc <> progDesc "Execute a definition from a file")) + +runPipeCommand :: Mod CommandFields Command +runPipeCommand = + command "run.pipe" (info runPipeParser (fullDesc <> progDesc "Execute code from stdin")) + +transcriptCommand :: Mod CommandFields Command +transcriptCommand = + command "transcript" (info transcriptParser (fullDesc <> progDesc transcriptHelp <> footerDoc transcriptFooter)) + where + transcriptHelp = "Execute transcript markdown files" + transcriptFooter = + Just . fold . List.intersperse P.line $ + [ "For each .md file provided this executes the transcript and creates" <+> bold ".output.md" <+> "if successful." + , "Exits after completion, and deletes the temporary directory created, unless --save-codebase is provided" + , "Multiple transcript files may be provided; they are processed in sequence" <+> "starting from the same codebase." + ] + +transcriptForkCommand :: Mod CommandFields Command +transcriptForkCommand = + command "transcript.fork" (info transcriptForkParser (fullDesc <> progDesc transcriptHelp <> footerDoc transcriptFooter)) + where + transcriptHelp = "Execute transcript markdown files in a sandboxed codebase" + transcriptFooter = + Just . fold . List.intersperse P.line $ + [ "For each .md file provided this executes the transcript in a sandbox codebase and creates" <+> bold ".output.md" <+> "if successful." + , "Exits after completion, and deletes the temporary directory created, unless --save-codebase is provided" + , "Multiple transcript files may be provided; they are processed in sequence" <+> "starting from the same codebase." + ] + +upgradeCodebaseCommand :: Mod CommandFields Command +upgradeCodebaseCommand = + command "upgrade-codebase" (info (pure UpgradeCodebase) (fullDesc <> progDesc "Upgrades a v1 codebase to a v2 codebase")) + +commandParser :: CodebaseServerOpts -> Parser Command +commandParser envOpts = + hsubparser commands <|> launchParser envOpts WithCLI + where + commands = + fold [ versionCommand + , initCommand + , runSymbolCommand + , runFileCommand + , runPipeCommand + , transcriptCommand + , transcriptForkCommand + , upgradeCodebaseCommand + , launchHeadlessCommand envOpts + ] + +globalOptionsParser :: Parser GlobalOptions +globalOptionsParser = do -- ApplicativeDo + codebasePath <- codebasePathParser + codebaseFormat <- codebaseFormatParser + pure GlobalOptions{..} + +codebasePathParser :: Parser (Maybe FilePath) +codebasePathParser = + optional . strOption $ + long "codebase" + <> metavar "path/to/codebase" + <> help "The path to the codebase, defaults to the home directory" + +codebaseFormatParser :: Parser CodebaseFormat +codebaseFormatParser = + flag' V1 (long "old-codebase" <> help "Use a v1 codebase on startup.") + <|> flag' V2 (long "new-codebase" <> help "Use a v2 codebase on startup.") + <|> pure V2 + +launchHeadlessCommand :: CodebaseServerOpts -> Mod CommandFields Command +launchHeadlessCommand envOpts = + command "headless" (info (launchParser envOpts Headless) (progDesc headlessHelp)) + where + headlessHelp = "Runs the codebase server without the command-line interface." + +codebaseServerOptsParser :: CodebaseServerOpts -> Parser CodebaseServerOpts +codebaseServerOptsParser envOpts = do -- ApplicativeDo + cliToken <- tokenFlag <|> pure (token envOpts) + cliHost <- hostFlag <|> pure (host envOpts) + cliPort <- portFlag <|> pure (port envOpts) + cliCodebaseUIPath <- codebaseUIPathFlag <|> pure (codebaseUIPath envOpts) + pure CodebaseServerOpts + { token = cliToken <|> token envOpts + , host = cliHost <|> host envOpts + , port = cliPort <|> port envOpts + , codebaseUIPath = cliCodebaseUIPath <|> codebaseUIPath envOpts + } + where + tokenFlag = + optional . strOption + $ long "token" + <> metavar "STRING" + <> help "API auth token" + hostFlag = + optional . strOption + $ long "host" + <> metavar "STRING" + <> help "Codebase server host" + portFlag = + optional . option auto + $ long "port" + <> metavar "NUMBER" + <> help "Codebase server port" + codebaseUIPathFlag = + optional . strOption + $ long "ui" + <> metavar "DIR" + <> help "Path to codebase ui root" + +launchParser :: CodebaseServerOpts -> IsHeadless -> Parser Command +launchParser envOpts isHeadless = do -- ApplicativeDo + codebaseServerOpts <- codebaseServerOptsParser envOpts + pure (Launch isHeadless codebaseServerOpts) + +initParser :: Parser Command +initParser = pure Init + +versionParser :: Parser Command +versionParser = pure PrintVersion + +runSymbolParser :: Parser Command +runSymbolParser = + Run . RunFromSymbol <$> strArgument (metavar "SYMBOL") + +runFileParser :: Parser Command +runFileParser = do -- ApplicativeDo + pathTofile <- fileArgument "path/to/file" + symbolName <- strArgument (metavar "SYMBOL") + pure $ Run (RunFromFile pathTofile symbolName) + +runPipeParser :: Parser Command +runPipeParser = + Run . RunFromPipe <$> strArgument (metavar "SYMBOL") + +saveCodebaseFlag :: Parser ShouldSaveCodebase +saveCodebaseFlag = flag DontSaveCodebase SaveCodebase (long "save-codebase" <> help saveHelp) + where + saveHelp = "if set the resulting codebase will be saved to a new directory, otherwise it will be deleted" + +fileArgument :: String -> Parser FilePath +fileArgument varName = + strArgument ( metavar varName + <> action "file" -- Autocomplete file names + ) + +transcriptParser :: Parser Command +transcriptParser = do -- ApplicativeDo + shouldSaveCodebase <- saveCodebaseFlag + files <- liftA2 (NE.:|) (fileArgument "FILE") (many (fileArgument "FILES...")) + pure (Transcript DontFork shouldSaveCodebase files) + +transcriptForkParser :: Parser Command +transcriptForkParser = do -- ApplicativeDo + shouldSaveCodebase <- saveCodebaseFlag + files <- liftA2 (NE.:|) (fileArgument "FILE") (many (fileArgument "FILES...")) + pure (Transcript UseFork shouldSaveCodebase files) + +unisonHelp :: String -> String -> P.Doc +unisonHelp (P.text -> executable) (P.text -> version) = + fold . List.intersperse P.line $ + [ P.empty + , "🌻" + , P.empty + , P.bold "Usage instructions for the Unison Codebase Manager" + , "You are running version:" <+> version + , P.empty + , "To get started just run" <+> P.bold executable + , P.empty + , "Use" <+> P.bold (executable <+> "[command] --help") <+> "to show help for a command." + ] diff --git a/parser-typechecker/unison/Compat.hs b/parser-typechecker/unison/Compat.hs new file mode 100644 index 0000000000..393b533d25 --- /dev/null +++ b/parser-typechecker/unison/Compat.hs @@ -0,0 +1,33 @@ +{-# LANGUAGE CPP #-} +module Compat where + +import Control.Concurrent (mkWeakThreadId, myThreadId) +import Control.Exception (AsyncException (UserInterrupt), throwTo) +import System.Mem.Weak (deRefWeak) + +#if defined(mingw32_HOST_OS) +import qualified GHC.ConsoleHandler as WinSig +#else +import qualified System.Posix.Signals as Sig +#endif + +installSignalHandlers :: IO () +installSignalHandlers = do + main_thread <- myThreadId + wtid <- mkWeakThreadId main_thread + let interrupt = do + r <- deRefWeak wtid + case r of + Nothing -> return () + Just t -> throwTo t UserInterrupt + +#if defined(mingw32_HOST_OS) + let sig_handler WinSig.ControlC = interrupt + sig_handler WinSig.Break = interrupt + sig_handler _ = return () + _ <- WinSig.installHandler (WinSig.Catch sig_handler) +#else + _ <- Sig.installHandler Sig.sigQUIT (Sig.Catch interrupt) Nothing + _ <- Sig.installHandler Sig.sigINT (Sig.Catch interrupt) Nothing +#endif + return () diff --git a/parser-typechecker/unison/Main.hs b/parser-typechecker/unison/Main.hs index 6ced6b8a0b..9026d9ef0f 100644 --- a/parser-typechecker/unison/Main.hs +++ b/parser-typechecker/unison/Main.hs @@ -1,40 +1,30 @@ -{-# LANGUAGE CPP #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE PartialTypeSignatures #-} {-# LANGUAGE ViewPatterns #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE ApplicativeDo #-} {-# OPTIONS_GHC -Wno-partial-type-signatures #-} +{-# LANGUAGE NamedFieldPuns #-} module Main where -#if defined(mingw32_HOST_OS) -import qualified GHC.ConsoleHandler as WinSig -#else -import qualified System.Posix.Signals as Sig -#endif - -import Control.Concurrent (mkWeakThreadId, myThreadId, newEmptyMVar, takeMVar) +import Control.Concurrent (newEmptyMVar, takeMVar) import Control.Error.Safe (rightMay) -import Control.Exception (AsyncException (UserInterrupt), throwTo) -import Data.ByteString.Char8 (unpack) import Data.Configurator.Types (Config) import qualified Data.Text as Text import qualified GHC.Conc -import qualified Network.URI.Encode as URI import System.Directory (canonicalizePath, getCurrentDirectory, removeDirectoryRecursive) -import System.Environment (getArgs, getProgName) +import System.Environment (getProgName) import qualified System.Exit as Exit import qualified System.FilePath as FP import System.IO.Error (catchIOError) import qualified System.IO.Temp as Temp -import System.Mem.Weak (deRefWeak) import qualified System.Path as Path import Text.Megaparsec (runParser) import qualified Unison.Codebase as Codebase import qualified Unison.Codebase.Init as Codebase import qualified Unison.Codebase.Editor.Input as Input -import Unison.Codebase.Editor.RemoteRepo (RemoteNamespace) +import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace) import qualified Unison.Codebase.Editor.VersionParser as VP import Unison.Codebase.Execute (execute) import qualified Unison.Codebase.FileCodebase as FC @@ -45,6 +35,7 @@ import Unison.CommandLine (plural', watchConfig) import qualified Unison.CommandLine.Main as CommandLine import Unison.Parser (Ann) import Unison.Prelude +import qualified Unison.Codebase.Runtime as Rt import qualified Unison.PrettyTerminal as PT import qualified Unison.Runtime.Interface as RTI import qualified Unison.Server.CodebaseServer as Server @@ -52,185 +43,97 @@ import Unison.Symbol (Symbol) import qualified Unison.Util.Pretty as P import qualified Version import qualified Unison.Codebase.Conversion.Upgrade12 as Upgrade12 - -usage :: String -> P.Pretty P.ColorText -usage executableStr = P.callout "🌻" $ P.lines [ - P.bold "Usage instructions for the Unison Codebase Manager", - "You are running version: " <> P.string Version.gitDescribe, - "", - P.bold executable, - P.wrap "Starts Unison interactively, using the codebase in the home directory.", - "", - P.bold $ executable <> " -codebase path/to/codebase", - P.wrap "Starts Unison interactively, using the specified codebase. This flag can also be set before any of the below commands.", - "", - P.bold $ executable <> " --old-codebase", - P.wrap $ "Starts Unison using a v1 codebase. This flag can also be set before any of the below commands.", - "", - P.bold $ executable <> " run .mylib.mymain", - P.wrap "Executes the definition `.mylib.mymain` from the codebase, then exits.", - "", - P.bold $ executable <> " run.file foo.u mymain", - P.wrap "Executes the definition called `mymain` in `foo.u`, then exits.", - "", - P.bold $ executable <> " run.pipe mymain", - P.wrap "Executes the definition called `mymain` from a `.u` file read from the standard input, then exits.", - "", - P.bold $ executable <> " transcript mytranscript.md", - P.wrap $ "Executes the `mytranscript.md` transcript and creates" - <> "`mytranscript.output.md` if successful. Exits after completion, and deletes" - <> "the temporary directory created." - <> "Multiple transcript files may be provided; they are processed in sequence" - <> "starting from the same codebase.", - "", - P.bold $ executable <> " transcript -save-codebase mytranscript.md", - P.wrap $ "Executes the `mytranscript.md` transcript and creates" - <> "`mytranscript.output.md` if successful. Exits after completion, and saves" - <> "the resulting codebase to a new directory on disk." - <> "Multiple transcript files may be provided; they are processed in sequence" - <> "starting from the same codebase.", - "", - P.bold $ executable <> " transcript.fork mytranscript.md", - P.wrap $ "Executes the `mytranscript.md` transcript in a copy of the current codebase" - <> "and creates `mytranscript.output.md` if successful. Exits after completion." - <> "Multiple transcript files may be provided; they are processed in sequence" - <> "starting from the same codebase.", - "", - P.bold $ executable <> " transcript.fork -save-codebase mytranscript.md", - P.wrap $ "Executes the `mytranscript.md` transcript in a copy of the current codebase" - <> "and creates `mytranscript.output.md` if successful. Exits after completion," - <> "and saves the resulting codebase to a new directory on disk." - <> "Multiple transcript files may be provided; they are processed in sequence" - <> "starting from the same codebase.", - "", - P.bold $ executable <> " upgrade-codebase", - "Upgrades a v1 codebase to a v2 codebase.", - "", - P.bold $ executable <> " headless", - "Runs the codebase server without the command-line interface.", - "", - P.bold $ executable <> " version", - "Prints version of Unison then quits.", - "", - P.bold $ executable <> " help", - "Prints this help."] - where executable = (P.text . Text.pack) executableStr - -installSignalHandlers :: IO () -installSignalHandlers = do - main_thread <- myThreadId - wtid <- mkWeakThreadId main_thread - - let interrupt = do - r <- deRefWeak wtid - case r of - Nothing -> return () - Just t -> throwTo t UserInterrupt - -#if defined(mingw32_HOST_OS) - let sig_handler WinSig.ControlC = interrupt - sig_handler WinSig.Break = interrupt - sig_handler _ = return () - _ <- WinSig.installHandler (WinSig.Catch sig_handler) -#else - _ <- Sig.installHandler Sig.sigQUIT (Sig.Catch interrupt) Nothing - _ <- Sig.installHandler Sig.sigINT (Sig.Catch interrupt) Nothing -#endif - - return () - - -data CodebaseFormat = V1 | V2 deriving (Eq) +import Compat ( installSignalHandlers ) +import ArgParse + ( UsageRenderer, + GlobalOptions(GlobalOptions, codebasePath, codebaseFormat), + CodebaseFormat(..), + Command(Launch, PrintVersion, Init, Run, Transcript, + UpgradeCodebase), + IsHeadless(WithCLI, Headless), + ShouldSaveCodebase(..), + ShouldForkCodebase(..), + RunSource(RunFromPipe, RunFromSymbol, RunFromFile), + parseCLIArgs ) +import Data.List.NonEmpty (NonEmpty) +import qualified Data.List.NonEmpty as NonEmpty cbInitFor :: CodebaseFormat -> Codebase.Init IO Symbol Ann cbInitFor = \case V1 -> FC.init; V2 -> SC.init main :: IO () main = do - args <- getArgs progName <- getProgName -- hSetBuffering stdout NoBuffering -- cool - _ <- installSignalHandlers - -- We need to know whether the program was invoked with -codebase for - -- certain messages. Therefore we keep a Maybe FilePath - mcodepath - -- rather than just deciding on whether to use the supplied path or - -- the home directory here and throwing away that bit of information - let (mcodepath, restargs0) = case args of - "-codebase" : codepath : restargs -> (Just codepath, restargs) - _ -> (Nothing, args) - (fromMaybe V2 -> cbFormat, restargs) = case restargs0 of - "--new-codebase" : rest -> (Just V2, rest) - "--old-codebase" : rest -> (Just V1, rest) - _ -> (Nothing, restargs0) - cbInit = case cbFormat of V1 -> FC.init; V2 -> SC.init + void installSignalHandlers + (renderUsageInfo, globalOptions, command) <- parseCLIArgs progName Version.gitDescribe + let GlobalOptions{codebasePath=mcodepath, codebaseFormat=cbFormat} = globalOptions + let cbInit = cbInitFor cbFormat currentDir <- getCurrentDirectory configFilePath <- getConfigFilePath mcodepath config <- catchIOError (watchConfig configFilePath) $ \_ -> Exit.die "Your .unisonConfig could not be loaded. Check that it's correct!" - case restargs of - [version] | isFlag "version" version -> - putStrLn $ progName ++ " version: " ++ Version.gitDescribe - [help] | isFlag "help" help -> PT.putPrettyLn (usage progName) - ["init"] -> Codebase.initCodebaseAndExit cbInit "main.init" mcodepath - "run" : [mainName] -> do + case command of + PrintVersion -> + putStrLn $ progName ++ " version: " ++ Version.gitDescribe + Init -> + Codebase.initCodebaseAndExit cbInit "main.init" mcodepath + Run (RunFromSymbol mainName) -> do (closeCodebase, theCodebase) <- getCodebaseOrExit cbFormat mcodepath runtime <- RTI.startRuntime execute theCodebase runtime mainName closeCodebase - "run.file" : file : [mainName] | isDotU file -> do - e <- safeReadUtf8 file - case e of - Left _ -> PT.putPrettyLn $ P.callout "⚠️" "I couldn't find that file or it is for some reason unreadable." - Right contents -> do - (closeCodebase, theCodebase) <- getCodebaseOrExit cbFormat mcodepath - let fileEvent = Input.UnisonFileChanged (Text.pack file) contents - launch currentDir config theCodebase [Left fileEvent, Right $ Input.ExecuteI mainName, Right Input.QuitI] - closeCodebase - "run.pipe" : [mainName] -> do + Run (RunFromFile file mainName) + | not (isDotU file) -> PT.putPrettyLn $ P.callout "⚠️" "Files must have a .u extension." + | otherwise -> do + e <- safeReadUtf8 file + case e of + Left _ -> PT.putPrettyLn $ P.callout "⚠️" "I couldn't find that file or it is for some reason unreadable." + Right contents -> do + (closeCodebase, theCodebase) <- getCodebaseOrExit cbFormat mcodepath + rt <- RTI.startRuntime + let fileEvent = Input.UnisonFileChanged (Text.pack file) contents + launch currentDir config rt theCodebase [Left fileEvent, Right $ Input.ExecuteI mainName, Right Input.QuitI] Nothing + closeCodebase + Run (RunFromPipe mainName) -> do e <- safeReadUtf8StdIn case e of Left _ -> PT.putPrettyLn $ P.callout "⚠️" "I had trouble reading this input." Right contents -> do (closeCodebase, theCodebase) <- getCodebaseOrExit cbFormat mcodepath + rt <- RTI.startRuntime let fileEvent = Input.UnisonFileChanged (Text.pack "") contents launch - currentDir config theCodebase + currentDir config rt theCodebase [Left fileEvent, Right $ Input.ExecuteI mainName, Right Input.QuitI] + Nothing closeCodebase - "transcript" : args' -> - case args' of - "-save-codebase" : transcripts -> runTranscripts cbFormat False True mcodepath transcripts - _ -> runTranscripts cbFormat False False mcodepath args' - "transcript.fork" : args' -> - case args' of - "-save-codebase" : transcripts -> runTranscripts cbFormat True True mcodepath transcripts - _ -> runTranscripts cbFormat True False mcodepath args' - ["upgrade-codebase"] -> upgradeCodebase mcodepath - args -> do - let headless = listToMaybe args == Just "headless" - (closeCodebase, theCodebase) <- getCodebaseOrExit cbFormat mcodepath - Server.start theCodebase $ \token port -> do - let url = - "http://127.0.0.1:" <> show port <> "/" <> URI.encode (unpack token) - when headless $ - PT.putPrettyLn $ P.lines - ["I've started the codebase API server at" , P.string $ url <> "/api"] - PT.putPrettyLn $ P.lines - ["The Unison Codebase UI is running at", P.string $ url <> "/ui"] - if headless then do - PT.putPrettyLn $ P.string "Running the codebase manager headless with " - <> P.shown GHC.Conc.numCapabilities - <> " " - <> plural' GHC.Conc.numCapabilities "cpu" "cpus" - <> "." - mvar <- newEmptyMVar - takeMVar mvar - else do - PT.putPrettyLn $ P.string "Now starting the Unison Codebase Manager..." - launch currentDir config theCodebase [] - closeCodebase + Transcript shouldFork shouldSaveCodebase transcriptFiles -> + runTranscripts renderUsageInfo cbFormat shouldFork shouldSaveCodebase mcodepath transcriptFiles + UpgradeCodebase -> upgradeCodebase mcodepath + Launch isHeadless codebaseServerOpts -> do + (closeCodebase, theCodebase) <- getCodebaseOrExit cbFormat mcodepath + runtime <- RTI.startRuntime + Server.startServer codebaseServerOpts runtime theCodebase $ \baseUrl -> do + PT.putPrettyLn $ P.lines + ["The Unison Codebase UI is running at", P.string $ Server.urlFor Server.UI baseUrl] + case isHeadless of + Headless -> do + PT.putPrettyLn $ P.lines + ["I've started the codebase API server at" , P.string $ Server.urlFor Server.Api baseUrl] + PT.putPrettyLn $ P.string "Running the codebase manager headless with " + <> P.shown GHC.Conc.numCapabilities + <> " " + <> plural' GHC.Conc.numCapabilities "cpu" "cpus" + <> "." + mvar <- newEmptyMVar + takeMVar mvar + WithCLI -> do + PT.putPrettyLn $ P.string "Now starting the Unison Codebase Manager..." + launch currentDir config runtime theCodebase [] (Just baseUrl) + closeCodebase upgradeCodebase :: Maybe Codebase.CodebasePath -> IO () upgradeCodebase mcodepath = @@ -249,88 +152,91 @@ upgradeCodebase mcodepath = <> "but there's no rush. You can access the old codebase again by passing the" <> P.backticked "--old-codebase" <> "flag at startup." -prepareTranscriptDir :: CodebaseFormat -> Bool -> Maybe FilePath -> IO FilePath -prepareTranscriptDir cbFormat inFork mcodepath = do +prepareTranscriptDir :: CodebaseFormat -> ShouldForkCodebase -> Maybe FilePath -> IO FilePath +prepareTranscriptDir cbFormat shouldFork mcodepath = do tmp <- Temp.getCanonicalTemporaryDirectory >>= (`Temp.createTempDirectory` "transcript") let cbInit = cbInitFor cbFormat - if inFork then - getCodebaseOrExit cbFormat mcodepath >> do - path <- Codebase.getCodebaseDir mcodepath - PT.putPrettyLn $ P.lines [ - P.wrap "Transcript will be run on a copy of the codebase at: ", "", - P.indentN 2 (P.string path) - ] - Path.copyDir (Codebase.codebasePath cbInit path) (Codebase.codebasePath cbInit tmp) - else do - PT.putPrettyLn . P.wrap $ "Transcript will be run on a new, empty codebase." - void $ Codebase.openNewUcmCodebaseOrExit cbInit "main.transcript" tmp + case shouldFork of + UseFork -> do + getCodebaseOrExit cbFormat mcodepath + path <- Codebase.getCodebaseDir mcodepath + PT.putPrettyLn $ P.lines [ + P.wrap "Transcript will be run on a copy of the codebase at: ", "", + P.indentN 2 (P.string path) + ] + Path.copyDir (Codebase.codebasePath cbInit path) (Codebase.codebasePath cbInit tmp) + DontFork -> do + PT.putPrettyLn . P.wrap $ "Transcript will be run on a new, empty codebase." + void $ Codebase.openNewUcmCodebaseOrExit cbInit "main.transcript" tmp pure tmp runTranscripts' :: CodebaseFormat -> Maybe FilePath -> FilePath - -> [String] + -> NonEmpty String -> IO Bool runTranscripts' codebaseFormat mcodepath transcriptDir args = do currentDir <- getCurrentDirectory - case args of - args@(_:_) -> do - for_ args $ \arg -> case arg of - md | isMarkdown md -> do - parsed <- TR.parseFile arg - case parsed of - Left err -> - PT.putPrettyLn $ P.callout "❓" ( - P.lines [ - P.indentN 2 "A parsing error occurred while reading a file:", "", - P.indentN 2 $ P.string err]) - Right stanzas -> do - configFilePath <- getConfigFilePath mcodepath - (closeCodebase, theCodebase) <- getCodebaseOrExit codebaseFormat $ Just transcriptDir - mdOut <- TR.run transcriptDir configFilePath stanzas theCodebase - closeCodebase - let out = currentDir FP. - FP.addExtension (FP.dropExtension arg ++ ".output") - (FP.takeExtension md) - writeUtf8 out mdOut - putStrLn $ "💾 Wrote " <> out - wat -> - PT.putPrettyLn $ P.callout "❓" ( - P.lines [ - P.indentN 2 "Unrecognized command, skipping:", "", - P.indentN 2 $ P.string wat]) - pure True - [] -> - pure False + let (markdownFiles, invalidArgs) = NonEmpty.partition isMarkdown args + for_ markdownFiles $ \fileName -> do + parsed <- TR.parseFile fileName + case parsed of + Left err -> + PT.putPrettyLn $ P.callout "❓" ( + P.lines [ + P.indentN 2 "A parsing error occurred while reading a file:", "", + P.indentN 2 $ P.string err]) + Right stanzas -> do + configFilePath <- getConfigFilePath mcodepath + (closeCodebase, theCodebase) <- getCodebaseOrExit codebaseFormat $ Just transcriptDir + mdOut <- TR.run transcriptDir configFilePath stanzas theCodebase + closeCodebase + let out = currentDir FP. + FP.addExtension (FP.dropExtension fileName ++ ".output") + (FP.takeExtension fileName) + writeUtf8 out mdOut + putStrLn $ "💾 Wrote " <> out + + when (not . null $ invalidArgs) $ do + PT.putPrettyLn $ P.callout "❓" ( + P.lines + [ P.indentN 2 "Transcripts must have an .md or .markdown extension." + , P.indentN 2 "Skipping the following invalid files:" + , "" + , P.bulleted $ fmap (P.bold . P.string . (<> "\n")) invalidArgs + ]) + pure True runTranscripts - :: CodebaseFormat - -> Bool - -> Bool + :: UsageRenderer + -> CodebaseFormat + -> ShouldForkCodebase + -> ShouldSaveCodebase -> Maybe FilePath - -> [String] + -> NonEmpty String -> IO () -runTranscripts cbFormat inFork keepTemp mcodepath args = do +runTranscripts renderUsageInfo cbFormat shouldFork shouldSaveTempCodebase mcodepath args = do progName <- getProgName - transcriptDir <- prepareTranscriptDir cbFormat inFork mcodepath + transcriptDir <- prepareTranscriptDir cbFormat shouldFork mcodepath completed <- runTranscripts' cbFormat (Just transcriptDir) transcriptDir args - when completed $ do - unless keepTemp $ removeDirectoryRecursive transcriptDir - when keepTemp $ PT.putPrettyLn $ - P.callout "🌸" ( - P.lines [ - "I've finished running the transcript(s) in this codebase:", "", - P.indentN 2 (P.string transcriptDir), "", - P.wrap $ "You can run" - <> P.backticked (P.string progName <> " -codebase " <> P.string transcriptDir) - <> "to do more work with it."]) - - unless completed $ do - unless keepTemp $ removeDirectoryRecursive transcriptDir - PT.putPrettyLn (usage progName) - Exit.exitWith (Exit.ExitFailure 1) + case shouldSaveTempCodebase of + DontSaveCodebase -> removeDirectoryRecursive transcriptDir + SaveCodebase -> + if completed + then + PT.putPrettyLn $ + P.callout "🌸" ( + P.lines [ + "I've finished running the transcript(s) in this codebase:", "", + P.indentN 2 (P.string transcriptDir), "", + P.wrap $ "You can run" + <> P.backticked (P.string progName <> " -codebase " <> P.string transcriptDir) + <> "to do more work with it."]) + else do + putStrLn (renderUsageInfo $ Just "transcript") + Exit.exitWith (Exit.ExitFailure 1) initialPath :: Path.Absolute initialPath = Path.absoluteEmpty @@ -338,11 +244,22 @@ initialPath = Path.absoluteEmpty launch :: FilePath -> (Config, IO ()) - -> _ + -> Rt.Runtime Symbol + -> Codebase.Codebase IO Symbol Ann -> [Either Input.Event Input.Input] + -> Maybe Server.BaseUrl -> IO () -launch dir config code inputs = - CommandLine.main dir defaultBaseLib initialPath config inputs code Version.gitDescribe +launch dir config runtime codebase inputs serverBaseUrl = + CommandLine.main + dir + defaultBaseLib + initialPath + config + inputs + runtime + codebase + Version.gitDescribe + serverBaseUrl isMarkdown :: String -> Bool isMarkdown md = case FP.takeExtension md of @@ -361,7 +278,7 @@ isFlag f arg = arg == f || arg == "-" ++ f || arg == "--" ++ f getConfigFilePath :: Maybe FilePath -> IO FilePath getConfigFilePath mcodepath = (FP. ".unisonConfig") <$> Codebase.getCodebaseDir mcodepath -defaultBaseLib :: Maybe RemoteNamespace +defaultBaseLib :: Maybe ReadRemoteNamespace defaultBaseLib = rightMay $ runParser VP.defaultBaseLib "version" (Text.pack Version.gitDescribe) diff --git a/parser-typechecker/unison/Version.hs b/parser-typechecker/unison/Version.hs index da45288409..98b0d4115a 100644 --- a/parser-typechecker/unison/Version.hs +++ b/parser-typechecker/unison/Version.hs @@ -8,9 +8,18 @@ import Language.Haskell.TH.Syntax (Exp(LitE), Lit(StringL)) import Shellmet import Data.Text +-- | Uses Template Haskell to embed a git descriptor of the commit +-- which was used to build the executable. gitDescribe :: String -gitDescribe = $( fmap (LitE . StringL . unpack) . runIO $ - "git" $| ["describe", "--tags", "--always", "--dirty='"] - $? pure "unknown" +gitDescribe = $( fmap (LitE . StringL . unpack) . runIO $ do + let formatDate d = " (built on " <> d <> ")" + -- Outputs date of current commit; E.g. 2021-08-06 + let getDate = "git" $| ["show", "-s", "--format=%cs"] + date <- (formatDate <$> getDate) $? pure "" + -- Fetches a unique tag-name to represent the current commit. + -- Uses human-readable names whenever possible. + -- Marks version with a `'` suffix if building on a dirty worktree. + let getTag = "git" $| ["describe", "--tags", "--always", "--dirty='"] + tag <- getTag $? pure "unknown" + pure (tag <> date) ) - diff --git a/stack.yaml b/stack.yaml index 488e913269..08749cd1e2 100644 --- a/stack.yaml +++ b/stack.yaml @@ -1,5 +1,5 @@ flags: - haskeline: + haskeline: terminfo: false allow-newer: true # async package has needlessly strict upper bound @@ -22,7 +22,7 @@ packages: - codebase2/util-term #compiler-check: match-exact -resolver: lts-17.5 +resolver: lts-17.15 extra-deps: - github: unisonweb/configurator @@ -31,27 +31,14 @@ extra-deps: commit: 2944b11d19ee034c48276edc991736105c9d6143 - github: unisonweb/megaparsec commit: c4463124c578e8d1074c04518779b5ce5957af6b -- github: biocad/openapi3 - commit: bd9df532f2381c4b22fe86ef722715088f5cfa68 -- github: biocad/servant-openapi3 - commit: deb32b7ce166aa86092f7e46ed2cd3cf43d540a4 -- base16-0.3.0.1@sha256:22e62f1283adb1fbc81de95f404b0c4039e69e90d92dac8c1bfca0d04941a749,2303 -- concurrent-supply-0.1.8@sha256:9373f4868ad28936a7b93781b214ef4afdeacf377ef4ac729583073491c9f9fb,1627 - guid-0.1.0@sha256:a7c975be473f6f142d5cc1b39bc807a99043d20b1bb0873fdfe7a3ce84d2faf1,1078 - prelude-extras-0.4.0.3@sha256:1c10b0123ea13a6423d74a8fcbaeb2d5249b472588abde418a36b47b7c4f48c8,1163 - sandi-0.5@sha256:b278d072ca717706ea38f9bd646e023f7f2576a778fb43565b434f93638849aa,3010 - strings-1.1@sha256:0285dec4c8ab262359342b3e5ef1eb567074669461b9b38404f1cb870c881c5c,1617 -- aeson-deriving-0.1.1.1@sha256:0b2b6dfdfdf57bb6b3db5978a9e60ba6345b7d48fa254cddb2c76da7d96f8c26,2714 -- servant-0.18@sha256:2b5c81089540c208b1945b5ca0c3551c862138d71b224a39fa275a62852a5c75,5068 -- servant-server-0.18 -- servant-docs-0.11.6 -- servant-auth-server-0.4.6.0@sha256:b411b44f4252e91e5da2455d71a7113c8b5b8ff2d943d19b2ddedcfcf0392351,5111 -- servant-auth-0.4.0.0@sha256:01d02dfb7df4747fc96442517146d7d4ab0e575e597a124e238e8763036ea4ff,2125 -- ListLike-4.7.4 -- sqlite-simple-0.4.18.0@sha256:3ceea56375c0a3590c814e411a4eb86943f8d31b93b110ca159c90689b6b39e5,3002 -- direct-sqlite-2.3.26@sha256:04e835402f1508abca383182023e4e2b9b86297b8533afbd4e57d1a5652e0c23,3718 - random-1.2.0 # remove these when stackage upgrades containers +# (these = containers 0.6.4, text-1.2.4, binary-0.8.8, parsec-3.1.14, Cabal-3.2.1.0) +# see https://github.com/unisonweb/unison/pull/1807#issuecomment-777069869 - containers-0.6.4.1 - text-1.2.4.1 - binary-0.8.8.0 @@ -59,6 +46,7 @@ extra-deps: - Cabal-3.2.1.0 - fuzzyfind-3.0.0 - monad-validate-1.2.0.0@sha256:9850f408431098b28806dd464b6825a88a0b56c84f380d7fe0454c1df9d6f881,3505 +- optparse-applicative-0.16.1.0 # We need some features from the most recent revision ghc-options: # All packages diff --git a/unison-core/src/Unison/ABT.hs b/unison-core/src/Unison/ABT.hs index eff7d13af4..efeacb373e 100644 --- a/unison-core/src/Unison/ABT.hs +++ b/unison-core/src/Unison/ABT.hs @@ -175,6 +175,7 @@ pattern Cycle' vs t <- Term _ _ (Cycle (AbsN' vs t)) -- pattern Abs' v body <- Term _ _ (Abs v body) pattern Abs' subst <- (unabs1 -> Just subst) pattern AbsN' vs body <- (unabs -> (vs, body)) +{-# COMPLETE AbsN' #-} pattern Tm' f <- Term _ _ (Tm f) pattern CycleA' a avs t <- Term _ a (Cycle (AbsNA' avs t)) pattern AbsNA' avs body <- (unabsA -> (avs, body)) @@ -719,7 +720,7 @@ hash = hash' [] where die = error $ "unknown var in environment: " ++ show v ++ " environment = " ++ show env Cycle (AbsN' vs t) -> hash' (Left vs : env) t - Cycle t -> hash' env t + -- Cycle t -> hash' env t Abs v t -> hash' (Right v : env) t Tm t -> Hashable.hash1 (hashCycle env) (hash' env) t diff --git a/unison-core/src/Unison/HashQualified'.hs b/unison-core/src/Unison/HashQualified'.hs index d54f5cf1fe..fa8380f605 100644 --- a/unison-core/src/Unison/HashQualified'.hs +++ b/unison-core/src/Unison/HashQualified'.hs @@ -1,4 +1,5 @@ {-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DeriveFoldable #-} {-# LANGUAGE OverloadedStrings #-} module Unison.HashQualified' where @@ -19,7 +20,7 @@ import qualified Unison.ShortHash as SH import qualified Unison.HashQualified as HQ data HashQualified n = NameOnly n | HashQualified n ShortHash - deriving (Eq, Functor, Generic) + deriving (Eq, Functor, Generic, Foldable) type HQSegment = HashQualified NameSegment @@ -112,10 +113,14 @@ requalify hq r = case hq of NameOnly n -> fromNamedReferent n r HashQualified n _ -> fromNamedReferent n r -instance Ord n => Ord (HashQualified n) where - compare a b = case compare (toName a) (toName b) of - EQ -> compare (toHash a) (toHash b) - o -> o +-- `HashQualified` is usually used for display, so we sort it alphabetically +instance Name.Alphabetical n => Ord (HashQualified n) where + compare (NameOnly n) (NameOnly n2) = Name.compareAlphabetical n n2 + -- NameOnly comes first + compare NameOnly{} HashQualified{} = LT + compare HashQualified{} NameOnly{} = GT + compare (HashQualified n sh) (HashQualified n2 sh2) = + Name.compareAlphabetical n n2 <> compare sh sh2 instance IsString (HashQualified Name) where fromString = unsafeFromText . Text.pack diff --git a/unison-core/src/Unison/HashQualified.hs b/unison-core/src/Unison/HashQualified.hs index ff8ecef7f7..8718fedfdf 100644 --- a/unison-core/src/Unison/HashQualified.hs +++ b/unison-core/src/Unison/HashQualified.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE DeriveFoldable #-} +{-# LANGUAGE DeriveTraversable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} @@ -20,7 +22,7 @@ import qualified Unison.Var as Var data HashQualified n = NameOnly n | HashOnly ShortHash | HashQualified n ShortHash - deriving (Eq, Functor, Show, Generic) + deriving (Eq, Foldable, Traversable, Functor, Show, Generic) stripNamespace :: Text -> HashQualified Name -> HashQualified Name stripNamespace namespace hq = case hq of @@ -157,15 +159,24 @@ requalify hq r = case hq of HashQualified n _ -> fromNamedReferent n r HashOnly _ -> fromReferent r --- this implementation shows HashOnly before the others, because None < Some. --- Flip it around carefully if HashOnly should come last. -instance Ord n => Ord (HashQualified n) where - compare a b = case compare (toName a) (toName b) of - EQ -> compare (toHash a) (toHash b) - o -> o +-- Ordered alphabetically, based on the name. Hashes come last. +instance (Eq n, Name.Alphabetical n) => Ord (HashQualified n) where + compare a b = case (toName a, toName b) of + (Just n , Just n2) -> Name.compareAlphabetical n n2 + (Nothing, Just _) -> GT + (Just _ , Nothing) -> LT + (Nothing, Nothing) -> EQ + <> + case (toHash a, toHash b) of + (Nothing, Nothing) -> EQ + (Nothing, Just _) -> LT -- prefer NameOnly to HashQualified + (Just _, Nothing) -> GT + (Just sh, Just sh2) -> compare sh sh2 instance Convert n n2 => Convert (HashQualified n) (HashQualified n2) where convert = fmap Name.convert +instance Convert n (HashQualified n) where + convert = NameOnly instance Parse Text (HashQualified Name) where parse = fromText diff --git a/unison-core/src/Unison/Name.hs b/unison-core/src/Unison/Name.hs index 93e7acb2e4..b2e5a0de1d 100644 --- a/unison-core/src/Unison/Name.hs +++ b/unison-core/src/Unison/Name.hs @@ -13,17 +13,25 @@ module Unison.Name , isPrefixOf , joinDot , makeAbsolute + , isAbsolute , parent + , module Unison.Util.Alphabetical , sortNames , sortNamed + , sortNameds , sortByText , sortNamed' , stripNamePrefix , stripPrefixes , segments + , reverseSegments , countSegments + , compareSuffix , segments' , suffixes + , searchBySuffix + , suffixFrom + , shortestUniqueSuffix , toString , toText , toVar @@ -45,14 +53,17 @@ import Unison.NameSegment ( NameSegment(NameSegment) import Control.Lens ( unsnoc ) import qualified Control.Lens as Lens import qualified Data.Text as Text +import qualified Data.Set as Set import qualified Unison.Hashable as H +import Unison.Util.Alphabetical (Alphabetical,compareAlphabetical) +import qualified Unison.Util.Relation as R import Unison.Var ( Var ) import qualified Unison.Var as Var import qualified Data.RFC5051 as RFC5051 -import Data.List ( sortBy, tails ) +import Data.List ( sortBy, tails, inits, find ) newtype Name = Name { toText :: Text } - deriving (Eq, Ord, Monoid, Semigroup, Generic) + deriving (Eq, Monoid, Semigroup, Generic) sortNames :: [Name] -> [Name] sortNames = sortNamed id @@ -60,6 +71,9 @@ sortNames = sortNamed id sortNamed :: (a -> Name) -> [a] -> [a] sortNamed by = sortByText (toText . by) +sortNameds :: (a -> [Name]) -> [a] -> [a] +sortNameds by = sortByText (Text.intercalate "." . map toText . by) + sortByText :: (a -> Text) -> [a] -> [a] sortByText by as = let as' = [ (a, by a) | a <- as ] @@ -120,6 +134,18 @@ stripNamePrefix prefix name = where mid = if toText prefix == "." then "" else "." +-- suffixFrom Int builtin.Int.+ ==> Int.+ +-- suffixFrom Int Int.negate ==> Int.negate +-- +-- Currently used as an implementation detail of expanding wildcard +-- imports, (like `use Int` should catch `builtin.Int.+`) +-- but it may be generally useful elsewhere. See `expandWildcardImports` +-- for details. +suffixFrom :: Name -> Name -> Maybe Name +suffixFrom mid overall = case Text.breakOnAll (toText mid) (toText overall) of + [] -> Nothing + (_, rem):_ -> Just (Name rem) + -- a.b.c.d -> d stripPrefixes :: Name -> Name stripPrefixes = maybe "" fromSegment . lastMay . segments @@ -178,9 +204,73 @@ fromSegment = unsafeFromText . NameSegment.toText segments :: Name -> [NameSegment] segments (Name n) = NameSegment <$> segments' n +reverseSegments :: Name -> [NameSegment] +reverseSegments (Name n) = NameSegment <$> NameSegment.reverseSegments' n + countSegments :: Name -> Int countSegments n = length (segments n) +-- The `Ord` instance for `Name` considers the segments of the name +-- starting from the last, enabling efficient search by name suffix. +-- +-- To order names alphabetically for purposes of display to a human, +-- `sortNamed` or one of its variants should be used, which provides a +-- Unicode and capitalization aware sorting (based on RFC5051). +instance Ord Name where + compare n1 n2 = + (reverseSegments n1 `compare` reverseSegments n2) + <> (isAbsolute n1 `compare` isAbsolute n2) + +instance Alphabetical Name where + compareAlphabetical (Name n1) (Name n2) = compareAlphabetical n1 n2 + +isAbsolute :: Name -> Bool +isAbsolute (Name n) = Text.isPrefixOf "." n + +-- If there's no exact matches for `suffix` in `rel`, find all +-- `r` in `rel` whose corresponding name `suffix` as a suffix. +-- For example, `searchBySuffix List.map {(base.List.map, r1)}` +-- will return `{r1}`. +-- +-- NB: Implementation uses logarithmic time lookups, not a linear scan. +searchBySuffix :: (Ord r) => Name -> R.Relation Name r -> Set r +searchBySuffix suffix rel = + R.lookupDom suffix rel `orElse` R.searchDom (compareSuffix suffix) rel + where + orElse s1 s2 = if Set.null s1 then s2 else s1 + +-- `compareSuffix suffix n` is equal to `compare n' suffix`, where +-- n' is `n` with only the last `countSegments suffix` segments. +-- +-- Used for suffix-based lookup of a name. For instance, given a `r : Relation Name x`, +-- `Relation.searchDom (compareSuffix "foo.bar") r` will find all `r` whose name +-- has `foo.bar` as a suffix. +compareSuffix :: Name -> Name -> Ordering +compareSuffix suffix = + let + suffixSegs = reverseSegments suffix + len = length suffixSegs + in + \n -> take len (reverseSegments n) `compare` suffixSegs + +-- Tries to shorten `fqn` to the smallest suffix that still refers +-- to to `r`. Uses an efficient logarithmic lookup in the provided relation. +-- The returned `Name` may refer to multiple hashes if the original FQN +-- did as well. +-- +-- NB: Only works if the `Ord` instance for `Name` orders based on +-- `Name.reverseSegments`. +shortestUniqueSuffix :: Ord r => Name -> r -> R.Relation Name r -> Name +shortestUniqueSuffix fqn r rel = + maybe fqn (convert . reverse) (find isOk suffixes) + where + allowed = R.lookupDom fqn rel + suffixes = drop 1 (inits (reverseSegments fqn)) + isOk suffix = (Set.size rs == 1 && Set.findMin rs == r) || rs == allowed + where rs = R.searchDom compareEnd rel + compareEnd n = compare (take len (reverseSegments n)) suffix + len = length suffix + class Convert a b where convert :: a -> b @@ -190,6 +280,8 @@ class Parse a b where instance Convert Name Text where convert = toText instance Convert Name [NameSegment] where convert = segments instance Convert NameSegment Name where convert = fromSegment +instance Convert [NameSegment] Name where + convert sgs = unsafeFromText (Text.intercalate "." (map NameSegment.toText sgs)) instance Parse Text NameSegment where parse txt = case NameSegment.segments' txt of diff --git a/unison-core/src/Unison/NameSegment.hs b/unison-core/src/Unison/NameSegment.hs index d220ebfabc..8f903d8995 100644 --- a/unison-core/src/Unison/NameSegment.hs +++ b/unison-core/src/Unison/NameSegment.hs @@ -7,9 +7,14 @@ import Unison.Prelude import qualified Data.Text as Text import qualified Unison.Hashable as H +import Unison.Util.Alphabetical (Alphabetical, compareAlphabetical) + -- Represents the parts of a name between the `.`s newtype NameSegment = NameSegment { toText :: Text } deriving (Eq, Ord) +instance Alphabetical NameSegment where + compareAlphabetical n1 n2 = compareAlphabetical (toText n1) (toText n2) + -- Split text into segments. A smarter version of `Text.splitOn` that handles -- the name `.` properly. segments' :: Text -> [Text] @@ -21,6 +26,25 @@ segments' n = go split go ("" : z) = go z go (x : y) = x : go y +-- Same as reverse . segments', but produces the output as a +-- lazy list, suitable for suffix-based ordering purposes or +-- building suffix tries. Examples: +-- +-- reverseSegments' "foo.bar.baz" => ["baz","bar","foo"] +-- reverseSegments' ".foo.bar.baz" => ["baz","bar","foo"] +-- reverseSegments' ".." => ["."] +-- reverseSegments' "Nat.++" => ["++","Nat"] +-- reverseSegments' "Nat.++.zoo" => ["zoo","++","Nat"] +reverseSegments' :: Text -> [Text] +reverseSegments' = go + where + go "" = [] + go t = let + seg0 = Text.takeWhileEnd (/= '.') t + seg = if Text.null seg0 then Text.takeEnd 1 t else seg0 + rem = Text.dropEnd (Text.length seg + 1) t + in seg : go rem + instance H.Hashable NameSegment where tokens s = [H.Text (toText s)] @@ -38,4 +62,3 @@ instance Show NameSegment where instance IsString NameSegment where fromString = NameSegment . Text.pack - diff --git a/unison-core/src/Unison/Names2.hs b/unison-core/src/Unison/Names2.hs index c3356a4936..835627ab2e 100644 --- a/unison-core/src/Unison/Names2.hs +++ b/unison-core/src/Unison/Names2.hs @@ -2,6 +2,7 @@ {-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ViewPatterns #-} module Unison.Names2 ( Names0 @@ -49,10 +50,12 @@ import Unison.Prelude import qualified Data.Map as Map import qualified Data.Set as Set +import qualified Data.Text as Text import Prelude hiding (filter) +import qualified Prelude import Unison.HashQualified' (HashQualified) import qualified Unison.HashQualified' as HQ -import Unison.Name (Name) +import Unison.Name (Name,Alphabetical) import qualified Unison.Name as Name import Unison.Reference (Reference) import qualified Unison.Reference as Reference @@ -85,10 +88,25 @@ fuzzyFind fuzzyFind query names = fmap flatten . fuzzyFinds (Name.toString . fst) query + . Prelude.filter prefilter . Map.toList - $ R.toMultimap (R.mapRan Left $ terms names) - <> R.toMultimap (R.mapRan Right $ types names) + -- `mapMonotonic` is safe here and saves a log n factor + $ (Set.mapMonotonic Left <$> R.toMultimap (terms names)) + <> (Set.mapMonotonic Right <$> R.toMultimap (types names)) where + lowerqueryt = Text.toLower . Text.pack <$> query + -- For performance, case-insensitive substring matching as a pre-filter + -- This finds fewer matches than subsequence matching, but is + -- (currently) way faster even on large name sets. + prefilter (Name.toText -> name, _) = case lowerqueryt of + -- Special cases here just to help optimizer, since + -- not sure if `all` will get sufficiently unrolled for + -- Text fusion to work out. + [q] -> q `Text.isInfixOf` lowername + [q1,q2] -> q1 `Text.isInfixOf` lowername && q2 `Text.isInfixOf` lowername + query -> all (`Text.isInfixOf` lowername) query + where + lowername = Text.toLower name flatten (a, (b, c)) = (a, b, c) fuzzyFinds :: (a -> String) -> [String] -> [a] -> [(FZF.Alignment, a)] fuzzyFinds f query d = @@ -247,7 +265,7 @@ addTerm n r = (<> fromTerms [(n, r)]) -- -- We want to append the hash regardless of whether or not one is a term and the -- other is a type. -hqName :: Ord n => Names' n -> n -> Either Reference Referent -> HQ.HashQualified n +hqName :: (Ord n, Alphabetical n) => Names' n -> n -> Either Reference Referent -> HQ.HashQualified n hqName b n = \case Left r -> if ambiguous then _hqTypeName' b n r else HQ.fromName n Right r -> if ambiguous then _hqTermName' b n r else HQ.fromName n @@ -256,31 +274,31 @@ hqName b n = \case -- Conditionally apply hash qualifier to term name. -- Should be the same as the input name if the Names0 is unconflicted. -hqTermName :: Ord n => Int -> Names' n -> n -> Referent -> HQ.HashQualified n +hqTermName :: (Ord n, Alphabetical n) => Int -> Names' n -> n -> Referent -> HQ.HashQualified n hqTermName hqLen b n r = if Set.size (termsNamed b n) > 1 then hqTermName' hqLen n r else HQ.fromName n -hqTypeName :: Ord n => Int -> Names' n -> n -> Reference -> HQ.HashQualified n +hqTypeName :: (Ord n, Alphabetical n) => Int -> Names' n -> n -> Reference -> HQ.HashQualified n hqTypeName hqLen b n r = if Set.size (typesNamed b n) > 1 then hqTypeName' hqLen n r else HQ.fromName n -_hqTermName :: Ord n => Names' n -> n -> Referent -> HQ.HashQualified n +_hqTermName :: (Ord n, Alphabetical n) => Names' n -> n -> Referent -> HQ.HashQualified n _hqTermName b n r = if Set.size (termsNamed b n) > 1 then _hqTermName' b n r else HQ.fromName n -_hqTypeName :: Ord n => Names' n -> n -> Reference -> HQ.HashQualified n +_hqTypeName :: (Ord n, Alphabetical n) => Names' n -> n -> Reference -> HQ.HashQualified n _hqTypeName b n r = if Set.size (typesNamed b n) > 1 then _hqTypeName' b n r else HQ.fromName n _hqTypeAliases :: - Ord n => Names' n -> n -> Reference -> Set (HQ.HashQualified n) + (Ord n, Alphabetical n) => Names' n -> n -> Reference -> Set (HQ.HashQualified n) _hqTypeAliases b n r = Set.map (flip (_hqTypeName b) r) (typeAliases b n r) -_hqTermAliases :: Ord n => Names' n -> n -> Referent -> Set (HQ.HashQualified n) +_hqTermAliases :: (Ord n, Alphabetical n) => Names' n -> n -> Referent -> Set (HQ.HashQualified n) _hqTermAliases b n r = Set.map (flip (_hqTermName b) r) (termAliases b n r) -- Unconditionally apply hash qualifier long enough to distinguish all the diff --git a/unison-core/src/Unison/Names3.hs b/unison-core/src/Unison/Names3.hs index 987496a4a9..d91a4776a6 100644 --- a/unison-core/src/Unison/Names3.hs +++ b/unison-core/src/Unison/Names3.hs @@ -1,11 +1,14 @@ {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE ViewPatterns #-} +{-# LANGUAGE OverloadedStrings #-} module Unison.Names3 where import Unison.Prelude +import Control.Lens (view, _4) +import Data.List (sort) import Data.List.Extra (nubOrd) import Unison.HashQualified (HashQualified) import qualified Unison.HashQualified as HQ @@ -35,22 +38,6 @@ data ResolutionFailure v a type ResolutionResult v a r = Either (Seq (ResolutionFailure v a)) r --- For all names in `ns`, (ex: foo.bar.baz), generate the list of suffixes --- of that name [[foo.bar.baz], [bar.baz], [baz]]. Insert these suffixes --- into a multimap map along with their corresponding refs. Any suffix --- which is unique is added as an entry to `ns`. -suffixify0 :: Names0 -> Names0 -suffixify0 ns = ns <> suffixNs - where - suffixNs = names0 (R.fromList uniqueTerms) (R.fromList uniqueTypes) - terms = List.multimap [ (n,ref) | (n0,ref) <- R.toList (terms0 ns), n <- Name.suffixes n0 ] - types = List.multimap [ (n,ref) | (n0,ref) <- R.toList (types0 ns), n <- Name.suffixes n0 ] - uniqueTerms = [ (n,ref) | (n, nubOrd -> [ref]) <- Map.toList terms ] - uniqueTypes = [ (n,ref) | (n, nubOrd -> [ref]) <- Map.toList types ] - -suffixify :: Names -> Names -suffixify ns = Names (suffixify0 (currentNames ns)) (oldNames ns) - filterTypes :: (Name -> Bool) -> Names0 -> Names0 filterTypes = Unison.Names2.filterTypes @@ -82,7 +69,8 @@ isEmpty0 n = R.null (terms0 n) && R.null (types0 n) -- moving shadowed definitions into `oldNames` so they can can still be -- referenced hash qualified. push :: Names0 -> Names -> Names -push n1 ns = Names (unionLeft0 n1 cur) (oldNames ns <> shadowed) where +push n0 ns = Names (unionLeft0 n1 cur) (oldNames ns <> shadowed) where + n1 = suffixify0 n0 cur = currentNames ns shadowed = names0 terms' types' where terms' = R.dom (terms0 n1) R.<| (terms0 cur `R.difference` terms0 n1) @@ -91,6 +79,25 @@ push n1 ns = Names (unionLeft0 n1 cur) (oldNames ns <> shadowed) where unionLeft0 n1 n2 = names0 terms' types' where terms' = terms0 n1 <> R.subtractDom (R.dom $ terms0 n1) (terms0 n2) types' = types0 n1 <> R.subtractDom (R.dom $ types0 n1) (types0 n2) + -- For all names in `ns`, (ex: foo.bar.baz), generate the list of suffixes + -- of that name [[foo.bar.baz], [bar.baz], [baz]]. Any suffix which uniquely + -- refers to a single definition is added as an alias + -- + -- If `Names` were more like a `[Names0]`, then `push` could just cons + -- onto the list and we could get rid of all this complex logic. The + -- complexity here is that we have to "bake the shadowing" into a single + -- Names0, taking into account suffix-based name resolution. + -- + -- We currently have `oldNames`, but that controls an unrelated axis, which + -- is whether names are hash qualified or not. + suffixify0 :: Names0 -> Names0 + suffixify0 ns = ns <> suffixNs + where + suffixNs = names0 (R.fromList uniqueTerms) (R.fromList uniqueTypes) + terms = List.multimap [ (n,ref) | (n0,ref) <- R.toList (terms0 ns), n <- Name.suffixes n0 ] + types = List.multimap [ (n,ref) | (n0,ref) <- R.toList (types0 ns), n <- Name.suffixes n0 ] + uniqueTerms = [ (n,ref) | (n, nubOrd -> [ref]) <- Map.toList terms ] + uniqueTypes = [ (n,ref) | (n, nubOrd -> [ref]) <- Map.toList types ] unionLeft0 :: Names0 -> Names0 -> Names0 unionLeft0 = Unison.Names2.unionLeft @@ -122,15 +129,27 @@ shadowing prio (Names current old) = makeAbsolute0:: Names0 -> Names0 makeAbsolute0 = map0 Name.makeAbsolute --- do a prefix match on currentNames and, if no match, then check oldNames. +-- Find all types whose name has a suffix matching the provided `HashQualified`, +-- returning types with relative names if they exist, and otherwise +-- returning types with absolute names. +lookupRelativeHQType :: HashQualified Name -> Names -> Set Reference +lookupRelativeHQType hq ns@Names{..} = let + rs = lookupHQType hq ns + keep r = any (not . Name.isAbsolute) (R.lookupRan r (Names.types currentNames)) + in case Set.filter keep rs of + rs' | Set.null rs' -> rs + | otherwise -> rs' + +-- Find all types whose name has a suffix matching the provided `HashQualified`. lookupHQType :: HashQualified Name -> Names -> Set Reference lookupHQType hq Names{..} = case hq of - HQ.NameOnly n -> R.lookupDom n (Names.types currentNames) - HQ.HashQualified n sh -> case matches sh currentNames of + HQ.NameOnly n -> Name.searchBySuffix n (Names.types currentNames) + HQ.HashQualified n sh -> case matches sh (Names.types currentNames) of s | (not . null) s -> s - | otherwise -> matches sh oldNames + | otherwise -> matches sh (Names.types oldNames) where - matches sh ns = Set.filter (Reference.isPrefixOf sh) (R.lookupDom n $ Names.types ns) + matches sh ns = + Set.filter (Reference.isPrefixOf sh) (Name.searchBySuffix n ns) HQ.HashOnly sh -> case matches sh currentNames of s | (not . null) s -> s | otherwise -> matches sh oldNames @@ -143,14 +162,27 @@ hasTermNamed n ns = not (Set.null $ lookupHQTerm (HQ.NameOnly n) ns) hasTypeNamed :: Name -> Names -> Bool hasTypeNamed n ns = not (Set.null $ lookupHQType (HQ.NameOnly n) ns) +-- Find all terms whose name has a suffix matching the provided `HashQualified`, +-- returning terms with relative names if they exist, and otherwise +-- returning terms with absolute names. +lookupRelativeHQTerm :: HashQualified Name -> Names -> Set Referent +lookupRelativeHQTerm hq ns@Names{..} = let + rs = lookupHQTerm hq ns + keep r = any (not . Name.isAbsolute) (R.lookupRan r (Names.terms currentNames)) + in case Set.filter keep rs of + rs' | Set.null rs' -> rs + | otherwise -> rs' + +-- Find all terms whose name has a suffix matching the provided `HashQualified`. lookupHQTerm :: HashQualified Name -> Names -> Set Referent lookupHQTerm hq Names{..} = case hq of - HQ.NameOnly n -> R.lookupDom n (Names.terms currentNames) - HQ.HashQualified n sh -> case matches sh currentNames of + HQ.NameOnly n -> Name.searchBySuffix n (Names.terms currentNames) + HQ.HashQualified n sh -> case matches sh (Names.terms currentNames) of s | (not . null) s -> s - | otherwise -> matches sh oldNames + | otherwise -> matches sh (Names.terms oldNames) where - matches sh ns = Set.filter (Referent.isPrefixOf sh) (R.lookupDom n $ Names.terms ns) + matches sh ns = + Set.filter (Referent.isPrefixOf sh) (Name.searchBySuffix n ns) HQ.HashOnly sh -> case matches sh currentNames of s | (not . null) s -> s | otherwise -> matches sh oldNames @@ -192,6 +224,30 @@ termName length r Names{..} = where hq n = HQ'.take length (HQ'.fromNamedReferent n r) isConflicted n = R.manyDom n (Names.terms currentNames) +suffixedTypeName :: Int -> Reference -> Names -> [HQ.HashQualified Name] +suffixedTermName :: Int -> Referent -> Names -> [HQ.HashQualified Name] +(suffixedTermName,suffixedTypeName) = + ( suffixedName termName (Names.terms . currentNames) HQ'.fromNamedReferent + , suffixedName typeName (Names.types . currentNames) HQ'.fromNamedReference ) + where + suffixedName fallback getRel hq' length r ns@(getRel -> rel) = + if R.memberRan r rel + then go $ toList (R.lookupRan r rel) + else sort $ map Name.convert $ Set.toList (fallback length r ns) + where + -- Orders names, using these criteria, in this order: + -- 1. NameOnly comes before HashQualified, + -- 2. Shorter names (in terms of segment count) come before longer ones + -- 3. If same on attributes 1 and 2, compare alphabetically + go :: [Name] -> [HashQualified Name] + go fqns = map (view _4) . sort $ map f fqns where + f fqn = let + n' = Name.shortestUniqueSuffix fqn r rel + isHQ'd = R.manyDom fqn rel -- it is conflicted + hq n = HQ'.take length (hq' n r) + hqn = Name.convert $ if isHQ'd then hq n' else HQ'.fromName n' + in (isHQ'd, Name.countSegments fqn, Name.isAbsolute n', hqn) + -- Set HashQualified -> Branch m -> Action' m v Names -- Set HashQualified -> Branch m -> Free (Command m i v) Names -- Set HashQualified -> Branch m -> Command m i v Names @@ -237,8 +293,8 @@ importing0 shortToLongName ns = (foldl' go (terms0 ns) shortToLongName) (foldl' go (types0 ns) shortToLongName) where - go :: (Show a, Ord a, Ord b) => Relation a b -> (a, a) -> Relation a b - go m (shortname, qname) = case R.lookupDom qname m of + go :: (Ord r) => Relation Name r -> (Name, Name) -> Relation Name r + go m (shortname, qname) = case Name.searchBySuffix qname m of s | Set.null s -> m | otherwise -> R.insertManyRan shortname s (R.deleteDom shortname m) @@ -251,11 +307,24 @@ expandWildcardImport prefix ns = [ (suffix, full) | Just (suffix,full) <- go <$> R.toList (terms0 ns) ] <> [ (suffix, full) | Just (suffix,full) <- go <$> R.toList (types0 ns) ] where - go (full, _) = case Name.stripNamePrefix prefix full of - Nothing -> Nothing - Just suffix -> Just (suffix, full) - -deleteTerms0 :: [Name] -> Names0 -> Names0 -deleteTerms0 ns n0 = names0 terms' (types0 n0) + go (full, _) = do + -- running example: + -- prefix = Int + -- full = builtin.Int.negate + rem <- Name.suffixFrom prefix full + -- rem = Int.negate + suffix <- Name.stripNamePrefix prefix rem + -- suffix = negate + pure (suffix, full) + +-- Deletes from the `n0 : Names0` any definitions whose names +-- share a suffix with a name in `ns`. Does so using logarithmic +-- time lookups, traversing only `ns`. +-- +-- See usage in `FileParser` for handling precendence of symbol +-- resolution where local names are preferred to codebase names. +shadowSuffixedTerms0 :: [Name] -> Names0 -> Names0 +shadowSuffixedTerms0 ns n0 = names0 terms' (types0 n0) where - terms' = R.subtractDom (Set.fromList ns) (terms0 n0) + shadowedBy name = Name.searchBySuffix name (terms0 n0) + terms' = R.subtractRan (foldMap shadowedBy ns) (terms0 n0) diff --git a/unison-core/src/Unison/Pattern.hs b/unison-core/src/Unison/Pattern.hs index 7506a791c1..ee133c1645 100644 --- a/unison-core/src/Unison/Pattern.hs +++ b/unison-core/src/Unison/Pattern.hs @@ -7,6 +7,10 @@ import Unison.Prelude import Data.List (intercalate) import Data.Foldable as Foldable hiding (foldMap') import Unison.Reference (Reference) +import Unison.Referent (Referent) +import qualified Data.Map as Map +import qualified Unison.Referent as Referent +import qualified Unison.ConstructorType as CT import qualified Unison.Hashable as H import qualified Unison.Type as Type import qualified Data.Set as Set @@ -37,6 +41,30 @@ data SeqOp = Cons | Concat deriving (Eq, Show, Ord, Generic) +updateDependencies :: Map Referent Referent -> Pattern loc -> Pattern loc +updateDependencies tms p = case p of + Unbound{} -> p + Var{} -> p + Boolean{} -> p + Int{} -> p + Nat{} -> p + Float{} -> p + Text{} -> p + Char{} -> p + Constructor loc r cid ps -> case Map.lookup (Referent.Con r cid CT.Data) tms of + Just (Referent.Con r cid CT.Data) -> Constructor loc r cid (updateDependencies tms <$> ps) + _ -> Constructor loc r cid (updateDependencies tms <$> ps) + As loc p -> As loc (updateDependencies tms p) + EffectPure loc p -> EffectPure loc (updateDependencies tms p) + EffectBind loc r cid pats k -> case Map.lookup (Referent.Con r cid CT.Effect) tms of + Just (Referent.Con r cid CT.Effect) -> + EffectBind loc r cid (updateDependencies tms <$> pats) (updateDependencies tms k) + _ -> + EffectBind loc r cid (updateDependencies tms <$> pats) (updateDependencies tms k) + SequenceLiteral loc ps -> SequenceLiteral loc (updateDependencies tms <$> ps) + SequenceOp loc lhs op rhs -> + SequenceOp loc (updateDependencies tms lhs) op (updateDependencies tms rhs) + instance H.Hashable SeqOp where tokens Cons = [H.Tag 0] tokens Snoc = [H.Tag 1] diff --git a/unison-core/src/Unison/Reference.hs b/unison-core/src/Unison/Reference.hs index 8bf809b5a1..fb07f91ee4 100644 --- a/unison-core/src/Unison/Reference.hs +++ b/unison-core/src/Unison/Reference.hs @@ -57,8 +57,7 @@ data Reference pattern Derived :: H.Hash -> Pos -> Size -> Reference pattern Derived h i n = DerivedId (Id h i n) --- A good idea, but causes a weird problem with view patterns in PatternP.hs in ghc 8.4.3 ---{-# COMPLETE Builtin, Derived #-} +{-# COMPLETE Builtin, Derived #-} -- | @Pos@ is a position into a cycle of size @Size@, as cycles are hashed together. data Id = Id H.Hash Pos Size deriving (Generic) @@ -80,7 +79,6 @@ toShortHash (Derived h i n) = SH.ShortHash (H.base32Hex h) index Nothing where -- todo: remove `n` parameter; must also update readSuffix index = Just $ showSuffix i n -toShortHash (DerivedId _) = error "this should be covered above" -- toShortHash . fromJust . fromShortHash == id and -- fromJust . fromShortHash . toShortHash == id @@ -127,11 +125,9 @@ newtype Component = Component { members :: Set Reference } -- Gives the component (dependency cycle) that the reference is a part of componentFor :: Reference -> Component -componentFor b@(Builtin _ ) = Component (Set.singleton b) -componentFor ( DerivedId (Id h _ n)) = Component - (Set.fromList - [ DerivedId (Id h i n) | i <- take (fromIntegral n) [0 ..] ] - ) +componentFor b@Builtin {} = Component (Set.singleton b) +componentFor (Derived h _ n) = + Component $ Set.fromList [Derived h i n | i <- take (fromIntegral n) [0 ..]] derivedBase32Hex :: Text -> Pos -> Size -> Reference derivedBase32Hex b32Hex i n = DerivedId (Id (fromMaybe msg h) i n) diff --git a/unison-core/src/Unison/Term.hs b/unison-core/src/Unison/Term.hs index 71526acc53..325ac44132 100644 --- a/unison-core/src/Unison/Term.hs +++ b/unison-core/src/Unison/Term.hs @@ -6,6 +6,7 @@ {-# LANGUAGE Rank2Types #-} {-# LANGUAGE UnicodeSyntax #-} {-# LANGUAGE ViewPatterns #-} +{-# LANGUAGE BangPatterns #-} module Unison.Term where @@ -37,7 +38,6 @@ import Unison.Referent (Referent) import qualified Unison.Referent as Referent import Unison.Type (Type) import qualified Unison.Type as Type -import qualified Unison.Util.Relation as Rel import qualified Unison.ConstructorType as CT import Unison.Util.List (multimap, validate) import Unison.Var (Var) @@ -115,19 +115,6 @@ type Term0 v = Term v () -- | Terms with type variables in `vt`, and term variables in `v` type Term0' vt v = Term' vt v () --- bindExternals --- :: forall v a b b2 --- . Var v --- => [(v, Term2 v b a v b2)] --- -> [(v, Reference)] --- -> Term2 v b a v a --- -> Term2 v b a v a --- bindBuiltins termBuiltins typeBuiltins = f . g --- where --- f :: Term2 v b a v a -> Term2 v b a v a --- f = typeMap (Type.bindBuiltins typeBuiltins) --- g :: Term2 v b a v a -> Term2 v b a v a --- g = ABT.substsInheritAnnotation termBuiltins bindNames :: forall v a . Var v => Set v @@ -135,27 +122,31 @@ bindNames -> Term v a -> Names.ResolutionResult v a (Term v a) -- bindNames keepFreeTerms _ _ | trace "Keep free terms:" False --- || traceShow keepFreeTerms False = undefined -bindNames keepFreeTerms ns e = do +-- || traceShow keepFreeTerms False = undefined +bindNames keepFreeTerms ns0 e = do let freeTmVars = [ (v,a) | (v,a) <- ABT.freeVarOccurrences keepFreeTerms e ] - -- !_ = trace "free term vars: " () + -- !_ = trace "bindNames.free term vars: " () -- !_ = traceShow $ fst <$> freeTmVars freeTyVars = [ (v, a) | (v,as) <- Map.toList (freeTypeVarAnnotations e) , a <- as ] - -- !_ = trace "free type vars: " () + ns = Names.Names ns0 mempty + -- !_ = trace "bindNames.free type vars: " () -- !_ = traceShow $ fst <$> freeTyVars okTm :: (v,a) -> Names.ResolutionResult v a (v, Term v a) - okTm (v,a) = case Rel.lookupDom (Name.fromVar v) (Names.terms0 ns) of + okTm (v,a) = case Names.lookupHQTerm (Name.convert $ Name.fromVar v) ns of rs | Set.size rs == 1 -> pure (v, fromReferent a $ Set.findMin rs) | otherwise -> Left (pure (Names.TermResolutionFailure v a rs)) - okTy (v,a) = case Rel.lookupDom (Name.fromVar v) (Names.types0 ns) of + okTy (v,a) = case Names.lookupHQType (Name.convert $ Name.fromVar v) ns of rs | Set.size rs == 1 -> pure (v, Type.ref a $ Set.findMin rs) | otherwise -> Left (pure (Names.TypeResolutionFailure v a rs)) termSubsts <- validate okTm freeTmVars typeSubsts <- validate okTy freeTyVars pure . substTypeVars typeSubsts . ABT.substsInheritAnnotation termSubsts $ e +-- This function replaces free term and type variables with +-- hashes found in the provided `Names0`, using suffix-based +-- lookup. Any terms not found in the `Names0` are kept free. bindSomeNames :: forall v a . Var v => Names0 @@ -166,11 +157,21 @@ bindSomeNames -- || traceShow ns False -- || trace "Free type vars:" False -- || traceShow (freeTypeVars e) False +-- || trace "Free term vars:" False +-- || traceShow (freeVars e) False -- || traceShow e False -- = undefined -bindSomeNames ns e = bindNames keepFree ns e where - keepFree = Set.difference (freeVars e) - (Set.map Name.toVar $ Rel.dom (Names.terms0 ns)) +bindSomeNames ns e = bindNames varsToTDNR ns e where + -- `Term.bindNames` takes a set of variables that are not substituted. + -- These should be the variables that will be subject to TDNR, which + -- we compute as the set of variables whose names cannot be found in `ns`. + -- + -- This allows TDNR to disambiguate those names (if multiple definitions + -- share the same suffix) or to report the type expected for that name + -- (if a free variable is being used as a typed hole). + varsToTDNR = Set.filter notFound (freeVars e) + notFound var = + Set.size (Name.searchBySuffix (Name.fromVar var) (Names.terms0 ns)) /= 1 -- Prepare a term for type-directed name resolution by replacing -- any remaining free variables with blanks to be resolved by TDNR @@ -929,18 +930,29 @@ labeledDependencies = generalizedDependencies LD.termRef updateDependencies :: Ord v - => Map Reference Reference + => Map Referent Referent -> Map Reference Reference -> Term v a -> Term v a updateDependencies termUpdates typeUpdates = ABT.rebuildUp go where - -- todo: this function might need tweaking if we ever allow type replacements - -- would need to look inside pattern matching and constructor calls - go (Ref r ) = Ref (Map.findWithDefault r r termUpdates) - go (TermLink (Referent.Ref r)) = TermLink (Referent.Ref $ Map.findWithDefault r r termUpdates) + referent (Referent.Ref r) = Ref r + referent (Referent.Con r cid CT.Data) = Constructor r cid + referent (Referent.Con r cid CT.Effect) = Request r cid + go (Ref r ) = case Map.lookup (Referent.Ref r) termUpdates of + Nothing -> Ref r + Just r -> referent r + go ct@(Constructor r cid) = case Map.lookup (Referent.Con r cid CT.Data) termUpdates of + Nothing -> ct + Just r -> referent r + go req@(Request r cid) = case Map.lookup (Referent.Con r cid CT.Effect) termUpdates of + Nothing -> req + Just r -> referent r + go (TermLink r) = TermLink (Map.findWithDefault r r termUpdates) go (TypeLink r) = TypeLink (Map.findWithDefault r r typeUpdates) go (Ann tm tp) = Ann tm $ Type.updateDependencies typeUpdates tp + go (Match tm cases) = Match tm (u <$> cases) where + u (MatchCase pat g b) = MatchCase (Pattern.updateDependencies termUpdates pat) g b go f = f -- | If the outermost term is a function application, @@ -1093,8 +1105,6 @@ instance Var v => Hashable1 (F v a p) where Or x y -> [tag 17, hashed $ hash x, hashed $ hash y] TermLink r -> [tag 18, accumulateToken r] TypeLink r -> [tag 19, accumulateToken r] - _ -> - error $ "unhandled case in hash: " <> show (void e) -- mostly boring serialization code below ... diff --git a/unison-core/src/Unison/Type.hs b/unison-core/src/Unison/Type.hs index 7559acc8b6..6e8862f553 100644 --- a/unison-core/src/Unison/Type.hs +++ b/unison-core/src/Unison/Type.hs @@ -26,7 +26,6 @@ import qualified Unison.Reference.Util as ReferenceUtil import Unison.Var (Var) import qualified Unison.Var as Var import qualified Unison.Settings as Settings -import qualified Unison.Util.Relation as R import qualified Unison.Names3 as Names import qualified Unison.Name as Name import qualified Unison.Util.List as List @@ -68,9 +67,10 @@ bindNames -> Names.Names0 -> Type v a -> Names.ResolutionResult v a (Type v a) -bindNames keepFree ns t = let +bindNames keepFree ns0 t = let + ns = Names.Names ns0 mempty fvs = ABT.freeVarOccurrences keepFree t - rs = [(v, a, R.lookupDom (Name.fromVar v) (Names.types0 ns)) | (v,a) <- fvs ] + rs = [(v, a, Names.lookupHQType (Name.convert $ Name.fromVar v) ns) | (v,a) <- fvs ] ok (v, a, rs) = if Set.size rs == 1 then pure (v, Set.findMin rs) else Left (pure (Names.TypeResolutionFailure v a rs)) in List.validate ok rs <&> \es -> bindExternal es t @@ -95,6 +95,7 @@ arity _ = 0 -- some smart patterns pattern Ref' r <- ABT.Tm' (Ref r) pattern Arrow' i o <- ABT.Tm' (Arrow i o) +pattern Arrow'' i es o <- Arrow' i (Effect'' es o) pattern Arrows' spine <- (unArrows -> Just spine) pattern EffectfulArrows' fst rest <- (unEffectfulArrows -> Just (fst, rest)) pattern Ann' t k <- ABT.Tm' (Ann t k) @@ -481,15 +482,31 @@ freeEffectVars t = in pure . Set.toList $ frees `Set.difference` ABT.annotation t go _ = pure [] +-- Converts all unadorned arrows in a type to have fresh +-- existential ability requirements. For example: +-- +-- (a -> b) -> [a] -> [b] +-- +-- Becomes +-- +-- (a ->{e1} b) ->{e2} [a] ->{e3} [b] existentializeArrows :: (Ord v, Monad m) => m v -> Type v a -> m (Type v a) -existentializeArrows freshVar = ABT.visit go +existentializeArrows newVar t = ABT.visit go t where go t@(Arrow' a b) = case b of - Effect1' _ _ -> Nothing + -- If an arrow already has attached abilities, + -- leave it alone. Ex: `a ->{e} b` is kept as is. + Effect1' _ _ -> Just $ do + a <- existentializeArrows newVar a + b <- existentializeArrows newVar b + pure $ arrow (ABT.annotation t) a b + -- For unadorned arrows, make up a fresh variable. + -- So `a -> b` becomes `a ->{e} b`, using the + -- `newVar` variable generator. _ -> Just $ do - e <- freshVar - a <- existentializeArrows freshVar a - b <- existentializeArrows freshVar b + e <- newVar + a <- existentializeArrows newVar a + b <- existentializeArrows newVar b let ann = ABT.annotation t pure $ arrow ann a (effect ann [var ann e] b) go _ = Nothing @@ -535,14 +552,29 @@ removeAllEffectVars t = let removePureEffects :: ABT.Var v => Type v a -> Type v a removePureEffects t | not Settings.removePureEffects = t | otherwise = - generalize vs $ removeEffectVars (Set.filter isPure fvs) tu + generalize vs $ removeEffectVars fvs tu where (vs, tu) = unforall' t - fvs = freeEffectVars tu `Set.difference` ABT.freeVars t - -- If an effect variable is mentioned only once, it is on - -- an arrow `a ->{e} b`. Generalizing this to - -- `∀ e . a ->{e} b` gives us the pure arrow `a -> b`. - isPure v = ABT.occurrences v tu <= 1 + vss = Set.fromList vs + fvs = freeEffectVars tu `Set.difference` keep + + keep = keepVarsT True tu + + keepVarsT pos (Arrow' i o) + = keepVarsT (not pos) i <> keepVarsT pos o + keepVarsT pos (Effect1' e o) + = keepVarsT pos e <> keepVarsT pos o + keepVarsT pos (Effects' es) = foldMap (keepVarsE pos) es + keepVarsT pos (ForallNamed' _ t) = keepVarsT pos t + keepVarsT pos (IntroOuterNamed' _ t) = keepVarsT pos t + keepVarsT _ t = freeVars t + + -- Note, this only allows removal if the variable was quantified, + -- so variables that were free in `t` will not be removed. + keepVarsE pos (Var' v) + | pos, v `Set.member` vss = mempty + | otherwise = Set.singleton v + keepVarsE pos e = keepVarsT pos e editFunctionResult :: forall v a diff --git a/unison-core/src/Unison/Util/Alphabetical.hs b/unison-core/src/Unison/Util/Alphabetical.hs new file mode 100644 index 0000000000..df0fb19de4 --- /dev/null +++ b/unison-core/src/Unison/Util/Alphabetical.hs @@ -0,0 +1,29 @@ +{-# Language DeriveFunctor, DeriveTraversable, DeriveFoldable #-} +module Unison.Util.Alphabetical where + +import qualified Data.RFC5051 as RFC5051 +import Data.Text (Text) + +-- Alphabetical ordering used for sorting things to display to humans. +-- Should have 'A' and 'a' both come before 'B' and 'b', etc. +-- +-- This need not coincide with the `Ord` instance for a type, which +-- is often an efficient yet arbitrary ordering that's used for +-- stashing the values in maps and sets. +class Eq n => Alphabetical n where + compareAlphabetical :: n -> n -> Ordering + +instance Alphabetical Text where + compareAlphabetical = RFC5051.compareUnicode + +-- newtype whose Ord instance uses alphabetical ordering +newtype OrderAlphabetically a = OrderAlphabetically a deriving (Functor,Traversable,Foldable,Eq) + +instance (Eq a, Alphabetical a) => Ord (OrderAlphabetically a) where + compare (OrderAlphabetically a) (OrderAlphabetically b) = compareAlphabetical a b + +instance Alphabetical a => Alphabetical [a] where + compareAlphabetical a1s a2s = compare (OrderAlphabetically <$> a1s) (OrderAlphabetically <$> a2s) + +instance Alphabetical a => Alphabetical (Maybe a) where + compareAlphabetical a1s a2s = compare (OrderAlphabetically <$> a1s) (OrderAlphabetically <$> a2s) diff --git a/unison-core/src/Unison/Util/Relation.hs b/unison-core/src/Unison/Util/Relation.hs index 0d295b0448..eb6e5f3838 100644 --- a/unison-core/src/Unison/Util/Relation.hs +++ b/unison-core/src/Unison/Util/Relation.hs @@ -9,6 +9,7 @@ import qualified Data.List as List import qualified Data.Map as M import qualified Data.Set as S import qualified Data.Map as Map +import qualified Data.Map.Internal as Map import qualified Unison.Hashable as H import qualified Control.Monad as Monad @@ -409,6 +410,43 @@ lookupRan b r = fromMaybe S.empty $ lookupRan' b r lookupDom :: Ord a => a -> Relation a b -> Set b lookupDom a r = fromMaybe S.empty $ lookupDom' a r +-- Efficiently locate the `Set b` for which the corresponding `a` tests +-- as `EQ` according to the provided function `f`, assuming that such +-- elements are contiguous via the `Ord a`. That is, `f <$> toList (dom r)` +-- must look something like [LT,LT,EQ,EQ,EQ,GT], or more generally, 0 or +-- more LT followed by 0 or more EQ, followed by 0 or more GT. +-- +-- For example, given a `Relation (Int,y) z`, +-- `searchDom (\(i,_) -> compare i 10)` will return all the `z` whose +-- associated `(Int,y)` is of the form `(10,y)` for any choice of `y`. +-- +-- Takes logarithmic time to find the smallest `amin` such that `f a == EQ`, +-- and the largest `amax` such that `f amax == EQ`. The rest of the runtime is +-- just assembling the returned `Set b`, so when the returned `Set b` is small +-- or empty, this function takes time logarithmic in the number of unique keys +-- of the domain, `a`. +searchDom :: (Ord a, Ord b) => (a -> Ordering) -> Relation a b -> Set b +searchDom f r = go (domain r) where + go Map.Tip = mempty + go (Map.Bin _ amid bs l r) = case f amid of + EQ -> bs <> goL l <> goR r + LT -> go r + GT -> go l + goL Map.Tip = mempty + goL (Map.Bin _ amid bs l r) = case f amid of + EQ -> bs <> goL l <> S.unions (Map.elems r) + LT -> goL r + GT -> error "predicate not monotone with respect to ordering" + goR Map.Tip = mempty + goR (Map.Bin _ amid bs l r) = case f amid of + EQ -> bs <> goR r <> S.unions (Map.elems l) + GT -> goR l + LT -> error "predicate not monotone with respect to ordering" + +-- Like `searchDom`, but searches the `b` of this `Relation`. +searchRan :: (Ord a, Ord b) => (b -> Ordering) -> Relation a b -> Set a +searchRan f r = searchDom f (swap r) + replaceDom :: (Ord a, Ord b) => a -> a -> Relation a b -> Relation a b replaceDom a a' r = foldl' (\r b -> insert a' b $ delete a b r) r (lookupDom a r) diff --git a/unison-core/unison-core1.cabal b/unison-core/unison-core1.cabal index 57d93f9a38..1b4f4d0b08 100644 --- a/unison-core/unison-core1.cabal +++ b/unison-core/unison-core1.cabal @@ -4,7 +4,7 @@ cabal-version: 1.12 -- -- see: https://github.com/sol/hpack -- --- hash: 6eae706c8674f4a7f22bb4bff150798cdaba8aa9186b3d94a6a8467a9cc23d06 +-- hash: cef34d9302306093bb4280d9edb3ef4819cb15687e8542dfd977dd83b72ecf13 name: unison-core1 version: 0.0.0 @@ -52,6 +52,7 @@ library Unison.Symbol Unison.Term Unison.Type + Unison.Util.Alphabetical Unison.Util.Components Unison.Util.List Unison.Util.Monoid diff --git a/unison-src/Cofree.u b/unison-src/Cofree.u deleted file mode 100644 index c697feb1cb..0000000000 --- a/unison-src/Cofree.u +++ /dev/null @@ -1,20 +0,0 @@ -type Cofree f a = Cofree a (f (Cofree f a)) - -type Functor f = Functor (forall a b. (a ->{} b) -> f a ->{} f b) - -use Functor Functor -fmap : Functor f -> (a -> b) -> f a -> f b -fmap fn f = match fn with - Functor map -> map f - -use Cofree Cofree - -namespace Cofree where - - extract : Cofree f a -> a - extract = cases - Cofree a _ -> a - - duplicate : Functor f -> Cofree f a -> Cofree f (Cofree f a) - duplicate f c = match c with - Cofree a p -> Cofree c (fmap f (duplicate f) p) diff --git a/unison-src/EasyTest.u b/unison-src/EasyTest.u deleted file mode 100644 index 4ebef149b5..0000000000 --- a/unison-src/EasyTest.u +++ /dev/null @@ -1,263 +0,0 @@ -use Test Success Status Report Test Scope -use Test.Status Failed Expected Unexpected Pending -use Test.Success Passed Proved -use Test.Report Report -use Test.Test Test -use Test passed proved failed expected unexpected pending finished label -use Test.Scope Scope -use List flatMap - -type Test.Success = Passed Nat | Proved - -type Test.Status = Failed - | Expected Test.Success - | Unexpected Test.Success - | Pending - --- Current scope together with accumulated test report. -type Test.Report = Report (Trie Text Test.Status) - -type Test.Test = Test (Test.Scope -> Test.Report) - -unique type Test.Scope = Scope [Text] - -foldSuccess : (Nat -> r) -> r -> Success -> r -foldSuccess passed proved = cases - Passed n -> passed n - Proved -> proved - -foldStatus : r -> (Success -> r) -> (Success -> r) -> r -> Status -> r -foldStatus failed expected unexpected pending = cases - Failed -> failed - Expected s -> expected s - Unexpected s -> unexpected s - Pending -> pending - -foldReport : (Trie Text Test.Status -> r) -> Report -> r -foldReport k r = case r of Report t -> k t - -foldScope : ([Text] -> r) -> Scope -> r -foldScope k = cases Scope ss -> k ss - -Scope.cons : Text -> Scope -> Scope -Scope.cons n = foldScope (Scope . List.cons n) - --- Basic building blocks of tests -Test.finished : Status -> Test -Test.finished st = - Test (Report . foldScope (sc -> Trie.singleton sc st)) - -Test.failed : Test -Test.failed = finished Failed - -Test.proved : Test -Test.proved = finished <| Expected Proved - -Test.passed : Test -Test.passed = finished . Expected <| Passed 1 - -Test.passedUnexpectedly : Test -Test.passedUnexpectedly = finished . Unexpected <| Passed 1 - -Test.provedUnexpectedly : Test -Test.provedUnexpectedly = finished <| Unexpected Proved - --- Basic test combinators - -Test.modifyStatus : (Status -> Status) -> Test -> Test -Test.modifyStatus f = - cases Test k -> Test (foldReport (Report . map f) . k) - -Test.label : Text -> Test -> Test -Test.label n = cases - Test.Test.Test k -> Test (scope -> k <| Scope.cons n scope) - -use Test.Report combine - -(Test.&&) : Test -> Test -> Test -(Test.&&) a b = match (a,b) with - (Test k1, Test k2) -> - Test ( - scope -> - let r1 = k1 scope - r2 = k2 scope - combine r1 r2) - -Test.passedWith : Text -> Test -Test.passedWith m = label m passed - -Test.provedWith : Text -> Test -Test.provedWith m = label m proved - -Test.failedWith : Text -> Test -Test.failedWith m = Test.label m Test.failed - --- Report combinators - -Test.Report.combine : Report -> Report -> Report -Test.Report.combine r1 r2 = match (r1, r2) with - (Test.Report.Report t1, Test.Report.Report t2) -> - Report <| Trie.unionWith Status.combine t1 t2 - -Test.Report.empty : Report -Test.Report.empty = Report empty - -Test.Report.toCLIResult : Report -> [Test.Result] -Test.Report.toCLIResult r = - descend scope = cases (k, t) -> - go ((if scope != "" then (scope ++ ".") else "") ++ k) t - convert : Text -> Test.Status -> Test.Result - convert scope = cases - Test.Status.Failed -> Test.Result.Fail scope - Test.Status.Expected (Test.Success.Passed n) -> - Test.Result.Ok (scope ++ " : Passed " ++ Nat.toText n ++ " tests.") - Test.Status.Expected (Test.Success.Proved) -> - Test.Result.Ok (scope ++ " : Proved.") - go : Text -> Trie Text Test.Status -> [Test.Result] - go scope t = - rest = flatMap (descend scope) (Map.toList (tail t)) - match head t with - Optional.Some status -> - cons (convert scope status) rest - Optional.None -> rest - match r with Test.Report.Report t -> go "" t - -Test.report : Test -> Report -Test.report = cases Test k -> k (Scope []) - --- Running tests - -Test.run : Test -> [Test.Result] -Test.run = Test.Report.toCLIResult . Test.report - -Test.runAll : [Test] -> [Test.Result] -Test.runAll = flatMap Test.run - --- Status combinators - -Status.combine : Test.Status -> Test.Status -> Test.Status -Status.combine s1 s2 = match (s1, s2) with - (_, Pending) -> Pending - (Pending, _) -> Pending - (Failed, _) -> Failed - (_, Failed) -> Failed - (Unexpected a, Unexpected b) -> Unexpected (Success.combine a b) - (Unexpected a, _) -> Unexpected a - (_, Unexpected b) -> Unexpected b - (Expected a, Expected b) -> Expected (Success.combine a b) - - -Status.pending : Test.Status -> Test.Status -Status.pending = cases - Failed -> Pending - Expected s -> Unexpected s - Unexpected s -> Pending - Pending -> Pending - --- Make a test pending -Test.pending : Test -> Test -Test.pending = modifyStatus Status.pending - -Test.modifyScope : (Scope -> Scope) -> Test -> Test -Test.modifyScope f = cases Test k -> Test (k . f) - -Success.combine s1 s2 = match (s1, s2) with - (Passed n, Passed m) -> Passed (n + m) - (Passed n, Proved) -> Passed n - (Proved, Passed n) -> Passed n - (Proved, Proved) -> Proved - --- Test case generation - --- A domain is either small, in which case we can exhaustively list all the --- values in the domain, or it's large, in which case we can ask for a value --- of a particular size. -type Domain a = Small [a] | Large (Weighted a) - --- The domain of natural numbers is large. -Domain.nats : Domain Nat -Domain.nats = Large Weighted.nats - --- The domain of all integers -Domain.ints : Domain Int -Domain.ints = let - go n = yield n <|> weight 1 - '(go (if n > +0 then negate n else increment (negate n))) - Large (List.foldl (a n -> a <|> yield n) - Weighted.Fail - [+0, +1, -1, maxInt, minInt] <|> go +2) - -use Universal == < > - -namespace Domain where - - -- The threshold of "small" domains. - smallSize = 10000 - - -- The Boolean domain is small - boolean : Domain Boolean - boolean = Small [false, true] - - -- The domain of lists of arbitrary data is large - listsOf : Domain a -> Domain [a] - listsOf d = - Large (Weighted.lists match d with - Domain.Small as -> Weighted.fromList as - Domain.Large w -> w) - - lists : Domain [()] - lists = Domain.listsOf (Small [()]) - - sample : Nat -> Domain a -> [a] - sample n = cases - Domain.Large w -> Weighted.sample n w - Domain.Small xs -> take n xs - - map : (a -> b) -> Domain a -> Domain b - map f = cases - Domain.Large w -> Domain.Large (Weighted.map f w) - Domain.Small as -> Domain.Small (List.map f as) - - pairs : Domain a -> Domain (a,a) - pairs d = lift2 (a b -> (a,b)) d d - - tuples : Domain a -> Domain b -> Domain (Pair a b) - tuples = lift2 (a b -> Pair a b) - - lift2 : (a -> b -> c) -> Domain a -> Domain b -> Domain c - lift2 f da db = let - wa = weighted da - wb = weighted db - wc = mergeWith (a1 a2 -> f a1 a2) wa wb - match (da, db) with - (Domain.Small as, Domain.Small bs) | size as + size bs < smallSize -> - Small (Weighted.sample smallSize wc) - _ -> Large wc - - weighted : Domain a -> Weighted a - weighted = cases - Domain.Small as -> Weighted.fromList as - Domain.Large w -> w - --- Test a property for a given domain up to a maximum size -Test.forAll' : Nat -> Domain a -> (a -> Boolean) -> Test -Test.forAll' maxSize domain property = - check xs s = - List.map ( - cases (c, i) -> - if property c then finished (Expected s) - else label ("test case " ++ Nat.toText i) (finished Failed) - ) (indexed xs) - List.foldb id (Test.&&) proved <| - match domain with - Domain.Small xs -> check (take maxSize xs) Proved - Domain.Large _ -> check (sample maxSize domain) (Passed 1) - -Test.check' : Boolean -> Test -Test.check' b = if b then Test.proved else Test.failed - -Test.forAll : Nat -> Domain a -> (a -> Boolean) -> [Test.Result] -Test.forAll n d p = Test.run (Test.forAll' n d p) - -Test.check : Boolean -> [Test.Result] -Test.check = Test.run . Test.check' diff --git a/unison-src/Trie.u b/unison-src/Trie.u deleted file mode 100644 index 9a54522e18..0000000000 --- a/unison-src/Trie.u +++ /dev/null @@ -1,39 +0,0 @@ -type Trie k v = { head : Optional v, tail : Map k (Trie k v) } - -namespace Trie where - empty : Trie k v - empty = Trie None Map.empty - - lookup : [k] -> Trie k v -> Optional v - lookup path t = match path with - [] -> Trie.head t - p +: ps -> flatMap (lookup ps) (Map.lookup p (Trie.tail t)) - - unionWith : (v -> v -> v) -> Trie k v -> Trie k v -> Trie k v - unionWith f t1 t2 = - h1 = Trie.head t1 - h2 = Trie.head t2 - Trie (map2 f h1 h2 `orElse` h1 `orElse` h2) - (Map.unionWith (unionWith f) (Trie.tail t1) (Trie.tail t2)) - -Trie.union : Trie k v -> Trie k v -> Trie k v -Trie.union = Trie.unionWith const - -Trie.insert : [k] -> v -> Trie k v -> Trie k v -Trie.insert path v t = - Trie.unionWith const (Trie.singleton path v) t - -Trie.singleton : [k] -> v -> Trie k v -Trie.singleton path v = - match path with - [] -> Trie (Some v) empty - k +: ks -> Trie None (Map.fromList [(k, Trie.singleton ks v)]) - -use Trie tail head - -Trie.map : (v1 -> v2) -> Trie k v1 -> Trie k v2 -Trie.map f t = Trie (map f (head t)) (map (Trie.map f) (tail t)) - -Trie.mapKeys : (k1 -> k2) -> Trie k1 v -> Trie k2 v -Trie.mapKeys f t = - Trie (head t) (Map.mapKeys f (Map.map (Trie.mapKeys f) (tail t))) diff --git a/unison-src/WeightedSearch.u b/unison-src/WeightedSearch.u deleted file mode 100644 index 789425191f..0000000000 --- a/unison-src/WeightedSearch.u +++ /dev/null @@ -1,69 +0,0 @@ --- A data structure that allows giving computations weight such that the --- lowest-cost computation will be returned first. Allows searching --- infinite spaces productively. --- --- Adapted from http://hackage.haskell.org/package/weighted-search-0.1.0.1 -use Universal == < > - -type Weighted a - = Fail - | Yield a (Weighted a) - | Weight Nat (() -> Weighted a) - -namespace Weighted where - - weight : Nat ->{e} (() ->{e} Weighted a) ->{e} Weighted a - weight w ws = Weight w ws - - map : (a ->{e} b) -> Weighted a ->{e} Weighted b - map f = cases - Weighted.Fail -> Weighted.Fail - Weighted.Yield x w -> Yield (f x) (map f w) - Weighted.Weight a w -> weight a '(map f !w) - - yield : a -> Weighted a - yield a = Yield a Fail - - flatMap : (a -> Weighted b) -> Weighted a -> Weighted b - flatMap f = cases - Weighted.Fail -> Weighted.Fail - Weighted.Yield x m -> f x <|> flatMap f m - Weighted.Weight w m -> Weight w '(flatMap f !m) - - mergeWith : (a -> b -> c) -> Weighted a -> Weighted b -> Weighted c - mergeWith f as bs = - flatMap (a -> map (b -> f a b) bs) as - - (<|>): Weighted a -> Weighted a -> Weighted a - (<|>) m n = match (m, n) with - (Weighted.Fail, n) -> n - (Weighted.Yield x m, n) -> Yield x (m <|> n) - (Weighted.Weight w m, Weighted.Fail) -> Weight w m - (Weighted.Weight w m, Weighted.Yield x n) -> - Yield x (Weight w m <|> n) - (Weighted.Weight w m, Weighted.Weight w' n) -> - if w < w' then Weight w '(!m <|> Weight (w' `drop` w) n) - else if w == w' then Weight w '(!m <|> !n) - else Weight w '(Weight (w `drop` w') m <|> !n) - - sample : Nat -> Weighted a -> [a] - sample n wsa = - if n > 0 then - match wsa with - Weighted.Fail -> [] - Weighted.Yield a ms -> cons a (sample (n `drop` 1) ms) - Weighted.Weight _ w -> sample n !w - else [] - - nats : Weighted Nat - nats = let - go n = yield n <|> weight 1 '(go (n + 1)) - go 0 - - lists : Weighted a -> Weighted [a] - lists w = yield [] <|> weight 1 '(mergeWith cons w (lists w)) - - fromList : [a] -> Weighted a - fromList = cases - [] -> Weighted.Fail - a +: as -> yield a <|> weight 1 '(fromList as) diff --git a/unison-src/errors/console.u b/unison-src/errors/console.u deleted file mode 100644 index 761be8aa84..0000000000 --- a/unison-src/errors/console.u +++ /dev/null @@ -1,19 +0,0 @@ -ability State s where - get : Nat -> {State s} s - set : s -> {State s} () - -ability Console where - read : () -> {Console} (Optional Text) - write : Text -> {Console} () - -fst = cases Tuple.Cons a _ -> a - -snd = cases Tuple.Cons _ (Tuple.Cons b _) -> b - -namespace Console where - - simulate : Request Console a -> {State ([Text], [Text])} a - simulate = cases - {Console.read _ -> k} -> k Optional.None - -Console.simulate diff --git a/unison-src/errors/console2.u b/unison-src/errors/console2.u deleted file mode 100644 index c57b382e3a..0000000000 --- a/unison-src/errors/console2.u +++ /dev/null @@ -1,29 +0,0 @@ -ability State s where - get : {State s} s - set : s -> {State s} () - -ability Console where - read : {Console} (Optional Text) - write : Text -> {Console} () - -fst = cases Tuple.Cons a _ -> a - -snd = cases Tuple.Cons _ (Tuple.Cons b _) -> b - -namespace Console where - - simulate : Request Console a -> {State ([Text], [Text])} a - simulate = cases - {Console.read -> k} -> - io = State.get - ins = fst io - outs = snd io - State.set (drop 1 ins, outs) - k (at 0 ins) -- this is missing recursive call to handle - {Console.write t -> k} -> - io = State.get - ins = fst io - outs = snd io - k (State.set (ins, cons t outs)) -- this is missing recursive call - -() diff --git a/unison-src/errors/map-reduce.u b/unison-src/errors/map-reduce.u deleted file mode 100644 index d29cc69089..0000000000 --- a/unison-src/errors/map-reduce.u +++ /dev/null @@ -1,102 +0,0 @@ - --- A simple distributed computation ability -ability Remote n where - - -- Spawn a new node, of type `n` - spawn : {Remote n} n - - -- Sequentially evaluate the given thunk on another node - -- then return to the current node when it completes - at : n -> '{Remote n} a -> {Remote n} a - - -- Start a computation running, returning an `r` that can be forced to - -- await the result of the computation - fork : '{Remote n} a -> {Remote n} ('{Remote n} a) - -type Monoid a = Monoid (a -> a -> a) a - -use Nat + - * / == < -use Sequence map take drop size foldLeft halve -use Optional None Some -use Monoid.Monoid -- import the constructor -use Remote fork spawn at - -namespace Monoid where - - zero : Monoid a -> a - zero = cases Monoid _ z -> z - - op : Monoid a -> a -> a -> a - op = cases Monoid op _ -> op - - foldMap : (a -> {e} b) -> Monoid b -> [a] -> {e} b - foldMap f m as = - op = Monoid.op m - -- this line has a type error, `op` is (b -> b -> b) - -- and `zero m` is of type `b`, but `as` is of type `[a]` - -- 👇 - if size as < 2 then Sequence.foldLeft op (zero m) as - else match Sequence.halve as with (l, r) -> foldMap f m l `op` foldMap f m r - - par : Monoid a -> Monoid ('{Remote n} a) - par m = - o = op m - z = zero m - -- note - does not typecheck if flip the order of the constructor! - -- the 'z has type 'a, which fails to match the later remote thunk - Monoid (a1 a2 -> parApply o a1 a2) 'z - -force : '{e} a -> {e} a -force a = !a - -mapReduce : (a -> {Remote n} b) -> Monoid b -> [a] -> {Remote n} b -mapReduce f m a = - force <| Monoid.foldMap (a -> fork '(f a)) (Monoid.par m) a - -namespace Sequence where - - foldLeft : (b -> a -> b) -> b -> [a] -> b - foldLeft f z as = _todo2 - - halve : [a] -> ([a], [a]) - halve as = (take (size as / 2) as, drop (size as / 2) as) - -ex : '{Remote n} Nat -ex = 'let - alice = spawn - bob = spawn - f1 = fork '(1 + 1) - f2 = fork '(2 + 2) - !f1 + !f2 - -parApply : (a -> b -> c) -> '{Remote n} a -> '{Remote n} b -> '{Remote n} c -parApply f a b = 'let - x = fork a - y = fork b - f !x !y - --- this currently crashes the compiler -Remote.runLocal : '{Remote Nat} a -> a -Remote.runLocal r = - step : Nat -> Request (Remote Nat) a -> a - step nid = cases - {a} -> a - {Remote.fork t -> k} -> handle k t with step nid - {Remote.spawn -> k} -> handle k nid with step (nid + 1) - {Remote.at _ t -> k} -> handle k !t with step (nid + 1) - - handle !r with step 0 - -uno : '{e} a -> '{e} a -> {e} a -uno a a2 = !a - -dos : (a -> a -> a) -> '{e} a -> '{e} a -> {e} a -dos f a a2 = f !a !a2 - -(<|) : (i -> o) -> i -> o -f <| i = f i -i |> f = f i - -Stream.fromNat 1 - |> Stream.take 15 - |> Stream.toSequence diff --git a/unison-src/errors/poor-error-message/consoleh.u b/unison-src/errors/poor-error-message/consoleh.u deleted file mode 100644 index 12b92f50db..0000000000 --- a/unison-src/errors/poor-error-message/consoleh.u +++ /dev/null @@ -1,57 +0,0 @@ --- Token {payload = Semi, start = Pos 51 1, end = Pos 51 1} :| [] --- bootstrap: unison-src/tests/console.uu:51:1: --- unexpected Semi --- expecting : or the rest of infixApp --- 51 | () - -ability State s where - get : {State s} s - set : s -> {State s} () - -ability Console where - read : {Console} (Optional Text) - write : Text -> {Console} () - -fst = cases Tuple.Cons a _ -> a - -snd = cases Tuple.Cons _ (Tuple.Cons b _) -> b - -namespace Console where - - state : s -> Request (State s) a -> a - state s = cases - {State.get -> k} -> handle k s with state s - {State.set s' -> k} -> handle k () with state s' - {a} -> a - - simulate : Request Console d -> {State ([Text], [Text])} d - simulate = cases - {Console.read -> k} -> - io = State.get - ins = fst io - outs = snd io - State.set (drop 1 ins, outs) - -- this really should typecheck but doesn't for some reason - -- error is that `simulate` doesn't check against `Request Console c -> r`, - -- but seems like that `r` should get instantiated as `{State (..)} c`. - handle k (at 0 ins) with simulate - {Console.write t -> k} -> - io = State.get - ins = fst io - outs = snd io - -- same deal here - handle k (State.set (ins, cons t outs)) with simulate - {a} -> a - -(++) = concatenate - -handle - handle - use Console read write - use Optional Some None - write "What's your name?" - match read with - Some name -> write ("Hello" ++ name) - None -> write "Fine, be that way." - with Console.simulate -() diff --git a/unison-src/sheepshead.u b/unison-src/sheepshead.u deleted file mode 100644 index d0f0f8d90e..0000000000 --- a/unison-src/sheepshead.u +++ /dev/null @@ -1,39 +0,0 @@ -type Suit = Club | Spade | Heart | Diamond -type Card = Card Rank Suit -type Rank = A | K | Q | J | _10 | _9 | _8 | _7 -type NonEmpty a = NonEmpty a [a] - -use Rank A K Q J _10 _9 _8 _7 -use Suit Club Spade Heart Diamond -use NonEmpty NonEmpty -use Optional Some None - -namespace Suit where - all = [Club, Spade, Heart, Diamond] - -namespace Rank where - all = [A, _10, K, Q, J, _9, _8, _7] - points = cases - A -> 11 - _10 -> 10 - K -> 4 - Q -> 3 - J -> 2 - _ -> 0 - toText = cases - A -> "A" - K -> "K" - Q -> "Q" - J -> "J" - _10 -> "10" - _9 -> "9" - _8 -> "8" - _7 -> "7" - -namespace NonEmpty where - toList = cases - NonEmpty h t -> Sequence.cons h t - fromList : [a] -> Optional (NonEmpty a) - fromList l = match Sequence.at 0 l with - None -> None - Some a -> Some (NonEmpty a (Sequence.drop 1 l)) diff --git a/unison-src/tests/fix1185.u b/unison-src/tests/fix1185.u new file mode 100644 index 0000000000..a897cc17f1 --- /dev/null +++ b/unison-src/tests/fix1185.u @@ -0,0 +1,33 @@ + +-- https://github.com/unisonweb/unison/issues/1185 +-- +-- Definitions below currently get inferred as: +-- List.map : (i ->{𝕖} o) ->{𝕖} [i] ->{𝕖} [o] +-- List.map2 : ignored ->{𝕖} (a ->{𝕖} b) ->{𝕖} [a] ->{𝕖} [b] +-- +-- This file won't typecheck unless the definitions get +-- the correct inferred types. + +ability Zonk where + zonk : Nat + +-- should be inferred as: +-- List.map : (a ->{e} b) -> [a] ->{e} [b] +List.map f = cases + [] -> [] + h +: t -> f h +: List.map f t + +-- This should typecheck since in the correct inferred type, +-- no abilities are required after the first arg +ex = List.map (x -> Zonk.zonk + 42) + +-- should be inferred as: +-- List.map2 : ignored -> (a ->{e} b) -> [a] ->{e} [b] +List.map2 : ignored -> (a -> b) -> [a] -> [b] +List.map2 ignore f = cases + [] -> [] + h +: t -> f h +: List.map2 ignore f t + +-- This should typecheck since in the correct inferred type, +-- no abilities are required after the first two args +ex2 = List.map2 () (x -> Zonk.zonk + 43) diff --git a/unison-src/tests/methodical/float.u b/unison-src/tests/methodical/float.u index 5fde45c0b4..f96bd49326 100644 --- a/unison-src/tests/methodical/float.u +++ b/unison-src/tests/methodical/float.u @@ -1,4 +1,4 @@ -use Float abs max min toText fromText +use Float abs max min toText fromText eq use Optional Some None withDefault : Optional a -> a -> a @@ -12,4 +12,6 @@ withDefault opt d = match opt with min 1.1 1.5, toText 1.1, withDefault (fromText "1.5") -1.0, - withDefault (fromText "Hello world!") -1.0) + withDefault (fromText "Hello world!") -1.0, + eq 0.0 0.0, + eq 0.0 2.0) diff --git a/unison-src/tests/methodical/float.ur b/unison-src/tests/methodical/float.ur index 1bbbd63b9b..45ed9fd736 100644 --- a/unison-src/tests/methodical/float.ur +++ b/unison-src/tests/methodical/float.ur @@ -4,4 +4,6 @@ 1.1, "1.1", 1.5, - -1.0) + -1.0, + true, + false) diff --git a/unison-src/tests/methodical/pattern-matching.u b/unison-src/tests/methodical/pattern-matching.u index 10c07315a3..e1883d1205 100644 --- a/unison-src/tests/methodical/pattern-matching.u +++ b/unison-src/tests/methodical/pattern-matching.u @@ -36,6 +36,11 @@ apat = cases [] -> ([], 0, []) xs@(y +: ys) -> (xs, y, ys) +tpat = cases + (0, _) -> 1 + (_, 0) -> 2 + _ -> 3 + > (pat1 0 1 (2, 3), pat2 0 1 "hi", pat3 0 1 (2, 3), @@ -50,4 +55,6 @@ apat = cases ipat +1, ipat -1, ipat -33, - apat [1,2,3]) + apat [1,2,3], + tpat (0, 1), + tpat (1, 0)) diff --git a/unison-src/tests/methodical/pattern-matching.ur b/unison-src/tests/methodical/pattern-matching.ur index 4359f45371..70e4e05986 100644 --- a/unison-src/tests/methodical/pattern-matching.ur +++ b/unison-src/tests/methodical/pattern-matching.ur @@ -12,4 +12,6 @@ -1, +1, +0, - ([1,2,3], 1, [2,3])) + ([1,2,3], 1, [2,3]), + 1, + 2) diff --git a/unison-src/tests/pattern-matching.u b/unison-src/tests/pattern-matching.u index a1403ac474..866fbb887d 100644 --- a/unison-src/tests/pattern-matching.u +++ b/unison-src/tests/pattern-matching.u @@ -33,4 +33,21 @@ len = cases List.Nil -> 0 List.Cons _ t -> len t + 1 -> (w, w2, len) +foo = cases + List.Cons h t + | h > 10 -> + x = 3124 + y = 230 + x + y + | h < 10 -> 10 + | otherwise -> 11 + + List.Nil -> 0 + +foo2 = cases + List.Cons h t | h > 10 -> 1 + | h < 10 -> 2 + | otherwise -> 3 + List.Nil -> 0 + +> (w, w2 (Foo3 1 4 "bye"), len (List.Cons 1 List.Nil), foo (List.Cons 0 List.Nil), foo2 List.Nil) diff --git a/unison-src/tests/pattern-matching.ur b/unison-src/tests/pattern-matching.ur new file mode 100644 index 0000000000..e122b40e84 --- /dev/null +++ b/unison-src/tests/pattern-matching.ur @@ -0,0 +1 @@ +("byebye", "byebye", 1, 10, 0) diff --git a/unison-src/tests/r5.u b/unison-src/tests/r5.u index 249bf9e034..a8455710c7 100644 --- a/unison-src/tests/r5.u +++ b/unison-src/tests/r5.u @@ -1,6 +1,6 @@ r5 : Float r5 = match 2.2 with - 2.2 -> 3.0 + r | abs (r - 2.2) <= 0.01 -> 3.0 _ -> 1.0 diff --git a/unison-src/tests/spurious-ability-fail-underapply.u b/unison-src/tests/spurious-ability-fail-underapply.u index 6d3c1fe79f..64cec3c053 100644 --- a/unison-src/tests/spurious-ability-fail-underapply.u +++ b/unison-src/tests/spurious-ability-fail-underapply.u @@ -4,5 +4,6 @@ ability Woot where wha : ((a ->{Woot} a) -> a ->{Woot} a) -> Nat wha f = blah a = f' a + f' : a ->{Woot} a f' = f blah 42 diff --git a/unison-src/tests/stream.u b/unison-src/tests/stream.u index ebff834a3e..f790e97df7 100644 --- a/unison-src/tests/stream.u +++ b/unison-src/tests/stream.u @@ -23,7 +23,7 @@ run' = cases Stream s -> s (++) : Stream {e} a r -> Stream {e} a r -> Stream {e} a r s1 ++ s2 = Stream '(forceBoth (run' s1) (run' s2)) -from : Nat -> Stream e Nat () +from : Nat -> Stream {} Nat () from n = unfold n (n -> Some (n, n + 1)) -- take : Nat -> Stream {} a () -> Stream {} a () @@ -53,7 +53,7 @@ toSeq s = {_} -> acc handle run s with step [] -fromSeq : [a] -> Stream e a () +fromSeq : [a] -> Stream {} a () fromSeq a = step a = match List.at 0 a with None -> None diff --git a/unison-src/transcripts-using-base/base.u b/unison-src/transcripts-using-base/base.u index daeca2e95d..1d49c6dd8c 100644 --- a/unison-src/transcripts-using-base/base.u +++ b/unison-src/transcripts-using-base/base.u @@ -179,6 +179,7 @@ getBuffering = compose reraise getBuffering.impl setBuffering mode = compose reraise (setBuffering.impl mode) seekHandle = compose3 reraise seekHandle.impl putBytes = compose2 reraise putBytes.impl +getLine = compose reraise getLine.impl systemTime = compose reraise systemTime.impl decodeCert = compose reraise decodeCert.impl serverSocket = compose2 reraise serverSocket.impl diff --git a/unison-src/transcripts-using-base/binary-encoding-nats.md b/unison-src/transcripts-using-base/binary-encoding-nats.md new file mode 100644 index 0000000000..c6daaa824d --- /dev/null +++ b/unison-src/transcripts-using-base/binary-encoding-nats.md @@ -0,0 +1,68 @@ +> +```ucm:hide +.> builtins.merge +.> builtins.mergeio +.> cd builtin +.> load unison-src/transcripts-using-base/base.u +.> add +``` + +```unison +unique type EncDec = EncDec Text (Nat -> Bytes) (Bytes -> Optional (Nat, Bytes)) + +BE64 = EncDec "64 bit Big Endian" encodeNat64be decodeNat64be +LE64 = EncDec "64 bit Little Endian" encodeNat64le decodeNat64le +BE32 = EncDec "32 bit Big Endian" encodeNat32be decodeNat32be +LE32 = EncDec "32 bit Little Endian" encodeNat32le decodeNat32le +BE16 = EncDec "16 bit Big Endian" encodeNat16be decodeNat16be +LE16 = EncDec "16 bit Little Endian" encodeNat16le decodeNat16le + +testRoundTrip : Nat -> EncDec -> {IO, Stream Result} () +testRoundTrip n = cases + EncDec label enc dec -> + encoded = enc n + match dec encoded with + Some (n', remain) -> + if n == n' then + emit (Ok ("successfully decoded " ++ (toText n) ++ " using " ++ label)) + else + emit (Fail ("decoded " ++ (toText n') ++ " instead of " ++ (toText n) ++ " using " ++ label)) + if (size remain) > 0 then + emit (Fail ("unconsumed input using " ++ label)) + else + emit (Ok ("consumed all input")) + None -> emit (Fail ("failed to decode " ++ (toText n) ++ " using " ++ label)) + +testNat : Nat -> '{IO, Stream Result} () +testNat n _ = + if n >= (shiftLeft 1 32) then + testRoundTrip n BE64 + testRoundTrip n LE64 + else if n >= (shiftLeft 1 16) then + testRoundTrip n BE64 + testRoundTrip n LE64 + testRoundTrip n BE32 + testRoundTrip n LE32 + else + testRoundTrip n BE64 + testRoundTrip n LE64 + testRoundTrip n BE32 + testRoundTrip n LE32 + testRoundTrip n BE16 + testRoundTrip n LE16 + + +testABunchOfNats _ = + (runTest (testNat 0xFFFFFFFF)) ++ + (runTest (testNat 0x41000000)) ++ + (runTest (testNat 0x00410000)) ++ + (runTest (testNat 0x00004100)) ++ + (runTest (testNat 0x86753099)) ++ + (runTest (testNat 0x00000041)) ++ + (runTest (testNat 0)) +``` + +```ucm +.> add +.> io.test testABunchOfNats +``` diff --git a/unison-src/transcripts-using-base/binary-encoding-nats.output.md b/unison-src/transcripts-using-base/binary-encoding-nats.output.md new file mode 100644 index 0000000000..caf809fdf6 --- /dev/null +++ b/unison-src/transcripts-using-base/binary-encoding-nats.output.md @@ -0,0 +1,170 @@ +> +```unison +unique type EncDec = EncDec Text (Nat -> Bytes) (Bytes -> Optional (Nat, Bytes)) + +BE64 = EncDec "64 bit Big Endian" encodeNat64be decodeNat64be +LE64 = EncDec "64 bit Little Endian" encodeNat64le decodeNat64le +BE32 = EncDec "32 bit Big Endian" encodeNat32be decodeNat32be +LE32 = EncDec "32 bit Little Endian" encodeNat32le decodeNat32le +BE16 = EncDec "16 bit Big Endian" encodeNat16be decodeNat16be +LE16 = EncDec "16 bit Little Endian" encodeNat16le decodeNat16le + +testRoundTrip : Nat -> EncDec -> {IO, Stream Result} () +testRoundTrip n = cases + EncDec label enc dec -> + encoded = enc n + match dec encoded with + Some (n', remain) -> + if n == n' then + emit (Ok ("successfully decoded " ++ (toText n) ++ " using " ++ label)) + else + emit (Fail ("decoded " ++ (toText n') ++ " instead of " ++ (toText n) ++ " using " ++ label)) + if (size remain) > 0 then + emit (Fail ("unconsumed input using " ++ label)) + else + emit (Ok ("consumed all input")) + None -> emit (Fail ("failed to decode " ++ (toText n) ++ " using " ++ label)) + +testNat : Nat -> '{IO, Stream Result} () +testNat n _ = + if n >= (shiftLeft 1 32) then + testRoundTrip n BE64 + testRoundTrip n LE64 + else if n >= (shiftLeft 1 16) then + testRoundTrip n BE64 + testRoundTrip n LE64 + testRoundTrip n BE32 + testRoundTrip n LE32 + else + testRoundTrip n BE64 + testRoundTrip n LE64 + testRoundTrip n BE32 + testRoundTrip n LE32 + testRoundTrip n BE16 + testRoundTrip n LE16 + + +testABunchOfNats _ = + (runTest (testNat 0xFFFFFFFF)) ++ + (runTest (testNat 0x41000000)) ++ + (runTest (testNat 0x00410000)) ++ + (runTest (testNat 0x00004100)) ++ + (runTest (testNat 0x86753099)) ++ + (runTest (testNat 0x00000041)) ++ + (runTest (testNat 0)) +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + unique type EncDec + BE16 : EncDec + BE32 : EncDec + BE64 : EncDec + LE16 : EncDec + LE32 : EncDec + LE64 : EncDec + testABunchOfNats : ∀ _. _ ->{IO} [Result] + testNat : Nat -> '{IO, Stream Result} () + testRoundTrip : Nat -> EncDec ->{IO, Stream Result} () + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + unique type EncDec + BE16 : EncDec + BE32 : EncDec + BE64 : EncDec + LE16 : EncDec + LE32 : EncDec + LE64 : EncDec + testABunchOfNats : ∀ _. _ ->{IO} [Result] + testNat : Nat -> '{IO, Stream Result} () + testRoundTrip : Nat -> EncDec ->{IO, Stream Result} () + +.> io.test testABunchOfNats + + New test results: + + ◉ testABunchOfNats successfully decoded 4294967295 using 64 bit Big Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 4294967295 using 64 bit Little Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 4294967295 using 32 bit Big Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 4294967295 using 32 bit Little Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 1090519040 using 64 bit Big Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 1090519040 using 64 bit Little Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 1090519040 using 32 bit Big Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 1090519040 using 32 bit Little Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 4259840 using 64 bit Big Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 4259840 using 64 bit Little Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 4259840 using 32 bit Big Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 4259840 using 32 bit Little Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 16640 using 64 bit Big Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 16640 using 64 bit Little Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 16640 using 32 bit Big Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 16640 using 32 bit Little Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 16640 using 16 bit Big Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 16640 using 16 bit Little Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 2255827097 using 64 bit Big Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 2255827097 using 64 bit Little Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 2255827097 using 32 bit Big Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 2255827097 using 32 bit Little Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 65 using 64 bit Big Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 65 using 64 bit Little Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 65 using 32 bit Big Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 65 using 32 bit Little Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 65 using 16 bit Big Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 65 using 16 bit Little Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 0 using 64 bit Big Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 0 using 64 bit Little Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 0 using 32 bit Big Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 0 using 32 bit Little Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 0 using 16 bit Big Endian + ◉ testABunchOfNats consumed all input + ◉ testABunchOfNats successfully decoded 0 using 16 bit Little Endian + ◉ testABunchOfNats consumed all input + + ✅ 68 test(s) passing + + Tip: Use view testABunchOfNats to view the source of a test. + +``` diff --git a/unison-src/transcripts-using-base/codeops.md b/unison-src/transcripts-using-base/codeops.md index d60546dd13..7933a428cd 100644 --- a/unison-src/transcripts-using-base/codeops.md +++ b/unison-src/transcripts-using-base/codeops.md @@ -137,6 +137,23 @@ tests = , identicality "ident bool" false , identicality "ident bytes" [fSer, Bytes.empty] ] + +badLoad : '{IO} [Result] +badLoad _ = + payload = Bytes.fromList[0,0,0,1,0,1,64,175,174,29,188,217,78,209,175,255,137,165,135,165,1,20,151,182,215,54,21,196,43,159,247,106,175,177,213,20,111,178,134,214,188,207,243,196,240,187,111,44,245,111,219,223,98,88,183,163,97,22,18,153,104,185,125,175,157,36,209,151,166,168,102,0,1,0,0,0,0,0,2,0,0,0,0] + go _ = + match Value.deserialize payload with + Left t -> Fail "deserialize exception" + Right a -> match Value.load a with + Left terms -> + bs = Value.serialize (Value.value terms) + s = size bs + Ok ("serialized" ++ toText s) + Right _ -> + Ok "actually loaded" + match toEither go with + Right v -> [v] + Left _ -> [Fail "Exception"] ``` This simply runs some functions to make sure there isn't a crash. Once @@ -147,4 +164,5 @@ to actual show that the serialization works. .> add .> display fDeps .> io.test tests +.> io.test badLoad ``` diff --git a/unison-src/transcripts-using-base/codeops.output.md b/unison-src/transcripts-using-base/codeops.output.md index 28034ab7d8..a2b7856d5e 100644 --- a/unison-src/transcripts-using-base/codeops.output.md +++ b/unison-src/transcripts-using-base/codeops.output.md @@ -88,27 +88,21 @@ identicality t x ⍟ These new definitions are ok to `add`: type Three a b c - concatMap : (a ->{g} [b]) ->{g} [a] ->{g} [b] + concatMap : (a ->{g} [b]) -> [a] ->{g} [b] extensionality : Text - -> (Three Nat Nat Nat - ->{Throw Text} Nat - ->{Throw Text} b) + -> (Three Nat Nat Nat -> Nat -> b) ->{IO} Result - extensionals : (a ->{Throw Text} b ->{Throw Text} Text) - ->{Throw Text} (a - ->{Throw Text} b - ->{Throw Text} c) - ->{Throw Text} (a - ->{Throw Text} b - ->{Throw Text} c) - ->{Throw Text} [(a, b)] + extensionals : (a -> b -> Text) + -> (a -> b -> c) + -> (a -> b -> c) + -> [(a, b)] ->{Throw Text} () fib10 : [Nat] handleTest : Text -> Request {Throw Text} a -> Result identical : Text -> a -> a ->{Throw Text} () identicality : Text -> a ->{IO} Result load : Bytes ->{IO, Throw Text} a - prod : [a] ->{g} [b] ->{g} [(a, b)] + prod : [a] -> [b] -> [(a, b)] roundtrip : a ->{IO, Throw Text} a save : a -> Bytes showThree : Three Nat Nat Nat -> Text @@ -121,27 +115,21 @@ identicality t x ⍟ I've added these definitions: type Three a b c - concatMap : (a ->{g} [b]) ->{g} [a] ->{g} [b] + concatMap : (a ->{g} [b]) -> [a] ->{g} [b] extensionality : Text - -> (Three Nat Nat Nat - ->{Throw Text} Nat - ->{Throw Text} b) + -> (Three Nat Nat Nat -> Nat -> b) ->{IO} Result - extensionals : (a ->{Throw Text} b ->{Throw Text} Text) - ->{Throw Text} (a - ->{Throw Text} b - ->{Throw Text} c) - ->{Throw Text} (a - ->{Throw Text} b - ->{Throw Text} c) - ->{Throw Text} [(a, b)] + extensionals : (a -> b -> Text) + -> (a -> b -> c) + -> (a -> b -> c) + -> [(a, b)] ->{Throw Text} () fib10 : [Nat] handleTest : Text -> Request {Throw Text} a -> Result identical : Text -> a -> a ->{Throw Text} () identicality : Text -> a ->{IO} Result load : Bytes ->{IO, Throw Text} a - prod : [a] ->{g} [b] ->{g} [(a, b)] + prod : [a] -> [b] -> [(a, b)] roundtrip : a ->{IO, Throw Text} a save : a -> Bytes showThree : Three Nat Nat Nat -> Text @@ -197,6 +185,23 @@ tests = , identicality "ident bool" false , identicality "ident bytes" [fSer, Bytes.empty] ] + +badLoad : '{IO} [Result] +badLoad _ = + payload = Bytes.fromList[0,0,0,1,0,1,64,175,174,29,188,217,78,209,175,255,137,165,135,165,1,20,151,182,215,54,21,196,43,159,247,106,175,177,213,20,111,178,134,214,188,207,243,196,240,187,111,44,245,111,219,223,98,88,183,163,97,22,18,153,104,185,125,175,157,36,209,151,166,168,102,0,1,0,0,0,0,0,2,0,0,0,0] + go _ = + match Value.deserialize payload with + Left t -> Fail "deserialize exception" + Right a -> match Value.load a with + Left terms -> + bs = Value.serialize (Value.value terms) + s = size bs + Ok ("serialized" ++ toText s) + Right _ -> + Ok "actually loaded" + match toEither go with + Right v -> [v] + Left _ -> [Fail "Exception"] ``` ```ucm @@ -208,14 +213,15 @@ tests = ⍟ These new definitions are ok to `add`: ability Zap - f : Nat ->{Zap} Nat - fDeps : [Link.Term] - fSer : Bytes - fVal : Value - h : Three Nat Nat Nat -> Nat -> Nat - rotate : Three Nat Nat Nat -> Three Nat Nat Nat - tests : '{IO} [Result] - zapper : Three Nat Nat Nat ->{g} Request {Zap} r ->{g} r + badLoad : '{IO} [Result] + f : Nat ->{Zap} Nat + fDeps : [Link.Term] + fSer : Bytes + fVal : Value + h : Three Nat Nat Nat -> Nat -> Nat + rotate : Three Nat Nat Nat -> Three Nat Nat Nat + tests : '{IO} [Result] + zapper : Three Nat Nat Nat -> Request {Zap} r -> r ``` This simply runs some functions to make sure there isn't a crash. Once @@ -228,14 +234,15 @@ to actual show that the serialization works. ⍟ I've added these definitions: ability Zap - f : Nat ->{Zap} Nat - fDeps : [Link.Term] - fSer : Bytes - fVal : Value - h : Three Nat Nat Nat -> Nat -> Nat - rotate : Three Nat Nat Nat -> Three Nat Nat Nat - tests : '{IO} [Result] - zapper : Three Nat Nat Nat ->{g} Request {Zap} r ->{g} r + badLoad : '{IO} [Result] + f : Nat ->{Zap} Nat + fDeps : [Link.Term] + fSer : Bytes + fVal : Value + h : Three Nat Nat Nat -> Nat -> Nat + rotate : Three Nat Nat Nat -> Three Nat Nat Nat + tests : '{IO} [Result] + zapper : Three Nat Nat Nat -> Request {Zap} r -> r .> display fDeps @@ -263,4 +270,14 @@ to actual show that the serialization works. Tip: Use view tests to view the source of a test. +.> io.test badLoad + + New test results: + + ◉ badLoad serialized78 + + ✅ 1 test(s) passing + + Tip: Use view badLoad to view the source of a test. + ``` diff --git a/unison-src/transcripts-using-base/doc.md b/unison-src/transcripts-using-base/doc.md index a71a3696c3..518a5b1caf 100644 --- a/unison-src/transcripts-using-base/doc.md +++ b/unison-src/transcripts-using-base/doc.md @@ -9,7 +9,7 @@ Unison documentation is written in Unison and has some neat features: * The documentation type provides a rich vocabulary of elements that go beyond markdown, including asides, callouts, tooltips, and more. * Docs may contain Unison code which is parsed and typechecked to ensure validity. No more out of date examples that don't compile or assume a bunch of implicit context! * Embeded examples are live and can show the results of evaluation. This uses the same evaluation cache as Unison's scratch files, allowing Unison docs to function like well-commented spreadsheets or notebooks. -* Links to other definitions are typechecked to ensure they point to valid defintions. The links are resolved to hashes and won't be broken by name changes or moving definitions around. +* Links to other definitions are typechecked to ensure they point to valid definitions. The links are resolved to hashes and won't be broken by name changes or moving definitions around. * Docs can be included in other docs and you can assemble documentation programmatically, using Unison code. * There's a powerful textual syntax for all of the above, which we'll introduce next. diff --git a/unison-src/transcripts-using-base/doc.output.md b/unison-src/transcripts-using-base/doc.output.md index 82d4cae694..38321c8895 100644 --- a/unison-src/transcripts-using-base/doc.output.md +++ b/unison-src/transcripts-using-base/doc.output.md @@ -5,7 +5,7 @@ Unison documentation is written in Unison and has some neat features: * The documentation type provides a rich vocabulary of elements that go beyond markdown, including asides, callouts, tooltips, and more. * Docs may contain Unison code which is parsed and typechecked to ensure validity. No more out of date examples that don't compile or assume a bunch of implicit context! * Embeded examples are live and can show the results of evaluation. This uses the same evaluation cache as Unison's scratch files, allowing Unison docs to function like well-commented spreadsheets or notebooks. -* Links to other definitions are typechecked to ensure they point to valid defintions. The links are resolved to hashes and won't be broken by name changes or moving definitions around. +* Links to other definitions are typechecked to ensure they point to valid definitions. The links are resolved to hashes and won't be broken by name changes or moving definitions around. * Docs can be included in other docs and you can assemble documentation programmatically, using Unison code. * There's a powerful textual syntax for all of the above, which we'll introduce next. diff --git a/unison-src/transcripts-using-base/fix2027.md b/unison-src/transcripts-using-base/fix2027.md index ae0dd6adfd..fc0fc9f7b8 100644 --- a/unison-src/transcripts-using-base/fix2027.md +++ b/unison-src/transcripts-using-base/fix2027.md @@ -2,10 +2,18 @@ ```ucm:hide .> builtins.mergeio -.> pull https://github.com/unisonweb/base_v2:.trunk .base ``` ```unison +ability Exception where raise : Failure -> x + +reraise = cases + Left e -> raise e + Right a -> a + +type Either a b = Left a | Right b + +putBytes h bs = reraise (putBytes.impl h bs) toException : Either Failure a ->{Exception} a toException = cases @@ -19,25 +27,29 @@ bugFail = cases Failure typ _ _ -> bug (Failure typ "problem" (Any ())) Exception.unsafeRun! : '{Exception, g} a -> '{g} a -Exception.unsafeRun! e _ = - h : Request {Exception} a -> a - h = cases - {Exception.raise fail -> _ } -> +Exception.unsafeRun! e _ = + h : Request {Exception} a -> a + h = cases + {Exception.raise fail -> _ } -> bugFail fail - {a} -> a - handle !e with h + {a} -> a + handle !e with h + +socketSend s bytes = reraise (socketSend.impl s bytes) +closeSocket s = reraise (closeSocket.impl s) +serverSocket host port = reraise (serverSocket.impl host port) hello : Text -> Text -> {IO, Exception} () -hello host port = +hello host port = socket = serverSocket (Some host) port - msg = toUtf8 "Hello there" + msg = toUtf8 "Hello there" socketSend socket msg - closeSocket socket + closeSocket socket myServer = unsafeRun! '(hello "127.0.0.1" "0") ``` ```ucm:error -.> run myServer +.> run myServer ``` diff --git a/unison-src/transcripts-using-base/fix2027.output.md b/unison-src/transcripts-using-base/fix2027.output.md index 1e12a80776..59e3783331 100644 --- a/unison-src/transcripts-using-base/fix2027.output.md +++ b/unison-src/transcripts-using-base/fix2027.output.md @@ -1,6 +1,16 @@ ```unison +ability Exception where raise : Failure -> x + +reraise = cases + Left e -> raise e + Right a -> a + +type Either a b = Left a | Right b + +putBytes h bs = reraise (putBytes.impl h bs) + toException : Either Failure a ->{Exception} a toException = cases Left e -> raise e @@ -13,20 +23,24 @@ bugFail = cases Failure typ _ _ -> bug (Failure typ "problem" (Any ())) Exception.unsafeRun! : '{Exception, g} a -> '{g} a -Exception.unsafeRun! e _ = - h : Request {Exception} a -> a - h = cases - {Exception.raise fail -> _ } -> +Exception.unsafeRun! e _ = + h : Request {Exception} a -> a + h = cases + {Exception.raise fail -> _ } -> bugFail fail - {a} -> a - handle !e with h + {a} -> a + handle !e with h + +socketSend s bytes = reraise (socketSend.impl s bytes) +closeSocket s = reraise (closeSocket.impl s) +serverSocket host port = reraise (serverSocket.impl host port) hello : Text -> Text -> {IO, Exception} () -hello host port = +hello host port = socket = serverSocket (Some host) port - msg = toUtf8 "Hello there" + msg = toUtf8 "Hello there" socketSend socket msg - closeSocket socket + closeSocket socket myServer = unsafeRun! '(hello "127.0.0.1" "0") @@ -38,28 +52,45 @@ myServer = unsafeRun! '(hello "127.0.0.1" "0") do an `add` or `update`, here's how your codebase would change: + ⊡ Previously added definitions will be ignored: Exception + Exception.raise + ⍟ These new definitions are ok to `add`: + type Either a b + (also named builtin.Either) Exception.unsafeRun! : '{g, Exception} a -> '{g} a bugFail : Failure -> r hello : Text -> Text ->{IO, Exception} () myServer : '{IO} () putText : Handle -> Text ->{IO, Exception} () + reraise : Either Failure b ->{Exception} b + (also named Exception.reraise) + socketSend : Socket + -> Bytes + ->{IO, Exception} () toException : Either Failure a ->{Exception} a - (also named Exception.reraise , base.Either.toException - , and base.Exception.reraise) + (also named Exception.reraise) + + ⍟ These names already exist. You can `update` them to your + new definition: + + closeSocket : Socket ->{IO, Exception} () + putBytes : Handle -> Bytes ->{IO, Exception} () + serverSocket : Optional Text + -> Text + ->{IO, Exception} Socket ``` ```ucm -.> run myServer +.> run myServer 💔💥 I've encountered a call to builtin.bug with the following value: - base.io.Failure.Failure - (typeLink base.io.IOFailure) "problem" !base.Any.Any + Failure (typeLink IOFailure) "problem" !Any I'm sorry this message doesn't have more detail about the location of the failure. My makers plan to fix this in a diff --git a/unison-src/transcripts-using-base/fix2158-1.md b/unison-src/transcripts-using-base/fix2158-1.md new file mode 100644 index 0000000000..c80faa3f1f --- /dev/null +++ b/unison-src/transcripts-using-base/fix2158-1.md @@ -0,0 +1,25 @@ +This transcript tests an ability check failure regression. + +```unison +ability Async t g where + fork : '{Async t g, g} a -> t a + await : t a -> a + +Async.parMap : (a ->{Async t g, g} b) -> [a] ->{Async t g} [b] +Async.parMap f as = + tasks = List.map (a -> fork '(f a)) as + List.map await tasks +``` + +The issue was that certain ability processing was happing in less +optimal order. `g` appears both as an ability used and as a parameter +to `Async`. However, the latter occurrence is more strict. Unifying +the types `Async t g1` and `Async t g2` requires `g1` and `g2` to +be equal, while abilities that occur directly in a row are subject to +some subtyping. + +However, the ability handling was just processing rows in whatever +order they occurred, and during inference it happened that `g` +occurred in the row before `Async t g`. Processing the stricter parts +first is better, becauase it can solve things more precisely and avoid +ambiguities relating to subtyping. diff --git a/unison-src/transcripts-using-base/fix2158-1.output.md b/unison-src/transcripts-using-base/fix2158-1.output.md new file mode 100644 index 0000000000..f9419e4a42 --- /dev/null +++ b/unison-src/transcripts-using-base/fix2158-1.output.md @@ -0,0 +1,39 @@ +This transcript tests an ability check failure regression. + +```unison +ability Async t g where + fork : '{Async t g, g} a -> t a + await : t a -> a + +Async.parMap : (a ->{Async t g, g} b) -> [a] ->{Async t g} [b] +Async.parMap f as = + tasks = List.map (a -> fork '(f a)) as + List.map await tasks +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + ability Async t g + Async.parMap : (a ->{g, Async t g} b) + -> [a] + ->{Async t g} [b] + +``` +The issue was that certain ability processing was happing in less +optimal order. `g` appears both as an ability used and as a parameter +to `Async`. However, the latter occurrence is more strict. Unifying +the types `Async t g1` and `Async t g2` requires `g1` and `g2` to +be equal, while abilities that occur directly in a row are subject to +some subtyping. + +However, the ability handling was just processing rows in whatever +order they occurred, and during inference it happened that `g` +occurred in the row before `Async t g. Processing the stricter parts +first is better, becauase it can solve things more precisely and avoid +ambiguities relating to subtyping. diff --git a/unison-src/transcripts-using-base/fix2297.md b/unison-src/transcripts-using-base/fix2297.md new file mode 100644 index 0000000000..2ecd676850 --- /dev/null +++ b/unison-src/transcripts-using-base/fix2297.md @@ -0,0 +1,26 @@ +This tests a case where a function was somehow discarding abilities. + + +```unison:error +ability Trivial where + trivial : () + +-- This handler SHOULD leave any additional effects alone and unhandled +handleTrivial : '{e, Trivial} a -> {e} a +handleTrivial action = + h : Request {Trivial} a -> a + h = cases + {trivial -> resume} -> handle !resume with h + {a} -> a + handle !action with h + +testAction : '{Exception, IO, Trivial} () +testAction _ = + printText "hi!" + trivial + +wat : () +wat = handleTrivial testAction -- Somehow this completely forgets about Exception and IO + +> handleTrivial testAction +``` diff --git a/unison-src/transcripts-using-base/fix2297.output.md b/unison-src/transcripts-using-base/fix2297.output.md new file mode 100644 index 0000000000..a2a8f85a85 --- /dev/null +++ b/unison-src/transcripts-using-base/fix2297.output.md @@ -0,0 +1,35 @@ +This tests a case where a function was somehow discarding abilities. + + +```unison +ability Trivial where + trivial : () + +-- This handler SHOULD leave any additional effects alone and unhandled +handleTrivial : '{e, Trivial} a -> {e} a +handleTrivial action = + h : Request {Trivial} a -> a + h = cases + {trivial -> resume} -> handle !resume with h + {a} -> a + handle !action with h + +testAction : '{Exception, IO, Trivial} () +testAction _ = + printText "hi!" + trivial + +wat : () +wat = handleTrivial testAction -- Somehow this completely forgets about Exception and IO + +> handleTrivial testAction +``` + +```ucm + + The expression in red needs the {Exception} ability, but this location does not have access to any abilities. + + 19 | wat = handleTrivial testAction -- Somehow this completely forgets about Exception and IO + + +``` diff --git a/unison-src/transcripts-using-base/hashing.output.md b/unison-src/transcripts-using-base/hashing.output.md index a9523cb57a..2f8c322478 100644 --- a/unison-src/transcripts-using-base/hashing.output.md +++ b/unison-src/transcripts-using-base/hashing.output.md @@ -7,21 +7,33 @@ Unison has cryptographic builtins for hashing and computing [HMACs](https://en.w 1. ++ (Bytes -> Bytes -> Bytes) 2. at (Nat -> Bytes -> Optional Nat) - 3. drop (Nat -> Bytes -> Bytes) - 4. empty (Bytes) - 5. flatten (Bytes -> Bytes) - 6. fromBase16 (Bytes -> Either Text Bytes) - 7. fromBase32 (Bytes -> Either Text Bytes) - 8. fromBase64 (Bytes -> Either Text Bytes) - 9. fromBase64UrlUnpadded (Bytes -> Either Text Bytes) - 10. fromList ([Nat] -> Bytes) - 11. size (Bytes -> Nat) - 12. take (Nat -> Bytes -> Bytes) - 13. toBase16 (Bytes -> Bytes) - 14. toBase32 (Bytes -> Bytes) - 15. toBase64 (Bytes -> Bytes) - 16. toBase64UrlUnpadded (Bytes -> Bytes) - 17. toList (Bytes -> [Nat]) + 3. decodeNat16be (Bytes -> Optional (Nat, Bytes)) + 4. decodeNat16le (Bytes -> Optional (Nat, Bytes)) + 5. decodeNat32be (Bytes -> Optional (Nat, Bytes)) + 6. decodeNat32le (Bytes -> Optional (Nat, Bytes)) + 7. decodeNat64be (Bytes -> Optional (Nat, Bytes)) + 8. decodeNat64le (Bytes -> Optional (Nat, Bytes)) + 9. drop (Nat -> Bytes -> Bytes) + 10. empty (Bytes) + 11. encodeNat16be (Nat -> Bytes) + 12. encodeNat16le (Nat -> Bytes) + 13. encodeNat32be (Nat -> Bytes) + 14. encodeNat32le (Nat -> Bytes) + 15. encodeNat64be (Nat -> Bytes) + 16. encodeNat64le (Nat -> Bytes) + 17. flatten (Bytes -> Bytes) + 18. fromBase16 (Bytes -> Either Text Bytes) + 19. fromBase32 (Bytes -> Either Text Bytes) + 20. fromBase64 (Bytes -> Either Text Bytes) + 21. fromBase64UrlUnpadded (Bytes -> Either Text Bytes) + 22. fromList ([Nat] -> Bytes) + 23. size (Bytes -> Nat) + 24. take (Nat -> Bytes -> Bytes) + 25. toBase16 (Bytes -> Bytes) + 26. toBase32 (Bytes -> Bytes) + 27. toBase64 (Bytes -> Bytes) + 28. toBase64UrlUnpadded (Bytes -> Bytes) + 29. toList (Bytes -> [Nat]) ``` Notice the `fromBase16` and `toBase16` functions. Here's some convenience functions for converting `Bytes` to and from base-16 `Text`. @@ -104,15 +116,15 @@ And here's the full API: ```ucm .builtin.crypto> find - 1. builtin type HashAlgorithm - 2. HashAlgorithm.Blake2b_256 : HashAlgorithm - 3. HashAlgorithm.Blake2b_512 : HashAlgorithm - 4. HashAlgorithm.Blake2s_256 : HashAlgorithm - 5. HashAlgorithm.Sha2_256 : HashAlgorithm - 6. HashAlgorithm.Sha2_512 : HashAlgorithm - 7. HashAlgorithm.Sha3_256 : HashAlgorithm - 8. HashAlgorithm.Sha3_512 : HashAlgorithm - 9. hash : HashAlgorithm -> a -> Bytes + 1. hash : HashAlgorithm -> a -> Bytes + 2. builtin type HashAlgorithm + 3. HashAlgorithm.Blake2b_256 : HashAlgorithm + 4. HashAlgorithm.Blake2b_512 : HashAlgorithm + 5. HashAlgorithm.Blake2s_256 : HashAlgorithm + 6. HashAlgorithm.Sha2_256 : HashAlgorithm + 7. HashAlgorithm.Sha2_512 : HashAlgorithm + 8. HashAlgorithm.Sha3_256 : HashAlgorithm + 9. HashAlgorithm.Sha3_512 : HashAlgorithm 10. hashBytes : HashAlgorithm -> Bytes -> Bytes 11. hmac : HashAlgorithm -> Bytes -> a -> Bytes 12. hmacBytes : HashAlgorithm -> Bytes -> Bytes -> Bytes diff --git a/unison-src/new-runtime-failing-transcripts/mvar.md b/unison-src/transcripts-using-base/mvar.md similarity index 80% rename from unison-src/new-runtime-failing-transcripts/mvar.md rename to unison-src/transcripts-using-base/mvar.md index 8ee495fff8..ccaa8b5ae3 100644 --- a/unison-src/new-runtime-failing-transcripts/mvar.md +++ b/unison-src/transcripts-using-base/mvar.md @@ -11,6 +11,11 @@ blocks, Queues, etc. ```unison +eitherCk : (a -> Boolean) -> Either e a -> Boolean +eitherCk f = cases + Left _ -> false + Right x -> f x + testMvars: '{io2.IO}[Result] testMvars _ = test = 'let @@ -28,11 +33,15 @@ testMvars _ = expectU "swap returns old contents" test2 test''' ma2 = !MVar.newEmpty + check "tryRead should succeed when not empty" + (eitherCk (x -> not (isNone x)) (tryRead.impl ma)) check "tryTake should succeed when not empty" (not (isNone (tryTake ma))) check "tryTake should not succeed when empty" (isNone (tryTake ma)) check "ma2 should be empty" (isEmpty ma2) check "tryTake should fail when empty" (isNone (tryTake ma2)) + check "tryRead should fail when empty" + (eitherCk isNone (tryRead.impl ma2)) runTest test diff --git a/unison-src/transcripts-using-base/mvar.output.md b/unison-src/transcripts-using-base/mvar.output.md index 4889374f72..112c4e032e 100644 --- a/unison-src/transcripts-using-base/mvar.output.md +++ b/unison-src/transcripts-using-base/mvar.output.md @@ -11,6 +11,11 @@ blocks, Queues, etc. ```unison +eitherCk : (a -> Boolean) -> Either e a -> Boolean +eitherCk f = cases + Left _ -> false + Right x -> f x + testMvars: '{io2.IO}[Result] testMvars _ = test = 'let @@ -28,11 +33,15 @@ testMvars _ = expectU "swap returns old contents" test2 test''' ma2 = !MVar.newEmpty + check "tryRead should succeed when not empty" + (eitherCk (x -> not (isNone x)) (tryRead.impl ma)) check "tryTake should succeed when not empty" (not (isNone (tryTake ma))) check "tryTake should not succeed when empty" (isNone (tryTake ma)) check "ma2 should be empty" (isEmpty ma2) check "tryTake should fail when empty" (isNone (tryTake ma2)) + check "tryRead should fail when empty" + (eitherCk isNone (tryRead.impl ma2)) runTest test @@ -46,7 +55,8 @@ testMvars _ = ⍟ These new definitions are ok to `add`: - testMvars : '{io2.IO} [Result] + eitherCk : (a ->{g} Boolean) -> Either e a ->{g} Boolean + testMvars : '{IO} [Result] ``` ```ucm @@ -54,7 +64,8 @@ testMvars _ = ⍟ I've added these definitions: - testMvars : '{io2.IO} [Result] + eitherCk : (a ->{g} Boolean) -> Either e a ->{g} Boolean + testMvars : '{IO} [Result] .> io.test testMvars @@ -65,12 +76,14 @@ testMvars _ = ◉ testMvars ma should be empty ◉ testMvars swap returns old contents ◉ testMvars swap returns old contents + ◉ testMvars tryRead should succeed when not empty ◉ testMvars tryTake should succeed when not empty ◉ testMvars tryTake should not succeed when empty ◉ testMvars ma2 should be empty ◉ testMvars tryTake should fail when empty + ◉ testMvars tryRead should fail when empty - ✅ 9 test(s) passing + ✅ 11 test(s) passing Tip: Use view testMvars to view the source of a test. diff --git a/unison-src/transcripts-using-base/nat-coersion.md b/unison-src/transcripts-using-base/nat-coersion.md new file mode 100644 index 0000000000..2aabecdb0f --- /dev/null +++ b/unison-src/transcripts-using-base/nat-coersion.md @@ -0,0 +1,46 @@ +> +```ucm:hide +.> builtins.merge +.> cd builtin +.> load unison-src/transcripts-using-base/base.u +.> add +``` + +```unison + +testNat: Nat -> Optional Int -> Optional Float -> {Stream Result}() +testNat n expectInt expectFloat = + float = Float.fromRepresentation n + int = Int.fromRepresentation n + + n2 = Float.toRepresentation float + n3 = Int.toRepresentation int + + match expectFloat with + None -> emit (Ok "skipped") + Some expect -> expectU ("expected " ++ (Float.toText expect) ++ " got " ++ (Float.toText float)) expect float + expectU ("round trip though float, expected " ++ (Nat.toText n) ++ " got " ++ (Nat.toText n2)) n n2 + + match expectInt with + None -> emit (Ok "skipped") + Some expect -> expectU ("expected " ++ (Int.toText expect) ++ " got " ++ (Int.toText int)) expect int + expectU ("round trip though Int, expected " ++ (Nat.toText n) ++ " got " ++ (Nat.toText n3)) n n3 + + + +test: '{io2.IO}[Result] +test = 'let + testABunchOfNats: '{Stream Result}() + testABunchOfNats _ = + testNat 0 (Some +0) (Some 0.0) + testNat 1 (Some +1) None + testNat 18446744073709551615 (Some -1) None -- we don't have a way of expressing Nan + testNat 0x3FF0000000000001 (Some +4607182418800017409) (Some 1.0000000000000002 ) + + runTest testABunchOfNats +``` + +```ucm +.> add +.> io.test test +``` diff --git a/unison-src/transcripts-using-base/nat-coersion.output.md b/unison-src/transcripts-using-base/nat-coersion.output.md new file mode 100644 index 0000000000..3a657635c4 --- /dev/null +++ b/unison-src/transcripts-using-base/nat-coersion.output.md @@ -0,0 +1,84 @@ +> +```unison +testNat: Nat -> Optional Int -> Optional Float -> {Stream Result}() +testNat n expectInt expectFloat = + float = Float.fromRepresentation n + int = Int.fromRepresentation n + + n2 = Float.toRepresentation float + n3 = Int.toRepresentation int + + match expectFloat with + None -> emit (Ok "skipped") + Some expect -> expectU ("expected " ++ (Float.toText expect) ++ " got " ++ (Float.toText float)) expect float + expectU ("round trip though float, expected " ++ (Nat.toText n) ++ " got " ++ (Nat.toText n2)) n n2 + + match expectInt with + None -> emit (Ok "skipped") + Some expect -> expectU ("expected " ++ (Int.toText expect) ++ " got " ++ (Int.toText int)) expect int + expectU ("round trip though Int, expected " ++ (Nat.toText n) ++ " got " ++ (Nat.toText n3)) n n3 + + + +test: '{io2.IO}[Result] +test = 'let + testABunchOfNats: '{Stream Result}() + testABunchOfNats _ = + testNat 0 (Some +0) (Some 0.0) + testNat 1 (Some +1) None + testNat 18446744073709551615 (Some -1) None -- we don't have a way of expressing Nan + testNat 0x3FF0000000000001 (Some +4607182418800017409) (Some 1.0000000000000002 ) + + runTest testABunchOfNats +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + test : '{IO} [Result] + testNat : Nat + -> Optional Int + -> Optional Float + ->{Stream Result} () + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + test : '{IO} [Result] + testNat : Nat + -> Optional Int + -> Optional Float + ->{Stream Result} () + +.> io.test test + + New test results: + + ◉ test expected 0.0 got 0.0 + ◉ test round trip though float, expected 0 got 0 + ◉ test expected 0 got 0 + ◉ test round trip though Int, expected 0 got 0 + ◉ test skipped + ◉ test expected 1 got 1 + ◉ test round trip though Int, expected 1 got 1 + ◉ test skipped + ◉ test expected -1 got -1 + ◉ test round trip though Int, expected 18446744073709551615 got 18446744073709551615 + ◉ test expected 1.0000000000000002 got 1.0000000000000002 + ◉ test round trip though float, expected 4607182418800017409 got 4607182418800017409 + ◉ test expected 4607182418800017409 got 4607182418800017409 + ◉ test round trip though Int, expected 4607182418800017409 got 4607182418800017409 + + ✅ 14 test(s) passing + + Tip: Use view test to view the source of a test. + +``` diff --git a/unison-src/transcripts-using-base/stm.output.md b/unison-src/transcripts-using-base/stm.output.md index 965162c74a..b395d5d1a3 100644 --- a/unison-src/transcripts-using-base/stm.output.md +++ b/unison-src/transcripts-using-base/stm.output.md @@ -38,7 +38,7 @@ body k out v = body : Nat -> TVar (Optional Nat) -> TVar Nat ->{IO} () count : Nat -> () inc : TVar Nat ->{IO} Nat - loop : '{IO} Nat ->{IO} Nat ->{IO} Nat ->{IO} Nat + loop : '{IO} Nat -> Nat -> Nat ->{IO} Nat ``` ```ucm @@ -49,7 +49,7 @@ body k out v = body : Nat -> TVar (Optional Nat) -> TVar Nat ->{IO} () count : Nat -> () inc : TVar Nat ->{IO} Nat - loop : '{IO} Nat ->{IO} Nat ->{IO} Nat ->{IO} Nat + loop : '{IO} Nat -> Nat -> Nat ->{IO} Nat ``` Test case. diff --git a/unison-src/transcripts-using-base/thread.md b/unison-src/transcripts-using-base/thread.md index 1f77c4c7f8..ee1f320288 100644 --- a/unison-src/transcripts-using-base/thread.md +++ b/unison-src/transcripts-using-base/thread.md @@ -1,4 +1,3 @@ - Lets just make sure we can start a thread ```unison @@ -30,7 +29,7 @@ thread1 x mv = 'let go = 'let put mv (increment x) - match (toEither go) with + match (toEither go) with Left (Failure _ t _) -> watch t () _ -> () @@ -58,7 +57,7 @@ sendingThread: Nat -> MVar Nat -> '{io2.IO}() sendingThread toSend mv = 'let go = 'let put mv (increment toSend) - + match (toEither go) with Left (Failure _ t _) -> watch t () _ -> () @@ -69,11 +68,11 @@ receivingThread recv send = 'let go = 'let recvd = take recv put send (toText recvd) - + match (toEither go) with Left (Failure _ t _) -> watch t () _ -> () - + testTwoThreads: '{io2.IO}[Result] testTwoThreads = 'let test = 'let @@ -89,7 +88,7 @@ testTwoThreads = 'let runTest test -``` +``` ```ucm .> add diff --git a/unison-src/transcripts-using-base/thread.output.md b/unison-src/transcripts-using-base/thread.output.md index ee94b7af95..7c26b7e937 100644 --- a/unison-src/transcripts-using-base/thread.output.md +++ b/unison-src/transcripts-using-base/thread.output.md @@ -1,4 +1,3 @@ - Lets just make sure we can start a thread ```unison @@ -37,7 +36,7 @@ thread1 x mv = 'let go = 'let put mv (increment x) - match (toEither go) with + match (toEither go) with Left (Failure _ t _) -> watch t () _ -> () @@ -92,7 +91,7 @@ sendingThread: Nat -> MVar Nat -> '{io2.IO}() sendingThread toSend mv = 'let go = 'let put mv (increment toSend) - + match (toEither go) with Left (Failure _ t _) -> watch t () _ -> () @@ -103,11 +102,11 @@ receivingThread recv send = 'let go = 'let recvd = take recv put send (toText recvd) - + match (toEither go) with Left (Failure _ t _) -> watch t () _ -> () - + testTwoThreads: '{io2.IO}[Result] testTwoThreads = 'let test = 'let diff --git a/unison-src/transcripts-using-base/utf8.output.md b/unison-src/transcripts-using-base/utf8.output.md index 6dc8b5a94a..372185222e 100644 --- a/unison-src/transcripts-using-base/utf8.output.md +++ b/unison-src/transcripts-using-base/utf8.output.md @@ -5,8 +5,8 @@ Unison has function for converting between `Text` and a UTF-8 `Bytes` encoding o ```ucm .> find Utf8 - 1. Text.fromUtf8 : Bytes ->{Exception} Text - 2. builtin.Text.toUtf8 : Text -> Bytes + 1. builtin.Text.toUtf8 : Text -> Bytes + 2. Text.fromUtf8 : Bytes ->{Exception} Text 3. builtin.Text.fromUtf8.impl : Bytes -> Either Failure Text diff --git a/unison-src/transcripts/alias-many.output.md b/unison-src/transcripts/alias-many.output.md index d23a5e3f4c..a58ee8f14f 100644 --- a/unison-src/transcripts/alias-many.output.md +++ b/unison-src/transcripts/alias-many.output.md @@ -22,409 +22,432 @@ Let's try it! 2. Any.Any : a -> Any 3. builtin type Boolean 4. Boolean.not : Boolean -> Boolean - 5. builtin type Bytes - 6. Bytes.++ : Bytes -> Bytes -> Bytes - 7. Bytes.at : Nat -> Bytes -> Optional Nat - 8. Bytes.drop : Nat -> Bytes -> Bytes - 9. Bytes.empty : Bytes - 10. Bytes.flatten : Bytes -> Bytes - 11. Bytes.fromBase16 : Bytes -> Either Text Bytes - 12. Bytes.fromBase32 : Bytes -> Either Text Bytes - 13. Bytes.fromBase64 : Bytes -> Either Text Bytes - 14. Bytes.fromBase64UrlUnpadded : Bytes -> Either Text Bytes - 15. Bytes.fromList : [Nat] -> Bytes - 16. Bytes.size : Bytes -> Nat - 17. Bytes.take : Nat -> Bytes -> Bytes - 18. Bytes.toBase16 : Bytes -> Bytes - 19. Bytes.toBase32 : Bytes -> Bytes - 20. Bytes.toBase64 : Bytes -> Bytes - 21. Bytes.toBase64UrlUnpadded : Bytes -> Bytes - 22. Bytes.toList : Bytes -> [Nat] - 23. builtin type Char - 24. Char.fromNat : Nat -> Char - 25. Char.toNat : Char -> Nat - 26. Char.toText : Char -> Text - 27. builtin type Code - 28. Code.cache_ : [(Term, Code)] ->{IO} [Term] - 29. Code.dependencies : Code -> [Term] - 30. Code.deserialize : Bytes -> Either Text Code - 31. Code.isMissing : Term ->{IO} Boolean - 32. Code.lookup : Term ->{IO} Optional Code - 33. Code.serialize : Code -> Bytes - 34. Debug.watch : Text -> a -> a - 35. unique type Doc - 36. Doc.Blob : Text -> Doc - 37. Doc.Evaluate : Term -> Doc - 38. Doc.Join : [Doc] -> Doc - 39. Doc.Link : Link -> Doc - 40. Doc.Signature : Term -> Doc - 41. Doc.Source : Link -> Doc - 42. type Either a b - 43. Either.Left : a -> Either a b - 44. Either.Right : b -> Either a b - 45. builtin type Float - 46. Float.* : Float -> Float -> Float - 47. Float.+ : Float -> Float -> Float - 48. Float.- : Float -> Float -> Float - 49. Float./ : Float -> Float -> Float - 50. Float.abs : Float -> Float - 51. Float.acos : Float -> Float - 52. Float.acosh : Float -> Float - 53. Float.asin : Float -> Float - 54. Float.asinh : Float -> Float - 55. Float.atan : Float -> Float - 56. Float.atan2 : Float -> Float -> Float - 57. Float.atanh : Float -> Float - 58. Float.ceiling : Float -> Int - 59. Float.cos : Float -> Float - 60. Float.cosh : Float -> Float - 61. Float.eq : Float -> Float -> Boolean - 62. Float.exp : Float -> Float - 63. Float.floor : Float -> Int - 64. Float.fromText : Text -> Optional Float - 65. Float.gt : Float -> Float -> Boolean - 66. Float.gteq : Float -> Float -> Boolean - 67. Float.log : Float -> Float - 68. Float.logBase : Float -> Float -> Float - 69. Float.lt : Float -> Float -> Boolean - 70. Float.lteq : Float -> Float -> Boolean - 71. Float.max : Float -> Float -> Float - 72. Float.min : Float -> Float -> Float - 73. Float.pow : Float -> Float -> Float - 74. Float.round : Float -> Int - 75. Float.sin : Float -> Float - 76. Float.sinh : Float -> Float - 77. Float.sqrt : Float -> Float - 78. Float.tan : Float -> Float - 79. Float.tanh : Float -> Float - 80. Float.toText : Float -> Text - 81. Float.truncate : Float -> Int - 82. builtin type Int - 83. Int.* : Int -> Int -> Int - 84. Int.+ : Int -> Int -> Int - 85. Int.- : Int -> Int -> Int - 86. Int./ : Int -> Int -> Int - 87. Int.and : Int -> Int -> Int - 88. Int.complement : Int -> Int - 89. Int.eq : Int -> Int -> Boolean - 90. Int.fromText : Text -> Optional Int - 91. Int.gt : Int -> Int -> Boolean - 92. Int.gteq : Int -> Int -> Boolean - 93. Int.increment : Int -> Int - 94. Int.isEven : Int -> Boolean - 95. Int.isOdd : Int -> Boolean - 96. Int.leadingZeros : Int -> Nat - 97. Int.lt : Int -> Int -> Boolean - 98. Int.lteq : Int -> Int -> Boolean - 99. Int.mod : Int -> Int -> Int - 100. Int.negate : Int -> Int - 101. Int.or : Int -> Int -> Int - 102. Int.popCount : Int -> Nat - 103. Int.pow : Int -> Nat -> Int - 104. Int.shiftLeft : Int -> Nat -> Int - 105. Int.shiftRight : Int -> Nat -> Int - 106. Int.signum : Int -> Int - 107. Int.toFloat : Int -> Float - 108. Int.toText : Int -> Text - 109. Int.trailingZeros : Int -> Nat - 110. Int.truncate0 : Int -> Nat - 111. Int.xor : Int -> Int -> Int - 112. unique type IsPropagated - 113. IsPropagated.IsPropagated : IsPropagated - 114. unique type IsTest - 115. IsTest.IsTest : IsTest - 116. unique type Link - 117. builtin type Link.Term - 118. Link.Term : Term -> Link - 119. builtin type Link.Type - 120. Link.Type : Type -> Link - 121. builtin type List - 122. List.++ : [a] -> [a] -> [a] - 123. List.+: : a -> [a] -> [a] - 124. List.:+ : [a] -> a -> [a] - 125. List.at : Nat -> [a] -> Optional a - 126. List.cons : a -> [a] -> [a] - 127. List.drop : Nat -> [a] -> [a] - 128. List.empty : [a] - 129. List.size : [a] -> Nat - 130. List.snoc : [a] -> a -> [a] - 131. List.take : Nat -> [a] -> [a] - 132. builtin type Nat - 133. Nat.* : Nat -> Nat -> Nat - 134. Nat.+ : Nat -> Nat -> Nat - 135. Nat./ : Nat -> Nat -> Nat - 136. Nat.and : Nat -> Nat -> Nat - 137. Nat.complement : Nat -> Nat - 138. Nat.drop : Nat -> Nat -> Nat - 139. Nat.eq : Nat -> Nat -> Boolean - 140. Nat.fromText : Text -> Optional Nat - 141. Nat.gt : Nat -> Nat -> Boolean - 142. Nat.gteq : Nat -> Nat -> Boolean - 143. Nat.increment : Nat -> Nat - 144. Nat.isEven : Nat -> Boolean - 145. Nat.isOdd : Nat -> Boolean - 146. Nat.leadingZeros : Nat -> Nat - 147. Nat.lt : Nat -> Nat -> Boolean - 148. Nat.lteq : Nat -> Nat -> Boolean - 149. Nat.mod : Nat -> Nat -> Nat - 150. Nat.or : Nat -> Nat -> Nat - 151. Nat.popCount : Nat -> Nat - 152. Nat.pow : Nat -> Nat -> Nat - 153. Nat.shiftLeft : Nat -> Nat -> Nat - 154. Nat.shiftRight : Nat -> Nat -> Nat - 155. Nat.sub : Nat -> Nat -> Int - 156. Nat.toFloat : Nat -> Float - 157. Nat.toInt : Nat -> Int - 158. Nat.toText : Nat -> Text - 159. Nat.trailingZeros : Nat -> Nat - 160. Nat.xor : Nat -> Nat -> Nat - 161. type Optional a - 162. Optional.None : Optional a - 163. Optional.Some : a -> Optional a - 164. builtin type Request - 165. type SeqView a b - 166. SeqView.VElem : a -> b -> SeqView a b - 167. SeqView.VEmpty : SeqView a b - 168. unique type Test.Result - 169. Test.Result.Fail : Text -> Result - 170. Test.Result.Ok : Text -> Result - 171. builtin type Text - 172. Text.!= : Text -> Text -> Boolean - 173. Text.++ : Text -> Text -> Text - 174. Text.drop : Nat -> Text -> Text - 175. Text.empty : Text - 176. Text.eq : Text -> Text -> Boolean - 177. Text.fromCharList : [Char] -> Text - 178. Text.fromUtf8.impl : Bytes -> Either Failure Text - 179. Text.gt : Text -> Text -> Boolean - 180. Text.gteq : Text -> Text -> Boolean - 181. Text.lt : Text -> Text -> Boolean - 182. Text.lteq : Text -> Text -> Boolean - 183. Text.repeat : Nat -> Text -> Text - 184. Text.size : Text -> Nat - 185. Text.take : Nat -> Text -> Text - 186. Text.toCharList : Text -> [Char] - 187. Text.toUtf8 : Text -> Bytes - 188. Text.uncons : Text -> Optional (Char, Text) - 189. Text.unsnoc : Text -> Optional (Text, Char) - 190. type Tuple a b - 191. Tuple.Cons : a -> b -> Tuple a b - 192. type Unit - 193. Unit.Unit : () - 194. Universal.< : a -> a -> Boolean - 195. Universal.<= : a -> a -> Boolean - 196. Universal.== : a -> a -> Boolean - 197. Universal.> : a -> a -> Boolean - 198. Universal.>= : a -> a -> Boolean - 199. Universal.compare : a -> a -> Int - 200. builtin type Value - 201. Value.dependencies : Value -> [Term] - 202. Value.deserialize : Bytes -> Either Text Value - 203. Value.load : Value ->{IO} Either [Term] a - 204. Value.serialize : Value -> Bytes - 205. Value.value : a -> Value - 206. bug : a -> b - 207. builtin type crypto.HashAlgorithm - 208. crypto.HashAlgorithm.Blake2b_256 : HashAlgorithm - 209. crypto.HashAlgorithm.Blake2b_512 : HashAlgorithm - 210. crypto.HashAlgorithm.Blake2s_256 : HashAlgorithm - 211. crypto.HashAlgorithm.Sha2_256 : HashAlgorithm - 212. crypto.HashAlgorithm.Sha2_512 : HashAlgorithm - 213. crypto.HashAlgorithm.Sha3_256 : HashAlgorithm - 214. crypto.HashAlgorithm.Sha3_512 : HashAlgorithm - 215. crypto.hash : HashAlgorithm -> a -> Bytes - 216. crypto.hashBytes : HashAlgorithm -> Bytes -> Bytes - 217. crypto.hmac : HashAlgorithm -> Bytes -> a -> Bytes - 218. crypto.hmacBytes : HashAlgorithm + 5. bug : a -> b + 6. builtin type Bytes + 7. Bytes.++ : Bytes -> Bytes -> Bytes + 8. Bytes.at : Nat -> Bytes -> Optional Nat + 9. Bytes.decodeNat16be : Bytes -> Optional (Nat, Bytes) + 10. Bytes.decodeNat16le : Bytes -> Optional (Nat, Bytes) + 11. Bytes.decodeNat32be : Bytes -> Optional (Nat, Bytes) + 12. Bytes.decodeNat32le : Bytes -> Optional (Nat, Bytes) + 13. Bytes.decodeNat64be : Bytes -> Optional (Nat, Bytes) + 14. Bytes.decodeNat64le : Bytes -> Optional (Nat, Bytes) + 15. Bytes.drop : Nat -> Bytes -> Bytes + 16. Bytes.empty : Bytes + 17. Bytes.encodeNat16be : Nat -> Bytes + 18. Bytes.encodeNat16le : Nat -> Bytes + 19. Bytes.encodeNat32be : Nat -> Bytes + 20. Bytes.encodeNat32le : Nat -> Bytes + 21. Bytes.encodeNat64be : Nat -> Bytes + 22. Bytes.encodeNat64le : Nat -> Bytes + 23. Bytes.flatten : Bytes -> Bytes + 24. Bytes.fromBase16 : Bytes -> Either Text Bytes + 25. Bytes.fromBase32 : Bytes -> Either Text Bytes + 26. Bytes.fromBase64 : Bytes -> Either Text Bytes + 27. Bytes.fromBase64UrlUnpadded : Bytes -> Either Text Bytes + 28. Bytes.fromList : [Nat] -> Bytes + 29. Bytes.size : Bytes -> Nat + 30. Bytes.take : Nat -> Bytes -> Bytes + 31. Bytes.toBase16 : Bytes -> Bytes + 32. Bytes.toBase32 : Bytes -> Bytes + 33. Bytes.toBase64 : Bytes -> Bytes + 34. Bytes.toBase64UrlUnpadded : Bytes -> Bytes + 35. Bytes.toList : Bytes -> [Nat] + 36. builtin type Char + 37. Char.fromNat : Nat -> Char + 38. Char.toNat : Char -> Nat + 39. Char.toText : Char -> Text + 40. builtin type Code + 41. Code.cache_ : [(Term, Code)] ->{IO} [Term] + 42. Code.dependencies : Code -> [Term] + 43. Code.deserialize : Bytes -> Either Text Code + 44. Code.isMissing : Term ->{IO} Boolean + 45. Code.lookup : Term ->{IO} Optional Code + 46. Code.serialize : Code -> Bytes + 47. crypto.hash : HashAlgorithm -> a -> Bytes + 48. builtin type crypto.HashAlgorithm + 49. crypto.HashAlgorithm.Blake2b_256 : HashAlgorithm + 50. crypto.HashAlgorithm.Blake2b_512 : HashAlgorithm + 51. crypto.HashAlgorithm.Blake2s_256 : HashAlgorithm + 52. crypto.HashAlgorithm.Sha2_256 : HashAlgorithm + 53. crypto.HashAlgorithm.Sha2_512 : HashAlgorithm + 54. crypto.HashAlgorithm.Sha3_256 : HashAlgorithm + 55. crypto.HashAlgorithm.Sha3_512 : HashAlgorithm + 56. crypto.hashBytes : HashAlgorithm -> Bytes -> Bytes + 57. crypto.hmac : HashAlgorithm -> Bytes -> a -> Bytes + 58. crypto.hmacBytes : HashAlgorithm -> Bytes -> Bytes -> Bytes - 219. unique type io2.BufferMode - 220. io2.BufferMode.BlockBuffering : BufferMode - 221. io2.BufferMode.LineBuffering : BufferMode - 222. io2.BufferMode.NoBuffering : BufferMode - 223. io2.BufferMode.SizedBlockBuffering : Nat -> BufferMode - 224. unique type io2.Failure - 225. io2.Failure.Failure : Type -> Text -> Any -> Failure - 226. unique type io2.FileMode - 227. io2.FileMode.Append : FileMode - 228. io2.FileMode.Read : FileMode - 229. io2.FileMode.ReadWrite : FileMode - 230. io2.FileMode.Write : FileMode - 231. builtin type io2.Handle - 232. builtin type io2.IO - 233. io2.IO.clientSocket.impl : Text + 59. Debug.watch : Text -> a -> a + 60. unique type Doc + 61. Doc.Blob : Text -> Doc + 62. Doc.Evaluate : Term -> Doc + 63. Doc.Join : [Doc] -> Doc + 64. Doc.Link : Link -> Doc + 65. Doc.Signature : Term -> Doc + 66. Doc.Source : Link -> Doc + 67. type Either a b + 68. Either.Left : a -> Either a b + 69. Either.Right : b -> Either a b + 70. ability Exception + 71. Exception.raise : Failure ->{Exception} x + 72. builtin type Float + 73. Float.* : Float -> Float -> Float + 74. Float.+ : Float -> Float -> Float + 75. Float.- : Float -> Float -> Float + 76. Float./ : Float -> Float -> Float + 77. Float.abs : Float -> Float + 78. Float.acos : Float -> Float + 79. Float.acosh : Float -> Float + 80. Float.asin : Float -> Float + 81. Float.asinh : Float -> Float + 82. Float.atan : Float -> Float + 83. Float.atan2 : Float -> Float -> Float + 84. Float.atanh : Float -> Float + 85. Float.ceiling : Float -> Int + 86. Float.cos : Float -> Float + 87. Float.cosh : Float -> Float + 88. Float.eq : Float -> Float -> Boolean + 89. Float.exp : Float -> Float + 90. Float.floor : Float -> Int + 91. Float.fromRepresentation : Nat -> Float + 92. Float.fromText : Text -> Optional Float + 93. Float.gt : Float -> Float -> Boolean + 94. Float.gteq : Float -> Float -> Boolean + 95. Float.log : Float -> Float + 96. Float.logBase : Float -> Float -> Float + 97. Float.lt : Float -> Float -> Boolean + 98. Float.lteq : Float -> Float -> Boolean + 99. Float.max : Float -> Float -> Float + 100. Float.min : Float -> Float -> Float + 101. Float.pow : Float -> Float -> Float + 102. Float.round : Float -> Int + 103. Float.sin : Float -> Float + 104. Float.sinh : Float -> Float + 105. Float.sqrt : Float -> Float + 106. Float.tan : Float -> Float + 107. Float.tanh : Float -> Float + 108. Float.toRepresentation : Float -> Nat + 109. Float.toText : Float -> Text + 110. Float.truncate : Float -> Int + 111. builtin type Int + 112. Int.* : Int -> Int -> Int + 113. Int.+ : Int -> Int -> Int + 114. Int.- : Int -> Int -> Int + 115. Int./ : Int -> Int -> Int + 116. Int.and : Int -> Int -> Int + 117. Int.complement : Int -> Int + 118. Int.eq : Int -> Int -> Boolean + 119. Int.fromRepresentation : Nat -> Int + 120. Int.fromText : Text -> Optional Int + 121. Int.gt : Int -> Int -> Boolean + 122. Int.gteq : Int -> Int -> Boolean + 123. Int.increment : Int -> Int + 124. Int.isEven : Int -> Boolean + 125. Int.isOdd : Int -> Boolean + 126. Int.leadingZeros : Int -> Nat + 127. Int.lt : Int -> Int -> Boolean + 128. Int.lteq : Int -> Int -> Boolean + 129. Int.mod : Int -> Int -> Int + 130. Int.negate : Int -> Int + 131. Int.or : Int -> Int -> Int + 132. Int.popCount : Int -> Nat + 133. Int.pow : Int -> Nat -> Int + 134. Int.shiftLeft : Int -> Nat -> Int + 135. Int.shiftRight : Int -> Nat -> Int + 136. Int.signum : Int -> Int + 137. Int.toFloat : Int -> Float + 138. Int.toRepresentation : Int -> Nat + 139. Int.toText : Int -> Text + 140. Int.trailingZeros : Int -> Nat + 141. Int.truncate0 : Int -> Nat + 142. Int.xor : Int -> Int -> Int + 143. unique type io2.BufferMode + 144. io2.BufferMode.BlockBuffering : BufferMode + 145. io2.BufferMode.LineBuffering : BufferMode + 146. io2.BufferMode.NoBuffering : BufferMode + 147. io2.BufferMode.SizedBlockBuffering : Nat -> BufferMode + 148. unique type io2.Failure + 149. io2.Failure.Failure : Type -> Text -> Any -> Failure + 150. unique type io2.FileMode + 151. io2.FileMode.Append : FileMode + 152. io2.FileMode.Read : FileMode + 153. io2.FileMode.ReadWrite : FileMode + 154. io2.FileMode.Write : FileMode + 155. builtin type io2.Handle + 156. builtin type io2.IO + 157. io2.IO.clientSocket.impl : Text -> Text ->{IO} Either Failure Socket - 234. io2.IO.closeFile.impl : Handle ->{IO} Either Failure () - 235. io2.IO.closeSocket.impl : Socket ->{IO} Either Failure () - 236. io2.IO.createDirectory.impl : Text + 158. io2.IO.closeFile.impl : Handle ->{IO} Either Failure () + 159. io2.IO.closeSocket.impl : Socket ->{IO} Either Failure () + 160. io2.IO.createDirectory.impl : Text ->{IO} Either Failure () - 237. io2.IO.createTempDirectory.impl : Text + 161. io2.IO.createTempDirectory.impl : Text ->{IO} Either Failure Text - 238. io2.IO.delay.impl : Nat ->{IO} Either Failure () - 239. io2.IO.fileExists.impl : Text + 162. io2.IO.delay.impl : Nat ->{IO} Either Failure () + 163. io2.IO.directoryContents.impl : Text + ->{IO} Either + Failure [Text] + 164. io2.IO.fileExists.impl : Text ->{IO} Either Failure Boolean - 240. io2.IO.forkComp : '{IO} a ->{IO} ThreadId - 241. io2.IO.getBuffering.impl : Handle + 165. io2.IO.forkComp : '{IO} a ->{IO} ThreadId + 166. io2.IO.getBuffering.impl : Handle ->{IO} Either Failure BufferMode - 242. io2.IO.getBytes.impl : Handle + 167. io2.IO.getBytes.impl : Handle -> Nat ->{IO} Either Failure Bytes - 243. io2.IO.getCurrentDirectory.impl : '{IO} Either + 168. io2.IO.getCurrentDirectory.impl : '{IO} Either Failure Text - 244. io2.IO.getFileSize.impl : Text ->{IO} Either Failure Nat - 245. io2.IO.getFileTimestamp.impl : Text + 169. io2.IO.getEnv.impl : Text ->{IO} Either Failure Text + 170. io2.IO.getFileSize.impl : Text ->{IO} Either Failure Nat + 171. io2.IO.getFileTimestamp.impl : Text ->{IO} Either Failure Nat - 246. io2.IO.getTempDirectory.impl : '{IO} Either Failure Text - 247. io2.IO.handlePosition.impl : Handle + 172. io2.IO.getLine.impl : Handle ->{IO} Either Failure Text + 173. io2.IO.getTempDirectory.impl : '{IO} Either Failure Text + 174. io2.IO.handlePosition.impl : Handle ->{IO} Either Failure Nat - 248. io2.IO.isDirectory.impl : Text + 175. io2.IO.isDirectory.impl : Text ->{IO} Either Failure Boolean - 249. io2.IO.isFileEOF.impl : Handle + 176. io2.IO.isFileEOF.impl : Handle ->{IO} Either Failure Boolean - 250. io2.IO.isFileOpen.impl : Handle + 177. io2.IO.isFileOpen.impl : Handle ->{IO} Either Failure Boolean - 251. io2.IO.isSeekable.impl : Handle + 178. io2.IO.isSeekable.impl : Handle ->{IO} Either Failure Boolean - 252. io2.IO.kill.impl : ThreadId ->{IO} Either Failure () - 253. io2.IO.listen.impl : Socket ->{IO} Either Failure () - 254. io2.IO.openFile.impl : Text + 179. io2.IO.kill.impl : ThreadId ->{IO} Either Failure () + 180. io2.IO.listen.impl : Socket ->{IO} Either Failure () + 181. io2.IO.openFile.impl : Text -> FileMode ->{IO} Either Failure Handle - 255. io2.IO.putBytes.impl : Handle + 182. io2.IO.putBytes.impl : Handle -> Bytes ->{IO} Either Failure () - 256. io2.IO.removeDirectory.impl : Text + 183. io2.IO.removeDirectory.impl : Text ->{IO} Either Failure () - 257. io2.IO.removeFile.impl : Text ->{IO} Either Failure () - 258. io2.IO.renameDirectory.impl : Text + 184. io2.IO.removeFile.impl : Text ->{IO} Either Failure () + 185. io2.IO.renameDirectory.impl : Text -> Text ->{IO} Either Failure () - 259. io2.IO.renameFile.impl : Text + 186. io2.IO.renameFile.impl : Text -> Text ->{IO} Either Failure () - 260. io2.IO.seekHandle.impl : Handle + 187. io2.IO.seekHandle.impl : Handle -> SeekMode -> Int ->{IO} Either Failure () - 261. io2.IO.serverSocket.impl : Optional Text + 188. io2.IO.serverSocket.impl : Optional Text -> Text ->{IO} Either Failure Socket - 262. io2.IO.setBuffering.impl : Handle + 189. io2.IO.setBuffering.impl : Handle -> BufferMode ->{IO} Either Failure () - 263. io2.IO.setCurrentDirectory.impl : Text + 190. io2.IO.setCurrentDirectory.impl : Text ->{IO} Either Failure () - 264. io2.IO.socketAccept.impl : Socket + 191. io2.IO.socketAccept.impl : Socket ->{IO} Either Failure Socket - 265. io2.IO.socketPort.impl : Socket ->{IO} Either Failure Nat - 266. io2.IO.socketReceive.impl : Socket + 192. io2.IO.socketPort.impl : Socket ->{IO} Either Failure Nat + 193. io2.IO.socketReceive.impl : Socket -> Nat ->{IO} Either Failure Bytes - 267. io2.IO.socketSend.impl : Socket + 194. io2.IO.socketSend.impl : Socket -> Bytes ->{IO} Either Failure () - 268. io2.IO.stdHandle : StdHandle -> Handle - 269. io2.IO.systemTime.impl : '{IO} Either Failure Nat - 270. unique type io2.IOError - 271. io2.IOError.AlreadyExists : IOError - 272. io2.IOError.EOF : IOError - 273. io2.IOError.IllegalOperation : IOError - 274. io2.IOError.NoSuchThing : IOError - 275. io2.IOError.PermissionDenied : IOError - 276. io2.IOError.ResourceBusy : IOError - 277. io2.IOError.ResourceExhausted : IOError - 278. io2.IOError.UserError : IOError - 279. unique type io2.IOFailure - 280. builtin type io2.MVar - 281. io2.MVar.isEmpty : MVar a ->{IO} Boolean - 282. io2.MVar.new : a ->{IO} MVar a - 283. io2.MVar.newEmpty : '{IO} MVar a - 284. io2.MVar.put.impl : MVar a -> a ->{IO} Either Failure () - 285. io2.MVar.read.impl : MVar a ->{IO} Either Failure a - 286. io2.MVar.swap.impl : MVar a -> a ->{IO} Either Failure a - 287. io2.MVar.take.impl : MVar a ->{IO} Either Failure a - 288. io2.MVar.tryPut.impl : MVar a + 195. io2.IO.stdHandle : StdHandle -> Handle + 196. io2.IO.systemTime.impl : '{IO} Either Failure Nat + 197. unique type io2.IOError + 198. io2.IOError.AlreadyExists : IOError + 199. io2.IOError.EOF : IOError + 200. io2.IOError.IllegalOperation : IOError + 201. io2.IOError.NoSuchThing : IOError + 202. io2.IOError.PermissionDenied : IOError + 203. io2.IOError.ResourceBusy : IOError + 204. io2.IOError.ResourceExhausted : IOError + 205. io2.IOError.UserError : IOError + 206. unique type io2.IOFailure + 207. builtin type io2.MVar + 208. io2.MVar.isEmpty : MVar a ->{IO} Boolean + 209. io2.MVar.new : a ->{IO} MVar a + 210. io2.MVar.newEmpty : '{IO} MVar a + 211. io2.MVar.put.impl : MVar a -> a ->{IO} Either Failure () + 212. io2.MVar.read.impl : MVar a ->{IO} Either Failure a + 213. io2.MVar.swap.impl : MVar a -> a ->{IO} Either Failure a + 214. io2.MVar.take.impl : MVar a ->{IO} Either Failure a + 215. io2.MVar.tryPut.impl : MVar a -> a ->{IO} Either Failure Boolean - 289. io2.MVar.tryRead.impl : MVar a + 216. io2.MVar.tryRead.impl : MVar a ->{IO} Either Failure (Optional a) - 290. io2.MVar.tryTake : MVar a ->{IO} Optional a - 291. builtin type io2.STM - 292. io2.STM.atomically : '{STM} a ->{IO} a - 293. io2.STM.retry : '{STM} a - 294. unique type io2.SeekMode - 295. io2.SeekMode.AbsoluteSeek : SeekMode - 296. io2.SeekMode.RelativeSeek : SeekMode - 297. io2.SeekMode.SeekFromEnd : SeekMode - 298. builtin type io2.Socket - 299. unique type io2.StdHandle - 300. io2.StdHandle.StdErr : StdHandle - 301. io2.StdHandle.StdIn : StdHandle - 302. io2.StdHandle.StdOut : StdHandle - 303. io2.TLS.ClientConfig.ciphers.set : [Cipher] - -> ClientConfig - -> ClientConfig - 304. builtin type io2.TVar - 305. io2.TVar.new : a ->{STM} TVar a - 306. io2.TVar.newIO : a ->{IO} TVar a - 307. io2.TVar.read : TVar a ->{STM} a - 308. io2.TVar.readIO : TVar a ->{IO} a - 309. io2.TVar.swap : TVar a -> a ->{STM} a - 310. io2.TVar.write : TVar a -> a ->{STM} () - 311. builtin type io2.ThreadId - 312. builtin type io2.Tls - 313. builtin type io2.Tls.Cipher - 314. builtin type io2.Tls.ClientConfig - 315. io2.Tls.ClientConfig.certificates.set : [SignedCert] + 217. io2.MVar.tryTake : MVar a ->{IO} Optional a + 218. unique type io2.SeekMode + 219. io2.SeekMode.AbsoluteSeek : SeekMode + 220. io2.SeekMode.RelativeSeek : SeekMode + 221. io2.SeekMode.SeekFromEnd : SeekMode + 222. builtin type io2.Socket + 223. unique type io2.StdHandle + 224. io2.StdHandle.StdErr : StdHandle + 225. io2.StdHandle.StdIn : StdHandle + 226. io2.StdHandle.StdOut : StdHandle + 227. builtin type io2.STM + 228. io2.STM.atomically : '{STM} a ->{IO} a + 229. io2.STM.retry : '{STM} a + 230. builtin type io2.ThreadId + 231. builtin type io2.Tls + 232. builtin type io2.Tls.Cipher + 233. builtin type io2.Tls.ClientConfig + 234. io2.Tls.ClientConfig.certificates.set : [SignedCert] -> ClientConfig -> ClientConfig - 316. io2.Tls.ClientConfig.default : Text + 235. io2.TLS.ClientConfig.ciphers.set : [Cipher] + -> ClientConfig + -> ClientConfig + 236. io2.Tls.ClientConfig.default : Text -> Bytes -> ClientConfig - 317. io2.Tls.ClientConfig.versions.set : [Version] + 237. io2.Tls.ClientConfig.versions.set : [Version] -> ClientConfig -> ClientConfig - 318. builtin type io2.Tls.PrivateKey - 319. builtin type io2.Tls.ServerConfig - 320. io2.Tls.ServerConfig.certificates.set : [SignedCert] + 238. io2.Tls.decodeCert.impl : Bytes + -> Either Failure SignedCert + 239. io2.Tls.decodePrivateKey : Bytes -> [PrivateKey] + 240. io2.Tls.encodeCert : SignedCert -> Bytes + 241. io2.Tls.encodePrivateKey : PrivateKey -> Bytes + 242. io2.Tls.handshake.impl : Tls ->{IO} Either Failure () + 243. io2.Tls.newClient.impl : ClientConfig + -> Socket + ->{IO} Either Failure Tls + 244. io2.Tls.newServer.impl : ServerConfig + -> Socket + ->{IO} Either Failure Tls + 245. builtin type io2.Tls.PrivateKey + 246. io2.Tls.receive.impl : Tls ->{IO} Either Failure Bytes + 247. io2.Tls.send.impl : Tls -> Bytes ->{IO} Either Failure () + 248. builtin type io2.Tls.ServerConfig + 249. io2.Tls.ServerConfig.certificates.set : [SignedCert] -> ServerConfig -> ServerConfig - 321. io2.Tls.ServerConfig.ciphers.set : [Cipher] + 250. io2.Tls.ServerConfig.ciphers.set : [Cipher] -> ServerConfig -> ServerConfig - 322. io2.Tls.ServerConfig.default : [SignedCert] + 251. io2.Tls.ServerConfig.default : [SignedCert] -> PrivateKey -> ServerConfig - 323. io2.Tls.ServerConfig.versions.set : [Version] + 252. io2.Tls.ServerConfig.versions.set : [Version] -> ServerConfig -> ServerConfig - 324. builtin type io2.Tls.SignedCert - 325. builtin type io2.Tls.Version - 326. io2.Tls.decodeCert.impl : Bytes - -> Either Failure SignedCert - 327. io2.Tls.decodePrivateKey : Bytes -> [PrivateKey] - 328. io2.Tls.encodeCert : SignedCert -> Bytes - 329. io2.Tls.encodePrivateKey : PrivateKey -> Bytes - 330. io2.Tls.handshake.impl : Tls ->{IO} Either Failure () - 331. io2.Tls.newClient.impl : ClientConfig - -> Socket - ->{IO} Either Failure Tls - 332. io2.Tls.newServer.impl : ServerConfig - -> Socket - ->{IO} Either Failure Tls - 333. io2.Tls.receive.impl : Tls ->{IO} Either Failure Bytes - 334. io2.Tls.send.impl : Tls -> Bytes ->{IO} Either Failure () - 335. io2.Tls.terminate.impl : Tls ->{IO} Either Failure () - 336. unique type io2.TlsFailure - 337. metadata.isPropagated : IsPropagated - 338. metadata.isTest : IsTest - 339. todo : a -> b + 253. builtin type io2.Tls.SignedCert + 254. io2.Tls.terminate.impl : Tls ->{IO} Either Failure () + 255. builtin type io2.Tls.Version + 256. unique type io2.TlsFailure + 257. builtin type io2.TVar + 258. io2.TVar.new : a ->{STM} TVar a + 259. io2.TVar.newIO : a ->{IO} TVar a + 260. io2.TVar.read : TVar a ->{STM} a + 261. io2.TVar.readIO : TVar a ->{IO} a + 262. io2.TVar.swap : TVar a -> a ->{STM} a + 263. io2.TVar.write : TVar a -> a ->{STM} () + 264. unique type IsPropagated + 265. IsPropagated.IsPropagated : IsPropagated + 266. unique type IsTest + 267. IsTest.IsTest : IsTest + 268. unique type Link + 269. builtin type Link.Term + 270. Link.Term : Term -> Link + 271. builtin type Link.Type + 272. Link.Type : Type -> Link + 273. builtin type List + 274. List.++ : [a] -> [a] -> [a] + 275. List.+: : a -> [a] -> [a] + 276. List.:+ : [a] -> a -> [a] + 277. List.at : Nat -> [a] -> Optional a + 278. List.cons : a -> [a] -> [a] + 279. List.drop : Nat -> [a] -> [a] + 280. List.empty : [a] + 281. List.size : [a] -> Nat + 282. List.snoc : [a] -> a -> [a] + 283. List.take : Nat -> [a] -> [a] + 284. metadata.isPropagated : IsPropagated + 285. metadata.isTest : IsTest + 286. builtin type Nat + 287. Nat.* : Nat -> Nat -> Nat + 288. Nat.+ : Nat -> Nat -> Nat + 289. Nat./ : Nat -> Nat -> Nat + 290. Nat.and : Nat -> Nat -> Nat + 291. Nat.complement : Nat -> Nat + 292. Nat.drop : Nat -> Nat -> Nat + 293. Nat.eq : Nat -> Nat -> Boolean + 294. Nat.fromText : Text -> Optional Nat + 295. Nat.gt : Nat -> Nat -> Boolean + 296. Nat.gteq : Nat -> Nat -> Boolean + 297. Nat.increment : Nat -> Nat + 298. Nat.isEven : Nat -> Boolean + 299. Nat.isOdd : Nat -> Boolean + 300. Nat.leadingZeros : Nat -> Nat + 301. Nat.lt : Nat -> Nat -> Boolean + 302. Nat.lteq : Nat -> Nat -> Boolean + 303. Nat.mod : Nat -> Nat -> Nat + 304. Nat.or : Nat -> Nat -> Nat + 305. Nat.popCount : Nat -> Nat + 306. Nat.pow : Nat -> Nat -> Nat + 307. Nat.shiftLeft : Nat -> Nat -> Nat + 308. Nat.shiftRight : Nat -> Nat -> Nat + 309. Nat.sub : Nat -> Nat -> Int + 310. Nat.toFloat : Nat -> Float + 311. Nat.toInt : Nat -> Int + 312. Nat.toText : Nat -> Text + 313. Nat.trailingZeros : Nat -> Nat + 314. Nat.xor : Nat -> Nat -> Nat + 315. type Optional a + 316. Optional.None : Optional a + 317. Optional.Some : a -> Optional a + 318. builtin type Request + 319. type SeqView a b + 320. SeqView.VElem : a -> b -> SeqView a b + 321. SeqView.VEmpty : SeqView a b + 322. unique type Test.Result + 323. Test.Result.Fail : Text -> Result + 324. Test.Result.Ok : Text -> Result + 325. builtin type Text + 326. Text.!= : Text -> Text -> Boolean + 327. Text.++ : Text -> Text -> Text + 328. Text.drop : Nat -> Text -> Text + 329. Text.empty : Text + 330. Text.eq : Text -> Text -> Boolean + 331. Text.fromCharList : [Char] -> Text + 332. Text.fromUtf8.impl : Bytes -> Either Failure Text + 333. Text.gt : Text -> Text -> Boolean + 334. Text.gteq : Text -> Text -> Boolean + 335. Text.lt : Text -> Text -> Boolean + 336. Text.lteq : Text -> Text -> Boolean + 337. Text.repeat : Nat -> Text -> Text + 338. Text.size : Text -> Nat + 339. Text.take : Nat -> Text -> Text + 340. Text.toCharList : Text -> [Char] + 341. Text.toUtf8 : Text -> Bytes + 342. Text.uncons : Text -> Optional (Char, Text) + 343. Text.unsnoc : Text -> Optional (Text, Char) + 344. todo : a -> b + 345. type Tuple a b + 346. Tuple.Cons : a -> b -> Tuple a b + 347. type Unit + 348. Unit.Unit : () + 349. Universal.< : a -> a -> Boolean + 350. Universal.<= : a -> a -> Boolean + 351. Universal.== : a -> a -> Boolean + 352. Universal.> : a -> a -> Boolean + 353. Universal.>= : a -> a -> Boolean + 354. Universal.compare : a -> a -> Int + 355. builtin type Value + 356. Value.dependencies : Value -> [Term] + 357. Value.deserialize : Bytes -> Either Text Value + 358. Value.load : Value ->{IO} Either [Term] a + 359. Value.serialize : Value -> Bytes + 360. Value.value : a -> Value .builtin> alias.many 94-104 .mylib @@ -433,17 +456,17 @@ Let's try it! Added definitions: - 1. Int.isEven : Int -> Boolean - 2. Int.isOdd : Int -> Boolean - 3. Int.leadingZeros : Int -> Nat - 4. Int.lt : Int -> Int -> Boolean - 5. Int.lteq : Int -> Int -> Boolean - 6. Int.mod : Int -> Int -> Int - 7. Int.negate : Int -> Int - 8. Int.or : Int -> Int -> Int - 9. Int.popCount : Int -> Nat - 10. Int.pow : Int -> Nat -> Int - 11. Int.shiftLeft : Int -> Nat -> Int + 1. Float.gteq : Float -> Float -> Boolean + 2. Float.log : Float -> Float + 3. Float.logBase : Float -> Float -> Float + 4. Float.lt : Float -> Float -> Boolean + 5. Float.lteq : Float -> Float -> Boolean + 6. Float.max : Float -> Float -> Float + 7. Float.min : Float -> Float -> Float + 8. Float.pow : Float -> Float -> Float + 9. Float.round : Float -> Int + 10. Float.sin : Float -> Float + 11. Float.sinh : Float -> Float Tip: You can use `undo` or `reflog` to undo this change. @@ -455,11 +478,11 @@ I want to incorporate a few more from another namespace: .runar> find 1. List.adjacentPairs : [a] -> [(a, a)] - 2. List.all : (a ->{g} Boolean) ->{g} [a] ->{g} Boolean - 3. List.any : (a ->{g} Boolean) ->{g} [a] ->{g} Boolean + 2. List.all : (a ->{g} Boolean) -> [a] ->{g} Boolean + 3. List.any : (a ->{g} Boolean) -> [a] ->{g} Boolean 4. List.chunk : Nat -> [a] -> [[a]] 5. List.chunksOf : Nat -> [a] -> [[a]] - 6. List.dropWhile : (a ->{g} Boolean) ->{g} [a] ->{g} [a] + 6. List.dropWhile : (a ->{g} Boolean) -> [a] ->{g} [a] 7. List.first : [a] -> Optional a 8. List.init : [a] -> Optional [a] 9. List.intersperse : a -> [a] -> [a] @@ -479,16 +502,14 @@ I want to incorporate a few more from another namespace: 1. List.adjacentPairs : [a] -> [(a, a)] 2. List.all : (a ->{g} Boolean) - ->{g} [a] + -> [a] ->{g} Boolean 3. List.any : (a ->{g} Boolean) - ->{g} [a] + -> [a] ->{g} Boolean 4. List.chunk : Nat -> [a] -> [[a]] 5. List.chunksOf : Nat -> [a] -> [[a]] - 6. List.dropWhile : (a ->{g} Boolean) - ->{g} [a] - ->{g} [a] + 6. List.dropWhile : (a ->{g} Boolean) -> [a] ->{g} [a] 7. List.first : [a] -> Optional a 8. List.init : [a] -> Optional [a] 9. List.intersperse : a -> [a] -> [a] @@ -505,23 +526,23 @@ I want to incorporate a few more from another namespace: .mylib> find - 1. Int.isEven : Int -> Boolean - 2. Int.isOdd : Int -> Boolean - 3. Int.leadingZeros : Int -> Nat - 4. Int.lt : Int -> Int -> Boolean - 5. Int.lteq : Int -> Int -> Boolean - 6. Int.mod : Int -> Int -> Int - 7. Int.negate : Int -> Int - 8. Int.or : Int -> Int -> Int - 9. Int.popCount : Int -> Nat - 10. Int.pow : Int -> Nat -> Int - 11. Int.shiftLeft : Int -> Nat -> Int + 1. Float.gteq : Float -> Float -> Boolean + 2. Float.log : Float -> Float + 3. Float.logBase : Float -> Float -> Float + 4. Float.lt : Float -> Float -> Boolean + 5. Float.lteq : Float -> Float -> Boolean + 6. Float.max : Float -> Float -> Float + 7. Float.min : Float -> Float -> Float + 8. Float.pow : Float -> Float -> Float + 9. Float.round : Float -> Int + 10. Float.sin : Float -> Float + 11. Float.sinh : Float -> Float 12. List.adjacentPairs : [a] -> [(a, a)] - 13. List.all : (a ->{g} Boolean) ->{g} [a] ->{g} Boolean - 14. List.any : (a ->{g} Boolean) ->{g} [a] ->{g} Boolean + 13. List.all : (a ->{g} Boolean) -> [a] ->{g} Boolean + 14. List.any : (a ->{g} Boolean) -> [a] ->{g} Boolean 15. List.chunk : Nat -> [a] -> [[a]] 16. List.chunksOf : Nat -> [a] -> [[a]] - 17. List.dropWhile : (a ->{g} Boolean) ->{g} [a] ->{g} [a] + 17. List.dropWhile : (a ->{g} Boolean) -> [a] ->{g} [a] 18. List.first : [a] -> Optional a 19. List.init : [a] -> Optional [a] 20. List.intersperse : a -> [a] -> [a] diff --git a/unison-src/transcripts/ambiguous-metadata.output.md b/unison-src/transcripts/ambiguous-metadata.output.md index c0d958fc6d..6b5f26b1db 100644 --- a/unison-src/transcripts/ambiguous-metadata.output.md +++ b/unison-src/transcripts/ambiguous-metadata.output.md @@ -32,7 +32,7 @@ x = 1 there are multiple matches: foo.doc - boo.doc#tj3gfqdnje + doc#tj3gfqdnje Tip: Try again and supply one of the above definitions explicitly. diff --git a/unison-src/transcripts/builtins-merge.output.md b/unison-src/transcripts/builtins-merge.output.md index aa474a06f3..9ec2694f10 100644 --- a/unison-src/transcripts/builtins-merge.output.md +++ b/unison-src/transcripts/builtins-merge.output.md @@ -14,7 +14,7 @@ The `builtins.merge` command adds the known builtins to a `builtin` subnamespace 3. Boolean (builtin type) 4. Boolean/ (1 definition) 5. Bytes (builtin type) - 6. Bytes/ (17 definitions) + 6. Bytes/ (29 definitions) 7. Char (builtin type) 8. Char/ (3 definitions) 9. Code (builtin type) @@ -24,39 +24,41 @@ The `builtins.merge` command adds the known builtins to a `builtin` subnamespace 13. Doc/ (6 definitions) 14. Either (type) 15. Either/ (2 definitions) - 16. Float (builtin type) - 17. Float/ (36 definitions) - 18. Int (builtin type) - 19. Int/ (29 definitions) - 20. IsPropagated (type) - 21. IsPropagated/ (1 definition) - 22. IsTest (type) - 23. IsTest/ (1 definition) - 24. Link (type) - 25. Link/ (4 definitions) - 26. List (builtin type) - 27. List/ (10 definitions) - 28. Nat (builtin type) - 29. Nat/ (28 definitions) - 30. Optional (type) - 31. Optional/ (2 definitions) - 32. Request (builtin type) - 33. SeqView (type) - 34. SeqView/ (2 definitions) - 35. Test/ (3 definitions) - 36. Text (builtin type) - 37. Text/ (18 definitions) - 38. Tuple (type) - 39. Tuple/ (1 definition) - 40. Unit (type) - 41. Unit/ (1 definition) - 42. Universal/ (6 definitions) - 43. Value (builtin type) - 44. Value/ (5 definitions) - 45. bug (a -> b) - 46. crypto/ (12 definitions) - 47. io2/ (118 definitions) - 48. metadata/ (2 definitions) - 49. todo (a -> b) + 16. Exception (type) + 17. Exception/ (1 definition) + 18. Float (builtin type) + 19. Float/ (38 definitions) + 20. Int (builtin type) + 21. Int/ (31 definitions) + 22. IsPropagated (type) + 23. IsPropagated/ (1 definition) + 24. IsTest (type) + 25. IsTest/ (1 definition) + 26. Link (type) + 27. Link/ (4 definitions) + 28. List (builtin type) + 29. List/ (10 definitions) + 30. Nat (builtin type) + 31. Nat/ (28 definitions) + 32. Optional (type) + 33. Optional/ (2 definitions) + 34. Request (builtin type) + 35. SeqView (type) + 36. SeqView/ (2 definitions) + 37. Test/ (3 definitions) + 38. Text (builtin type) + 39. Text/ (18 definitions) + 40. Tuple (type) + 41. Tuple/ (1 definition) + 42. Unit (type) + 43. Unit/ (1 definition) + 44. Universal/ (6 definitions) + 45. Value (builtin type) + 46. Value/ (5 definitions) + 47. bug (a -> b) + 48. crypto/ (12 definitions) + 49. io2/ (121 definitions) + 50. metadata/ (2 definitions) + 51. todo (a -> b) ``` diff --git a/unison-src/transcripts-using-base/builtins.md b/unison-src/transcripts/builtins.md similarity index 95% rename from unison-src/transcripts-using-base/builtins.md rename to unison-src/transcripts/builtins.md index 154e2ef226..8352af1586 100644 --- a/unison-src/transcripts-using-base/builtins.md +++ b/unison-src/transcripts/builtins.md @@ -1,5 +1,12 @@ # Unit tests for builtin functions +```ucm:hide +.> builtins.mergeio +.> cd builtin +.> load unison-src/transcripts-using-base/base.u +.> add +``` + This transcript defines unit tests for builtin functions. There's a single `.> test` execution at the end that will fail the transcript with a nice report if any of the tests fail. ## `Int` functions @@ -95,6 +102,8 @@ test> Nat.tests.arithmetic = 10 / 5 == 2, 10 `mod` 3 == 1, 10 `mod` 2 == 0, + 18446744073709551615 / 2 == 9223372036854775807, + 18446744073709551615 `mod` 2 == 1, increment 99 == 100, not (isEven 99), isEven 100, @@ -105,7 +114,7 @@ test> Nat.tests.arithmetic = 43 `lteq` 43, 43 `lteq` 44, 43 `gteq` 43, - 43 `gteq` 41 + 43 `gteq` 41, ] test> Nat.tests.bitTwiddling = diff --git a/unison-src/transcripts-using-base/builtins.output.md b/unison-src/transcripts/builtins.output.md similarity index 98% rename from unison-src/transcripts-using-base/builtins.output.md rename to unison-src/transcripts/builtins.output.md index c981a5445f..b83798518a 100644 --- a/unison-src/transcripts-using-base/builtins.output.md +++ b/unison-src/transcripts/builtins.output.md @@ -91,6 +91,8 @@ test> Nat.tests.arithmetic = 10 / 5 == 2, 10 `mod` 3 == 1, 10 `mod` 2 == 0, + 18446744073709551615 / 2 == 9223372036854775807, + 18446744073709551615 `mod` 2 == 1, increment 99 == 100, not (isEven 99), isEven 100, @@ -101,7 +103,7 @@ test> Nat.tests.arithmetic = 43 `lteq` 43, 43 `lteq` 44, 43 `gteq` 43, - 43 `gteq` 41 + 43 `gteq` 41, ] test> Nat.tests.bitTwiddling = diff --git a/unison-src/transcripts/command-replace.md b/unison-src/transcripts/command-replace.md new file mode 100644 index 0000000000..2117e67851 --- /dev/null +++ b/unison-src/transcripts/command-replace.md @@ -0,0 +1,58 @@ +# Replace with terms and types + +Let's set up some definitions to start: + +```ucm:hide +.> builtins.merge +``` + +```unison +x = 1 +y = 2 + +type X = One Nat +type Y = Two Nat Nat +``` + +```ucm +.scratch> add +``` + +Test that replace works with terms +```ucm +.scratch> replace x y +.scratch> view x +``` + +Test that replace works with types +```ucm +.scratch> replace X Y +.scratch> find +.scratch> view.patch patch +.scratch> view X +``` + +Try with a type/term mismatch +```ucm:error +.scratch> replace X x +``` +```ucm:error +.scratch> replace y Y +``` + +Try with missing references +```ucm:error +.scratch> replace X NOPE +``` +```ucm:error +.scratch> replace y nope +``` +```ucm:error +.scratch> replace nope X +``` +```ucm:error +.scratch> replace nope y +``` +```ucm:error +.scratch> replace nope nope +``` diff --git a/unison-src/transcripts/command-replace.output.md b/unison-src/transcripts/command-replace.output.md new file mode 100644 index 0000000000..8957f570e3 --- /dev/null +++ b/unison-src/transcripts/command-replace.output.md @@ -0,0 +1,146 @@ +# Replace with terms and types + +Let's set up some definitions to start: + +```unison +x = 1 +y = 2 + +type X = One Nat +type Y = Two Nat Nat +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + type X + type Y + x : Nat + y : Nat + +``` +```ucm + ☝️ The namespace .scratch is empty. + +.scratch> add + + ⍟ I've added these definitions: + + type X + type Y + x : Nat + y : Nat + +``` +Test that replace works with terms +```ucm +.scratch> replace x y + + Done. + +.scratch> view x + + x : Nat + x = 2 + +``` +Test that replace works with types +```ucm +.scratch> replace X Y + + Done. + +.scratch> find + + 1. type X + 2. x : Nat + 3. X.One : Nat -> Nat -> X + 4. type Y + 5. y : Nat + 6. Y.Two : Nat -> Nat -> X + + +.scratch> view.patch patch + + Edited Types: X#d97e0jhkmd -> X + + Edited Terms: #jk19sm5bf8 -> x + + Tip: To remove entries from a patch, use + delete.term-replacement or delete.type-replacement, as + appropriate. + +.scratch> view X + + type X = One Nat Nat + +``` +Try with a type/term mismatch +```ucm +.scratch> replace X x + + ⚠️ + + I was expecting either two types or two terms but was given a type X and a term x. + +``` +```ucm +.scratch> replace y Y + + ⚠️ + + I was expecting either two types or two terms but was given a type Y and a term y. + +``` +Try with missing references +```ucm +.scratch> replace X NOPE + + ⚠️ + + The following names were not found in the codebase. Check your spelling. + NOPE + +``` +```ucm +.scratch> replace y nope + + ⚠️ + + The following names were not found in the codebase. Check your spelling. + nope + +``` +```ucm +.scratch> replace nope X + + ⚠️ + + The following names were not found in the codebase. Check your spelling. + nope + +``` +```ucm +.scratch> replace nope y + + ⚠️ + + The following names were not found in the codebase. Check your spelling. + nope + +``` +```ucm +.scratch> replace nope nope + + ⚠️ + + The following names were not found in the codebase. Check your spelling. + nope + nope + +``` diff --git a/unison-src/transcripts/contrabilities.md b/unison-src/transcripts/contrabilities.md new file mode 100644 index 0000000000..cf409cc49c --- /dev/null +++ b/unison-src/transcripts/contrabilities.md @@ -0,0 +1,8 @@ +```ucm:hide +.> builtins.merge +``` + +```unison +f : 'a -> Nat +f x = 42 +``` diff --git a/unison-src/transcripts/contrabilities.output.md b/unison-src/transcripts/contrabilities.output.md new file mode 100644 index 0000000000..079957c561 --- /dev/null +++ b/unison-src/transcripts/contrabilities.output.md @@ -0,0 +1,16 @@ +```unison +f : 'a -> Nat +f x = 42 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + f : '{g} a -> Nat + +``` diff --git a/unison-src/transcripts/create-author.md b/unison-src/transcripts/create-author.md index 69b45b19cd..9a8a7dfb8b 100644 --- a/unison-src/transcripts/create-author.md +++ b/unison-src/transcripts/create-author.md @@ -12,6 +12,6 @@ def2 = 2 ```ucm .foo> add .foo> create.author alicecoder "Alice McGee" -.foo> view 3 +.foo> view 2 .foo> link metadata.authors.alicecoder def1 def2 ``` diff --git a/unison-src/transcripts/create-author.output.md b/unison-src/transcripts/create-author.output.md index 69d0df22cd..41569526b7 100644 --- a/unison-src/transcripts/create-author.output.md +++ b/unison-src/transcripts/create-author.output.md @@ -20,12 +20,12 @@ def2 = 2 Added definitions: 1. metadata.authors.alicecoder : Author - 2. metadata.authors.alicecoder.guid : GUID - 3. metadata.copyrightHolders.alicecoder : CopyrightHolder + 2. metadata.copyrightHolders.alicecoder : CopyrightHolder + 3. metadata.authors.alicecoder.guid : GUID Tip: Add License values for alicecoder under metadata. -.foo> view 3 +.foo> view 2 metadata.copyrightHolders.alicecoder : CopyrightHolder metadata.copyrightHolders.alicecoder = diff --git a/unison-src/transcripts/delete.output.md b/unison-src/transcripts/delete.output.md index 61af13bc5f..45fba4a36e 100644 --- a/unison-src/transcripts/delete.output.md +++ b/unison-src/transcripts/delete.output.md @@ -113,8 +113,8 @@ A delete should remove both versions of the term. Name changes: Original Changes - 2. a.foo#0ja1qfpej6 ┐ 3. a.foo#0ja1qfpej6 (removed) - 4. b.foo ┘ + 2. b.foo ┐ 3. a.foo#0ja1qfpej6 (removed) + 4. a.foo#0ja1qfpej6 ┘ Tip: You can use `undo` or `reflog` to undo this change. @@ -186,8 +186,8 @@ type Foo = Foo Boolean Name changes: Original Changes - 2. a.Foo#gq9inhvg9h ┐ 3. a.Foo#gq9inhvg9h (removed) - 4. b.Foo ┘ + 2. b.Foo ┐ 3. a.Foo#gq9inhvg9h (removed) + 4. a.Foo#gq9inhvg9h ┘ Tip: You can use `undo` or `reflog` to undo this change. @@ -202,8 +202,8 @@ type Foo = Foo Boolean Name changes: Original Changes - 2. a.Foo.Foo#gq9inhvg9h#0 ┐ 3. a.Foo.Foo#gq9inhvg9h#0 (removed) - 4. b.Foo.Foo ┘ + 2. b.Foo.Foo ┐ 3. a.Foo.Foo#gq9inhvg9h#0 (removed) + 4. a.Foo.Foo#gq9inhvg9h#0 ┘ Tip: You can use `undo` or `reflog` to undo this change. diff --git a/unison-src/transcripts/deleteReplacements.md b/unison-src/transcripts/deleteReplacements.md index 1f4086a29b..dd4520b00c 100644 --- a/unison-src/transcripts/deleteReplacements.md +++ b/unison-src/transcripts/deleteReplacements.md @@ -23,7 +23,7 @@ x = 2 ``` ```unison -type Foo = Foo +unique[a] type Foo = Foo ``` ```ucm @@ -31,7 +31,7 @@ type Foo = Foo ``` ```unison -type Foo = Foo | Bar +unique[b] type Foo = Foo | Bar ``` ```ucm @@ -40,13 +40,13 @@ type Foo = Foo | Bar ``` ```ucm -.> delete.type-replacement #568rsi7o3g +.> delete.type-replacement #hsk1l8232e .> view.patch ``` ```unison bar = 3 -type bar = Foo +unique[aa] type bar = Foo ``` ```ucm @@ -54,12 +54,12 @@ type bar = Foo ``` ```unison -type bar = Foo | Bar +unique[bb] type bar = Foo | Bar ``` ```ucm .> update .> view.patch -.> delete.type-replacement bar +.> delete.type-replacement #b1ct5ub6du .> view.patch ``` diff --git a/unison-src/transcripts/deleteReplacements.output.md b/unison-src/transcripts/deleteReplacements.output.md index 4fb2894429..89f7d71d40 100644 --- a/unison-src/transcripts/deleteReplacements.output.md +++ b/unison-src/transcripts/deleteReplacements.output.md @@ -66,7 +66,7 @@ x = 2 ``` ```unison -type Foo = Foo +unique[a] type Foo = Foo ``` ```ucm @@ -77,7 +77,7 @@ type Foo = Foo ⍟ These new definitions are ok to `add`: - type Foo + unique type Foo ``` ```ucm @@ -85,11 +85,11 @@ type Foo = Foo ⍟ I've added these definitions: - type Foo + unique type Foo ``` ```unison -type Foo = Foo | Bar +unique[b] type Foo = Foo | Bar ``` ```ucm @@ -101,7 +101,7 @@ type Foo = Foo | Bar ⍟ These names already exist. You can `update` them to your new definition: - type Foo + unique type Foo ``` ```ucm @@ -109,11 +109,11 @@ type Foo = Foo | Bar ⍟ I've updated these names to your new definition: - type Foo + unique type Foo .> view.patch - Edited Types: Foo#568rsi7o3g -> Foo + Edited Types: Foo#hsk1l8232e -> Foo Tip: To remove entries from a patch, use delete.term-replacement or delete.type-replacement, as @@ -121,7 +121,7 @@ type Foo = Foo | Bar ``` ```ucm -.> delete.type-replacement #568rsi7o3g +.> delete.type-replacement #hsk1l8232e Done. @@ -132,7 +132,7 @@ type Foo = Foo | Bar ``` ```unison bar = 3 -type bar = Foo +unique[aa] type bar = Foo ``` ```ucm @@ -143,7 +143,7 @@ type bar = Foo ⍟ These new definitions are ok to `add`: - type bar + unique type bar bar : ##Nat ``` @@ -152,12 +152,12 @@ type bar = Foo ⍟ I've added these definitions: - type bar + unique type bar bar : ##Nat ``` ```unison -type bar = Foo | Bar +unique[bb] type bar = Foo | Bar ``` ```ucm @@ -169,8 +169,7 @@ type bar = Foo | Bar ⍟ These names already exist. You can `update` them to your new definition: - type bar - (also named Foo) + unique type bar ``` ```ucm @@ -178,27 +177,22 @@ type bar = Foo | Bar ⍟ I've updated these names to your new definition: - type bar - (also named Foo) + unique type bar .> view.patch - Edited Types: bar#568rsi7o3g -> Foo + Edited Types: bar#b1ct5ub6du -> bar Tip: To remove entries from a patch, use delete.term-replacement or delete.type-replacement, as appropriate. -.> delete.type-replacement bar +.> delete.type-replacement #b1ct5ub6du Done. .> view.patch - Edited Types: bar#568rsi7o3g -> Foo - - Tip: To remove entries from a patch, use - delete.term-replacement or delete.type-replacement, as - appropriate. + This patch is empty. ``` diff --git a/unison-src/transcripts/dependents-dependencies-debugfile.output.md b/unison-src/transcripts/dependents-dependencies-debugfile.output.md index b86c321eb4..2ee6390d5c 100644 --- a/unison-src/transcripts/dependents-dependencies-debugfile.output.md +++ b/unison-src/transcripts/dependents-dependencies-debugfile.output.md @@ -68,8 +68,8 @@ But wait, there's more. I can check the dependencies and dependents of a defini Dependencies of #muulibntaq#0: Reference Name - 1. ##Int builtin.Int - 2. #muulibntaq outside.B + 1. #muulibntaq outside.B + 2. ##Int builtin.Int .> dependencies d @@ -79,8 +79,8 @@ But wait, there's more. I can check the dependencies and dependents of a defini 1. ##Nat builtin.Nat 2. ##Nat.+ builtin.Nat.+ 3. ##Universal.< builtin.Universal.< - 4. #fiupm7pl7o inside.p - 5. #msp7bv40rv outside.c + 4. #msp7bv40rv outside.c + 5. #fiupm7pl7o inside.p .> dependents d diff --git a/unison-src/transcripts/diff.md b/unison-src/transcripts/diff.md index 9183a695f8..5846af90c1 100644 --- a/unison-src/transcripts/diff.md +++ b/unison-src/transcripts/diff.md @@ -19,6 +19,7 @@ fslkdjflskdjflksjdf = 663 .b0> add .> merge b0 b1 .> diff.namespace b1 b2 +.b2> diff.namespace .b1 ``` Things we want to test: diff --git a/unison-src/transcripts/diff.output.md b/unison-src/transcripts/diff.output.md index fa59b9b8a0..cda20f1fcb 100644 --- a/unison-src/transcripts/diff.output.md +++ b/unison-src/transcripts/diff.output.md @@ -65,8 +65,24 @@ fslkdjflskdjflksjdf = 663 Name changes: Original Changes - 4. fslkdjflskdjflksjdf#4kipsv2tm6 ┐ 5. abc (added) - 6. x ┘ 7. fslkdjflskdjflksjdf (added) + 4. x ┐ 5. abc (added) + 6. fslkdjflskdjflksjdf#4kipsv2tm6 ┘ 7. fslkdjflskdjflksjdf (added) + 8. fslkdjflskdjflksjdf#4kipsv2tm6 (removed) + +.b2> diff.namespace .b1 + + Resolved name conflicts: + + 1. ┌ fslkdjflskdjflksjdf#4kipsv2tm6 : Nat + 2. └ fslkdjflskdjflksjdf#s5tu4n7rlb : Nat + ↓ + 3. fslkdjflskdjflksjdf#4kipsv2tm6 : Nat + + Name changes: + + Original Changes + 4. x ┐ 5. abc (added) + 6. fslkdjflskdjflksjdf#4kipsv2tm6 ┘ 7. fslkdjflskdjflksjdf (added) 8. fslkdjflskdjflksjdf#4kipsv2tm6 (removed) ``` diff --git a/unison-src/transcripts/emptyCodebase.output.md b/unison-src/transcripts/emptyCodebase.output.md index 5b08e5dd4f..27574aefb3 100644 --- a/unison-src/transcripts/emptyCodebase.output.md +++ b/unison-src/transcripts/emptyCodebase.output.md @@ -23,7 +23,7 @@ Technically, the definitions all exist, but they have no names. `builtins.merge` .foo> ls - 1. builtin/ (339 definitions) + 1. builtin/ (360 definitions) ``` And for a limited time, you can get even more builtin goodies: @@ -35,7 +35,7 @@ And for a limited time, you can get even more builtin goodies: .foo> ls - 1. builtin/ (507 definitions) + 1. builtin/ (528 definitions) ``` More typically, you'd start out by pulling `base. diff --git a/unison-src/transcripts/fix1063.output.md b/unison-src/transcripts/fix1063.output.md index 8c4a03a917..38065343b1 100644 --- a/unison-src/transcripts/fix1063.output.md +++ b/unison-src/transcripts/fix1063.output.md @@ -14,8 +14,8 @@ noop = not . not ⍟ These new definitions are ok to `add`: - . : ∀ o g i1 i. - (i1 ->{g} o) -> (i ->{g} i1) -> i ->{g} o + . : ∀ o g1 i1 g i. + (i1 ->{g1} o) -> (i ->{g} i1) -> i ->{g1, g} o noop : Boolean -> Boolean ``` @@ -24,7 +24,8 @@ noop = not . not ⍟ I've added these definitions: - . : ∀ o g i1 i. (i1 ->{g} o) -> (i ->{g} i1) -> i ->{g} o + . : ∀ o g1 i1 g i. + (i1 ->{g1} o) -> (i ->{g} i1) -> i ->{g1, g} o noop : Boolean -> Boolean .> view noop diff --git a/unison-src/transcripts/fix1334.md b/unison-src/transcripts/fix1334.md index 4f01d6adc8..6de7566843 100644 --- a/unison-src/transcripts/fix1334.md +++ b/unison-src/transcripts/fix1334.md @@ -2,6 +2,8 @@ Previously, the `alias.term` and `alias.type` would fail if the source argument With this PR, the source of an alias can be a short hash (even of a definition that doesn't currently have a name in the namespace) along with a name or hash-qualified name from the current namespace as usual, and the arguments to `replace.term` and `replace.type` can be a short hash, a name, or a hash-qualified name. +Note: `replace.term` and `replace.type` have since been replaced with just `replace`. + Let's make some hash-only aliases, now that we can. :mad-with-power-emoji: ```ucm @@ -22,10 +24,10 @@ h = f + 1 .> add ``` -We used to have to know the full hash for a definition to be able to use the `replace.*` commands, but now we don't: +We used to have to know the full hash for a definition to be able to use the `replace` commands, but now we don't: ```ucm .> names g -.> replace.term f g +.> replace f g .> names g .> view.patch ``` diff --git a/unison-src/transcripts/fix1334.output.md b/unison-src/transcripts/fix1334.output.md index f846f2acbf..9fdc30def6 100644 --- a/unison-src/transcripts/fix1334.output.md +++ b/unison-src/transcripts/fix1334.output.md @@ -2,6 +2,8 @@ Previously, the `alias.term` and `alias.type` would fail if the source argument With this PR, the source of an alias can be a short hash (even of a definition that doesn't currently have a name in the namespace) along with a name or hash-qualified name from the current namespace as usual, and the arguments to `replace.term` and `replace.type` can be a short hash, a name, or a hash-qualified name. +Note: `replace.term` and `replace.type` have since been replaced with just `replace`. + Let's make some hash-only aliases, now that we can. :mad-with-power-emoji: ```ucm @@ -53,7 +55,7 @@ h = f + 1 h : Cat ``` -We used to have to know the full hash for a definition to be able to use the `replace.*` commands, but now we don't: +We used to have to know the full hash for a definition to be able to use the `replace` commands, but now we don't: ```ucm .> names g @@ -61,7 +63,7 @@ We used to have to know the full hash for a definition to be able to use the `re Hash: #52addbrohu Names: g -.> replace.term f g +.> replace f g Done. diff --git a/unison-src/transcripts/fix1390.output.md b/unison-src/transcripts/fix1390.output.md index dc768dfdec..d1d852a1bf 100644 --- a/unison-src/transcripts/fix1390.output.md +++ b/unison-src/transcripts/fix1390.output.md @@ -22,7 +22,7 @@ List.map f = ⍟ These new definitions are ok to `add`: - List.map : (i ->{g} o) ->{g} [i] ->{g} [o] + List.map : (i ->{g} o) -> [i] ->{g} [o] ``` ```ucm @@ -30,11 +30,11 @@ List.map f = ⍟ I've added these definitions: - List.map : (i ->{g} o) ->{g} [i] ->{g} [o] + List.map : (i ->{g} o) -> [i] ->{g} [o] .> view List.map - List.map : (i ->{g} o) ->{g} [i] ->{g} [o] + List.map : (i ->{g} o) -> [i] ->{g} [o] List.map f = go acc = cases [] -> acc @@ -60,6 +60,6 @@ List.map2 f = ⍟ These new definitions are ok to `add`: - List.map2 : (g ->{h} g2) ->{h} [g] ->{h} [g2] + List.map2 : (g ->{h} g2) -> [g] ->{h} [g2] ``` diff --git a/unison-src/transcripts/fix1696.output.md b/unison-src/transcripts/fix1696.output.md index 8e28b3d1ac..d930247073 100644 --- a/unison-src/transcripts/fix1696.output.md +++ b/unison-src/transcripts/fix1696.output.md @@ -19,7 +19,7 @@ dialog = Ask.provide 'zoot '("Awesome number: " ++ Nat.toText Ask.ask ++ "!") ```ucm - The expression in red needs these abilities: {Zoot, 𝕖46}, but this location does not have access to any abilities. + The expression in red needs the {Zoot} ability, but this location does not have access to any abilities. 13 | dialog = Ask.provide 'zoot '("Awesome number: " ++ Nat.toText Ask.ask ++ "!") diff --git a/unison-src/transcripts/fix1800.output.md b/unison-src/transcripts/fix1800.output.md index 5fb2ddfd0f..eb69a2fe98 100644 --- a/unison-src/transcripts/fix1800.output.md +++ b/unison-src/transcripts/fix1800.output.md @@ -83,7 +83,7 @@ This shouldn't work since `main4` and `main5` don't have the right type. but in order for me to `run` it it needs to have the type: - main4 : '{IO} () + main4 : '{IO, Exception} () ``` ```ucm @@ -97,6 +97,6 @@ This shouldn't work since `main4` and `main5` don't have the right type. but in order for me to `run` it it needs to have the type: - main5 : '{IO} () + main5 : '{IO, Exception} () ``` diff --git a/unison-src/transcripts/fix2004.output.md b/unison-src/transcripts/fix2004.output.md index d891ca1aaa..ea340bbed2 100644 --- a/unison-src/transcripts/fix2004.output.md +++ b/unison-src/transcripts/fix2004.output.md @@ -164,10 +164,10 @@ Meanwhile, `a2` adds some other unrelated terms, some via merging in namespaces .a2> find - 1. builtin type Delete4 - 2. delete1 : Delete4 -> Delete4 -> Delete4 - 3. delete2 : Delete4 -> Delete4 -> Delete4 - 4. delete3 : Delete4 -> Delete4 -> Delete4 + 1. delete1 : Delete4 -> Delete4 -> Delete4 + 2. delete2 : Delete4 -> Delete4 -> Delete4 + 3. delete3 : Delete4 -> Delete4 -> Delete4 + 4. builtin type Delete4 5. keep1 : Delete4 -> Text -> Text 6. keep2 : Delete4 -> Text -> Text 7. keep3 : Delete4 -> Text -> Text diff --git a/unison-src/transcripts/fix2026.md b/unison-src/transcripts/fix2026.md index b1470b0409..56ddc81674 100644 --- a/unison-src/transcripts/fix2026.md +++ b/unison-src/transcripts/fix2026.md @@ -15,7 +15,7 @@ printLine t = stdOut : Handle stdOut = stdHandle StdOut -compose2 : (c ->{𝕖} d) -> (a ->{𝕖} b ->{𝕖} c) -> a -> b ->{𝕖} d +compose2 : (c ->{𝕖1} d) -> (a ->{𝕖2} b ->{𝕖3} c) -> a -> b ->{𝕖1,𝕖2,𝕖3} d compose2 f g x y = f (g x y) putBytes : Handle -> Bytes ->{IO, Exception} () diff --git a/unison-src/transcripts/fix2026.output.md b/unison-src/transcripts/fix2026.output.md index 3d853a903e..e28df2509d 100644 --- a/unison-src/transcripts/fix2026.output.md +++ b/unison-src/transcripts/fix2026.output.md @@ -11,7 +11,7 @@ printLine t = stdOut : Handle stdOut = stdHandle StdOut -compose2 : (c ->{𝕖} d) -> (a ->{𝕖} b ->{𝕖} c) -> a -> b ->{𝕖} d +compose2 : (c ->{𝕖1} d) -> (a ->{𝕖2} b ->{𝕖3} c) -> a -> b ->{𝕖1,𝕖2,𝕖3} d compose2 f g x y = f (g x y) putBytes : Handle -> Bytes ->{IO, Exception} () @@ -44,12 +44,13 @@ Exception.unsafeRun! e _ = ⍟ These new definitions are ok to `add`: ability Exception + (also named builtin.Exception) Exception.unsafeRun! : '{g, Exception} a -> '{g} a - compose2 : (c ->{𝕖} d) - -> (a ->{𝕖} b ->{𝕖} c) + compose2 : (c ->{𝕖1} d) + -> (a ->{𝕖2} b ->{𝕖3} c) -> a -> b - ->{𝕖} d + ->{𝕖1, 𝕖2, 𝕖3} d ex : '{IO} () printLine : Text ->{IO, Exception} () putBytes : Handle diff --git a/unison-src/transcripts/fix2053.md b/unison-src/transcripts/fix2053.md new file mode 100644 index 0000000000..7309d80b62 --- /dev/null +++ b/unison-src/transcripts/fix2053.md @@ -0,0 +1,7 @@ +```ucm:hide +.> builtins.mergeio +``` + +```ucm +.> display List.map +``` \ No newline at end of file diff --git a/unison-src/transcripts/fix2053.output.md b/unison-src/transcripts/fix2053.output.md new file mode 100644 index 0000000000..60fe87aa41 --- /dev/null +++ b/unison-src/transcripts/fix2053.output.md @@ -0,0 +1,13 @@ +```ucm +.> display List.map + + go f i as acc = + _pattern = List.at i as + match _pattern with + None -> acc + Some _pattern1 -> + use Nat + + go f (i + 1) as (acc :+ f _pattern) + f a -> go f 0 a [] + +``` diff --git a/unison-src/transcripts/fix2167.md b/unison-src/transcripts/fix2167.md new file mode 100644 index 0000000000..cb5a64f302 --- /dev/null +++ b/unison-src/transcripts/fix2167.md @@ -0,0 +1,28 @@ +```ucm:hide +.> builtins.merge +``` + +This is just a simple transcript to regression check an ability +inference/checking issue. + +```unison +ability R t where + die : () -> x + near.impl : Nat -> Either () [Nat] + +R.near n = match near.impl n with + Left e -> die () + Right a -> a + +R.near1 region loc = match R.near 42 with + [loc] -> loc + ls -> R.die () +``` + +The issue was that abilities with parameters like this were sometimes +causing failures like this because the variable in the parameter would +escape to a scope where it no longer made sense. Then solving would +fail because the type was invalid. + +The fix was to avoid dropping certain existential variables out of +scope. diff --git a/unison-src/transcripts/fix2167.output.md b/unison-src/transcripts/fix2167.output.md new file mode 100644 index 0000000000..4a6f3de654 --- /dev/null +++ b/unison-src/transcripts/fix2167.output.md @@ -0,0 +1,37 @@ +This is just a simple transcript to regression check an ability +inference/checking issue. + +```unison +ability R t where + die : () -> x + near.impl : Nat -> Either () [Nat] + +R.near n = match near.impl n with + Left e -> die () + Right a -> a + +R.near1 region loc = match R.near 42 with + [loc] -> loc + ls -> R.die () +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + ability R t + R.near : Nat ->{R t} [Nat] + R.near1 : region -> loc ->{R t} Nat + +``` +The issue was that abilities with parameters like this were sometimes +causing failures like this because the variable in the parameter would +escape to a scope where it no longer made sense. Then solving would +fail because the type was invalid. + +The fix was to avoid dropping certain existential variables out of +scope. diff --git a/unison-src/transcripts/fix2187.md b/unison-src/transcripts/fix2187.md new file mode 100644 index 0000000000..5468b2a247 --- /dev/null +++ b/unison-src/transcripts/fix2187.md @@ -0,0 +1,19 @@ +```ucm:hide +.> builtins.mergeio +``` + +```unison + +lexicalScopeEx: [Text] +lexicalScopeEx = + parent = "outer" + inner1 = let + child1 = "child1" + inner2 : [Text] + inner2 = let + child2 = "child2" + [parent, child1, child2] + inner2 + inner1 + +``` \ No newline at end of file diff --git a/unison-src/transcripts/fix2187.output.md b/unison-src/transcripts/fix2187.output.md new file mode 100644 index 0000000000..e314ac66e2 --- /dev/null +++ b/unison-src/transcripts/fix2187.output.md @@ -0,0 +1,26 @@ +```unison +lexicalScopeEx: [Text] +lexicalScopeEx = + parent = "outer" + inner1 = let + child1 = "child1" + inner2 : [Text] + inner2 = let + child2 = "child2" + [parent, child1, child2] + inner2 + inner1 + +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + lexicalScopeEx : [Text] + +``` diff --git a/unison-src/transcripts/fix2231.md b/unison-src/transcripts/fix2231.md new file mode 100644 index 0000000000..2d217e1db7 --- /dev/null +++ b/unison-src/transcripts/fix2231.md @@ -0,0 +1,29 @@ +This transcript contains some cases that were problematic with the new +type checker. They were likely not discovered earlier because they +involve combining types inferred with the older strategy with the new +inference algorithm. Some code can be given multiple possible types, +and while they are all valid and some may be equivalently general, +the choices may not work equally well with the type checking +strategies. + +```ucm:hide +.> builtins.merge +``` + +```unison +(.) : (b ->{e} c) -> (a ->{e} b) -> a ->{e} c +(.) f g x = f (g x) + +f = atan . tan + +foldl : (b ->{e} a ->{e} b) -> b -> [a] ->{e} b +foldl f a = cases + [] -> a + x +: xs -> foldl f (f a x) xs + +txt = foldl (Text.++) "" ["a", "b", "c"] +``` + +```ucm +.> add +``` diff --git a/unison-src/transcripts/fix2231.output.md b/unison-src/transcripts/fix2231.output.md new file mode 100644 index 0000000000..abdd3d22aa --- /dev/null +++ b/unison-src/transcripts/fix2231.output.md @@ -0,0 +1,47 @@ +This transcript contains some cases that were problematic with the new +type checker. They were likely not discovered earlier because they +involve combining types inferred with the older strategy with the new +inference algorithm. Some code can be given multiple possible types, +and while they are all valid and some may be equivalently general, +the choices may not work equally well with the type checking +strategies. + +```unison +(.) : (b ->{e} c) -> (a ->{e} b) -> a ->{e} c +(.) f g x = f (g x) + +f = atan . tan + +foldl : (b ->{e} a ->{e} b) -> b -> [a] ->{e} b +foldl f a = cases + [] -> a + x +: xs -> foldl f (f a x) xs + +txt = foldl (Text.++) "" ["a", "b", "c"] +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + . : (b ->{e} c) -> (a ->{e} b) -> a ->{e} c + f : Float -> Float + foldl : (b ->{e} a ->{e} b) -> b -> [a] ->{e} b + txt : Text + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + . : (b ->{e} c) -> (a ->{e} b) -> a ->{e} c + f : Float -> Float + foldl : (b ->{e} a ->{e} b) -> b -> [a] ->{e} b + txt : Text + +``` diff --git a/unison-src/transcripts/fix2238.md b/unison-src/transcripts/fix2238.md new file mode 100644 index 0000000000..eaacb39b43 --- /dev/null +++ b/unison-src/transcripts/fix2238.md @@ -0,0 +1,18 @@ + +```ucm:hide +.> builtins.mergeio +``` + +This should not typecheck - the inline `@eval` expression uses abilities. + +```unison:error +ability Abort where abort : x + +ex = {{ @eval{abort} }} +``` + +This file should also not typecheck - it has a triple backticks block that uses abilities. + +```ucm:error +.> load unison-src/transcripts/fix2238.u +``` diff --git a/unison-src/transcripts/fix2238.output.md b/unison-src/transcripts/fix2238.output.md new file mode 100644 index 0000000000..595118463b --- /dev/null +++ b/unison-src/transcripts/fix2238.output.md @@ -0,0 +1,28 @@ + +This should not typecheck - the inline `@eval` expression uses abilities. + +```unison +ability Abort where abort : x + +ex = {{ @eval{abort} }} +``` + +```ucm + + The expression in red needs the {Abort} ability, but this location does not have access to any abilities. + + 3 | ex = {{ @eval{abort} }} + + +``` +This file should also not typecheck - it has a triple backticks block that uses abilities. + +```ucm +.> load unison-src/transcripts/fix2238.u + + The expression in red needs the {Abort} ability, but this location does not have access to any abilities. + + 7 | abort + 1 + + +``` diff --git a/unison-src/transcripts/fix2238.u b/unison-src/transcripts/fix2238.u new file mode 100644 index 0000000000..01fcf7cc38 --- /dev/null +++ b/unison-src/transcripts/fix2238.u @@ -0,0 +1,9 @@ + +ability Abort where abort : x + +ex = {{ + +``` +abort + 1 +``` +}} diff --git a/unison-src/transcripts/fix2254.md b/unison-src/transcripts/fix2254.md new file mode 100644 index 0000000000..7e2559f375 --- /dev/null +++ b/unison-src/transcripts/fix2254.md @@ -0,0 +1,88 @@ + +```ucm:hide +.> builtins.merge +``` + +This transcript checks that updates to data types propagate successfully to dependent types and dependent terms that do pattern matching. First let's create some types and terms: + +```unison:hide +unique type A a b c d + = A a + | B b + | C c + | D d + +type NeedsA a b = NeedsA (A a b Nat Nat) + | Zoink Text + +f : A Nat Nat Nat Nat -> Nat +f = cases + A n -> n + _ -> 42 + +f2 a = + n = f a + n + 1 + +f3 : NeedsA Nat Nat -> Nat +f3 = cases + NeedsA a -> f a + 20 + _ -> 0 + +g : A Nat Nat Nat Nat -> Nat +g = cases + D n -> n + _ -> 43 +``` + +We'll make our edits in a fork of the `a` namespace: + +```ucm +.a> add +.> fork a a2 +``` + +First let's edit the `A` type, adding another constructor `E`. Note that the functions written against the old type have a wildcard in their pattern match, so they should work fine after the update. + +```unison:hide +unique type A a b c d + = A a + | B b + | C c + | D d + | E a d +``` + +Let's do the update now, and verify that the definitions all look good and there's nothing `todo`: + +```ucm +.a2> update +.a2> view A NeedsA f f2 f3 g +.a2> todo +``` + +## Record updates + +Here's a test of updating a record: + +```unison +type Rec = { uno : Nat, dos : Nat } + +combine r = uno r + dos r +``` + +```ucm +.a3> add +``` + +```unison +type Rec = { uno : Nat, dos : Nat, tres : Text } +``` + +And checking that after updating this record, there's nothing `todo`: + +```ucm +.> fork a3 a4 +.a4> update +.a4> todo +``` diff --git a/unison-src/transcripts/fix2254.output.md b/unison-src/transcripts/fix2254.output.md new file mode 100644 index 0000000000..3f53636f61 --- /dev/null +++ b/unison-src/transcripts/fix2254.output.md @@ -0,0 +1,215 @@ + +This transcript checks that updates to data types propagate successfully to dependent types and dependent terms that do pattern matching. First let's create some types and terms: + +```unison +unique type A a b c d + = A a + | B b + | C c + | D d + +type NeedsA a b = NeedsA (A a b Nat Nat) + | Zoink Text + +f : A Nat Nat Nat Nat -> Nat +f = cases + A n -> n + _ -> 42 + +f2 a = + n = f a + n + 1 + +f3 : NeedsA Nat Nat -> Nat +f3 = cases + NeedsA a -> f a + 20 + _ -> 0 + +g : A Nat Nat Nat Nat -> Nat +g = cases + D n -> n + _ -> 43 +``` + +We'll make our edits in a fork of the `a` namespace: + +```ucm + ☝️ The namespace .a is empty. + +.a> add + + ⍟ I've added these definitions: + + unique type A a b c d + type NeedsA a b + f : A Nat Nat Nat Nat -> Nat + f2 : A Nat Nat Nat Nat -> Nat + f3 : NeedsA Nat Nat -> Nat + g : A Nat Nat Nat Nat -> Nat + +.> fork a a2 + + Done. + +``` +First let's edit the `A` type, adding another constructor `E`. Note that the functions written against the old type have a wildcard in their pattern match, so they should work fine after the update. + +```unison +unique type A a b c d + = A a + | B b + | C c + | D d + | E a d +``` + +Let's do the update now, and verify that the definitions all look good and there's nothing `todo`: + +```ucm +.a2> update + + ⍟ I've updated these names to your new definition: + + unique type A a b c d + +.a2> view A NeedsA f f2 f3 g + + unique type A a b c d = E a d | C c | A a | B b | D d + + type NeedsA a b = Zoink Text | NeedsA (A a b Nat Nat) + + f : A Nat Nat Nat Nat -> Nat + f = cases + A.A n -> n + _ -> 42 + + f2 : A Nat Nat Nat Nat -> Nat + f2 a = + use Nat + + n = f a + n + 1 + + f3 : NeedsA Nat Nat -> Nat + f3 = cases + NeedsA.NeedsA a -> + use Nat + + f a + 20 + _ -> 0 + + g : A Nat Nat Nat Nat -> Nat + g = cases + A.D n -> n + _ -> 43 + +.a2> todo + + ✅ + + No conflicts or edits in progress. + +``` +## Record updates + +Here's a test of updating a record: + +```unison +type Rec = { uno : Nat, dos : Nat } + +combine r = uno r + dos r +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + type Rec + Rec.dos : Rec -> Nat + Rec.dos.modify : (Nat ->{g} Nat) -> Rec ->{g} Rec + Rec.dos.set : Nat -> Rec -> Rec + Rec.uno : Rec -> Nat + Rec.uno.modify : (Nat ->{g} Nat) -> Rec ->{g} Rec + Rec.uno.set : Nat -> Rec -> Rec + combine : Rec -> Nat + +``` +```ucm + ☝️ The namespace .a3 is empty. + +.a3> add + + ⍟ I've added these definitions: + + type Rec + Rec.dos : Rec -> Nat + Rec.dos.modify : (Nat ->{g} Nat) -> Rec ->{g} Rec + Rec.dos.set : Nat -> Rec -> Rec + Rec.uno : Rec -> Nat + Rec.uno.modify : (Nat ->{g} Nat) -> Rec ->{g} Rec + Rec.uno.set : Nat -> Rec -> Rec + combine : Rec -> Nat + +``` +```unison +type Rec = { uno : Nat, dos : Nat, tres : Text } +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + Rec.tres : Rec -> Text + Rec.tres.modify : (Text ->{g} Text) -> Rec ->{g} Rec + Rec.tres.set : Text -> Rec -> Rec + + ⍟ These names already exist. You can `update` them to your + new definition: + + type Rec + Rec.dos : Rec -> Nat + Rec.dos.modify : (Nat ->{g} Nat) -> Rec ->{g} Rec + Rec.dos.set : Nat -> Rec -> Rec + Rec.uno : Rec -> Nat + Rec.uno.modify : (Nat ->{g} Nat) -> Rec ->{g} Rec + Rec.uno.set : Nat -> Rec -> Rec + +``` +And checking that after updating this record, there's nothing `todo`: + +```ucm +.> fork a3 a4 + + Done. + +.a4> update + + ⍟ I've added these definitions: + + Rec.tres : Rec -> Text + Rec.tres.modify : (Text ->{g} Text) -> Rec ->{g} Rec + Rec.tres.set : Text -> Rec -> Rec + + ⍟ I've updated these names to your new definition: + + type Rec + Rec.dos : Rec -> Nat + Rec.dos.modify : (Nat ->{g} Nat) -> Rec ->{g} Rec + Rec.dos.set : Nat -> Rec -> Rec + Rec.uno : Rec -> Nat + Rec.uno.modify : (Nat ->{g} Nat) -> Rec ->{g} Rec + Rec.uno.set : Nat -> Rec -> Rec + +.a4> todo + + ✅ + + No conflicts or edits in progress. + +``` diff --git a/unison-src/transcripts/fix2268.md b/unison-src/transcripts/fix2268.md new file mode 100644 index 0000000000..5886fd6f5f --- /dev/null +++ b/unison-src/transcripts/fix2268.md @@ -0,0 +1,20 @@ +Tests for a TDNR case that wasn't working. The code wasn't 'relaxing' +inferred types that didn't contain arrows, so effects that just yield +a value weren't getting disambiguated. + +```ucm:hide +.> builtins.merge +``` + +```unison +unique ability A where + a : Nat + +unique ability B where + a : Char + +test : 'Nat +test _ = + x = a + toNat x +``` diff --git a/unison-src/transcripts/fix2268.output.md b/unison-src/transcripts/fix2268.output.md new file mode 100644 index 0000000000..7e48d76214 --- /dev/null +++ b/unison-src/transcripts/fix2268.output.md @@ -0,0 +1,30 @@ +Tests for a TDNR case that wasn't working. The code wasn't 'relaxing' +inferred types that didn't contain arrows, so effects that just yield +a value weren't getting disambiguated. + +```unison +unique ability A where + a : Nat + +unique ability B where + a : Char + +test : 'Nat +test _ = + x = a + toNat x +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + unique ability A + unique ability B + test : '{B} Nat + +``` diff --git a/unison-src/transcripts/fix2334.md b/unison-src/transcripts/fix2334.md new file mode 100644 index 0000000000..efddd5c521 --- /dev/null +++ b/unison-src/transcripts/fix2334.md @@ -0,0 +1,20 @@ + +Tests an issue where pattern matching matrices involving built-in +types was discarding default cases in some branches. + +```ucm:hide +.> builtins.mergeio +``` + +```unison +f = cases + 0, 0 -> 0 + _, 1 -> 2 + 1, _ -> 3 + _, _ -> 1 + +> f 0 0 +> f 1 0 +> f 0 1 +> f 1 1 +``` diff --git a/unison-src/transcripts/fix2334.output.md b/unison-src/transcripts/fix2334.output.md new file mode 100644 index 0000000000..1c59faa2de --- /dev/null +++ b/unison-src/transcripts/fix2334.output.md @@ -0,0 +1,47 @@ + +Tests an issue where pattern matching matrices involving built-in +types was discarding default cases in some branches. + +```unison +f = cases + 0, 0 -> 0 + _, 1 -> 2 + 1, _ -> 3 + _, _ -> 1 + +> f 0 0 +> f 1 0 +> f 0 1 +> f 1 1 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + f : Nat -> Nat -> Nat + + Now evaluating any watch expressions (lines starting with + `>`)... Ctrl+C cancels. + + 7 | > f 0 0 + ⧩ + 0 + + 8 | > f 1 0 + ⧩ + 3 + + 9 | > f 0 1 + ⧩ + 2 + + 10 | > f 1 1 + ⧩ + 2 + +``` diff --git a/unison-src/transcripts/io.md b/unison-src/transcripts/io.md index 7e2e2a531c..94b2169310 100644 --- a/unison-src/transcripts/io.md +++ b/unison-src/transcripts/io.md @@ -17,15 +17,10 @@ You can skip the section which is just needed to make the transcript self-contai TempDirs/autoCleaned is an ability/hanlder which allows you to easily create a scratch directory which will automatically get cleaned up. -```unison:hide - -``` - ```ucm:hide .> add ``` - ## Basic File Functions ### Creating/Deleting/Renaming Directories @@ -60,6 +55,7 @@ testCreateRename _ = runTest test ``` + ```ucm .> add .> io.test testCreateRename @@ -108,6 +104,7 @@ testOpenClose _ = runTest test ``` + ```ucm .> add .> io.test testOpenClose @@ -122,6 +119,7 @@ Tests: openFile isFileEOF seekHandle getBytes + getLine ```unison testSeek : '{io2.IO} [Result] @@ -146,6 +144,15 @@ testSeek _ = expectU "should be able to read our temporary file after seeking" "2345678" text3a closeFile handle3 + barFile = tempDir ++ "/bar" + handle4 = openFile barFile FileMode.Append + putBytes handle4 (toUtf8 "foobar\n") + closeFile handle4 + + handle5 = openFile barFile FileMode.Read + expectU "getLine should get a line" "foobar" (getLine handle5) + closeFile handle5 + runTest test testAppend : '{io2.IO} [Result] @@ -171,6 +178,7 @@ testAppend _ = runTest test ``` + ```ucm .> add .> io.test testSeek @@ -187,7 +195,45 @@ testSystemTime _ = runTest test ``` + ```ucm .> add .> io.test testSystemTime ``` + +### Get directory contents + +```unison:hide +testDirContents : '{io2.IO} [Result] +testDirContents _ = + test = 'let + tempDir = newTempDir "dircontents" + c = reraise (directoryContents.impl tempDir) + check "directory size should be" (size c == 2) + check "directory contents should have current directory and parent" let + (c == [".", ".."]) || (c == ["..", "."]) + runTest test +``` + +```ucm +.> add +.> io.test testDirContents +``` + +### Read environment variables + +```unison:hide +testHomeEnvVar : '{io2.IO} [Result] +testHomeEnvVar _ = + test = 'let + home = reraise (getEnv.impl "HOME") + check "HOME environent variable should be set" (size home > 0) + match getEnv.impl "DOESNTEXIST" with + Right _ -> emit (Fail "env var shouldn't exist") + Left _ -> emit (Ok "DOESNTEXIST didn't exist") + runTest test +``` +```ucm +.> add +.> io.test testHomeEnvVar +``` diff --git a/unison-src/transcripts/io.output.md b/unison-src/transcripts/io.output.md index a8cb884d82..fb7bac6b7d 100644 --- a/unison-src/transcripts/io.output.md +++ b/unison-src/transcripts/io.output.md @@ -9,9 +9,6 @@ You can skip the section which is just needed to make the transcript self-contai TempDirs/autoCleaned is an ability/hanlder which allows you to easily create a scratch directory which will automatically get cleaned up. -```unison -``` - ## Basic File Functions ### Creating/Deleting/Renaming Directories @@ -168,6 +165,7 @@ Tests: openFile isFileEOF seekHandle getBytes + getLine ```unison testSeek : '{io2.IO} [Result] @@ -192,6 +190,15 @@ testSeek _ = expectU "should be able to read our temporary file after seeking" "2345678" text3a closeFile handle3 + barFile = tempDir ++ "/bar" + handle4 = openFile barFile FileMode.Append + putBytes handle4 (toUtf8 "foobar\n") + closeFile handle4 + + handle5 = openFile barFile FileMode.Read + expectU "getLine should get a line" "foobar" (getLine handle5) + closeFile handle5 + runTest test testAppend : '{io2.IO} [Result] @@ -248,8 +255,9 @@ testAppend _ = ◉ testSeek we should be at position 0 ◉ testSeek we should be at position 1 ◉ testSeek should be able to read our temporary file after seeking + ◉ testSeek getLine should get a line - ✅ 6 test(s) passing + ✅ 7 test(s) passing Tip: Use view testSeek to view the source of a test. @@ -304,3 +312,69 @@ testSystemTime _ = Tip: Use view testSystemTime to view the source of a test. ``` +### Get directory contents + +```unison +testDirContents : '{io2.IO} [Result] +testDirContents _ = + test = 'let + tempDir = newTempDir "dircontents" + c = reraise (directoryContents.impl tempDir) + check "directory size should be" (size c == 2) + check "directory contents should have current directory and parent" let + (c == [".", ".."]) || (c == ["..", "."]) + runTest test +``` + +```ucm +.> add + + ⍟ I've added these definitions: + + testDirContents : '{IO} [Result] + +.> io.test testDirContents + + New test results: + + ◉ testDirContents directory size should be + ◉ testDirContents directory contents should have current directory and parent + + ✅ 2 test(s) passing + + Tip: Use view testDirContents to view the source of a test. + +``` +### Read environment variables + +```unison +testHomeEnvVar : '{io2.IO} [Result] +testHomeEnvVar _ = + test = 'let + home = reraise (getEnv.impl "HOME") + check "HOME environent variable should be set" (size home > 0) + match getEnv.impl "DOESNTEXIST" with + Right _ -> emit (Fail "env var shouldn't exist") + Left _ -> emit (Ok "DOESNTEXIST didn't exist") + runTest test +``` + +```ucm +.> add + + ⍟ I've added these definitions: + + testHomeEnvVar : '{IO} [Result] + +.> io.test testHomeEnvVar + + New test results: + + ◉ testHomeEnvVar HOME environent variable should be set + ◉ testHomeEnvVar DOESNTEXIST didn't exist + + ✅ 2 test(s) passing + + Tip: Use view testHomeEnvVar to view the source of a test. + +``` diff --git a/unison-src/transcripts/lambdacase.md b/unison-src/transcripts/lambdacase.md index 65b36bea13..df546e7289 100644 --- a/unison-src/transcripts/lambdacase.md +++ b/unison-src/transcripts/lambdacase.md @@ -87,3 +87,32 @@ blorf = cases > blah F F > blorf T F ``` + +## Patterns with multiple guards + +```unison +merge3 : [a] -> [a] -> [a] +merge3 = cases + [], ys -> ys + xs, [] -> xs + h +: t, h2 +: t2 | h <= h2 -> h +: merge3 t (h2 +: t2) + | otherwise -> h2 +: merge3 (h +: t) t2 +``` + +```ucm +.> add +.> view merge3 +``` + +This is the same definition written with multiple patterns and not using the `cases` syntax; notice it is considered an alias of `merge3` above. + +```unison +merge4 : [a] -> [a] -> [a] +merge4 a b = match (a,b) with + [], ys -> ys + xs, [] -> xs + h +: t, h2 +: t2 | h <= h2 -> h +: merge4 t (h2 +: t2) + h +: t, h2 +: t2 | otherwise -> h2 +: merge4 (h +: t) t2 +``` + + diff --git a/unison-src/transcripts/lambdacase.output.md b/unison-src/transcripts/lambdacase.output.md index f56dd9bab3..79727baed4 100644 --- a/unison-src/transcripts/lambdacase.output.md +++ b/unison-src/transcripts/lambdacase.output.md @@ -71,7 +71,7 @@ merge xs ys = match (xs, ys) with ⍟ I've added these definitions: - merge : [a] ->{g} [a] ->{g} [a] + merge : [a] -> [a] -> [a] ``` And here's a version using `cases`. The patterns are separated by commas: @@ -94,7 +94,7 @@ merge2 = cases ⍟ These new definitions are ok to `add`: - merge2 : [a] ->{g} [a] ->{g} [a] + merge2 : [a] -> [a] -> [a] (also named merge) ``` @@ -160,3 +160,66 @@ blorf = cases F ``` +## Patterns with multiple guards + +```unison +merge3 : [a] -> [a] -> [a] +merge3 = cases + [], ys -> ys + xs, [] -> xs + h +: t, h2 +: t2 | h <= h2 -> h +: merge3 t (h2 +: t2) + | otherwise -> h2 +: merge3 (h +: t) t2 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + merge3 : [a] -> [a] -> [a] + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + merge3 : [a] -> [a] -> [a] + +.> view merge3 + + merge3 : [a] -> [a] -> [a] + merge3 = cases + [], ys -> ys + xs, [] -> xs + h +: t, h2 +: t2 + | h <= h2 -> h +: merge3 t (h2 +: t2) + | otherwise -> h2 +: merge3 (h +: t) t2 + +``` +This is the same definition written with multiple patterns and not using the `cases` syntax; notice it is considered an alias of `merge3` above. + +```unison +merge4 : [a] -> [a] -> [a] +merge4 a b = match (a,b) with + [], ys -> ys + xs, [] -> xs + h +: t, h2 +: t2 | h <= h2 -> h +: merge4 t (h2 +: t2) + h +: t, h2 +: t2 | otherwise -> h2 +: merge4 (h +: t) t2 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + merge4 : [a] -> [a] -> [a] + (also named merge3) + +``` diff --git a/unison-src/transcripts/merge.md b/unison-src/transcripts/merge.md index 51633493b5..62ef1b4213 100644 --- a/unison-src/transcripts/merge.md +++ b/unison-src/transcripts/merge.md @@ -99,3 +99,51 @@ These test that things we expect to be deleted are still deleted. ```ucm:error .> view P0.quux.x ``` + +### Corner cases + +We're going to now do two concurrent edits with an update on one side to make sure 3-way merge behaves as expected. + +Here's the starting namespace, which will be the LCA. + +```unison:hide +a = 1 + +f = (x y -> y) a "woot!" +``` + +```ucm +.c1> add +.> fork c1 c1a +.> fork c1 c1b +``` + +```unison:hide +oog.b = 230948 +oog.c = 339249 +``` + +In `c1a`, we add new definitions, `b` and `c`. + +```ucm +.c1a> add +``` + +In `c1b`, we update the definition `a`, which is used by `f`. + +```unison:hide +a = "hello world!" +``` + +```ucm +.c1b> update +``` + +Now merging `c1b` into `c1a` should result in the updated version of `a` and `f`, and the new definitions `b` and `c`: + +```ucm +.> merge c1b c1a +.c1a> todo .c1b.patch +.c1a> find +.c1a> view 1-4 +``` diff --git a/unison-src/transcripts/merge.output.md b/unison-src/transcripts/merge.output.md index ff64e189bb..9767d4ca2a 100644 --- a/unison-src/transcripts/merge.output.md +++ b/unison-src/transcripts/merge.output.md @@ -225,3 +225,117 @@ These test that things we expect to be deleted are still deleted. P0.quux.x ``` +### Corner cases + +We're going to now do two concurrent edits with an update on one side to make sure 3-way merge behaves as expected. + +Here's the starting namespace, which will be the LCA. + +```unison +a = 1 + +f = (x y -> y) a "woot!" +``` + +```ucm + ☝️ The namespace .c1 is empty. + +.c1> add + + ⍟ I've added these definitions: + + a : Nat + f : Text + +.> fork c1 c1a + + Done. + +.> fork c1 c1b + + Done. + +``` +```unison +oog.b = 230948 +oog.c = 339249 +``` + +In `c1a`, we add new definitions, `b` and `c`. + +```ucm +.c1a> add + + ⍟ I've added these definitions: + + oog.b : Nat + oog.c : Nat + +``` +In `c1b`, we update the definition `a`, which is used by `f`. + +```unison +a = "hello world!" +``` + +```ucm +.c1b> update + + ⍟ I've updated these names to your new definition: + + a : Text + +``` +Now merging `c1b` into `c1a` should result in the updated version of `a` and `f`, and the new definitions `b` and `c`: + +```ucm +.> merge c1b c1a + + Here's what's changed in c1a after the merge: + + Updates: + + 1. a : Nat + ↓ + 2. a : Text + + There were 1 auto-propagated updates. + + Added definitions: + + 3. patch patch (added 1 updates) + + Tip: You can use `todo` to see if this generated any work to + do in this namespace and `test` to run the tests. Or you + can use `undo` or `reflog` to undo the results of this + merge. + +.c1a> todo .c1b.patch + + ✅ + + No conflicts or edits in progress. + +.c1a> find + + 1. a : Text + 2. f : Text + 3. oog.b : Nat + 4. oog.c : Nat + + +.c1a> view 1-4 + + a : Text + a = "hello world!" + + f : Text + f = (x y -> y) a "woot!" + + oog.b : Nat + oog.b = 230948 + + oog.c : Nat + oog.c = 339249 + +``` diff --git a/unison-src/transcripts/merges.output.md b/unison-src/transcripts/merges.output.md index 8b51bd4a85..936e112548 100644 --- a/unison-src/transcripts/merges.output.md +++ b/unison-src/transcripts/merges.output.md @@ -112,13 +112,13 @@ We can also delete the fork if we're done with it. (Don't worry, it's still in t Note: The most recent namespace hash is immediately below this message. - ⊙ #suu4oq8rrh + ⊙ #9bfm3siukb - Deletes: feature1.y - ⊙ #4i78gb0s9r + ⊙ #1mbl4b4t5g + Adds / updates: @@ -129,26 +129,26 @@ We can also delete the fork if we're done with it. (Don't worry, it's still in t Original name New name(s) feature1.y master.y - ⊙ #qer1fcccts + ⊙ #fh9g1m1add + Adds / updates: feature1.y - ⊙ #548hfmj536 + ⊙ #ahlnio3eom > Moves: Original name New name x master.x - ⊙ #nm19spfvg0 + ⊙ #o2lqjr91e1 + Adds / updates: x - □ #f40nqgbsui (start of history) + □ #9ied0t98hk (start of history) ``` To resurrect an old version of a namespace, you can learn its hash via the `history` command, then use `fork #namespacehash .newname`. diff --git a/unison-src/transcripts/name-selection.md b/unison-src/transcripts/name-selection.md new file mode 100644 index 0000000000..933a6b019b --- /dev/null +++ b/unison-src/transcripts/name-selection.md @@ -0,0 +1,56 @@ +This transcript shows how the pretty-printer picks names for a hash when multiple are available. The algorithm is: + +1. Names that are "name-only" come before names that are hash qualified. So `List.map` comes before `List.map#2384a` and also `aaaa#xyz`. +2. Shorter names (in terms of segment count) come before longer ones, for instance `base.List.map` comes before `somelibrary.external.base.List.map`. +3. Otherwise if there are multiple names with a minimal number of segments, compare the names alphabetically. + +```ucm:hide +.> alias.type ##Nat Nat +.> alias.term ##Nat.+ Nat.+ +``` + +```unison:hide +a = b + 1 +b = 0 + 1 +``` + +Will add `a` and `b` to the codebase and give `b` a longer (in terms of segment length alias), and show that it isn't used when viewing `a`: + +```ucm +.a> add +.a> alias.term b aaa.but.more.segments +.a> view a +``` + +Next let's introduce a conflicting symbol and show that its hash qualified name isn't used when it has an unconflicted name: + +``` +.> fork a a2 +.> fork a a3 +``` + +```unison:hide +c = 1 +d = c + 10 +``` + +```ucm +.a2> add +.a2> alias.term c aaaa.tooManySegments +``` + +```unison:hide +c = 2 +d = c + 10 +``` + +```ucm +.a3> add +.a3> merge .a2 .a3 +``` + +At this point, `a3` is conflicted for symbols `c` and `d`, but the original `a2` namespace has an unconflicted definition for `c` and `d`, so those are preferred. + +```ucm +.> view a b c d +``` diff --git a/unison-src/transcripts/name-selection.output.md b/unison-src/transcripts/name-selection.output.md new file mode 100644 index 0000000000..4cb92fe524 --- /dev/null +++ b/unison-src/transcripts/name-selection.output.md @@ -0,0 +1,127 @@ +This transcript shows how the pretty-printer picks names for a hash when multiple are available. The algorithm is: + +1. Names that are "name-only" come before names that are hash qualified. So `List.map` comes before `List.map#2384a` and also `aaaa#xyz`. +2. Shorter names (in terms of segment count) come before longer ones, for instance `base.List.map` comes before `somelibrary.external.base.List.map`. +3. Otherwise if there are multiple names with a minimal number of segments, compare the names alphabetically. + +```unison +a = b + 1 +b = 0 + 1 +``` + +Will add `a` and `b` to the codebase and give `b` a longer (in terms of segment length alias), and show that it isn't used when viewing `a`: + +```ucm + ☝️ The namespace .a is empty. + +.a> add + + ⍟ I've added these definitions: + + a : Nat + b : Nat + +.a> alias.term b aaa.but.more.segments + + Done. + +.a> view a + + a : Nat + a = b + 1 + +``` +Next let's introduce a conflicting symbol and show that its hash qualified name isn't used when it has an unconflicted name: + +``` +.> fork a a2 +.> fork a a3 + +``` + +```unison +c = 1 +d = c + 10 +``` + +```ucm + ☝️ The namespace .a2 is empty. + +.a2> add + + ⍟ I've added these definitions: + + c : Nat + d : Nat + +.a2> alias.term c aaaa.tooManySegments + + Done. + +``` +```unison +c = 2 +d = c + 10 +``` + +```ucm + ☝️ The namespace .a3 is empty. + +.a3> add + + ⍟ I've added these definitions: + + c : Nat + d : Nat + +.a3> merge .a2 .a3 + + Here's what's changed in .a3 after the merge: + + New name conflicts: + + 1. c#0ja1qfpej6 : Nat + ↓ + 2. ┌ c#0ja1qfpej6 : Nat + 3. └ c#jk19sm5bf8 : Nat + + 4. d#gk1aqtfmh6 : Nat + ↓ + 5. ┌ d#gk1aqtfmh6 : Nat + 6. └ d#qk9ub6bngd : Nat + + Added definitions: + + 7. ┌ c#jk19sm5bf8 : Nat + 8. └ aaaa.tooManySegments : Nat + + Tip: You can use `todo` to see if this generated any work to + do in this namespace and `test` to run the tests. Or you + can use `undo` or `reflog` to undo the results of this + merge. + +``` +At this point, `a3` is conflicted for symbols `c` and `d`, but the original `a2` namespace has an unconflicted definition for `c` and `d`, so those are preferred. + +```ucm +.> view a b c d + + a.a : Nat + a.a = b + 1 + + a.b : Nat + a.b = 0 + 1 + + a2.c : Nat + a2.c = 1 + + a2.d : Nat + a2.d = a2.c + 10 + + a3.c#0ja1qfpej6 : Nat + a3.c#0ja1qfpej6 = 2 + + a3.d#gk1aqtfmh6 : Nat + a3.d#gk1aqtfmh6 = c#0ja1qfpej6 + 10 + +``` diff --git a/unison-src/transcripts/reflog.output.md b/unison-src/transcripts/reflog.output.md index 40694a59ee..cfb916f635 100644 --- a/unison-src/transcripts/reflog.output.md +++ b/unison-src/transcripts/reflog.output.md @@ -59,16 +59,16 @@ y = 2 most recent, along with the command that got us there. Try: `fork 2 .old` - `fork #agc102aln4 .old` to make an old namespace + `fork #h5ii1cefto .old` to make an old namespace accessible again, - `reset-root #agc102aln4` to reset the root namespace and + `reset-root #h5ii1cefto` to reset the root namespace and its history to that of the specified namespace. - 1. #ru6bh9leo4 : add - 2. #agc102aln4 : add - 3. #f40nqgbsui : builtins.merge + 1. #a34n023ojd : add + 2. #h5ii1cefto : add + 3. #9ied0t98hk : builtins.merge 4. #sjg2v58vn2 : (initial reflogged namespace) ``` diff --git a/unison-src/transcripts/resolve.md b/unison-src/transcripts/resolve.md index 2d03b9dfd3..29f5444794 100644 --- a/unison-src/transcripts/resolve.md +++ b/unison-src/transcripts/resolve.md @@ -4,7 +4,7 @@ .> builtins.merge ``` -The `ucm` tool tracks edits to hashes in an object called a _patch_. When patches get merged, sometimes those patches will have conflicting edits. The `replace.term` command helps resolve such conflicts. +The `ucm` tool tracks edits to hashes in an object called a _patch_. When patches get merged, sometimes those patches will have conflicting edits. The `replace` command helps resolve such conflicts. First, let's make a new namespace, `example.resolve`: @@ -81,7 +81,7 @@ Let's now merge these namespaces into `c`: The namespace `c` now has an edit conflict, since the term `foo` was edited in two different ways. -```ucm +```ucm:error .example.resolve> cd c .example.resolve.c> todo ``` @@ -91,7 +91,7 @@ We see that `#44954ulpdf` (the original hash of `a.foo`) got replaced with _both We can resolve this conflict by picking one of the terms as the "winner": ```ucm -.example.resolve.c> replace.term #44954ulpdf #8e68dvpr0a +.example.resolve.c> replace #44954ulpdf #8e68dvpr0a ``` This changes the merged `c.patch` so that only the edit from #44954ulpdf to #8e68dvpr0a remains: @@ -102,7 +102,7 @@ This changes the merged `c.patch` so that only the edit from #44954ulpdf to #8e We still have a remaining _name conflict_ since it just so happened that both of the definitions in the edits were named `foo`. -```ucm +```ucm:error .example.resolve.c> todo ``` @@ -110,6 +110,7 @@ We can resolve the name conflict by deleting one of the names. ```ucm .example.resolve.c> delete.term foo#jdqoenu794 +.example.resolve.c> todo ``` And that's how you resolve edit conflicts with UCM. diff --git a/unison-src/transcripts/resolve.output.md b/unison-src/transcripts/resolve.output.md index bd319c1ed0..3ee8ea89ba 100644 --- a/unison-src/transcripts/resolve.output.md +++ b/unison-src/transcripts/resolve.output.md @@ -1,6 +1,6 @@ # Resolving edit conflicts in `ucm` -The `ucm` tool tracks edits to hashes in an object called a _patch_. When patches get merged, sometimes those patches will have conflicting edits. The `replace.term` command helps resolve such conflicts. +The `ucm` tool tracks edits to hashes in an object called a _patch_. When patches get merged, sometimes those patches will have conflicting edits. The `replace` command helps resolve such conflicts. First, let's make a new namespace, `example.resolve`: @@ -203,7 +203,7 @@ We see that `#44954ulpdf` (the original hash of `a.foo`) got replaced with _both We can resolve this conflict by picking one of the terms as the "winner": ```ucm -.example.resolve.c> replace.term #44954ulpdf #8e68dvpr0a +.example.resolve.c> replace #44954ulpdf #8e68dvpr0a Done. @@ -231,7 +231,7 @@ We still have a remaining _name conflict_ since it just so happened that both of Tip: This occurs when merging branches that both independently introduce the same name. Use `view foo` to see the - conflicting defintions, then use `move.term` to resolve + conflicting definitions, then use `move.term` to resolve the conflicts. ``` @@ -255,5 +255,11 @@ We can resolve the name conflict by deleting one of the names. Tip: You can use `undo` or `reflog` to undo this change. +.example.resolve.c> todo + + ✅ + + No conflicts or edits in progress. + ``` And that's how you resolve edit conflicts with UCM. diff --git a/unison-src/transcripts/squash.output.md b/unison-src/transcripts/squash.output.md index f386b785a6..47d97fd6df 100644 --- a/unison-src/transcripts/squash.output.md +++ b/unison-src/transcripts/squash.output.md @@ -13,7 +13,7 @@ Let's look at some examples. We'll start with a namespace with just the builtins - □ #73u4s1s881 (start of history) + □ #u5rtlqmmgp (start of history) .> fork builtin builtin2 @@ -42,21 +42,21 @@ Now suppose we `fork` a copy of builtin, then rename `Nat.+` to `frobnicate`, th Note: The most recent namespace hash is immediately below this message. - ⊙ #8i1dld2kqq + ⊙ #3ia3q75nlj > Moves: Original name New name Nat.frobnicate Nat.+ - ⊙ #c5n4kp25q0 + ⊙ #2gaqk05una > Moves: Original name New name Nat.+ Nat.frobnicate - □ #73u4s1s881 (start of history) + □ #u5rtlqmmgp (start of history) ``` If we merge that back into `builtin`, we get that same chain of history: @@ -71,21 +71,21 @@ If we merge that back into `builtin`, we get that same chain of history: Note: The most recent namespace hash is immediately below this message. - ⊙ #8i1dld2kqq + ⊙ #3ia3q75nlj > Moves: Original name New name Nat.frobnicate Nat.+ - ⊙ #c5n4kp25q0 + ⊙ #2gaqk05una > Moves: Original name New name Nat.+ Nat.frobnicate - □ #73u4s1s881 (start of history) + □ #u5rtlqmmgp (start of history) ``` Let's try again, but using a `merge.squash` (or just `squash`) instead. The history will be unchanged: @@ -106,7 +106,7 @@ Let's try again, but using a `merge.squash` (or just `squash`) instead. The hist - □ #73u4s1s881 (start of history) + □ #u5rtlqmmgp (start of history) ``` The churn that happened in `mybuiltin` namespace ended up back in the same spot, so the squash merge of that namespace with our original namespace had no effect. @@ -485,13 +485,13 @@ This checks to see that squashing correctly preserves deletions: Note: The most recent namespace hash is immediately below this message. - ⊙ #j321fql2c3 + ⊙ #a6t7mgqv2m - Deletes: Nat.* Nat.+ - □ #73u4s1s881 (start of history) + □ #u5rtlqmmgp (start of history) ``` Notice that `Nat.+` and `Nat.*` are deleted by the squash, and we see them deleted in one atomic step in the history. diff --git a/unison-src/transcripts/suffixes.md b/unison-src/transcripts/suffixes.md index 359e40b7ed..757ed6f098 100644 --- a/unison-src/transcripts/suffixes.md +++ b/unison-src/transcripts/suffixes.md @@ -38,3 +38,35 @@ Type-based search also benefits from this, we can just say `Nat` rather than `.b ```ucm .> find : Nat -> [a] -> [a] ``` + +## Corner cases + +If a definition is given in a scratch file, its suffixes shadow existing definitions that exist in the codebase with the same suffixes. For example: + +```unison:hide +unique type A = Thing1 Nat | thing2 Nat + +foo.a = 23 +bar = 100 +``` + +```ucm +.> add +``` + +```unison +unique type B = Thing1 Text | thing2 Text | Thing3 Text + +zoink.a = "hi" + +-- verifying that the `a` here references `zoink.a` +foo.baz.qux.bar : Text +foo.baz.qux.bar = a + +-- verifying that the `bar` is resolving to `foo.baz.qux.bar` +-- and that `Thing1` references `B.Thing1` from the current file +fn = cases + Thing1 msg -> msg Text.++ bar + thing2 msg -> msg Text.++ bar + _ -> todo "hmm" +``` diff --git a/unison-src/transcripts/suffixes.output.md b/unison-src/transcripts/suffixes.output.md index 5f35e9a053..12c6e31513 100644 --- a/unison-src/transcripts/suffixes.output.md +++ b/unison-src/transcripts/suffixes.output.md @@ -38,7 +38,7 @@ The `view` and `display` commands also benefit from this: ```ucm .> view List.drop - -- builtin.List.drop is built-in. + builtin builtin.List.drop : Nat -> [a] -> [a] .> display bar.a @@ -57,3 +57,55 @@ Type-based search also benefits from this, we can just say `Nat` rather than `.b ``` +## Corner cases + +If a definition is given in a scratch file, its suffixes shadow existing definitions that exist in the codebase with the same suffixes. For example: + +```unison +unique type A = Thing1 Nat | thing2 Nat + +foo.a = 23 +bar = 100 +``` + +```ucm +.> add + + ⍟ I've added these definitions: + + unique type A + bar : Nat + foo.a : Nat + +``` +```unison +unique type B = Thing1 Text | thing2 Text | Thing3 Text + +zoink.a = "hi" + +-- verifying that the `a` here references `zoink.a` +foo.baz.qux.bar : Text +foo.baz.qux.bar = a + +-- verifying that the `bar` is resolving to `foo.baz.qux.bar` +-- and that `Thing1` references `B.Thing1` from the current file +fn = cases + Thing1 msg -> msg Text.++ bar + thing2 msg -> msg Text.++ bar + _ -> todo "hmm" +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + unique type B + fn : B -> Text + foo.baz.qux.bar : Text + zoink.a : Text + +``` diff --git a/unison-src/transcripts/top-level-exceptions.md b/unison-src/transcripts/top-level-exceptions.md new file mode 100644 index 0000000000..8749984744 --- /dev/null +++ b/unison-src/transcripts/top-level-exceptions.md @@ -0,0 +1,46 @@ + +A simple transcript to test the use of exceptions that bubble to the top level. + +```ucm:hide +.> builtins.merge +``` + +FYI, here are the `Exception` and `Failure` types: + +```ucm +.> view Exception Failure +``` + +Here's a sample program just to verify that the typechecker allows `run` to throw exceptions: + +```unison +use builtin IO Exception Test.Result + +main : '{IO, Exception} () +main _ = () + +mytest : '{IO, Exception} [Test.Result] +mytest _ = [Ok "Great"] +``` + +```ucm +.> run main +.> add +.> io.test mytest +``` + +Now a test to show the handling of uncaught exceptions: + +```unison +main2 = '(error "oh noes!" ()) + +error : Text -> a ->{Exception} x +error msg a = + builtin.Exception.raise (Failure (typeLink RuntimeError) msg (Any a)) + +unique type RuntimeError = +``` + +```ucm:error +.> run main2 +``` diff --git a/unison-src/transcripts/top-level-exceptions.output.md b/unison-src/transcripts/top-level-exceptions.output.md new file mode 100644 index 0000000000..9c052d3f7f --- /dev/null +++ b/unison-src/transcripts/top-level-exceptions.output.md @@ -0,0 +1,95 @@ + +A simple transcript to test the use of exceptions that bubble to the top level. + +FYI, here are the `Exception` and `Failure` types: + +```ucm +.> view Exception Failure + + ability builtin.Exception where + raise : Failure ->{builtin.Exception} x + + unique type builtin.io2.Failure + = Failure Type Text Any + +``` +Here's a sample program just to verify that the typechecker allows `run` to throw exceptions: + +```unison +use builtin IO Exception Test.Result + +main : '{IO, Exception} () +main _ = () + +mytest : '{IO, Exception} [Test.Result] +mytest _ = [Ok "Great"] +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + main : '{IO, Exception} () + mytest : '{IO, Exception} [Result] + +``` +```ucm +.> run main + +.> add + + ⍟ I've added these definitions: + + main : '{IO, Exception} () + mytest : '{IO, Exception} [Result] + +.> io.test mytest + + New test results: + + ◉ mytest Great + + ✅ 1 test(s) passing + + Tip: Use view mytest to view the source of a test. + +``` +Now a test to show the handling of uncaught exceptions: + +```unison +main2 = '(error "oh noes!" ()) + +error : Text -> a ->{Exception} x +error msg a = + builtin.Exception.raise (Failure (typeLink RuntimeError) msg (Any a)) + +unique type RuntimeError = +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + unique type RuntimeError + error : Text -> a ->{Exception} x + main2 : '{Exception} r + +``` +```ucm +.> run main2 + + 💔💥 + + The program halted with an unhandled exception: + + Failure (typeLink RuntimeError) "oh noes!" !Any + +``` diff --git a/yaks/easytest/src/EasyTest.hs b/yaks/easytest/src/EasyTest.hs index 5ff22f41f3..76e0b678b5 100644 --- a/yaks/easytest/src/EasyTest.hs +++ b/yaks/easytest/src/EasyTest.hs @@ -61,11 +61,17 @@ expect :: HasCallStack => Bool -> Test () expect False = crash "unexpected" expect True = ok -expectEqual :: (Eq a, Show a) => a -> a -> Test () -expectEqual expected actual = if expected == actual then ok - else crash $ unlines ["", show actual, "** did not equal expected value **", show expected] +expectEqual' :: (HasCallStack, Eq a, Show a) => a -> a -> Test () +expectEqual' expected actual = + if expected == actual then pure () + else crash $ unlines ["", show actual, "** did not equal expected value **", show expected] -expectNotEqual :: (Eq a, Show a) => a -> a -> Test () +expectEqual :: (HasCallStack, Eq a, Show a) => a -> a -> Test () +expectEqual expected actual = + if expected == actual then ok + else crash $ unlines ["", show actual, "** did not equal expected value **", show expected] + +expectNotEqual :: (HasCallStack, Eq a, Show a) => a -> a -> Test () expectNotEqual forbidden actual = if forbidden /= actual then ok else crash $ unlines ["", show actual, "** did equal the forbidden value **", show forbidden] @@ -261,7 +267,7 @@ word8' = random' -- | Sample uniformly from the given list of possibilities pick :: [a] -> Test a pick as = let n = length as; ind = picker n as in do - _ <- if (n > 0) then ok else crash "pick called with empty list" + _ <- if (n > 0) then pure () else crash "pick called with empty list" i <- int' 0 (n - 1) Just a <- pure (ind i) pure a